1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
60 #include "hash-table.h"
61 #include "langhooks.h"
67 /* Nonzero if we are folding constants inside an initializer; zero
69 int folding_initializer
= 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code
{
93 static bool negate_mathfn_p (enum built_in_function
);
94 static bool negate_expr_p (tree
);
95 static tree
negate_expr (tree
);
96 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
97 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
98 static tree
const_binop (enum tree_code
, tree
, tree
);
99 static enum comparison_code
comparison_to_compcode (enum tree_code
);
100 static enum tree_code
compcode_to_comparison (enum comparison_code
);
101 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
102 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
103 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
104 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
105 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
106 static tree
make_bit_field_ref (location_t
, tree
, tree
,
107 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
108 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
110 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
112 enum machine_mode
*, int *, int *,
114 static int all_ones_mask_p (const_tree
, int);
115 static tree
sign_bit_p (tree
, const_tree
);
116 static int simple_operand_p (const_tree
);
117 static bool simple_operand_p_2 (tree
);
118 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
119 static tree
range_predecessor (tree
);
120 static tree
range_successor (tree
);
121 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
122 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
123 static tree
unextend (tree
, int, int, tree
);
124 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
126 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
127 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
128 static tree
fold_binary_op_with_conditional_arg (location_t
,
129 enum tree_code
, tree
,
132 static tree
fold_mathfn_compare (location_t
,
133 enum built_in_function
, enum tree_code
,
135 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
137 static bool reorder_operands_p (const_tree
, const_tree
);
138 static tree
fold_negate_const (tree
, tree
);
139 static tree
fold_not_const (const_tree
, tree
);
140 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
147 expr_location_or (tree t
, location_t loc
)
149 location_t tloc
= EXPR_LOCATION (t
);
150 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
157 protected_set_expr_location_unshare (tree x
, location_t loc
)
159 if (CAN_HAVE_LOCATION_P (x
)
160 && EXPR_LOCATION (x
) != loc
161 && !(TREE_CODE (x
) == SAVE_EXPR
162 || TREE_CODE (x
) == TARGET_EXPR
163 || TREE_CODE (x
) == BIND_EXPR
))
166 SET_EXPR_LOCATION (x
, loc
);
171 /* If ARG2 divides ARG1 with zero remainder, carries out the division
172 of type CODE and returns the quotient.
173 Otherwise returns NULL_TREE. */
176 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
181 /* The sign of the division is according to operand two, that
182 does the correct thing for POINTER_PLUS_EXPR where we want
183 a signed division. */
184 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
186 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
190 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
204 static int fold_deferring_overflow_warnings
;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning
;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings
;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
237 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
242 gcc_assert (fold_deferring_overflow_warnings
> 0);
243 --fold_deferring_overflow_warnings
;
244 if (fold_deferring_overflow_warnings
> 0)
246 if (fold_deferred_overflow_warning
!= NULL
248 && code
< (int) fold_deferred_overflow_code
)
249 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
253 warnmsg
= fold_deferred_overflow_warning
;
254 fold_deferred_overflow_warning
= NULL
;
256 if (!issue
|| warnmsg
== NULL
)
259 if (gimple_no_warning_p (stmt
))
262 /* Use the smallest code level when deciding to issue the
264 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
265 code
= fold_deferred_overflow_code
;
267 if (!issue_strict_overflow_warning (code
))
271 locus
= input_location
;
273 locus
= gimple_location (stmt
);
274 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
277 /* Stop deferring overflow warnings, ignoring any deferred
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL
, 0);
286 /* Whether we are deferring overflow warnings. */
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings
> 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
298 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
300 if (fold_deferring_overflow_warnings
> 0)
302 if (fold_deferred_overflow_warning
== NULL
303 || wc
< fold_deferred_overflow_code
)
305 fold_deferred_overflow_warning
= gmsgid
;
306 fold_deferred_overflow_code
= wc
;
309 else if (issue_strict_overflow_warning (wc
))
310 warning (OPT_Wstrict_overflow
, gmsgid
);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
317 negate_mathfn_p (enum built_in_function code
)
321 CASE_FLT_FN (BUILT_IN_ASIN
):
322 CASE_FLT_FN (BUILT_IN_ASINH
):
323 CASE_FLT_FN (BUILT_IN_ATAN
):
324 CASE_FLT_FN (BUILT_IN_ATANH
):
325 CASE_FLT_FN (BUILT_IN_CASIN
):
326 CASE_FLT_FN (BUILT_IN_CASINH
):
327 CASE_FLT_FN (BUILT_IN_CATAN
):
328 CASE_FLT_FN (BUILT_IN_CATANH
):
329 CASE_FLT_FN (BUILT_IN_CBRT
):
330 CASE_FLT_FN (BUILT_IN_CPROJ
):
331 CASE_FLT_FN (BUILT_IN_CSIN
):
332 CASE_FLT_FN (BUILT_IN_CSINH
):
333 CASE_FLT_FN (BUILT_IN_CTAN
):
334 CASE_FLT_FN (BUILT_IN_CTANH
):
335 CASE_FLT_FN (BUILT_IN_ERF
):
336 CASE_FLT_FN (BUILT_IN_LLROUND
):
337 CASE_FLT_FN (BUILT_IN_LROUND
):
338 CASE_FLT_FN (BUILT_IN_ROUND
):
339 CASE_FLT_FN (BUILT_IN_SIN
):
340 CASE_FLT_FN (BUILT_IN_SINH
):
341 CASE_FLT_FN (BUILT_IN_TAN
):
342 CASE_FLT_FN (BUILT_IN_TANH
):
343 CASE_FLT_FN (BUILT_IN_TRUNC
):
346 CASE_FLT_FN (BUILT_IN_LLRINT
):
347 CASE_FLT_FN (BUILT_IN_LRINT
):
348 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
349 CASE_FLT_FN (BUILT_IN_RINT
):
350 return !flag_rounding_math
;
358 /* Check whether we may negate an integer constant T without causing
362 may_negate_without_overflow_p (const_tree t
)
364 unsigned HOST_WIDE_INT val
;
368 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
370 type
= TREE_TYPE (t
);
371 if (TYPE_UNSIGNED (type
))
374 prec
= TYPE_PRECISION (type
);
375 if (prec
> HOST_BITS_PER_WIDE_INT
)
377 if (TREE_INT_CST_LOW (t
) != 0)
379 prec
-= HOST_BITS_PER_WIDE_INT
;
380 val
= TREE_INT_CST_HIGH (t
);
383 val
= TREE_INT_CST_LOW (t
);
384 if (prec
< HOST_BITS_PER_WIDE_INT
)
385 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
386 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
389 /* Determine whether an expression T can be cheaply negated using
390 the function negate_expr without introducing undefined overflow. */
393 negate_expr_p (tree t
)
400 type
= TREE_TYPE (t
);
403 switch (TREE_CODE (t
))
406 if (TYPE_OVERFLOW_WRAPS (type
))
409 /* Check that -CST will not overflow type. */
410 return may_negate_without_overflow_p (t
);
412 return (INTEGRAL_TYPE_P (type
)
413 && TYPE_OVERFLOW_WRAPS (type
));
420 /* We want to canonicalize to positive real constants. Pretend
421 that only negative ones can be easily negated. */
422 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
425 return negate_expr_p (TREE_REALPART (t
))
426 && negate_expr_p (TREE_IMAGPART (t
));
430 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
433 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
435 for (i
= 0; i
< count
; i
++)
436 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
443 return negate_expr_p (TREE_OPERAND (t
, 0))
444 && negate_expr_p (TREE_OPERAND (t
, 1));
447 return negate_expr_p (TREE_OPERAND (t
, 0));
450 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
451 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
453 /* -(A + B) -> (-B) - A. */
454 if (negate_expr_p (TREE_OPERAND (t
, 1))
455 && reorder_operands_p (TREE_OPERAND (t
, 0),
456 TREE_OPERAND (t
, 1)))
458 /* -(A + B) -> (-A) - B. */
459 return negate_expr_p (TREE_OPERAND (t
, 0));
462 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
463 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
464 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
465 && reorder_operands_p (TREE_OPERAND (t
, 0),
466 TREE_OPERAND (t
, 1));
469 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
475 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
476 return negate_expr_p (TREE_OPERAND (t
, 1))
477 || negate_expr_p (TREE_OPERAND (t
, 0));
485 /* In general we can't negate A / B, because if A is INT_MIN and
486 B is 1, we may turn this into INT_MIN / -1 which is undefined
487 and actually traps on some architectures. But if overflow is
488 undefined, we can negate, because - (INT_MIN / 1) is an
490 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
492 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
494 /* If overflow is undefined then we have to be careful because
495 we ask whether it's ok to associate the negate with the
496 division which is not ok for example for
497 -((a - b) / c) where (-(a - b)) / c may invoke undefined
498 overflow because of negating INT_MIN. So do not use
499 negate_expr_p here but open-code the two important cases. */
500 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
501 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
502 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
505 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
507 return negate_expr_p (TREE_OPERAND (t
, 1));
510 /* Negate -((double)float) as (double)(-float). */
511 if (TREE_CODE (type
) == REAL_TYPE
)
513 tree tem
= strip_float_extensions (t
);
515 return negate_expr_p (tem
);
520 /* Negate -f(x) as f(-x). */
521 if (negate_mathfn_p (builtin_mathfn_code (t
)))
522 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
526 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
527 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
529 tree op1
= TREE_OPERAND (t
, 1);
530 if (TREE_INT_CST_HIGH (op1
) == 0
531 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
532 == TREE_INT_CST_LOW (op1
))
543 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
544 simplification is possible.
545 If negate_expr_p would return true for T, NULL_TREE will never be
549 fold_negate_expr (location_t loc
, tree t
)
551 tree type
= TREE_TYPE (t
);
554 switch (TREE_CODE (t
))
556 /* Convert - (~A) to A + 1. */
558 if (INTEGRAL_TYPE_P (type
))
559 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
560 build_one_cst (type
));
564 tem
= fold_negate_const (t
, type
);
565 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
566 || !TYPE_OVERFLOW_TRAPS (type
))
571 tem
= fold_negate_const (t
, type
);
572 /* Two's complement FP formats, such as c4x, may overflow. */
573 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
578 tem
= fold_negate_const (t
, type
);
583 tree rpart
= negate_expr (TREE_REALPART (t
));
584 tree ipart
= negate_expr (TREE_IMAGPART (t
));
586 if ((TREE_CODE (rpart
) == REAL_CST
587 && TREE_CODE (ipart
) == REAL_CST
)
588 || (TREE_CODE (rpart
) == INTEGER_CST
589 && TREE_CODE (ipart
) == INTEGER_CST
))
590 return build_complex (type
, rpart
, ipart
);
596 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
597 tree
*elts
= XALLOCAVEC (tree
, count
);
599 for (i
= 0; i
< count
; i
++)
601 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
602 if (elts
[i
] == NULL_TREE
)
606 return build_vector (type
, elts
);
610 if (negate_expr_p (t
))
611 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
612 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
613 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
617 if (negate_expr_p (t
))
618 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
619 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
623 return TREE_OPERAND (t
, 0);
626 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
627 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
629 /* -(A + B) -> (-B) - A. */
630 if (negate_expr_p (TREE_OPERAND (t
, 1))
631 && reorder_operands_p (TREE_OPERAND (t
, 0),
632 TREE_OPERAND (t
, 1)))
634 tem
= negate_expr (TREE_OPERAND (t
, 1));
635 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
636 tem
, TREE_OPERAND (t
, 0));
639 /* -(A + B) -> (-A) - B. */
640 if (negate_expr_p (TREE_OPERAND (t
, 0)))
642 tem
= negate_expr (TREE_OPERAND (t
, 0));
643 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
644 tem
, TREE_OPERAND (t
, 1));
650 /* - (A - B) -> B - A */
651 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
652 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
653 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
654 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
655 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
659 if (TYPE_UNSIGNED (type
))
665 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
667 tem
= TREE_OPERAND (t
, 1);
668 if (negate_expr_p (tem
))
669 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
670 TREE_OPERAND (t
, 0), negate_expr (tem
));
671 tem
= TREE_OPERAND (t
, 0);
672 if (negate_expr_p (tem
))
673 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
674 negate_expr (tem
), TREE_OPERAND (t
, 1));
683 /* In general we can't negate A / B, because if A is INT_MIN and
684 B is 1, we may turn this into INT_MIN / -1 which is undefined
685 and actually traps on some architectures. But if overflow is
686 undefined, we can negate, because - (INT_MIN / 1) is an
688 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
690 const char * const warnmsg
= G_("assuming signed overflow does not "
691 "occur when negating a division");
692 tem
= TREE_OPERAND (t
, 1);
693 if (negate_expr_p (tem
))
695 if (INTEGRAL_TYPE_P (type
)
696 && (TREE_CODE (tem
) != INTEGER_CST
697 || integer_onep (tem
)))
698 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
699 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
700 TREE_OPERAND (t
, 0), negate_expr (tem
));
702 /* If overflow is undefined then we have to be careful because
703 we ask whether it's ok to associate the negate with the
704 division which is not ok for example for
705 -((a - b) / c) where (-(a - b)) / c may invoke undefined
706 overflow because of negating INT_MIN. So do not use
707 negate_expr_p here but open-code the two important cases. */
708 tem
= TREE_OPERAND (t
, 0);
709 if ((INTEGRAL_TYPE_P (type
)
710 && (TREE_CODE (tem
) == NEGATE_EXPR
711 || (TREE_CODE (tem
) == INTEGER_CST
712 && may_negate_without_overflow_p (tem
))))
713 || !INTEGRAL_TYPE_P (type
))
714 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
715 negate_expr (tem
), TREE_OPERAND (t
, 1));
720 /* Convert -((double)float) into (double)(-float). */
721 if (TREE_CODE (type
) == REAL_TYPE
)
723 tem
= strip_float_extensions (t
);
724 if (tem
!= t
&& negate_expr_p (tem
))
725 return fold_convert_loc (loc
, type
, negate_expr (tem
));
730 /* Negate -f(x) as f(-x). */
731 if (negate_mathfn_p (builtin_mathfn_code (t
))
732 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
736 fndecl
= get_callee_fndecl (t
);
737 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
738 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
743 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
744 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
746 tree op1
= TREE_OPERAND (t
, 1);
747 if (TREE_INT_CST_HIGH (op1
) == 0
748 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
749 == TREE_INT_CST_LOW (op1
))
751 tree ntype
= TYPE_UNSIGNED (type
)
752 ? signed_type_for (type
)
753 : unsigned_type_for (type
);
754 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
755 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
756 return fold_convert_loc (loc
, type
, temp
);
768 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
769 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
781 loc
= EXPR_LOCATION (t
);
782 type
= TREE_TYPE (t
);
785 tem
= fold_negate_expr (loc
, t
);
787 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
788 return fold_convert_loc (loc
, type
, tem
);
791 /* Split a tree IN into a constant, literal and variable parts that could be
792 combined with CODE to make IN. "constant" means an expression with
793 TREE_CONSTANT but that isn't an actual constant. CODE must be a
794 commutative arithmetic operation. Store the constant part into *CONP,
795 the literal in *LITP and return the variable part. If a part isn't
796 present, set it to null. If the tree does not decompose in this way,
797 return the entire tree as the variable part and the other parts as null.
799 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
800 case, we negate an operand that was subtracted. Except if it is a
801 literal for which we use *MINUS_LITP instead.
803 If NEGATE_P is true, we are negating all of IN, again except a literal
804 for which we use *MINUS_LITP instead.
806 If IN is itself a literal or constant, return it as appropriate.
808 Note that we do not guarantee that any of the three values will be the
809 same type as IN, but they will have the same signedness and mode. */
812 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
813 tree
*minus_litp
, int negate_p
)
821 /* Strip any conversions that don't change the machine mode or signedness. */
822 STRIP_SIGN_NOPS (in
);
824 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
825 || TREE_CODE (in
) == FIXED_CST
)
827 else if (TREE_CODE (in
) == code
828 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
829 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
830 /* We can associate addition and subtraction together (even
831 though the C standard doesn't say so) for integers because
832 the value is not affected. For reals, the value might be
833 affected, so we can't. */
834 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
835 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
837 tree op0
= TREE_OPERAND (in
, 0);
838 tree op1
= TREE_OPERAND (in
, 1);
839 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
840 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
842 /* First see if either of the operands is a literal, then a constant. */
843 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
844 || TREE_CODE (op0
) == FIXED_CST
)
845 *litp
= op0
, op0
= 0;
846 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
847 || TREE_CODE (op1
) == FIXED_CST
)
848 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
850 if (op0
!= 0 && TREE_CONSTANT (op0
))
851 *conp
= op0
, op0
= 0;
852 else if (op1
!= 0 && TREE_CONSTANT (op1
))
853 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
855 /* If we haven't dealt with either operand, this is not a case we can
856 decompose. Otherwise, VAR is either of the ones remaining, if any. */
857 if (op0
!= 0 && op1
!= 0)
862 var
= op1
, neg_var_p
= neg1_p
;
864 /* Now do any needed negations. */
866 *minus_litp
= *litp
, *litp
= 0;
868 *conp
= negate_expr (*conp
);
870 var
= negate_expr (var
);
872 else if (TREE_CODE (in
) == BIT_NOT_EXPR
873 && code
== PLUS_EXPR
)
875 /* -X - 1 is folded to ~X, undo that here. */
876 *minus_litp
= build_one_cst (TREE_TYPE (in
));
877 var
= negate_expr (TREE_OPERAND (in
, 0));
879 else if (TREE_CONSTANT (in
))
887 *minus_litp
= *litp
, *litp
= 0;
888 else if (*minus_litp
)
889 *litp
= *minus_litp
, *minus_litp
= 0;
890 *conp
= negate_expr (*conp
);
891 var
= negate_expr (var
);
897 /* Re-associate trees split by the above function. T1 and T2 are
898 either expressions to associate or null. Return the new
899 expression, if any. LOC is the location of the new expression. If
900 we build an operation, do it in TYPE and with CODE. */
903 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
910 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
911 try to fold this since we will have infinite recursion. But do
912 deal with any NEGATE_EXPRs. */
913 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
914 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
916 if (code
== PLUS_EXPR
)
918 if (TREE_CODE (t1
) == NEGATE_EXPR
)
919 return build2_loc (loc
, MINUS_EXPR
, type
,
920 fold_convert_loc (loc
, type
, t2
),
921 fold_convert_loc (loc
, type
,
922 TREE_OPERAND (t1
, 0)));
923 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
924 return build2_loc (loc
, MINUS_EXPR
, type
,
925 fold_convert_loc (loc
, type
, t1
),
926 fold_convert_loc (loc
, type
,
927 TREE_OPERAND (t2
, 0)));
928 else if (integer_zerop (t2
))
929 return fold_convert_loc (loc
, type
, t1
);
931 else if (code
== MINUS_EXPR
)
933 if (integer_zerop (t2
))
934 return fold_convert_loc (loc
, type
, t1
);
937 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
938 fold_convert_loc (loc
, type
, t2
));
941 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
942 fold_convert_loc (loc
, type
, t2
));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
949 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
951 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
953 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
968 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
969 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
970 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
974 /* Combine two integer constants ARG1 and ARG2 under operation CODE
975 to produce a new constant. Return NULL_TREE if we don't know how
976 to evaluate CODE at compile-time. */
979 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
982 double_int op1
, op2
, res
, tmp
;
984 tree type
= TREE_TYPE (arg1
);
985 bool uns
= TYPE_UNSIGNED (type
);
986 bool overflow
= false;
988 op1
= tree_to_double_int (arg1
);
989 op2
= tree_to_double_int (arg2
);
1006 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1017 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1021 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1025 res
= op1
.add_with_sign (op2
, false, &overflow
);
1029 res
= op1
.sub_with_overflow (op2
, &overflow
);
1033 res
= op1
.mul_with_sign (op2
, false, &overflow
);
1036 case MULT_HIGHPART_EXPR
:
1037 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1039 bool dummy_overflow
;
1040 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
1042 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
1046 bool dummy_overflow
;
1047 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1048 is performed in twice the precision of arguments. */
1049 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1050 res
= tmp
.rshift (TYPE_PRECISION (type
),
1051 2 * TYPE_PRECISION (type
), !uns
);
1055 case TRUNC_DIV_EXPR
:
1056 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1057 case EXACT_DIV_EXPR
:
1058 /* This is a shortcut for a common special case. */
1059 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1060 && !TREE_OVERFLOW (arg1
)
1061 && !TREE_OVERFLOW (arg2
)
1062 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1064 if (code
== CEIL_DIV_EXPR
)
1065 op1
.low
+= op2
.low
- 1;
1067 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1071 /* ... fall through ... */
1073 case ROUND_DIV_EXPR
:
1081 if (op1
== op2
&& !op1
.is_zero ())
1083 res
= double_int_one
;
1086 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1089 case TRUNC_MOD_EXPR
:
1090 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1091 /* This is a shortcut for a common special case. */
1092 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1093 && !TREE_OVERFLOW (arg1
)
1094 && !TREE_OVERFLOW (arg2
)
1095 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1097 if (code
== CEIL_MOD_EXPR
)
1098 op1
.low
+= op2
.low
- 1;
1099 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1103 /* ... fall through ... */
1105 case ROUND_MOD_EXPR
:
1108 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1112 res
= op1
.min (op2
, uns
);
1116 res
= op1
.max (op2
, uns
);
1123 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1125 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1131 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1133 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1136 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1137 constant. We assume ARG1 and ARG2 have the same data type, or at least
1138 are the same kind of constant and the same machine mode. Return zero if
1139 combining the constants is not allowed in the current operating mode. */
1142 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1144 /* Sanity check for the recursive cases. */
1151 if (TREE_CODE (arg1
) == INTEGER_CST
)
1152 return int_const_binop (code
, arg1
, arg2
);
1154 if (TREE_CODE (arg1
) == REAL_CST
)
1156 enum machine_mode mode
;
1159 REAL_VALUE_TYPE value
;
1160 REAL_VALUE_TYPE result
;
1164 /* The following codes are handled by real_arithmetic. */
1179 d1
= TREE_REAL_CST (arg1
);
1180 d2
= TREE_REAL_CST (arg2
);
1182 type
= TREE_TYPE (arg1
);
1183 mode
= TYPE_MODE (type
);
1185 /* Don't perform operation if we honor signaling NaNs and
1186 either operand is a NaN. */
1187 if (HONOR_SNANS (mode
)
1188 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1191 /* Don't perform operation if it would raise a division
1192 by zero exception. */
1193 if (code
== RDIV_EXPR
1194 && REAL_VALUES_EQUAL (d2
, dconst0
)
1195 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1198 /* If either operand is a NaN, just return it. Otherwise, set up
1199 for floating-point trap; we return an overflow. */
1200 if (REAL_VALUE_ISNAN (d1
))
1202 else if (REAL_VALUE_ISNAN (d2
))
1205 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1206 real_convert (&result
, mode
, &value
);
1208 /* Don't constant fold this floating point operation if
1209 the result has overflowed and flag_trapping_math. */
1210 if (flag_trapping_math
1211 && MODE_HAS_INFINITIES (mode
)
1212 && REAL_VALUE_ISINF (result
)
1213 && !REAL_VALUE_ISINF (d1
)
1214 && !REAL_VALUE_ISINF (d2
))
1217 /* Don't constant fold this floating point operation if the
1218 result may dependent upon the run-time rounding mode and
1219 flag_rounding_math is set, or if GCC's software emulation
1220 is unable to accurately represent the result. */
1221 if ((flag_rounding_math
1222 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1223 && (inexact
|| !real_identical (&result
, &value
)))
1226 t
= build_real (type
, result
);
1228 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1232 if (TREE_CODE (arg1
) == FIXED_CST
)
1234 FIXED_VALUE_TYPE f1
;
1235 FIXED_VALUE_TYPE f2
;
1236 FIXED_VALUE_TYPE result
;
1241 /* The following codes are handled by fixed_arithmetic. */
1247 case TRUNC_DIV_EXPR
:
1248 f2
= TREE_FIXED_CST (arg2
);
1253 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1254 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1262 f1
= TREE_FIXED_CST (arg1
);
1263 type
= TREE_TYPE (arg1
);
1264 sat_p
= TYPE_SATURATING (type
);
1265 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1266 t
= build_fixed (type
, result
);
1267 /* Propagate overflow flags. */
1268 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1269 TREE_OVERFLOW (t
) = 1;
1273 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1275 tree type
= TREE_TYPE (arg1
);
1276 tree r1
= TREE_REALPART (arg1
);
1277 tree i1
= TREE_IMAGPART (arg1
);
1278 tree r2
= TREE_REALPART (arg2
);
1279 tree i2
= TREE_IMAGPART (arg2
);
1286 real
= const_binop (code
, r1
, r2
);
1287 imag
= const_binop (code
, i1
, i2
);
1291 if (COMPLEX_FLOAT_TYPE_P (type
))
1292 return do_mpc_arg2 (arg1
, arg2
, type
,
1293 /* do_nonfinite= */ folding_initializer
,
1296 real
= const_binop (MINUS_EXPR
,
1297 const_binop (MULT_EXPR
, r1
, r2
),
1298 const_binop (MULT_EXPR
, i1
, i2
));
1299 imag
= const_binop (PLUS_EXPR
,
1300 const_binop (MULT_EXPR
, r1
, i2
),
1301 const_binop (MULT_EXPR
, i1
, r2
));
1305 if (COMPLEX_FLOAT_TYPE_P (type
))
1306 return do_mpc_arg2 (arg1
, arg2
, type
,
1307 /* do_nonfinite= */ folding_initializer
,
1310 case TRUNC_DIV_EXPR
:
1312 case FLOOR_DIV_EXPR
:
1313 case ROUND_DIV_EXPR
:
1314 if (flag_complex_method
== 0)
1316 /* Keep this algorithm in sync with
1317 tree-complex.c:expand_complex_div_straight().
1319 Expand complex division to scalars, straightforward algorithm.
1320 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1324 = const_binop (PLUS_EXPR
,
1325 const_binop (MULT_EXPR
, r2
, r2
),
1326 const_binop (MULT_EXPR
, i2
, i2
));
1328 = const_binop (PLUS_EXPR
,
1329 const_binop (MULT_EXPR
, r1
, r2
),
1330 const_binop (MULT_EXPR
, i1
, i2
));
1332 = const_binop (MINUS_EXPR
,
1333 const_binop (MULT_EXPR
, i1
, r2
),
1334 const_binop (MULT_EXPR
, r1
, i2
));
1336 real
= const_binop (code
, t1
, magsquared
);
1337 imag
= const_binop (code
, t2
, magsquared
);
1341 /* Keep this algorithm in sync with
1342 tree-complex.c:expand_complex_div_wide().
1344 Expand complex division to scalars, modified algorithm to minimize
1345 overflow with wide input ranges. */
1346 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1347 fold_abs_const (r2
, TREE_TYPE (type
)),
1348 fold_abs_const (i2
, TREE_TYPE (type
)));
1350 if (integer_nonzerop (compare
))
1352 /* In the TRUE branch, we compute
1354 div = (br * ratio) + bi;
1355 tr = (ar * ratio) + ai;
1356 ti = (ai * ratio) - ar;
1359 tree ratio
= const_binop (code
, r2
, i2
);
1360 tree div
= const_binop (PLUS_EXPR
, i2
,
1361 const_binop (MULT_EXPR
, r2
, ratio
));
1362 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1363 real
= const_binop (PLUS_EXPR
, real
, i1
);
1364 real
= const_binop (code
, real
, div
);
1366 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1367 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1368 imag
= const_binop (code
, imag
, div
);
1372 /* In the FALSE branch, we compute
1374 divisor = (d * ratio) + c;
1375 tr = (b * ratio) + a;
1376 ti = b - (a * ratio);
1379 tree ratio
= const_binop (code
, i2
, r2
);
1380 tree div
= const_binop (PLUS_EXPR
, r2
,
1381 const_binop (MULT_EXPR
, i2
, ratio
));
1383 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1384 real
= const_binop (PLUS_EXPR
, real
, r1
);
1385 real
= const_binop (code
, real
, div
);
1387 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1388 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1389 imag
= const_binop (code
, imag
, div
);
1399 return build_complex (type
, real
, imag
);
1402 if (TREE_CODE (arg1
) == VECTOR_CST
1403 && TREE_CODE (arg2
) == VECTOR_CST
)
1405 tree type
= TREE_TYPE (arg1
);
1406 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1407 tree
*elts
= XALLOCAVEC (tree
, count
);
1409 for (i
= 0; i
< count
; i
++)
1411 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1412 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1414 elts
[i
] = const_binop (code
, elem1
, elem2
);
1416 /* It is possible that const_binop cannot handle the given
1417 code and return NULL_TREE */
1418 if (elts
[i
] == NULL_TREE
)
1422 return build_vector (type
, elts
);
1425 /* Shifts allow a scalar offset for a vector. */
1426 if (TREE_CODE (arg1
) == VECTOR_CST
1427 && TREE_CODE (arg2
) == INTEGER_CST
)
1429 tree type
= TREE_TYPE (arg1
);
1430 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1431 tree
*elts
= XALLOCAVEC (tree
, count
);
1433 if (code
== VEC_LSHIFT_EXPR
1434 || code
== VEC_RSHIFT_EXPR
)
1436 if (!tree_fits_uhwi_p (arg2
))
1439 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1440 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1441 unsigned HOST_WIDE_INT innerc
1442 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1443 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1445 int offset
= shiftc
/ innerc
;
1446 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1447 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1448 for !BYTES_BIG_ENDIAN picks first vector element, but
1449 for BYTES_BIG_ENDIAN last element from the vector. */
1450 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1452 tree zero
= build_zero_cst (TREE_TYPE (type
));
1453 for (i
= 0; i
< count
; i
++)
1455 if (i
+ offset
< 0 || i
+ offset
>= count
)
1458 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1462 for (i
= 0; i
< count
; i
++)
1464 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1466 elts
[i
] = const_binop (code
, elem1
, arg2
);
1468 /* It is possible that const_binop cannot handle the given
1469 code and return NULL_TREE */
1470 if (elts
[i
] == NULL_TREE
)
1474 return build_vector (type
, elts
);
1479 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1480 indicates which particular sizetype to create. */
1483 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1485 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1488 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1489 is a tree code. The type of the result is taken from the operands.
1490 Both must be equivalent integer types, ala int_binop_types_match_p.
1491 If the operands are constant, so is the result. */
1494 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1496 tree type
= TREE_TYPE (arg0
);
1498 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1499 return error_mark_node
;
1501 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1504 /* Handle the special case of two integer constants faster. */
1505 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1507 /* And some specific cases even faster than that. */
1508 if (code
== PLUS_EXPR
)
1510 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1512 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1515 else if (code
== MINUS_EXPR
)
1517 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1520 else if (code
== MULT_EXPR
)
1522 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1526 /* Handle general case of two integer constants. For sizetype
1527 constant calculations we always want to know about overflow,
1528 even in the unsigned case. */
1529 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1532 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1535 /* Given two values, either both of sizetype or both of bitsizetype,
1536 compute the difference between the two values. Return the value
1537 in signed type corresponding to the type of the operands. */
1540 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1542 tree type
= TREE_TYPE (arg0
);
1545 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1548 /* If the type is already signed, just do the simple thing. */
1549 if (!TYPE_UNSIGNED (type
))
1550 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1552 if (type
== sizetype
)
1554 else if (type
== bitsizetype
)
1555 ctype
= sbitsizetype
;
1557 ctype
= signed_type_for (type
);
1559 /* If either operand is not a constant, do the conversions to the signed
1560 type and subtract. The hardware will do the right thing with any
1561 overflow in the subtraction. */
1562 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1563 return size_binop_loc (loc
, MINUS_EXPR
,
1564 fold_convert_loc (loc
, ctype
, arg0
),
1565 fold_convert_loc (loc
, ctype
, arg1
));
1567 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1568 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1569 overflow) and negate (which can't either). Special-case a result
1570 of zero while we're here. */
1571 if (tree_int_cst_equal (arg0
, arg1
))
1572 return build_int_cst (ctype
, 0);
1573 else if (tree_int_cst_lt (arg1
, arg0
))
1574 return fold_convert_loc (loc
, ctype
,
1575 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1577 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1578 fold_convert_loc (loc
, ctype
,
1579 size_binop_loc (loc
,
1584 /* A subroutine of fold_convert_const handling conversions of an
1585 INTEGER_CST to another integer type. */
1588 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1592 /* Given an integer constant, make new constant with new type,
1593 appropriately sign-extended or truncated. */
1594 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1595 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1596 (TREE_INT_CST_HIGH (arg1
) < 0
1597 && (TYPE_UNSIGNED (type
)
1598 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1599 | TREE_OVERFLOW (arg1
));
1604 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1605 to an integer type. */
1608 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1613 /* The following code implements the floating point to integer
1614 conversion rules required by the Java Language Specification,
1615 that IEEE NaNs are mapped to zero and values that overflow
1616 the target precision saturate, i.e. values greater than
1617 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1618 are mapped to INT_MIN. These semantics are allowed by the
1619 C and C++ standards that simply state that the behavior of
1620 FP-to-integer conversion is unspecified upon overflow. */
1624 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1628 case FIX_TRUNC_EXPR
:
1629 real_trunc (&r
, VOIDmode
, &x
);
1636 /* If R is NaN, return zero and show we have an overflow. */
1637 if (REAL_VALUE_ISNAN (r
))
1640 val
= double_int_zero
;
1643 /* See if R is less than the lower bound or greater than the
1648 tree lt
= TYPE_MIN_VALUE (type
);
1649 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1650 if (REAL_VALUES_LESS (r
, l
))
1653 val
= tree_to_double_int (lt
);
1659 tree ut
= TYPE_MAX_VALUE (type
);
1662 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1663 if (REAL_VALUES_LESS (u
, r
))
1666 val
= tree_to_double_int (ut
);
1672 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1674 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1678 /* A subroutine of fold_convert_const handling conversions of a
1679 FIXED_CST to an integer type. */
1682 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1685 double_int temp
, temp_trunc
;
1688 /* Right shift FIXED_CST to temp by fbit. */
1689 temp
= TREE_FIXED_CST (arg1
).data
;
1690 mode
= TREE_FIXED_CST (arg1
).mode
;
1691 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1693 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1694 HOST_BITS_PER_DOUBLE_INT
,
1695 SIGNED_FIXED_POINT_MODE_P (mode
));
1697 /* Left shift temp to temp_trunc by fbit. */
1698 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1699 HOST_BITS_PER_DOUBLE_INT
,
1700 SIGNED_FIXED_POINT_MODE_P (mode
));
1704 temp
= double_int_zero
;
1705 temp_trunc
= double_int_zero
;
1708 /* If FIXED_CST is negative, we need to round the value toward 0.
1709 By checking if the fractional bits are not zero to add 1 to temp. */
1710 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1711 && temp_trunc
.is_negative ()
1712 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1713 temp
+= double_int_one
;
1715 /* Given a fixed-point constant, make new constant with new type,
1716 appropriately sign-extended or truncated. */
1717 t
= force_fit_type_double (type
, temp
, -1,
1718 (temp
.is_negative ()
1719 && (TYPE_UNSIGNED (type
)
1720 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1721 | TREE_OVERFLOW (arg1
));
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to another floating point type. */
1730 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1732 REAL_VALUE_TYPE value
;
1735 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1736 t
= build_real (type
, value
);
1738 /* If converting an infinity or NAN to a representation that doesn't
1739 have one, set the overflow bit so that we can produce some kind of
1740 error message at the appropriate point if necessary. It's not the
1741 most user-friendly message, but it's better than nothing. */
1742 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1743 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1744 TREE_OVERFLOW (t
) = 1;
1745 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1746 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1747 TREE_OVERFLOW (t
) = 1;
1748 /* Regular overflow, conversion produced an infinity in a mode that
1749 can't represent them. */
1750 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1751 && REAL_VALUE_ISINF (value
)
1752 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1753 TREE_OVERFLOW (t
) = 1;
1755 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1759 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1760 to a floating point type. */
1763 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1765 REAL_VALUE_TYPE value
;
1768 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1769 t
= build_real (type
, value
);
1771 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776 to another fixed-point type. */
1779 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1781 FIXED_VALUE_TYPE value
;
1785 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1786 TYPE_SATURATING (type
));
1787 t
= build_fixed (type
, value
);
1789 /* Propagate overflow flags. */
1790 if (overflow_p
| TREE_OVERFLOW (arg1
))
1791 TREE_OVERFLOW (t
) = 1;
1795 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1796 to a fixed-point type. */
1799 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1801 FIXED_VALUE_TYPE value
;
1805 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1806 TREE_INT_CST (arg1
),
1807 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1808 TYPE_SATURATING (type
));
1809 t
= build_fixed (type
, value
);
1811 /* Propagate overflow flags. */
1812 if (overflow_p
| TREE_OVERFLOW (arg1
))
1813 TREE_OVERFLOW (t
) = 1;
1817 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1818 to a fixed-point type. */
1821 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1823 FIXED_VALUE_TYPE value
;
1827 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1828 &TREE_REAL_CST (arg1
),
1829 TYPE_SATURATING (type
));
1830 t
= build_fixed (type
, value
);
1832 /* Propagate overflow flags. */
1833 if (overflow_p
| TREE_OVERFLOW (arg1
))
1834 TREE_OVERFLOW (t
) = 1;
1838 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1839 type TYPE. If no simplification can be done return NULL_TREE. */
1842 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1844 if (TREE_TYPE (arg1
) == type
)
1847 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1848 || TREE_CODE (type
) == OFFSET_TYPE
)
1850 if (TREE_CODE (arg1
) == INTEGER_CST
)
1851 return fold_convert_const_int_from_int (type
, arg1
);
1852 else if (TREE_CODE (arg1
) == REAL_CST
)
1853 return fold_convert_const_int_from_real (code
, type
, arg1
);
1854 else if (TREE_CODE (arg1
) == FIXED_CST
)
1855 return fold_convert_const_int_from_fixed (type
, arg1
);
1857 else if (TREE_CODE (type
) == REAL_TYPE
)
1859 if (TREE_CODE (arg1
) == INTEGER_CST
)
1860 return build_real_from_int_cst (type
, arg1
);
1861 else if (TREE_CODE (arg1
) == REAL_CST
)
1862 return fold_convert_const_real_from_real (type
, arg1
);
1863 else if (TREE_CODE (arg1
) == FIXED_CST
)
1864 return fold_convert_const_real_from_fixed (type
, arg1
);
1866 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1868 if (TREE_CODE (arg1
) == FIXED_CST
)
1869 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1870 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1871 return fold_convert_const_fixed_from_int (type
, arg1
);
1872 else if (TREE_CODE (arg1
) == REAL_CST
)
1873 return fold_convert_const_fixed_from_real (type
, arg1
);
1878 /* Construct a vector of zero elements of vector type TYPE. */
1881 build_zero_vector (tree type
)
1885 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1886 return build_vector_from_val (type
, t
);
1889 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1892 fold_convertible_p (const_tree type
, const_tree arg
)
1894 tree orig
= TREE_TYPE (arg
);
1899 if (TREE_CODE (arg
) == ERROR_MARK
1900 || TREE_CODE (type
) == ERROR_MARK
1901 || TREE_CODE (orig
) == ERROR_MARK
)
1904 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1907 switch (TREE_CODE (type
))
1909 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1910 case POINTER_TYPE
: case REFERENCE_TYPE
:
1912 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1913 || TREE_CODE (orig
) == OFFSET_TYPE
)
1915 return (TREE_CODE (orig
) == VECTOR_TYPE
1916 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1919 case FIXED_POINT_TYPE
:
1923 return TREE_CODE (type
) == TREE_CODE (orig
);
1930 /* Convert expression ARG to type TYPE. Used by the middle-end for
1931 simple conversions in preference to calling the front-end's convert. */
1934 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1936 tree orig
= TREE_TYPE (arg
);
1942 if (TREE_CODE (arg
) == ERROR_MARK
1943 || TREE_CODE (type
) == ERROR_MARK
1944 || TREE_CODE (orig
) == ERROR_MARK
)
1945 return error_mark_node
;
1947 switch (TREE_CODE (type
))
1950 case REFERENCE_TYPE
:
1951 /* Handle conversions between pointers to different address spaces. */
1952 if (POINTER_TYPE_P (orig
)
1953 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1954 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1955 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1958 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1960 if (TREE_CODE (arg
) == INTEGER_CST
)
1962 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1963 if (tem
!= NULL_TREE
)
1966 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1967 || TREE_CODE (orig
) == OFFSET_TYPE
)
1968 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1969 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1970 return fold_convert_loc (loc
, type
,
1971 fold_build1_loc (loc
, REALPART_EXPR
,
1972 TREE_TYPE (orig
), arg
));
1973 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1974 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1975 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1978 if (TREE_CODE (arg
) == INTEGER_CST
)
1980 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1981 if (tem
!= NULL_TREE
)
1984 else if (TREE_CODE (arg
) == REAL_CST
)
1986 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1987 if (tem
!= NULL_TREE
)
1990 else if (TREE_CODE (arg
) == FIXED_CST
)
1992 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1993 if (tem
!= NULL_TREE
)
1997 switch (TREE_CODE (orig
))
2000 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2001 case POINTER_TYPE
: case REFERENCE_TYPE
:
2002 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2005 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2007 case FIXED_POINT_TYPE
:
2008 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2011 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2012 return fold_convert_loc (loc
, type
, tem
);
2018 case FIXED_POINT_TYPE
:
2019 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2020 || TREE_CODE (arg
) == REAL_CST
)
2022 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2023 if (tem
!= NULL_TREE
)
2024 goto fold_convert_exit
;
2027 switch (TREE_CODE (orig
))
2029 case FIXED_POINT_TYPE
:
2034 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2037 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2038 return fold_convert_loc (loc
, type
, tem
);
2045 switch (TREE_CODE (orig
))
2048 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2049 case POINTER_TYPE
: case REFERENCE_TYPE
:
2051 case FIXED_POINT_TYPE
:
2052 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2053 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2054 fold_convert_loc (loc
, TREE_TYPE (type
),
2055 integer_zero_node
));
2060 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2062 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2063 TREE_OPERAND (arg
, 0));
2064 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2065 TREE_OPERAND (arg
, 1));
2066 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2069 arg
= save_expr (arg
);
2070 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2071 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2072 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2073 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2074 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2082 if (integer_zerop (arg
))
2083 return build_zero_vector (type
);
2084 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2085 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2086 || TREE_CODE (orig
) == VECTOR_TYPE
);
2087 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2090 tem
= fold_ignored_result (arg
);
2091 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2094 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2095 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2099 protected_set_expr_location_unshare (tem
, loc
);
2103 /* Return false if expr can be assumed not to be an lvalue, true
2107 maybe_lvalue_p (const_tree x
)
2109 /* We only need to wrap lvalue tree codes. */
2110 switch (TREE_CODE (x
))
2123 case ARRAY_RANGE_REF
:
2129 case PREINCREMENT_EXPR
:
2130 case PREDECREMENT_EXPR
:
2132 case TRY_CATCH_EXPR
:
2133 case WITH_CLEANUP_EXPR
:
2142 /* Assume the worst for front-end tree codes. */
2143 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2151 /* Return an expr equal to X but certainly not valid as an lvalue. */
2154 non_lvalue_loc (location_t loc
, tree x
)
2156 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2161 if (! maybe_lvalue_p (x
))
2163 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2166 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2167 Zero means allow extended lvalues. */
2169 int pedantic_lvalues
;
2171 /* When pedantic, return an expr equal to X but certainly not valid as a
2172 pedantic lvalue. Otherwise, return X. */
2175 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2177 if (pedantic_lvalues
)
2178 return non_lvalue_loc (loc
, x
);
2180 return protected_set_expr_location_unshare (x
, loc
);
2183 /* Given a tree comparison code, return the code that is the logical inverse.
2184 It is generally not safe to do this for floating-point comparisons, except
2185 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2186 ERROR_MARK in this case. */
2189 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2191 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2192 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2202 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2204 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2206 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2208 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2222 return UNORDERED_EXPR
;
2223 case UNORDERED_EXPR
:
2224 return ORDERED_EXPR
;
2230 /* Similar, but return the comparison that results if the operands are
2231 swapped. This is safe for floating-point. */
2234 swap_tree_comparison (enum tree_code code
)
2241 case UNORDERED_EXPR
:
2267 /* Convert a comparison tree code from an enum tree_code representation
2268 into a compcode bit-based encoding. This function is the inverse of
2269 compcode_to_comparison. */
2271 static enum comparison_code
2272 comparison_to_compcode (enum tree_code code
)
2289 return COMPCODE_ORD
;
2290 case UNORDERED_EXPR
:
2291 return COMPCODE_UNORD
;
2293 return COMPCODE_UNLT
;
2295 return COMPCODE_UNEQ
;
2297 return COMPCODE_UNLE
;
2299 return COMPCODE_UNGT
;
2301 return COMPCODE_LTGT
;
2303 return COMPCODE_UNGE
;
2309 /* Convert a compcode bit-based encoding of a comparison operator back
2310 to GCC's enum tree_code representation. This function is the
2311 inverse of comparison_to_compcode. */
2313 static enum tree_code
2314 compcode_to_comparison (enum comparison_code code
)
2331 return ORDERED_EXPR
;
2332 case COMPCODE_UNORD
:
2333 return UNORDERED_EXPR
;
2351 /* Return a tree for the comparison which is the combination of
2352 doing the AND or OR (depending on CODE) of the two operations LCODE
2353 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2354 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2355 if this makes the transformation invalid. */
2358 combine_comparisons (location_t loc
,
2359 enum tree_code code
, enum tree_code lcode
,
2360 enum tree_code rcode
, tree truth_type
,
2361 tree ll_arg
, tree lr_arg
)
2363 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2364 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2365 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2370 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2371 compcode
= lcompcode
& rcompcode
;
2374 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2375 compcode
= lcompcode
| rcompcode
;
2384 /* Eliminate unordered comparisons, as well as LTGT and ORD
2385 which are not used unless the mode has NaNs. */
2386 compcode
&= ~COMPCODE_UNORD
;
2387 if (compcode
== COMPCODE_LTGT
)
2388 compcode
= COMPCODE_NE
;
2389 else if (compcode
== COMPCODE_ORD
)
2390 compcode
= COMPCODE_TRUE
;
2392 else if (flag_trapping_math
)
2394 /* Check that the original operation and the optimized ones will trap
2395 under the same condition. */
2396 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2397 && (lcompcode
!= COMPCODE_EQ
)
2398 && (lcompcode
!= COMPCODE_ORD
);
2399 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2400 && (rcompcode
!= COMPCODE_EQ
)
2401 && (rcompcode
!= COMPCODE_ORD
);
2402 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2403 && (compcode
!= COMPCODE_EQ
)
2404 && (compcode
!= COMPCODE_ORD
);
2406 /* In a short-circuited boolean expression the LHS might be
2407 such that the RHS, if evaluated, will never trap. For
2408 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2409 if neither x nor y is NaN. (This is a mixed blessing: for
2410 example, the expression above will never trap, hence
2411 optimizing it to x < y would be invalid). */
2412 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2413 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2416 /* If the comparison was short-circuited, and only the RHS
2417 trapped, we may now generate a spurious trap. */
2419 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2422 /* If we changed the conditions that cause a trap, we lose. */
2423 if ((ltrap
|| rtrap
) != trap
)
2427 if (compcode
== COMPCODE_TRUE
)
2428 return constant_boolean_node (true, truth_type
);
2429 else if (compcode
== COMPCODE_FALSE
)
2430 return constant_boolean_node (false, truth_type
);
2433 enum tree_code tcode
;
2435 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2436 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2440 /* Return nonzero if two operands (typically of the same tree node)
2441 are necessarily equal. If either argument has side-effects this
2442 function returns zero. FLAGS modifies behavior as follows:
2444 If OEP_ONLY_CONST is set, only return nonzero for constants.
2445 This function tests whether the operands are indistinguishable;
2446 it does not test whether they are equal using C's == operation.
2447 The distinction is important for IEEE floating point, because
2448 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2449 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2451 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2452 even though it may hold multiple values during a function.
2453 This is because a GCC tree node guarantees that nothing else is
2454 executed between the evaluation of its "operands" (which may often
2455 be evaluated in arbitrary order). Hence if the operands themselves
2456 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2457 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2458 unset means assuming isochronic (or instantaneous) tree equivalence.
2459 Unless comparing arbitrary expression trees, such as from different
2460 statements, this flag can usually be left unset.
2462 If OEP_PURE_SAME is set, then pure functions with identical arguments
2463 are considered the same. It is used when the caller has other ways
2464 to ensure that global memory is unchanged in between. */
2467 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2469 /* If either is ERROR_MARK, they aren't equal. */
2470 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2471 || TREE_TYPE (arg0
) == error_mark_node
2472 || TREE_TYPE (arg1
) == error_mark_node
)
2475 /* Similar, if either does not have a type (like a released SSA name),
2476 they aren't equal. */
2477 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2480 /* Check equality of integer constants before bailing out due to
2481 precision differences. */
2482 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2483 return tree_int_cst_equal (arg0
, arg1
);
2485 /* If both types don't have the same signedness, then we can't consider
2486 them equal. We must check this before the STRIP_NOPS calls
2487 because they may change the signedness of the arguments. As pointers
2488 strictly don't have a signedness, require either two pointers or
2489 two non-pointers as well. */
2490 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2491 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2494 /* We cannot consider pointers to different address space equal. */
2495 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2496 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2497 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2500 /* If both types don't have the same precision, then it is not safe
2502 if (element_precision (TREE_TYPE (arg0
))
2503 != element_precision (TREE_TYPE (arg1
)))
2509 /* In case both args are comparisons but with different comparison
2510 code, try to swap the comparison operands of one arg to produce
2511 a match and compare that variant. */
2512 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2513 && COMPARISON_CLASS_P (arg0
)
2514 && COMPARISON_CLASS_P (arg1
))
2516 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2518 if (TREE_CODE (arg0
) == swap_code
)
2519 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2520 TREE_OPERAND (arg1
, 1), flags
)
2521 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2522 TREE_OPERAND (arg1
, 0), flags
);
2525 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2526 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2527 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2530 /* This is needed for conversions and for COMPONENT_REF.
2531 Might as well play it safe and always test this. */
2532 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2533 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2534 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2537 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2538 We don't care about side effects in that case because the SAVE_EXPR
2539 takes care of that for us. In all other cases, two expressions are
2540 equal if they have no side effects. If we have two identical
2541 expressions with side effects that should be treated the same due
2542 to the only side effects being identical SAVE_EXPR's, that will
2543 be detected in the recursive calls below.
2544 If we are taking an invariant address of two identical objects
2545 they are necessarily equal as well. */
2546 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2547 && (TREE_CODE (arg0
) == SAVE_EXPR
2548 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2549 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2552 /* Next handle constant cases, those for which we can return 1 even
2553 if ONLY_CONST is set. */
2554 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2555 switch (TREE_CODE (arg0
))
2558 return tree_int_cst_equal (arg0
, arg1
);
2561 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2562 TREE_FIXED_CST (arg1
));
2565 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2566 TREE_REAL_CST (arg1
)))
2570 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2572 /* If we do not distinguish between signed and unsigned zero,
2573 consider them equal. */
2574 if (real_zerop (arg0
) && real_zerop (arg1
))
2583 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2586 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2588 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2589 VECTOR_CST_ELT (arg1
, i
), flags
))
2596 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2598 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2602 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2603 && ! memcmp (TREE_STRING_POINTER (arg0
),
2604 TREE_STRING_POINTER (arg1
),
2605 TREE_STRING_LENGTH (arg0
)));
2608 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2609 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2610 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2615 if (flags
& OEP_ONLY_CONST
)
2618 /* Define macros to test an operand from arg0 and arg1 for equality and a
2619 variant that allows null and views null as being different from any
2620 non-null value. In the latter case, if either is null, the both
2621 must be; otherwise, do the normal comparison. */
2622 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2623 TREE_OPERAND (arg1, N), flags)
2625 #define OP_SAME_WITH_NULL(N) \
2626 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2627 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2629 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2632 /* Two conversions are equal only if signedness and modes match. */
2633 switch (TREE_CODE (arg0
))
2636 case FIX_TRUNC_EXPR
:
2637 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2638 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2648 case tcc_comparison
:
2650 if (OP_SAME (0) && OP_SAME (1))
2653 /* For commutative ops, allow the other order. */
2654 return (commutative_tree_code (TREE_CODE (arg0
))
2655 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2656 TREE_OPERAND (arg1
, 1), flags
)
2657 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2658 TREE_OPERAND (arg1
, 0), flags
));
2661 /* If either of the pointer (or reference) expressions we are
2662 dereferencing contain a side effect, these cannot be equal,
2663 but their addresses can be. */
2664 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2665 && (TREE_SIDE_EFFECTS (arg0
)
2666 || TREE_SIDE_EFFECTS (arg1
)))
2669 switch (TREE_CODE (arg0
))
2672 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2679 case TARGET_MEM_REF
:
2680 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2681 /* Require equal extra operands and then fall through to MEM_REF
2682 handling of the two common operands. */
2683 if (!OP_SAME_WITH_NULL (2)
2684 || !OP_SAME_WITH_NULL (3)
2685 || !OP_SAME_WITH_NULL (4))
2689 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2690 /* Require equal access sizes, and similar pointer types.
2691 We can have incomplete types for array references of
2692 variable-sized arrays from the Fortran frontend
2693 though. Also verify the types are compatible. */
2694 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2695 || (TYPE_SIZE (TREE_TYPE (arg0
))
2696 && TYPE_SIZE (TREE_TYPE (arg1
))
2697 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2698 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2699 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2700 && alias_ptr_types_compatible_p
2701 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2702 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2703 && OP_SAME (0) && OP_SAME (1));
2706 case ARRAY_RANGE_REF
:
2707 /* Operands 2 and 3 may be null.
2708 Compare the array index by value if it is constant first as we
2709 may have different types but same value here. */
2712 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2713 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2714 TREE_OPERAND (arg1
, 1))
2716 && OP_SAME_WITH_NULL (2)
2717 && OP_SAME_WITH_NULL (3));
2720 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2721 may be NULL when we're called to compare MEM_EXPRs. */
2722 if (!OP_SAME_WITH_NULL (0)
2725 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2726 return OP_SAME_WITH_NULL (2);
2731 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2732 return OP_SAME (1) && OP_SAME (2);
2738 case tcc_expression
:
2739 switch (TREE_CODE (arg0
))
2742 case TRUTH_NOT_EXPR
:
2745 case TRUTH_ANDIF_EXPR
:
2746 case TRUTH_ORIF_EXPR
:
2747 return OP_SAME (0) && OP_SAME (1);
2750 case WIDEN_MULT_PLUS_EXPR
:
2751 case WIDEN_MULT_MINUS_EXPR
:
2754 /* The multiplcation operands are commutative. */
2757 case TRUTH_AND_EXPR
:
2759 case TRUTH_XOR_EXPR
:
2760 if (OP_SAME (0) && OP_SAME (1))
2763 /* Otherwise take into account this is a commutative operation. */
2764 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2765 TREE_OPERAND (arg1
, 1), flags
)
2766 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2767 TREE_OPERAND (arg1
, 0), flags
));
2772 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2779 switch (TREE_CODE (arg0
))
2782 /* If the CALL_EXPRs call different functions, then they
2783 clearly can not be equal. */
2784 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2789 unsigned int cef
= call_expr_flags (arg0
);
2790 if (flags
& OEP_PURE_SAME
)
2791 cef
&= ECF_CONST
| ECF_PURE
;
2798 /* Now see if all the arguments are the same. */
2800 const_call_expr_arg_iterator iter0
, iter1
;
2802 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2803 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2805 a0
= next_const_call_expr_arg (&iter0
),
2806 a1
= next_const_call_expr_arg (&iter1
))
2807 if (! operand_equal_p (a0
, a1
, flags
))
2810 /* If we get here and both argument lists are exhausted
2811 then the CALL_EXPRs are equal. */
2812 return ! (a0
|| a1
);
2818 case tcc_declaration
:
2819 /* Consider __builtin_sqrt equal to sqrt. */
2820 return (TREE_CODE (arg0
) == FUNCTION_DECL
2821 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2822 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2823 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2830 #undef OP_SAME_WITH_NULL
2833 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2834 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2836 When in doubt, return 0. */
2839 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2841 int unsignedp1
, unsignedpo
;
2842 tree primarg0
, primarg1
, primother
;
2843 unsigned int correct_width
;
2845 if (operand_equal_p (arg0
, arg1
, 0))
2848 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2849 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2852 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2853 and see if the inner values are the same. This removes any
2854 signedness comparison, which doesn't matter here. */
2855 primarg0
= arg0
, primarg1
= arg1
;
2856 STRIP_NOPS (primarg0
);
2857 STRIP_NOPS (primarg1
);
2858 if (operand_equal_p (primarg0
, primarg1
, 0))
2861 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2862 actual comparison operand, ARG0.
2864 First throw away any conversions to wider types
2865 already present in the operands. */
2867 primarg1
= get_narrower (arg1
, &unsignedp1
);
2868 primother
= get_narrower (other
, &unsignedpo
);
2870 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2871 if (unsignedp1
== unsignedpo
2872 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2873 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2875 tree type
= TREE_TYPE (arg0
);
2877 /* Make sure shorter operand is extended the right way
2878 to match the longer operand. */
2879 primarg1
= fold_convert (signed_or_unsigned_type_for
2880 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2882 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2889 /* See if ARG is an expression that is either a comparison or is performing
2890 arithmetic on comparisons. The comparisons must only be comparing
2891 two different values, which will be stored in *CVAL1 and *CVAL2; if
2892 they are nonzero it means that some operands have already been found.
2893 No variables may be used anywhere else in the expression except in the
2894 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2895 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2897 If this is true, return 1. Otherwise, return zero. */
2900 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2902 enum tree_code code
= TREE_CODE (arg
);
2903 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2905 /* We can handle some of the tcc_expression cases here. */
2906 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2908 else if (tclass
== tcc_expression
2909 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2910 || code
== COMPOUND_EXPR
))
2911 tclass
= tcc_binary
;
2913 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2914 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2916 /* If we've already found a CVAL1 or CVAL2, this expression is
2917 two complex to handle. */
2918 if (*cval1
|| *cval2
)
2928 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2931 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2932 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2933 cval1
, cval2
, save_p
));
2938 case tcc_expression
:
2939 if (code
== COND_EXPR
)
2940 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2941 cval1
, cval2
, save_p
)
2942 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2943 cval1
, cval2
, save_p
)
2944 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2945 cval1
, cval2
, save_p
));
2948 case tcc_comparison
:
2949 /* First see if we can handle the first operand, then the second. For
2950 the second operand, we know *CVAL1 can't be zero. It must be that
2951 one side of the comparison is each of the values; test for the
2952 case where this isn't true by failing if the two operands
2955 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2956 TREE_OPERAND (arg
, 1), 0))
2960 *cval1
= TREE_OPERAND (arg
, 0);
2961 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2963 else if (*cval2
== 0)
2964 *cval2
= TREE_OPERAND (arg
, 0);
2965 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2970 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2972 else if (*cval2
== 0)
2973 *cval2
= TREE_OPERAND (arg
, 1);
2974 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2986 /* ARG is a tree that is known to contain just arithmetic operations and
2987 comparisons. Evaluate the operations in the tree substituting NEW0 for
2988 any occurrence of OLD0 as an operand of a comparison and likewise for
2992 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2993 tree old1
, tree new1
)
2995 tree type
= TREE_TYPE (arg
);
2996 enum tree_code code
= TREE_CODE (arg
);
2997 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2999 /* We can handle some of the tcc_expression cases here. */
3000 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3002 else if (tclass
== tcc_expression
3003 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3004 tclass
= tcc_binary
;
3009 return fold_build1_loc (loc
, code
, type
,
3010 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3011 old0
, new0
, old1
, new1
));
3014 return fold_build2_loc (loc
, code
, type
,
3015 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3016 old0
, new0
, old1
, new1
),
3017 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3018 old0
, new0
, old1
, new1
));
3020 case tcc_expression
:
3024 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3028 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3032 return fold_build3_loc (loc
, code
, type
,
3033 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3034 old0
, new0
, old1
, new1
),
3035 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3036 old0
, new0
, old1
, new1
),
3037 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3038 old0
, new0
, old1
, new1
));
3042 /* Fall through - ??? */
3044 case tcc_comparison
:
3046 tree arg0
= TREE_OPERAND (arg
, 0);
3047 tree arg1
= TREE_OPERAND (arg
, 1);
3049 /* We need to check both for exact equality and tree equality. The
3050 former will be true if the operand has a side-effect. In that
3051 case, we know the operand occurred exactly once. */
3053 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3055 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3058 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3060 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3063 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3071 /* Return a tree for the case when the result of an expression is RESULT
3072 converted to TYPE and OMITTED was previously an operand of the expression
3073 but is now not needed (e.g., we folded OMITTED * 0).
3075 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3076 the conversion of RESULT to TYPE. */
3079 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3081 tree t
= fold_convert_loc (loc
, type
, result
);
3083 /* If the resulting operand is an empty statement, just return the omitted
3084 statement casted to void. */
3085 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3086 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3087 fold_ignored_result (omitted
));
3089 if (TREE_SIDE_EFFECTS (omitted
))
3090 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3091 fold_ignored_result (omitted
), t
);
3093 return non_lvalue_loc (loc
, t
);
3096 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3099 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3102 tree t
= fold_convert_loc (loc
, type
, result
);
3104 /* If the resulting operand is an empty statement, just return the omitted
3105 statement casted to void. */
3106 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3107 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3108 fold_ignored_result (omitted
));
3110 if (TREE_SIDE_EFFECTS (omitted
))
3111 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3112 fold_ignored_result (omitted
), t
);
3114 return pedantic_non_lvalue_loc (loc
, t
);
3117 /* Return a tree for the case when the result of an expression is RESULT
3118 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3119 of the expression but are now not needed.
3121 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3122 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3123 evaluated before OMITTED2. Otherwise, if neither has side effects,
3124 just do the conversion of RESULT to TYPE. */
3127 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3128 tree omitted1
, tree omitted2
)
3130 tree t
= fold_convert_loc (loc
, type
, result
);
3132 if (TREE_SIDE_EFFECTS (omitted2
))
3133 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3134 if (TREE_SIDE_EFFECTS (omitted1
))
3135 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3137 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3141 /* Return a simplified tree node for the truth-negation of ARG. This
3142 never alters ARG itself. We assume that ARG is an operation that
3143 returns a truth value (0 or 1).
3145 FIXME: one would think we would fold the result, but it causes
3146 problems with the dominator optimizer. */
3149 fold_truth_not_expr (location_t loc
, tree arg
)
3151 tree type
= TREE_TYPE (arg
);
3152 enum tree_code code
= TREE_CODE (arg
);
3153 location_t loc1
, loc2
;
3155 /* If this is a comparison, we can simply invert it, except for
3156 floating-point non-equality comparisons, in which case we just
3157 enclose a TRUTH_NOT_EXPR around what we have. */
3159 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3161 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3162 if (FLOAT_TYPE_P (op_type
)
3163 && flag_trapping_math
3164 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3165 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3168 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3169 if (code
== ERROR_MARK
)
3172 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3173 TREE_OPERAND (arg
, 1));
3179 return constant_boolean_node (integer_zerop (arg
), type
);
3181 case TRUTH_AND_EXPR
:
3182 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3183 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3184 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3185 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3186 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3189 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3190 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3191 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3192 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3193 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3195 case TRUTH_XOR_EXPR
:
3196 /* Here we can invert either operand. We invert the first operand
3197 unless the second operand is a TRUTH_NOT_EXPR in which case our
3198 result is the XOR of the first operand with the inside of the
3199 negation of the second operand. */
3201 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3202 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3203 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3205 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3206 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3207 TREE_OPERAND (arg
, 1));
3209 case TRUTH_ANDIF_EXPR
:
3210 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3211 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3212 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3213 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3214 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3216 case TRUTH_ORIF_EXPR
:
3217 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3218 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3219 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3220 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3221 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3223 case TRUTH_NOT_EXPR
:
3224 return TREE_OPERAND (arg
, 0);
3228 tree arg1
= TREE_OPERAND (arg
, 1);
3229 tree arg2
= TREE_OPERAND (arg
, 2);
3231 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3232 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3234 /* A COND_EXPR may have a throw as one operand, which
3235 then has void type. Just leave void operands
3237 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3238 VOID_TYPE_P (TREE_TYPE (arg1
))
3239 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3240 VOID_TYPE_P (TREE_TYPE (arg2
))
3241 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3245 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3246 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3247 TREE_OPERAND (arg
, 0),
3248 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3250 case NON_LVALUE_EXPR
:
3251 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3252 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3255 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3256 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3258 /* ... fall through ... */
3261 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3262 return build1_loc (loc
, TREE_CODE (arg
), type
,
3263 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3266 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3268 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3271 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3273 case CLEANUP_POINT_EXPR
:
3274 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3275 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3276 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3283 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3284 assume that ARG is an operation that returns a truth value (0 or 1
3285 for scalars, 0 or -1 for vectors). Return the folded expression if
3286 folding is successful. Otherwise, return NULL_TREE. */
3289 fold_invert_truthvalue (location_t loc
, tree arg
)
3291 tree type
= TREE_TYPE (arg
);
3292 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3298 /* Return a simplified tree node for the truth-negation of ARG. This
3299 never alters ARG itself. We assume that ARG is an operation that
3300 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3303 invert_truthvalue_loc (location_t loc
, tree arg
)
3305 if (TREE_CODE (arg
) == ERROR_MARK
)
3308 tree type
= TREE_TYPE (arg
);
3309 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3315 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3316 operands are another bit-wise operation with a common input. If so,
3317 distribute the bit operations to save an operation and possibly two if
3318 constants are involved. For example, convert
3319 (A | B) & (A | C) into A | (B & C)
3320 Further simplification will occur if B and C are constants.
3322 If this optimization cannot be done, 0 will be returned. */
3325 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3326 tree arg0
, tree arg1
)
3331 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3332 || TREE_CODE (arg0
) == code
3333 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3334 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3337 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3339 common
= TREE_OPERAND (arg0
, 0);
3340 left
= TREE_OPERAND (arg0
, 1);
3341 right
= TREE_OPERAND (arg1
, 1);
3343 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3345 common
= TREE_OPERAND (arg0
, 0);
3346 left
= TREE_OPERAND (arg0
, 1);
3347 right
= TREE_OPERAND (arg1
, 0);
3349 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3351 common
= TREE_OPERAND (arg0
, 1);
3352 left
= TREE_OPERAND (arg0
, 0);
3353 right
= TREE_OPERAND (arg1
, 1);
3355 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3357 common
= TREE_OPERAND (arg0
, 1);
3358 left
= TREE_OPERAND (arg0
, 0);
3359 right
= TREE_OPERAND (arg1
, 0);
3364 common
= fold_convert_loc (loc
, type
, common
);
3365 left
= fold_convert_loc (loc
, type
, left
);
3366 right
= fold_convert_loc (loc
, type
, right
);
3367 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3368 fold_build2_loc (loc
, code
, type
, left
, right
));
3371 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3372 with code CODE. This optimization is unsafe. */
3374 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3375 tree arg0
, tree arg1
)
3377 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3378 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3380 /* (A / C) +- (B / C) -> (A +- B) / C. */
3382 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3383 TREE_OPERAND (arg1
, 1), 0))
3384 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3385 fold_build2_loc (loc
, code
, type
,
3386 TREE_OPERAND (arg0
, 0),
3387 TREE_OPERAND (arg1
, 0)),
3388 TREE_OPERAND (arg0
, 1));
3390 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3391 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3392 TREE_OPERAND (arg1
, 0), 0)
3393 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3394 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3396 REAL_VALUE_TYPE r0
, r1
;
3397 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3398 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3400 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3402 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3403 real_arithmetic (&r0
, code
, &r0
, &r1
);
3404 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3405 TREE_OPERAND (arg0
, 0),
3406 build_real (type
, r0
));
3412 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3413 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3416 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3417 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3419 tree result
, bftype
;
3423 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3424 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3425 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3426 && tree_fits_shwi_p (size
)
3427 && tree_to_shwi (size
) == bitsize
)
3428 return fold_convert_loc (loc
, type
, inner
);
3432 if (TYPE_PRECISION (bftype
) != bitsize
3433 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3434 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3436 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3437 size_int (bitsize
), bitsize_int (bitpos
));
3440 result
= fold_convert_loc (loc
, type
, result
);
3445 /* Optimize a bit-field compare.
3447 There are two cases: First is a compare against a constant and the
3448 second is a comparison of two items where the fields are at the same
3449 bit position relative to the start of a chunk (byte, halfword, word)
3450 large enough to contain it. In these cases we can avoid the shift
3451 implicit in bitfield extractions.
3453 For constants, we emit a compare of the shifted constant with the
3454 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3455 compared. For two fields at the same position, we do the ANDs with the
3456 similar mask and compare the result of the ANDs.
3458 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3459 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3460 are the left and right operands of the comparison, respectively.
3462 If the optimization described above can be done, we return the resulting
3463 tree. Otherwise we return zero. */
3466 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3467 tree compare_type
, tree lhs
, tree rhs
)
3469 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3470 tree type
= TREE_TYPE (lhs
);
3471 tree signed_type
, unsigned_type
;
3472 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3473 enum machine_mode lmode
, rmode
, nmode
;
3474 int lunsignedp
, runsignedp
;
3475 int lvolatilep
= 0, rvolatilep
= 0;
3476 tree linner
, rinner
= NULL_TREE
;
3480 /* Get all the information about the extractions being done. If the bit size
3481 if the same as the size of the underlying object, we aren't doing an
3482 extraction at all and so can do nothing. We also don't want to
3483 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3484 then will no longer be able to replace it. */
3485 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3486 &lunsignedp
, &lvolatilep
, false);
3487 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3488 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3493 /* If this is not a constant, we can only do something if bit positions,
3494 sizes, and signedness are the same. */
3495 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3496 &runsignedp
, &rvolatilep
, false);
3498 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3499 || lunsignedp
!= runsignedp
|| offset
!= 0
3500 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3504 /* See if we can find a mode to refer to this field. We should be able to,
3505 but fail if we can't. */
3506 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3507 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3508 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3509 TYPE_ALIGN (TREE_TYPE (rinner
))),
3511 if (nmode
== VOIDmode
)
3514 /* Set signed and unsigned types of the precision of this mode for the
3516 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3517 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3519 /* Compute the bit position and size for the new reference and our offset
3520 within it. If the new reference is the same size as the original, we
3521 won't optimize anything, so return zero. */
3522 nbitsize
= GET_MODE_BITSIZE (nmode
);
3523 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3525 if (nbitsize
== lbitsize
)
3528 if (BYTES_BIG_ENDIAN
)
3529 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3531 /* Make the mask to be used against the extracted field. */
3532 mask
= build_int_cst_type (unsigned_type
, -1);
3533 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3534 mask
= const_binop (RSHIFT_EXPR
, mask
,
3535 size_int (nbitsize
- lbitsize
- lbitpos
));
3538 /* If not comparing with constant, just rework the comparison
3540 return fold_build2_loc (loc
, code
, compare_type
,
3541 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3542 make_bit_field_ref (loc
, linner
,
3547 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3548 make_bit_field_ref (loc
, rinner
,
3554 /* Otherwise, we are handling the constant case. See if the constant is too
3555 big for the field. Warn and return a tree of for 0 (false) if so. We do
3556 this not only for its own sake, but to avoid having to test for this
3557 error case below. If we didn't, we might generate wrong code.
3559 For unsigned fields, the constant shifted right by the field length should
3560 be all zero. For signed fields, the high-order bits should agree with
3565 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3566 fold_convert_loc (loc
,
3567 unsigned_type
, rhs
),
3568 size_int (lbitsize
))))
3570 warning (0, "comparison is always %d due to width of bit-field",
3572 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3577 tree tem
= const_binop (RSHIFT_EXPR
,
3578 fold_convert_loc (loc
, signed_type
, rhs
),
3579 size_int (lbitsize
- 1));
3580 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3582 warning (0, "comparison is always %d due to width of bit-field",
3584 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3588 /* Single-bit compares should always be against zero. */
3589 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3591 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3592 rhs
= build_int_cst (type
, 0);
3595 /* Make a new bitfield reference, shift the constant over the
3596 appropriate number of bits and mask it with the computed mask
3597 (in case this was a signed field). If we changed it, make a new one. */
3598 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3600 rhs
= const_binop (BIT_AND_EXPR
,
3601 const_binop (LSHIFT_EXPR
,
3602 fold_convert_loc (loc
, unsigned_type
, rhs
),
3603 size_int (lbitpos
)),
3606 lhs
= build2_loc (loc
, code
, compare_type
,
3607 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3611 /* Subroutine for fold_truth_andor_1: decode a field reference.
3613 If EXP is a comparison reference, we return the innermost reference.
3615 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3616 set to the starting bit number.
3618 If the innermost field can be completely contained in a mode-sized
3619 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3621 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3622 otherwise it is not changed.
3624 *PUNSIGNEDP is set to the signedness of the field.
3626 *PMASK is set to the mask used. This is either contained in a
3627 BIT_AND_EXPR or derived from the width of the field.
3629 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3631 Return 0 if this is not a component reference or is one that we can't
3632 do anything with. */
3635 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3636 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3637 int *punsignedp
, int *pvolatilep
,
3638 tree
*pmask
, tree
*pand_mask
)
3640 tree outer_type
= 0;
3642 tree mask
, inner
, offset
;
3644 unsigned int precision
;
3646 /* All the optimizations using this function assume integer fields.
3647 There are problems with FP fields since the type_for_size call
3648 below can fail for, e.g., XFmode. */
3649 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3652 /* We are interested in the bare arrangement of bits, so strip everything
3653 that doesn't affect the machine mode. However, record the type of the
3654 outermost expression if it may matter below. */
3655 if (CONVERT_EXPR_P (exp
)
3656 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3657 outer_type
= TREE_TYPE (exp
);
3660 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3662 and_mask
= TREE_OPERAND (exp
, 1);
3663 exp
= TREE_OPERAND (exp
, 0);
3664 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3665 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3669 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3670 punsignedp
, pvolatilep
, false);
3671 if ((inner
== exp
&& and_mask
== 0)
3672 || *pbitsize
< 0 || offset
!= 0
3673 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3676 /* If the number of bits in the reference is the same as the bitsize of
3677 the outer type, then the outer type gives the signedness. Otherwise
3678 (in case of a small bitfield) the signedness is unchanged. */
3679 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3680 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3682 /* Compute the mask to access the bitfield. */
3683 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3684 precision
= TYPE_PRECISION (unsigned_type
);
3686 mask
= build_int_cst_type (unsigned_type
, -1);
3688 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3689 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3691 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3693 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3694 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3697 *pand_mask
= and_mask
;
3701 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3705 all_ones_mask_p (const_tree mask
, int size
)
3707 tree type
= TREE_TYPE (mask
);
3708 unsigned int precision
= TYPE_PRECISION (type
);
3711 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3714 tree_int_cst_equal (mask
,
3715 const_binop (RSHIFT_EXPR
,
3716 const_binop (LSHIFT_EXPR
, tmask
,
3717 size_int (precision
- size
)),
3718 size_int (precision
- size
)));
3721 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3722 represents the sign bit of EXP's type. If EXP represents a sign
3723 or zero extension, also test VAL against the unextended type.
3724 The return value is the (sub)expression whose sign bit is VAL,
3725 or NULL_TREE otherwise. */
3728 sign_bit_p (tree exp
, const_tree val
)
3730 unsigned HOST_WIDE_INT mask_lo
, lo
;
3731 HOST_WIDE_INT mask_hi
, hi
;
3735 /* Tree EXP must have an integral type. */
3736 t
= TREE_TYPE (exp
);
3737 if (! INTEGRAL_TYPE_P (t
))
3740 /* Tree VAL must be an integer constant. */
3741 if (TREE_CODE (val
) != INTEGER_CST
3742 || TREE_OVERFLOW (val
))
3745 width
= TYPE_PRECISION (t
);
3746 if (width
> HOST_BITS_PER_WIDE_INT
)
3748 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3751 mask_hi
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_DOUBLE_INT
- width
));
3757 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3760 mask_lo
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- width
));
3763 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3764 treat VAL as if it were unsigned. */
3765 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3766 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3769 /* Handle extension from a narrower type. */
3770 if (TREE_CODE (exp
) == NOP_EXPR
3771 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3772 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3777 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3778 to be evaluated unconditionally. */
3781 simple_operand_p (const_tree exp
)
3783 /* Strip any conversions that don't change the machine mode. */
3786 return (CONSTANT_CLASS_P (exp
)
3787 || TREE_CODE (exp
) == SSA_NAME
3789 && ! TREE_ADDRESSABLE (exp
)
3790 && ! TREE_THIS_VOLATILE (exp
)
3791 && ! DECL_NONLOCAL (exp
)
3792 /* Don't regard global variables as simple. They may be
3793 allocated in ways unknown to the compiler (shared memory,
3794 #pragma weak, etc). */
3795 && ! TREE_PUBLIC (exp
)
3796 && ! DECL_EXTERNAL (exp
)
3797 /* Weakrefs are not safe to be read, since they can be NULL.
3798 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3799 have DECL_WEAK flag set. */
3800 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3801 /* Loading a static variable is unduly expensive, but global
3802 registers aren't expensive. */
3803 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3806 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3807 to be evaluated unconditionally.
3808 I addition to simple_operand_p, we assume that comparisons, conversions,
3809 and logic-not operations are simple, if their operands are simple, too. */
3812 simple_operand_p_2 (tree exp
)
3814 enum tree_code code
;
3816 if (TREE_SIDE_EFFECTS (exp
)
3817 || tree_could_trap_p (exp
))
3820 while (CONVERT_EXPR_P (exp
))
3821 exp
= TREE_OPERAND (exp
, 0);
3823 code
= TREE_CODE (exp
);
3825 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3826 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3827 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3829 if (code
== TRUTH_NOT_EXPR
)
3830 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3832 return simple_operand_p (exp
);
3836 /* The following functions are subroutines to fold_range_test and allow it to
3837 try to change a logical combination of comparisons into a range test.
3840 X == 2 || X == 3 || X == 4 || X == 5
3844 (unsigned) (X - 2) <= 3
3846 We describe each set of comparisons as being either inside or outside
3847 a range, using a variable named like IN_P, and then describe the
3848 range with a lower and upper bound. If one of the bounds is omitted,
3849 it represents either the highest or lowest value of the type.
3851 In the comments below, we represent a range by two numbers in brackets
3852 preceded by a "+" to designate being inside that range, or a "-" to
3853 designate being outside that range, so the condition can be inverted by
3854 flipping the prefix. An omitted bound is represented by a "-". For
3855 example, "- [-, 10]" means being outside the range starting at the lowest
3856 possible value and ending at 10, in other words, being greater than 10.
3857 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3860 We set up things so that the missing bounds are handled in a consistent
3861 manner so neither a missing bound nor "true" and "false" need to be
3862 handled using a special case. */
3864 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3865 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3866 and UPPER1_P are nonzero if the respective argument is an upper bound
3867 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3868 must be specified for a comparison. ARG1 will be converted to ARG0's
3869 type if both are specified. */
3872 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3873 tree arg1
, int upper1_p
)
3879 /* If neither arg represents infinity, do the normal operation.
3880 Else, if not a comparison, return infinity. Else handle the special
3881 comparison rules. Note that most of the cases below won't occur, but
3882 are handled for consistency. */
3884 if (arg0
!= 0 && arg1
!= 0)
3886 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3887 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3889 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3892 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3895 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3896 for neither. In real maths, we cannot assume open ended ranges are
3897 the same. But, this is computer arithmetic, where numbers are finite.
3898 We can therefore make the transformation of any unbounded range with
3899 the value Z, Z being greater than any representable number. This permits
3900 us to treat unbounded ranges as equal. */
3901 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3902 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3906 result
= sgn0
== sgn1
;
3909 result
= sgn0
!= sgn1
;
3912 result
= sgn0
< sgn1
;
3915 result
= sgn0
<= sgn1
;
3918 result
= sgn0
> sgn1
;
3921 result
= sgn0
>= sgn1
;
3927 return constant_boolean_node (result
, type
);
3930 /* Helper routine for make_range. Perform one step for it, return
3931 new expression if the loop should continue or NULL_TREE if it should
3935 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3936 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3937 bool *strict_overflow_p
)
3939 tree arg0_type
= TREE_TYPE (arg0
);
3940 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3941 int in_p
= *p_in_p
, n_in_p
;
3945 case TRUTH_NOT_EXPR
:
3946 /* We can only do something if the range is testing for zero. */
3947 if (low
== NULL_TREE
|| high
== NULL_TREE
3948 || ! integer_zerop (low
) || ! integer_zerop (high
))
3953 case EQ_EXPR
: case NE_EXPR
:
3954 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3955 /* We can only do something if the range is testing for zero
3956 and if the second operand is an integer constant. Note that
3957 saying something is "in" the range we make is done by
3958 complementing IN_P since it will set in the initial case of
3959 being not equal to zero; "out" is leaving it alone. */
3960 if (low
== NULL_TREE
|| high
== NULL_TREE
3961 || ! integer_zerop (low
) || ! integer_zerop (high
)
3962 || TREE_CODE (arg1
) != INTEGER_CST
)
3967 case NE_EXPR
: /* - [c, c] */
3970 case EQ_EXPR
: /* + [c, c] */
3971 in_p
= ! in_p
, low
= high
= arg1
;
3973 case GT_EXPR
: /* - [-, c] */
3974 low
= 0, high
= arg1
;
3976 case GE_EXPR
: /* + [c, -] */
3977 in_p
= ! in_p
, low
= arg1
, high
= 0;
3979 case LT_EXPR
: /* - [c, -] */
3980 low
= arg1
, high
= 0;
3982 case LE_EXPR
: /* + [-, c] */
3983 in_p
= ! in_p
, low
= 0, high
= arg1
;
3989 /* If this is an unsigned comparison, we also know that EXP is
3990 greater than or equal to zero. We base the range tests we make
3991 on that fact, so we record it here so we can parse existing
3992 range tests. We test arg0_type since often the return type
3993 of, e.g. EQ_EXPR, is boolean. */
3994 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3996 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3998 build_int_cst (arg0_type
, 0),
4002 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4004 /* If the high bound is missing, but we have a nonzero low
4005 bound, reverse the range so it goes from zero to the low bound
4007 if (high
== 0 && low
&& ! integer_zerop (low
))
4010 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4011 integer_one_node
, 0);
4012 low
= build_int_cst (arg0_type
, 0);
4022 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4023 low and high are non-NULL, then normalize will DTRT. */
4024 if (!TYPE_UNSIGNED (arg0_type
)
4025 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4027 if (low
== NULL_TREE
)
4028 low
= TYPE_MIN_VALUE (arg0_type
);
4029 if (high
== NULL_TREE
)
4030 high
= TYPE_MAX_VALUE (arg0_type
);
4033 /* (-x) IN [a,b] -> x in [-b, -a] */
4034 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4035 build_int_cst (exp_type
, 0),
4037 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4038 build_int_cst (exp_type
, 0),
4040 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4046 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4047 build_int_cst (exp_type
, 1));
4051 if (TREE_CODE (arg1
) != INTEGER_CST
)
4054 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4055 move a constant to the other side. */
4056 if (!TYPE_UNSIGNED (arg0_type
)
4057 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4060 /* If EXP is signed, any overflow in the computation is undefined,
4061 so we don't worry about it so long as our computations on
4062 the bounds don't overflow. For unsigned, overflow is defined
4063 and this is exactly the right thing. */
4064 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4065 arg0_type
, low
, 0, arg1
, 0);
4066 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4067 arg0_type
, high
, 1, arg1
, 0);
4068 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4069 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4072 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4073 *strict_overflow_p
= true;
4076 /* Check for an unsigned range which has wrapped around the maximum
4077 value thus making n_high < n_low, and normalize it. */
4078 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4080 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4081 integer_one_node
, 0);
4082 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4083 integer_one_node
, 0);
4085 /* If the range is of the form +/- [ x+1, x ], we won't
4086 be able to normalize it. But then, it represents the
4087 whole range or the empty set, so make it
4089 if (tree_int_cst_equal (n_low
, low
)
4090 && tree_int_cst_equal (n_high
, high
))
4096 low
= n_low
, high
= n_high
;
4104 case NON_LVALUE_EXPR
:
4105 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4108 if (! INTEGRAL_TYPE_P (arg0_type
)
4109 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4110 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4113 n_low
= low
, n_high
= high
;
4116 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4119 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4121 /* If we're converting arg0 from an unsigned type, to exp,
4122 a signed type, we will be doing the comparison as unsigned.
4123 The tests above have already verified that LOW and HIGH
4126 So we have to ensure that we will handle large unsigned
4127 values the same way that the current signed bounds treat
4130 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4134 /* For fixed-point modes, we need to pass the saturating flag
4135 as the 2nd parameter. */
4136 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4138 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4139 TYPE_SATURATING (arg0_type
));
4142 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4144 /* A range without an upper bound is, naturally, unbounded.
4145 Since convert would have cropped a very large value, use
4146 the max value for the destination type. */
4148 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4149 : TYPE_MAX_VALUE (arg0_type
);
4151 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4152 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4153 fold_convert_loc (loc
, arg0_type
,
4155 build_int_cst (arg0_type
, 1));
4157 /* If the low bound is specified, "and" the range with the
4158 range for which the original unsigned value will be
4162 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4163 1, fold_convert_loc (loc
, arg0_type
,
4168 in_p
= (n_in_p
== in_p
);
4172 /* Otherwise, "or" the range with the range of the input
4173 that will be interpreted as negative. */
4174 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4175 1, fold_convert_loc (loc
, arg0_type
,
4180 in_p
= (in_p
!= n_in_p
);
4194 /* Given EXP, a logical expression, set the range it is testing into
4195 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4196 actually being tested. *PLOW and *PHIGH will be made of the same
4197 type as the returned expression. If EXP is not a comparison, we
4198 will most likely not be returning a useful value and range. Set
4199 *STRICT_OVERFLOW_P to true if the return value is only valid
4200 because signed overflow is undefined; otherwise, do not change
4201 *STRICT_OVERFLOW_P. */
4204 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4205 bool *strict_overflow_p
)
4207 enum tree_code code
;
4208 tree arg0
, arg1
= NULL_TREE
;
4209 tree exp_type
, nexp
;
4212 location_t loc
= EXPR_LOCATION (exp
);
4214 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4215 and see if we can refine the range. Some of the cases below may not
4216 happen, but it doesn't seem worth worrying about this. We "continue"
4217 the outer loop when we've changed something; otherwise we "break"
4218 the switch, which will "break" the while. */
4221 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4225 code
= TREE_CODE (exp
);
4226 exp_type
= TREE_TYPE (exp
);
4229 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4231 if (TREE_OPERAND_LENGTH (exp
) > 0)
4232 arg0
= TREE_OPERAND (exp
, 0);
4233 if (TREE_CODE_CLASS (code
) == tcc_binary
4234 || TREE_CODE_CLASS (code
) == tcc_comparison
4235 || (TREE_CODE_CLASS (code
) == tcc_expression
4236 && TREE_OPERAND_LENGTH (exp
) > 1))
4237 arg1
= TREE_OPERAND (exp
, 1);
4239 if (arg0
== NULL_TREE
)
4242 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4243 &high
, &in_p
, strict_overflow_p
);
4244 if (nexp
== NULL_TREE
)
4249 /* If EXP is a constant, we can evaluate whether this is true or false. */
4250 if (TREE_CODE (exp
) == INTEGER_CST
)
4252 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4254 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4260 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4264 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4265 type, TYPE, return an expression to test if EXP is in (or out of, depending
4266 on IN_P) the range. Return 0 if the test couldn't be created. */
4269 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4270 tree low
, tree high
)
4272 tree etype
= TREE_TYPE (exp
), value
;
4274 #ifdef HAVE_canonicalize_funcptr_for_compare
4275 /* Disable this optimization for function pointer expressions
4276 on targets that require function pointer canonicalization. */
4277 if (HAVE_canonicalize_funcptr_for_compare
4278 && TREE_CODE (etype
) == POINTER_TYPE
4279 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4285 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4287 return invert_truthvalue_loc (loc
, value
);
4292 if (low
== 0 && high
== 0)
4293 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4296 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4297 fold_convert_loc (loc
, etype
, high
));
4300 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4301 fold_convert_loc (loc
, etype
, low
));
4303 if (operand_equal_p (low
, high
, 0))
4304 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4305 fold_convert_loc (loc
, etype
, low
));
4307 if (integer_zerop (low
))
4309 if (! TYPE_UNSIGNED (etype
))
4311 etype
= unsigned_type_for (etype
);
4312 high
= fold_convert_loc (loc
, etype
, high
);
4313 exp
= fold_convert_loc (loc
, etype
, exp
);
4315 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4318 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4319 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4321 unsigned HOST_WIDE_INT lo
;
4325 prec
= TYPE_PRECISION (etype
);
4326 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4329 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4333 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4334 lo
= HOST_WIDE_INT_M1U
;
4337 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4339 if (TYPE_UNSIGNED (etype
))
4341 tree signed_etype
= signed_type_for (etype
);
4342 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4344 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4346 etype
= signed_etype
;
4347 exp
= fold_convert_loc (loc
, etype
, exp
);
4349 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4350 build_int_cst (etype
, 0));
4354 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4355 This requires wrap-around arithmetics for the type of the expression.
4356 First make sure that arithmetics in this type is valid, then make sure
4357 that it wraps around. */
4358 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4359 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4360 TYPE_UNSIGNED (etype
));
4362 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4364 tree utype
, minv
, maxv
;
4366 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4367 for the type in question, as we rely on this here. */
4368 utype
= unsigned_type_for (etype
);
4369 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4370 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4371 integer_one_node
, 1);
4372 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4374 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4381 high
= fold_convert_loc (loc
, etype
, high
);
4382 low
= fold_convert_loc (loc
, etype
, low
);
4383 exp
= fold_convert_loc (loc
, etype
, exp
);
4385 value
= const_binop (MINUS_EXPR
, high
, low
);
4388 if (POINTER_TYPE_P (etype
))
4390 if (value
!= 0 && !TREE_OVERFLOW (value
))
4392 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4393 return build_range_check (loc
, type
,
4394 fold_build_pointer_plus_loc (loc
, exp
, low
),
4395 1, build_int_cst (etype
, 0), value
);
4400 if (value
!= 0 && !TREE_OVERFLOW (value
))
4401 return build_range_check (loc
, type
,
4402 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4403 1, build_int_cst (etype
, 0), value
);
4408 /* Return the predecessor of VAL in its type, handling the infinite case. */
4411 range_predecessor (tree val
)
4413 tree type
= TREE_TYPE (val
);
4415 if (INTEGRAL_TYPE_P (type
)
4416 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4419 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4422 /* Return the successor of VAL in its type, handling the infinite case. */
4425 range_successor (tree val
)
4427 tree type
= TREE_TYPE (val
);
4429 if (INTEGRAL_TYPE_P (type
)
4430 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4433 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4436 /* Given two ranges, see if we can merge them into one. Return 1 if we
4437 can, 0 if we can't. Set the output range into the specified parameters. */
4440 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4441 tree high0
, int in1_p
, tree low1
, tree high1
)
4449 int lowequal
= ((low0
== 0 && low1
== 0)
4450 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4451 low0
, 0, low1
, 0)));
4452 int highequal
= ((high0
== 0 && high1
== 0)
4453 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4454 high0
, 1, high1
, 1)));
4456 /* Make range 0 be the range that starts first, or ends last if they
4457 start at the same value. Swap them if it isn't. */
4458 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4461 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4462 high1
, 1, high0
, 1))))
4464 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4465 tem
= low0
, low0
= low1
, low1
= tem
;
4466 tem
= high0
, high0
= high1
, high1
= tem
;
4469 /* Now flag two cases, whether the ranges are disjoint or whether the
4470 second range is totally subsumed in the first. Note that the tests
4471 below are simplified by the ones above. */
4472 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4473 high0
, 1, low1
, 0));
4474 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4475 high1
, 1, high0
, 1));
4477 /* We now have four cases, depending on whether we are including or
4478 excluding the two ranges. */
4481 /* If they don't overlap, the result is false. If the second range
4482 is a subset it is the result. Otherwise, the range is from the start
4483 of the second to the end of the first. */
4485 in_p
= 0, low
= high
= 0;
4487 in_p
= 1, low
= low1
, high
= high1
;
4489 in_p
= 1, low
= low1
, high
= high0
;
4492 else if (in0_p
&& ! in1_p
)
4494 /* If they don't overlap, the result is the first range. If they are
4495 equal, the result is false. If the second range is a subset of the
4496 first, and the ranges begin at the same place, we go from just after
4497 the end of the second range to the end of the first. If the second
4498 range is not a subset of the first, or if it is a subset and both
4499 ranges end at the same place, the range starts at the start of the
4500 first range and ends just before the second range.
4501 Otherwise, we can't describe this as a single range. */
4503 in_p
= 1, low
= low0
, high
= high0
;
4504 else if (lowequal
&& highequal
)
4505 in_p
= 0, low
= high
= 0;
4506 else if (subset
&& lowequal
)
4508 low
= range_successor (high1
);
4513 /* We are in the weird situation where high0 > high1 but
4514 high1 has no successor. Punt. */
4518 else if (! subset
|| highequal
)
4521 high
= range_predecessor (low1
);
4525 /* low0 < low1 but low1 has no predecessor. Punt. */
4533 else if (! in0_p
&& in1_p
)
4535 /* If they don't overlap, the result is the second range. If the second
4536 is a subset of the first, the result is false. Otherwise,
4537 the range starts just after the first range and ends at the
4538 end of the second. */
4540 in_p
= 1, low
= low1
, high
= high1
;
4541 else if (subset
|| highequal
)
4542 in_p
= 0, low
= high
= 0;
4545 low
= range_successor (high0
);
4550 /* high1 > high0 but high0 has no successor. Punt. */
4558 /* The case where we are excluding both ranges. Here the complex case
4559 is if they don't overlap. In that case, the only time we have a
4560 range is if they are adjacent. If the second is a subset of the
4561 first, the result is the first. Otherwise, the range to exclude
4562 starts at the beginning of the first range and ends at the end of the
4566 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4567 range_successor (high0
),
4569 in_p
= 0, low
= low0
, high
= high1
;
4572 /* Canonicalize - [min, x] into - [-, x]. */
4573 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4574 switch (TREE_CODE (TREE_TYPE (low0
)))
4577 if (TYPE_PRECISION (TREE_TYPE (low0
))
4578 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4582 if (tree_int_cst_equal (low0
,
4583 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4587 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4588 && integer_zerop (low0
))
4595 /* Canonicalize - [x, max] into - [x, -]. */
4596 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4597 switch (TREE_CODE (TREE_TYPE (high1
)))
4600 if (TYPE_PRECISION (TREE_TYPE (high1
))
4601 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4605 if (tree_int_cst_equal (high1
,
4606 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4610 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4611 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4613 integer_one_node
, 1)))
4620 /* The ranges might be also adjacent between the maximum and
4621 minimum values of the given type. For
4622 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4623 return + [x + 1, y - 1]. */
4624 if (low0
== 0 && high1
== 0)
4626 low
= range_successor (high0
);
4627 high
= range_predecessor (low1
);
4628 if (low
== 0 || high
== 0)
4638 in_p
= 0, low
= low0
, high
= high0
;
4640 in_p
= 0, low
= low0
, high
= high1
;
4643 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4648 /* Subroutine of fold, looking inside expressions of the form
4649 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4650 of the COND_EXPR. This function is being used also to optimize
4651 A op B ? C : A, by reversing the comparison first.
4653 Return a folded expression whose code is not a COND_EXPR
4654 anymore, or NULL_TREE if no folding opportunity is found. */
4657 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4658 tree arg0
, tree arg1
, tree arg2
)
4660 enum tree_code comp_code
= TREE_CODE (arg0
);
4661 tree arg00
= TREE_OPERAND (arg0
, 0);
4662 tree arg01
= TREE_OPERAND (arg0
, 1);
4663 tree arg1_type
= TREE_TYPE (arg1
);
4669 /* If we have A op 0 ? A : -A, consider applying the following
4672 A == 0? A : -A same as -A
4673 A != 0? A : -A same as A
4674 A >= 0? A : -A same as abs (A)
4675 A > 0? A : -A same as abs (A)
4676 A <= 0? A : -A same as -abs (A)
4677 A < 0? A : -A same as -abs (A)
4679 None of these transformations work for modes with signed
4680 zeros. If A is +/-0, the first two transformations will
4681 change the sign of the result (from +0 to -0, or vice
4682 versa). The last four will fix the sign of the result,
4683 even though the original expressions could be positive or
4684 negative, depending on the sign of A.
4686 Note that all these transformations are correct if A is
4687 NaN, since the two alternatives (A and -A) are also NaNs. */
4688 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4689 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4690 ? real_zerop (arg01
)
4691 : integer_zerop (arg01
))
4692 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4693 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4694 /* In the case that A is of the form X-Y, '-A' (arg2) may
4695 have already been folded to Y-X, check for that. */
4696 || (TREE_CODE (arg1
) == MINUS_EXPR
4697 && TREE_CODE (arg2
) == MINUS_EXPR
4698 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4699 TREE_OPERAND (arg2
, 1), 0)
4700 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4701 TREE_OPERAND (arg2
, 0), 0))))
4706 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4707 return pedantic_non_lvalue_loc (loc
,
4708 fold_convert_loc (loc
, type
,
4709 negate_expr (tem
)));
4712 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4715 if (flag_trapping_math
)
4720 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4721 arg1
= fold_convert_loc (loc
, signed_type_for
4722 (TREE_TYPE (arg1
)), arg1
);
4723 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4724 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4727 if (flag_trapping_math
)
4731 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4732 arg1
= fold_convert_loc (loc
, signed_type_for
4733 (TREE_TYPE (arg1
)), arg1
);
4734 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4735 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4737 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4741 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4742 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4743 both transformations are correct when A is NaN: A != 0
4744 is then true, and A == 0 is false. */
4746 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4747 && integer_zerop (arg01
) && integer_zerop (arg2
))
4749 if (comp_code
== NE_EXPR
)
4750 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4751 else if (comp_code
== EQ_EXPR
)
4752 return build_zero_cst (type
);
4755 /* Try some transformations of A op B ? A : B.
4757 A == B? A : B same as B
4758 A != B? A : B same as A
4759 A >= B? A : B same as max (A, B)
4760 A > B? A : B same as max (B, A)
4761 A <= B? A : B same as min (A, B)
4762 A < B? A : B same as min (B, A)
4764 As above, these transformations don't work in the presence
4765 of signed zeros. For example, if A and B are zeros of
4766 opposite sign, the first two transformations will change
4767 the sign of the result. In the last four, the original
4768 expressions give different results for (A=+0, B=-0) and
4769 (A=-0, B=+0), but the transformed expressions do not.
4771 The first two transformations are correct if either A or B
4772 is a NaN. In the first transformation, the condition will
4773 be false, and B will indeed be chosen. In the case of the
4774 second transformation, the condition A != B will be true,
4775 and A will be chosen.
4777 The conversions to max() and min() are not correct if B is
4778 a number and A is not. The conditions in the original
4779 expressions will be false, so all four give B. The min()
4780 and max() versions would give a NaN instead. */
4781 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4782 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4783 /* Avoid these transformations if the COND_EXPR may be used
4784 as an lvalue in the C++ front-end. PR c++/19199. */
4786 || VECTOR_TYPE_P (type
)
4787 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4788 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4789 || ! maybe_lvalue_p (arg1
)
4790 || ! maybe_lvalue_p (arg2
)))
4792 tree comp_op0
= arg00
;
4793 tree comp_op1
= arg01
;
4794 tree comp_type
= TREE_TYPE (comp_op0
);
4796 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4797 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4807 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4809 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4814 /* In C++ a ?: expression can be an lvalue, so put the
4815 operand which will be used if they are equal first
4816 so that we can convert this back to the
4817 corresponding COND_EXPR. */
4818 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4820 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4821 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4822 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4823 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4824 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4825 comp_op1
, comp_op0
);
4826 return pedantic_non_lvalue_loc (loc
,
4827 fold_convert_loc (loc
, type
, tem
));
4834 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4836 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4837 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4838 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4839 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4840 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4841 comp_op1
, comp_op0
);
4842 return pedantic_non_lvalue_loc (loc
,
4843 fold_convert_loc (loc
, type
, tem
));
4847 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4848 return pedantic_non_lvalue_loc (loc
,
4849 fold_convert_loc (loc
, type
, arg2
));
4852 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4853 return pedantic_non_lvalue_loc (loc
,
4854 fold_convert_loc (loc
, type
, arg1
));
4857 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4862 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4863 we might still be able to simplify this. For example,
4864 if C1 is one less or one more than C2, this might have started
4865 out as a MIN or MAX and been transformed by this function.
4866 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4868 if (INTEGRAL_TYPE_P (type
)
4869 && TREE_CODE (arg01
) == INTEGER_CST
4870 && TREE_CODE (arg2
) == INTEGER_CST
)
4874 if (TREE_CODE (arg1
) == INTEGER_CST
)
4876 /* We can replace A with C1 in this case. */
4877 arg1
= fold_convert_loc (loc
, type
, arg01
);
4878 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4881 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4882 MIN_EXPR, to preserve the signedness of the comparison. */
4883 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4885 && operand_equal_p (arg01
,
4886 const_binop (PLUS_EXPR
, arg2
,
4887 build_int_cst (type
, 1)),
4890 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4891 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4893 return pedantic_non_lvalue_loc (loc
,
4894 fold_convert_loc (loc
, type
, tem
));
4899 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4901 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4903 && operand_equal_p (arg01
,
4904 const_binop (MINUS_EXPR
, arg2
,
4905 build_int_cst (type
, 1)),
4908 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4909 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4911 return pedantic_non_lvalue_loc (loc
,
4912 fold_convert_loc (loc
, type
, tem
));
4917 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4918 MAX_EXPR, to preserve the signedness of the comparison. */
4919 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4921 && operand_equal_p (arg01
,
4922 const_binop (MINUS_EXPR
, arg2
,
4923 build_int_cst (type
, 1)),
4926 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4927 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4929 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4934 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4935 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4937 && operand_equal_p (arg01
,
4938 const_binop (PLUS_EXPR
, arg2
,
4939 build_int_cst (type
, 1)),
4942 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4943 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4945 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4959 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4960 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4961 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4965 /* EXP is some logical combination of boolean tests. See if we can
4966 merge it into some range test. Return the new tree if so. */
4969 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4972 int or_op
= (code
== TRUTH_ORIF_EXPR
4973 || code
== TRUTH_OR_EXPR
);
4974 int in0_p
, in1_p
, in_p
;
4975 tree low0
, low1
, low
, high0
, high1
, high
;
4976 bool strict_overflow_p
= false;
4978 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4979 "when simplifying range test");
4981 if (!INTEGRAL_TYPE_P (type
))
4984 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4985 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4987 /* If this is an OR operation, invert both sides; we will invert
4988 again at the end. */
4990 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4992 /* If both expressions are the same, if we can merge the ranges, and we
4993 can build the range test, return it or it inverted. If one of the
4994 ranges is always true or always false, consider it to be the same
4995 expression as the other. */
4996 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4997 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4999 && 0 != (tem
= (build_range_check (loc
, type
,
5001 : rhs
!= 0 ? rhs
: integer_zero_node
,
5004 if (strict_overflow_p
)
5005 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5006 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5009 /* On machines where the branch cost is expensive, if this is a
5010 short-circuited branch and the underlying object on both sides
5011 is the same, make a non-short-circuit operation. */
5012 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5013 && lhs
!= 0 && rhs
!= 0
5014 && (code
== TRUTH_ANDIF_EXPR
5015 || code
== TRUTH_ORIF_EXPR
)
5016 && operand_equal_p (lhs
, rhs
, 0))
5018 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5019 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5020 which cases we can't do this. */
5021 if (simple_operand_p (lhs
))
5022 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5023 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5026 else if (!lang_hooks
.decls
.global_bindings_p ()
5027 && !CONTAINS_PLACEHOLDER_P (lhs
))
5029 tree common
= save_expr (lhs
);
5031 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5032 or_op
? ! in0_p
: in0_p
,
5034 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5035 or_op
? ! in1_p
: in1_p
,
5038 if (strict_overflow_p
)
5039 fold_overflow_warning (warnmsg
,
5040 WARN_STRICT_OVERFLOW_COMPARISON
);
5041 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5042 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5051 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5052 bit value. Arrange things so the extra bits will be set to zero if and
5053 only if C is signed-extended to its full width. If MASK is nonzero,
5054 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5057 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5059 tree type
= TREE_TYPE (c
);
5060 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5063 if (p
== modesize
|| unsignedp
)
5066 /* We work by getting just the sign bit into the low-order bit, then
5067 into the high-order bit, then sign-extend. We then XOR that value
5069 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5070 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5072 /* We must use a signed type in order to get an arithmetic right shift.
5073 However, we must also avoid introducing accidental overflows, so that
5074 a subsequent call to integer_zerop will work. Hence we must
5075 do the type conversion here. At this point, the constant is either
5076 zero or one, and the conversion to a signed type can never overflow.
5077 We could get an overflow if this conversion is done anywhere else. */
5078 if (TYPE_UNSIGNED (type
))
5079 temp
= fold_convert (signed_type_for (type
), temp
);
5081 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5082 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5084 temp
= const_binop (BIT_AND_EXPR
, temp
,
5085 fold_convert (TREE_TYPE (c
), mask
));
5086 /* If necessary, convert the type back to match the type of C. */
5087 if (TYPE_UNSIGNED (type
))
5088 temp
= fold_convert (type
, temp
);
5090 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5093 /* For an expression that has the form
5097 we can drop one of the inner expressions and simplify to
5101 LOC is the location of the resulting expression. OP is the inner
5102 logical operation; the left-hand side in the examples above, while CMPOP
5103 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5104 removing a condition that guards another, as in
5105 (A != NULL && A->...) || A == NULL
5106 which we must not transform. If RHS_ONLY is true, only eliminate the
5107 right-most operand of the inner logical operation. */
5110 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5113 tree type
= TREE_TYPE (cmpop
);
5114 enum tree_code code
= TREE_CODE (cmpop
);
5115 enum tree_code truthop_code
= TREE_CODE (op
);
5116 tree lhs
= TREE_OPERAND (op
, 0);
5117 tree rhs
= TREE_OPERAND (op
, 1);
5118 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5119 enum tree_code rhs_code
= TREE_CODE (rhs
);
5120 enum tree_code lhs_code
= TREE_CODE (lhs
);
5121 enum tree_code inv_code
;
5123 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5126 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5129 if (rhs_code
== truthop_code
)
5131 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5132 if (newrhs
!= NULL_TREE
)
5135 rhs_code
= TREE_CODE (rhs
);
5138 if (lhs_code
== truthop_code
&& !rhs_only
)
5140 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5141 if (newlhs
!= NULL_TREE
)
5144 lhs_code
= TREE_CODE (lhs
);
5148 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5149 if (inv_code
== rhs_code
5150 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5151 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5153 if (!rhs_only
&& inv_code
== lhs_code
5154 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5155 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5157 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5158 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5163 /* Find ways of folding logical expressions of LHS and RHS:
5164 Try to merge two comparisons to the same innermost item.
5165 Look for range tests like "ch >= '0' && ch <= '9'".
5166 Look for combinations of simple terms on machines with expensive branches
5167 and evaluate the RHS unconditionally.
5169 For example, if we have p->a == 2 && p->b == 4 and we can make an
5170 object large enough to span both A and B, we can do this with a comparison
5171 against the object ANDed with the a mask.
5173 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5174 operations to do this with one comparison.
5176 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5177 function and the one above.
5179 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5180 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5182 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5185 We return the simplified tree or 0 if no optimization is possible. */
5188 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5191 /* If this is the "or" of two comparisons, we can do something if
5192 the comparisons are NE_EXPR. If this is the "and", we can do something
5193 if the comparisons are EQ_EXPR. I.e.,
5194 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5196 WANTED_CODE is this operation code. For single bit fields, we can
5197 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5198 comparison for one-bit fields. */
5200 enum tree_code wanted_code
;
5201 enum tree_code lcode
, rcode
;
5202 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5203 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5204 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5205 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5206 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5207 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5208 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5209 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5210 enum machine_mode lnmode
, rnmode
;
5211 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5212 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5213 tree l_const
, r_const
;
5214 tree lntype
, rntype
, result
;
5215 HOST_WIDE_INT first_bit
, end_bit
;
5218 /* Start by getting the comparison codes. Fail if anything is volatile.
5219 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5220 it were surrounded with a NE_EXPR. */
5222 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5225 lcode
= TREE_CODE (lhs
);
5226 rcode
= TREE_CODE (rhs
);
5228 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5230 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5231 build_int_cst (TREE_TYPE (lhs
), 0));
5235 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5237 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5238 build_int_cst (TREE_TYPE (rhs
), 0));
5242 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5243 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5246 ll_arg
= TREE_OPERAND (lhs
, 0);
5247 lr_arg
= TREE_OPERAND (lhs
, 1);
5248 rl_arg
= TREE_OPERAND (rhs
, 0);
5249 rr_arg
= TREE_OPERAND (rhs
, 1);
5251 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5252 if (simple_operand_p (ll_arg
)
5253 && simple_operand_p (lr_arg
))
5255 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5256 && operand_equal_p (lr_arg
, rr_arg
, 0))
5258 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5259 truth_type
, ll_arg
, lr_arg
);
5263 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5264 && operand_equal_p (lr_arg
, rl_arg
, 0))
5266 result
= combine_comparisons (loc
, code
, lcode
,
5267 swap_tree_comparison (rcode
),
5268 truth_type
, ll_arg
, lr_arg
);
5274 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5275 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5277 /* If the RHS can be evaluated unconditionally and its operands are
5278 simple, it wins to evaluate the RHS unconditionally on machines
5279 with expensive branches. In this case, this isn't a comparison
5280 that can be merged. */
5282 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5284 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5285 && simple_operand_p (rl_arg
)
5286 && simple_operand_p (rr_arg
))
5288 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5289 if (code
== TRUTH_OR_EXPR
5290 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5291 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5292 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5293 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5294 return build2_loc (loc
, NE_EXPR
, truth_type
,
5295 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5297 build_int_cst (TREE_TYPE (ll_arg
), 0));
5299 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5300 if (code
== TRUTH_AND_EXPR
5301 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5302 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5303 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5304 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5305 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5306 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5308 build_int_cst (TREE_TYPE (ll_arg
), 0));
5311 /* See if the comparisons can be merged. Then get all the parameters for
5314 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5315 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5319 ll_inner
= decode_field_reference (loc
, ll_arg
,
5320 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5321 &ll_unsignedp
, &volatilep
, &ll_mask
,
5323 lr_inner
= decode_field_reference (loc
, lr_arg
,
5324 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5325 &lr_unsignedp
, &volatilep
, &lr_mask
,
5327 rl_inner
= decode_field_reference (loc
, rl_arg
,
5328 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5329 &rl_unsignedp
, &volatilep
, &rl_mask
,
5331 rr_inner
= decode_field_reference (loc
, rr_arg
,
5332 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5333 &rr_unsignedp
, &volatilep
, &rr_mask
,
5336 /* It must be true that the inner operation on the lhs of each
5337 comparison must be the same if we are to be able to do anything.
5338 Then see if we have constants. If not, the same must be true for
5340 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5341 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5344 if (TREE_CODE (lr_arg
) == INTEGER_CST
5345 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5346 l_const
= lr_arg
, r_const
= rr_arg
;
5347 else if (lr_inner
== 0 || rr_inner
== 0
5348 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5351 l_const
= r_const
= 0;
5353 /* If either comparison code is not correct for our logical operation,
5354 fail. However, we can convert a one-bit comparison against zero into
5355 the opposite comparison against that bit being set in the field. */
5357 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5358 if (lcode
!= wanted_code
)
5360 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5362 /* Make the left operand unsigned, since we are only interested
5363 in the value of one bit. Otherwise we are doing the wrong
5372 /* This is analogous to the code for l_const above. */
5373 if (rcode
!= wanted_code
)
5375 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5384 /* See if we can find a mode that contains both fields being compared on
5385 the left. If we can't, fail. Otherwise, update all constants and masks
5386 to be relative to a field of that size. */
5387 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5388 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5389 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5390 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5392 if (lnmode
== VOIDmode
)
5395 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5396 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5397 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5398 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5400 if (BYTES_BIG_ENDIAN
)
5402 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5403 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5406 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5407 size_int (xll_bitpos
));
5408 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5409 size_int (xrl_bitpos
));
5413 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5414 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5415 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5416 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5417 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5420 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5422 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5427 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5428 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5429 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5430 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5431 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5434 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5436 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5440 /* If the right sides are not constant, do the same for it. Also,
5441 disallow this optimization if a size or signedness mismatch occurs
5442 between the left and right sides. */
5445 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5446 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5447 /* Make sure the two fields on the right
5448 correspond to the left without being swapped. */
5449 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5452 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5453 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5454 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5455 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5457 if (rnmode
== VOIDmode
)
5460 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5461 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5462 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5463 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5465 if (BYTES_BIG_ENDIAN
)
5467 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5468 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5471 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5473 size_int (xlr_bitpos
));
5474 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5476 size_int (xrr_bitpos
));
5478 /* Make a mask that corresponds to both fields being compared.
5479 Do this for both items being compared. If the operands are the
5480 same size and the bits being compared are in the same position
5481 then we can do this by masking both and comparing the masked
5483 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5484 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5485 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5487 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5488 ll_unsignedp
|| rl_unsignedp
);
5489 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5490 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5492 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5493 lr_unsignedp
|| rr_unsignedp
);
5494 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5495 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5497 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5500 /* There is still another way we can do something: If both pairs of
5501 fields being compared are adjacent, we may be able to make a wider
5502 field containing them both.
5504 Note that we still must mask the lhs/rhs expressions. Furthermore,
5505 the mask must be shifted to account for the shift done by
5506 make_bit_field_ref. */
5507 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5508 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5509 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5510 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5514 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5515 ll_bitsize
+ rl_bitsize
,
5516 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5517 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5518 lr_bitsize
+ rr_bitsize
,
5519 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5521 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5522 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5523 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5524 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5526 /* Convert to the smaller type before masking out unwanted bits. */
5528 if (lntype
!= rntype
)
5530 if (lnbitsize
> rnbitsize
)
5532 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5533 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5536 else if (lnbitsize
< rnbitsize
)
5538 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5539 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5544 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5545 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5547 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5548 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5550 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5556 /* Handle the case of comparisons with constants. If there is something in
5557 common between the masks, those bits of the constants must be the same.
5558 If not, the condition is always false. Test for this to avoid generating
5559 incorrect code below. */
5560 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5561 if (! integer_zerop (result
)
5562 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5563 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5565 if (wanted_code
== NE_EXPR
)
5567 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5568 return constant_boolean_node (true, truth_type
);
5572 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5573 return constant_boolean_node (false, truth_type
);
5577 /* Construct the expression we will return. First get the component
5578 reference we will make. Unless the mask is all ones the width of
5579 that field, perform the mask operation. Then compare with the
5581 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5582 ll_unsignedp
|| rl_unsignedp
);
5584 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5585 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5586 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5588 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5589 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5592 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5596 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5600 enum tree_code op_code
;
5603 int consts_equal
, consts_lt
;
5606 STRIP_SIGN_NOPS (arg0
);
5608 op_code
= TREE_CODE (arg0
);
5609 minmax_const
= TREE_OPERAND (arg0
, 1);
5610 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5611 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5612 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5613 inner
= TREE_OPERAND (arg0
, 0);
5615 /* If something does not permit us to optimize, return the original tree. */
5616 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5617 || TREE_CODE (comp_const
) != INTEGER_CST
5618 || TREE_OVERFLOW (comp_const
)
5619 || TREE_CODE (minmax_const
) != INTEGER_CST
5620 || TREE_OVERFLOW (minmax_const
))
5623 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5624 and GT_EXPR, doing the rest with recursive calls using logical
5628 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5631 = optimize_minmax_comparison (loc
,
5632 invert_tree_comparison (code
, false),
5635 return invert_truthvalue_loc (loc
, tem
);
5641 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5642 optimize_minmax_comparison
5643 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5644 optimize_minmax_comparison
5645 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5648 if (op_code
== MAX_EXPR
&& consts_equal
)
5649 /* MAX (X, 0) == 0 -> X <= 0 */
5650 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5652 else if (op_code
== MAX_EXPR
&& consts_lt
)
5653 /* MAX (X, 0) == 5 -> X == 5 */
5654 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5656 else if (op_code
== MAX_EXPR
)
5657 /* MAX (X, 0) == -1 -> false */
5658 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5660 else if (consts_equal
)
5661 /* MIN (X, 0) == 0 -> X >= 0 */
5662 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5665 /* MIN (X, 0) == 5 -> false */
5666 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5669 /* MIN (X, 0) == -1 -> X == -1 */
5670 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5673 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5674 /* MAX (X, 0) > 0 -> X > 0
5675 MAX (X, 0) > 5 -> X > 5 */
5676 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5678 else if (op_code
== MAX_EXPR
)
5679 /* MAX (X, 0) > -1 -> true */
5680 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5682 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5683 /* MIN (X, 0) > 0 -> false
5684 MIN (X, 0) > 5 -> false */
5685 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5688 /* MIN (X, 0) > -1 -> X > -1 */
5689 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5696 /* T is an integer expression that is being multiplied, divided, or taken a
5697 modulus (CODE says which and what kind of divide or modulus) by a
5698 constant C. See if we can eliminate that operation by folding it with
5699 other operations already in T. WIDE_TYPE, if non-null, is a type that
5700 should be used for the computation if wider than our type.
5702 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5703 (X * 2) + (Y * 4). We must, however, be assured that either the original
5704 expression would not overflow or that overflow is undefined for the type
5705 in the language in question.
5707 If we return a non-null expression, it is an equivalent form of the
5708 original computation, but need not be in the original type.
5710 We set *STRICT_OVERFLOW_P to true if the return values depends on
5711 signed overflow being undefined. Otherwise we do not change
5712 *STRICT_OVERFLOW_P. */
5715 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5716 bool *strict_overflow_p
)
5718 /* To avoid exponential search depth, refuse to allow recursion past
5719 three levels. Beyond that (1) it's highly unlikely that we'll find
5720 something interesting and (2) we've probably processed it before
5721 when we built the inner expression. */
5730 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5737 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5738 bool *strict_overflow_p
)
5740 tree type
= TREE_TYPE (t
);
5741 enum tree_code tcode
= TREE_CODE (t
);
5742 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5743 > GET_MODE_SIZE (TYPE_MODE (type
)))
5744 ? wide_type
: type
);
5746 int same_p
= tcode
== code
;
5747 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5748 bool sub_strict_overflow_p
;
5750 /* Don't deal with constants of zero here; they confuse the code below. */
5751 if (integer_zerop (c
))
5754 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5755 op0
= TREE_OPERAND (t
, 0);
5757 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5758 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5760 /* Note that we need not handle conditional operations here since fold
5761 already handles those cases. So just do arithmetic here. */
5765 /* For a constant, we can always simplify if we are a multiply
5766 or (for divide and modulus) if it is a multiple of our constant. */
5767 if (code
== MULT_EXPR
5768 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5769 return const_binop (code
, fold_convert (ctype
, t
),
5770 fold_convert (ctype
, c
));
5773 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5774 /* If op0 is an expression ... */
5775 if ((COMPARISON_CLASS_P (op0
)
5776 || UNARY_CLASS_P (op0
)
5777 || BINARY_CLASS_P (op0
)
5778 || VL_EXP_CLASS_P (op0
)
5779 || EXPRESSION_CLASS_P (op0
))
5780 /* ... and has wrapping overflow, and its type is smaller
5781 than ctype, then we cannot pass through as widening. */
5782 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5783 && (TYPE_PRECISION (ctype
)
5784 > TYPE_PRECISION (TREE_TYPE (op0
))))
5785 /* ... or this is a truncation (t is narrower than op0),
5786 then we cannot pass through this narrowing. */
5787 || (TYPE_PRECISION (type
)
5788 < TYPE_PRECISION (TREE_TYPE (op0
)))
5789 /* ... or signedness changes for division or modulus,
5790 then we cannot pass through this conversion. */
5791 || (code
!= MULT_EXPR
5792 && (TYPE_UNSIGNED (ctype
)
5793 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5794 /* ... or has undefined overflow while the converted to
5795 type has not, we cannot do the operation in the inner type
5796 as that would introduce undefined overflow. */
5797 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5798 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5801 /* Pass the constant down and see if we can make a simplification. If
5802 we can, replace this expression with the inner simplification for
5803 possible later conversion to our or some other type. */
5804 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5805 && TREE_CODE (t2
) == INTEGER_CST
5806 && !TREE_OVERFLOW (t2
)
5807 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5809 ? ctype
: NULL_TREE
,
5810 strict_overflow_p
))))
5815 /* If widening the type changes it from signed to unsigned, then we
5816 must avoid building ABS_EXPR itself as unsigned. */
5817 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5819 tree cstype
= (*signed_type_for
) (ctype
);
5820 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5823 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5824 return fold_convert (ctype
, t1
);
5828 /* If the constant is negative, we cannot simplify this. */
5829 if (tree_int_cst_sgn (c
) == -1)
5833 /* For division and modulus, type can't be unsigned, as e.g.
5834 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5835 For signed types, even with wrapping overflow, this is fine. */
5836 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5838 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5840 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5843 case MIN_EXPR
: case MAX_EXPR
:
5844 /* If widening the type changes the signedness, then we can't perform
5845 this optimization as that changes the result. */
5846 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5849 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5850 sub_strict_overflow_p
= false;
5851 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5852 &sub_strict_overflow_p
)) != 0
5853 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5854 &sub_strict_overflow_p
)) != 0)
5856 if (tree_int_cst_sgn (c
) < 0)
5857 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5858 if (sub_strict_overflow_p
)
5859 *strict_overflow_p
= true;
5860 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5861 fold_convert (ctype
, t2
));
5865 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5866 /* If the second operand is constant, this is a multiplication
5867 or floor division, by a power of two, so we can treat it that
5868 way unless the multiplier or divisor overflows. Signed
5869 left-shift overflow is implementation-defined rather than
5870 undefined in C90, so do not convert signed left shift into
5872 if (TREE_CODE (op1
) == INTEGER_CST
5873 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5874 /* const_binop may not detect overflow correctly,
5875 so check for it explicitly here. */
5876 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5877 && TREE_INT_CST_HIGH (op1
) == 0
5878 && 0 != (t1
= fold_convert (ctype
,
5879 const_binop (LSHIFT_EXPR
,
5882 && !TREE_OVERFLOW (t1
))
5883 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5884 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5886 fold_convert (ctype
, op0
),
5888 c
, code
, wide_type
, strict_overflow_p
);
5891 case PLUS_EXPR
: case MINUS_EXPR
:
5892 /* See if we can eliminate the operation on both sides. If we can, we
5893 can return a new PLUS or MINUS. If we can't, the only remaining
5894 cases where we can do anything are if the second operand is a
5896 sub_strict_overflow_p
= false;
5897 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5898 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5899 if (t1
!= 0 && t2
!= 0
5900 && (code
== MULT_EXPR
5901 /* If not multiplication, we can only do this if both operands
5902 are divisible by c. */
5903 || (multiple_of_p (ctype
, op0
, c
)
5904 && multiple_of_p (ctype
, op1
, c
))))
5906 if (sub_strict_overflow_p
)
5907 *strict_overflow_p
= true;
5908 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5909 fold_convert (ctype
, t2
));
5912 /* If this was a subtraction, negate OP1 and set it to be an addition.
5913 This simplifies the logic below. */
5914 if (tcode
== MINUS_EXPR
)
5916 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5917 /* If OP1 was not easily negatable, the constant may be OP0. */
5918 if (TREE_CODE (op0
) == INTEGER_CST
)
5929 if (TREE_CODE (op1
) != INTEGER_CST
)
5932 /* If either OP1 or C are negative, this optimization is not safe for
5933 some of the division and remainder types while for others we need
5934 to change the code. */
5935 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5937 if (code
== CEIL_DIV_EXPR
)
5938 code
= FLOOR_DIV_EXPR
;
5939 else if (code
== FLOOR_DIV_EXPR
)
5940 code
= CEIL_DIV_EXPR
;
5941 else if (code
!= MULT_EXPR
5942 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5946 /* If it's a multiply or a division/modulus operation of a multiple
5947 of our constant, do the operation and verify it doesn't overflow. */
5948 if (code
== MULT_EXPR
5949 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5951 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5952 fold_convert (ctype
, c
));
5953 /* We allow the constant to overflow with wrapping semantics. */
5955 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5961 /* If we have an unsigned type, we cannot widen the operation since it
5962 will change the result if the original computation overflowed. */
5963 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5966 /* If we were able to eliminate our operation from the first side,
5967 apply our operation to the second side and reform the PLUS. */
5968 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5969 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5971 /* The last case is if we are a multiply. In that case, we can
5972 apply the distributive law to commute the multiply and addition
5973 if the multiplication of the constants doesn't overflow
5974 and overflow is defined. With undefined overflow
5975 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5976 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5977 return fold_build2 (tcode
, ctype
,
5978 fold_build2 (code
, ctype
,
5979 fold_convert (ctype
, op0
),
5980 fold_convert (ctype
, c
)),
5986 /* We have a special case here if we are doing something like
5987 (C * 8) % 4 since we know that's zero. */
5988 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5989 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5990 /* If the multiplication can overflow we cannot optimize this. */
5991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5992 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5993 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5995 *strict_overflow_p
= true;
5996 return omit_one_operand (type
, integer_zero_node
, op0
);
5999 /* ... fall through ... */
6001 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6002 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6003 /* If we can extract our operation from the LHS, do so and return a
6004 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6005 do something only if the second operand is a constant. */
6007 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6008 strict_overflow_p
)) != 0)
6009 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6010 fold_convert (ctype
, op1
));
6011 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6012 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6013 strict_overflow_p
)) != 0)
6014 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6015 fold_convert (ctype
, t1
));
6016 else if (TREE_CODE (op1
) != INTEGER_CST
)
6019 /* If these are the same operation types, we can associate them
6020 assuming no overflow. */
6025 unsigned prec
= TYPE_PRECISION (ctype
);
6026 bool uns
= TYPE_UNSIGNED (ctype
);
6027 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
6028 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
6029 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
6030 overflow_p
= ((!uns
&& overflow_p
)
6031 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
6032 if (!double_int_fits_to_tree_p (ctype
, mul
)
6033 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
6036 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6037 double_int_to_tree (ctype
, mul
));
6040 /* If these operations "cancel" each other, we have the main
6041 optimizations of this pass, which occur when either constant is a
6042 multiple of the other, in which case we replace this with either an
6043 operation or CODE or TCODE.
6045 If we have an unsigned type, we cannot do this since it will change
6046 the result if the original computation overflowed. */
6047 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6048 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6049 || (tcode
== MULT_EXPR
6050 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6051 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6052 && code
!= MULT_EXPR
)))
6054 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
6056 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6057 *strict_overflow_p
= true;
6058 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6059 fold_convert (ctype
,
6060 const_binop (TRUNC_DIV_EXPR
,
6063 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6065 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6066 *strict_overflow_p
= true;
6067 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6068 fold_convert (ctype
,
6069 const_binop (TRUNC_DIV_EXPR
,
6082 /* Return a node which has the indicated constant VALUE (either 0 or
6083 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6084 and is of the indicated TYPE. */
6087 constant_boolean_node (bool value
, tree type
)
6089 if (type
== integer_type_node
)
6090 return value
? integer_one_node
: integer_zero_node
;
6091 else if (type
== boolean_type_node
)
6092 return value
? boolean_true_node
: boolean_false_node
;
6093 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6094 return build_vector_from_val (type
,
6095 build_int_cst (TREE_TYPE (type
),
6098 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6102 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6103 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6104 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6105 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6106 COND is the first argument to CODE; otherwise (as in the example
6107 given here), it is the second argument. TYPE is the type of the
6108 original expression. Return NULL_TREE if no simplification is
6112 fold_binary_op_with_conditional_arg (location_t loc
,
6113 enum tree_code code
,
6114 tree type
, tree op0
, tree op1
,
6115 tree cond
, tree arg
, int cond_first_p
)
6117 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6118 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6119 tree test
, true_value
, false_value
;
6120 tree lhs
= NULL_TREE
;
6121 tree rhs
= NULL_TREE
;
6122 enum tree_code cond_code
= COND_EXPR
;
6124 if (TREE_CODE (cond
) == COND_EXPR
6125 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6127 test
= TREE_OPERAND (cond
, 0);
6128 true_value
= TREE_OPERAND (cond
, 1);
6129 false_value
= TREE_OPERAND (cond
, 2);
6130 /* If this operand throws an expression, then it does not make
6131 sense to try to perform a logical or arithmetic operation
6133 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6135 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6140 tree testtype
= TREE_TYPE (cond
);
6142 true_value
= constant_boolean_node (true, testtype
);
6143 false_value
= constant_boolean_node (false, testtype
);
6146 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6147 cond_code
= VEC_COND_EXPR
;
6149 /* This transformation is only worthwhile if we don't have to wrap ARG
6150 in a SAVE_EXPR and the operation can be simplified without recursing
6151 on at least one of the branches once its pushed inside the COND_EXPR. */
6152 if (!TREE_CONSTANT (arg
)
6153 && (TREE_SIDE_EFFECTS (arg
)
6154 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6155 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6158 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6161 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6163 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6165 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6169 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6171 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6173 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6176 /* Check that we have simplified at least one of the branches. */
6177 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6180 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6184 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6186 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6187 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6188 ADDEND is the same as X.
6190 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6191 and finite. The problematic cases are when X is zero, and its mode
6192 has signed zeros. In the case of rounding towards -infinity,
6193 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6194 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6197 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6199 if (!real_zerop (addend
))
6202 /* Don't allow the fold with -fsignaling-nans. */
6203 if (HONOR_SNANS (TYPE_MODE (type
)))
6206 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6207 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6210 /* In a vector or complex, we would need to check the sign of all zeros. */
6211 if (TREE_CODE (addend
) != REAL_CST
)
6214 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6215 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6218 /* The mode has signed zeros, and we have to honor their sign.
6219 In this situation, there is only one case we can return true for.
6220 X - 0 is the same as X unless rounding towards -infinity is
6222 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6225 /* Subroutine of fold() that checks comparisons of built-in math
6226 functions against real constants.
6228 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6229 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6230 is the type of the result and ARG0 and ARG1 are the operands of the
6231 comparison. ARG1 must be a TREE_REAL_CST.
6233 The function returns the constant folded tree if a simplification
6234 can be made, and NULL_TREE otherwise. */
6237 fold_mathfn_compare (location_t loc
,
6238 enum built_in_function fcode
, enum tree_code code
,
6239 tree type
, tree arg0
, tree arg1
)
6243 if (BUILTIN_SQRT_P (fcode
))
6245 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6246 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6248 c
= TREE_REAL_CST (arg1
);
6249 if (REAL_VALUE_NEGATIVE (c
))
6251 /* sqrt(x) < y is always false, if y is negative. */
6252 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6253 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6255 /* sqrt(x) > y is always true, if y is negative and we
6256 don't care about NaNs, i.e. negative values of x. */
6257 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6258 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6260 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6261 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6262 build_real (TREE_TYPE (arg
), dconst0
));
6264 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6268 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6269 real_convert (&c2
, mode
, &c2
);
6271 if (REAL_VALUE_ISINF (c2
))
6273 /* sqrt(x) > y is x == +Inf, when y is very large. */
6274 if (HONOR_INFINITIES (mode
))
6275 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6276 build_real (TREE_TYPE (arg
), c2
));
6278 /* sqrt(x) > y is always false, when y is very large
6279 and we don't care about infinities. */
6280 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6283 /* sqrt(x) > c is the same as x > c*c. */
6284 return fold_build2_loc (loc
, code
, type
, arg
,
6285 build_real (TREE_TYPE (arg
), c2
));
6287 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6291 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6292 real_convert (&c2
, mode
, &c2
);
6294 if (REAL_VALUE_ISINF (c2
))
6296 /* sqrt(x) < y is always true, when y is a very large
6297 value and we don't care about NaNs or Infinities. */
6298 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6299 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6301 /* sqrt(x) < y is x != +Inf when y is very large and we
6302 don't care about NaNs. */
6303 if (! HONOR_NANS (mode
))
6304 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6305 build_real (TREE_TYPE (arg
), c2
));
6307 /* sqrt(x) < y is x >= 0 when y is very large and we
6308 don't care about Infinities. */
6309 if (! HONOR_INFINITIES (mode
))
6310 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6311 build_real (TREE_TYPE (arg
), dconst0
));
6313 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6314 arg
= save_expr (arg
);
6315 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6316 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6317 build_real (TREE_TYPE (arg
),
6319 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6320 build_real (TREE_TYPE (arg
),
6324 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6325 if (! HONOR_NANS (mode
))
6326 return fold_build2_loc (loc
, code
, type
, arg
,
6327 build_real (TREE_TYPE (arg
), c2
));
6329 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6330 arg
= save_expr (arg
);
6331 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6332 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6333 build_real (TREE_TYPE (arg
),
6335 fold_build2_loc (loc
, code
, type
, arg
,
6336 build_real (TREE_TYPE (arg
),
6344 /* Subroutine of fold() that optimizes comparisons against Infinities,
6345 either +Inf or -Inf.
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6355 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6356 tree arg0
, tree arg1
)
6358 enum machine_mode mode
;
6359 REAL_VALUE_TYPE max
;
6363 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6365 /* For negative infinity swap the sense of the comparison. */
6366 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6368 code
= swap_tree_comparison (code
);
6373 /* x > +Inf is always false, if with ignore sNANs. */
6374 if (HONOR_SNANS (mode
))
6376 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6379 /* x <= +Inf is always true, if we don't case about NaNs. */
6380 if (! HONOR_NANS (mode
))
6381 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6383 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6384 arg0
= save_expr (arg0
);
6385 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6389 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6390 real_maxval (&max
, neg
, mode
);
6391 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6392 arg0
, build_real (TREE_TYPE (arg0
), max
));
6395 /* x < +Inf is always equal to x <= DBL_MAX. */
6396 real_maxval (&max
, neg
, mode
);
6397 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6398 arg0
, build_real (TREE_TYPE (arg0
), max
));
6401 /* x != +Inf is always equal to !(x > DBL_MAX). */
6402 real_maxval (&max
, neg
, mode
);
6403 if (! HONOR_NANS (mode
))
6404 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6405 arg0
, build_real (TREE_TYPE (arg0
), max
));
6407 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6408 arg0
, build_real (TREE_TYPE (arg0
), max
));
6409 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6418 /* Subroutine of fold() that optimizes comparisons of a division by
6419 a nonzero integer constant against an integer constant, i.e.
6422 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6423 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6424 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6426 The function returns the constant folded tree if a simplification
6427 can be made, and NULL_TREE otherwise. */
6430 fold_div_compare (location_t loc
,
6431 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6433 tree prod
, tmp
, hi
, lo
;
6434 tree arg00
= TREE_OPERAND (arg0
, 0);
6435 tree arg01
= TREE_OPERAND (arg0
, 1);
6437 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6441 /* We have to do this the hard way to detect unsigned overflow.
6442 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6443 val
= TREE_INT_CST (arg01
)
6444 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6445 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6446 neg_overflow
= false;
6450 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6451 build_int_cst (TREE_TYPE (arg01
), 1));
6454 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6455 val
= TREE_INT_CST (prod
)
6456 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6457 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6458 -1, overflow
| TREE_OVERFLOW (prod
));
6460 else if (tree_int_cst_sgn (arg01
) >= 0)
6462 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6463 build_int_cst (TREE_TYPE (arg01
), 1));
6464 switch (tree_int_cst_sgn (arg1
))
6467 neg_overflow
= true;
6468 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6473 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6478 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6488 /* A negative divisor reverses the relational operators. */
6489 code
= swap_tree_comparison (code
);
6491 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6492 build_int_cst (TREE_TYPE (arg01
), 1));
6493 switch (tree_int_cst_sgn (arg1
))
6496 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6501 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6506 neg_overflow
= true;
6507 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6519 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6520 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6521 if (TREE_OVERFLOW (hi
))
6522 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6523 if (TREE_OVERFLOW (lo
))
6524 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6525 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6528 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6529 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6530 if (TREE_OVERFLOW (hi
))
6531 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6532 if (TREE_OVERFLOW (lo
))
6533 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6534 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6537 if (TREE_OVERFLOW (lo
))
6539 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6540 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6542 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6545 if (TREE_OVERFLOW (hi
))
6547 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6548 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6550 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6553 if (TREE_OVERFLOW (hi
))
6555 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6556 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6558 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6561 if (TREE_OVERFLOW (lo
))
6563 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6564 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6566 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6576 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6577 equality/inequality test, then return a simplified form of the test
6578 using a sign testing. Otherwise return NULL. TYPE is the desired
6582 fold_single_bit_test_into_sign_test (location_t loc
,
6583 enum tree_code code
, tree arg0
, tree arg1
,
6586 /* If this is testing a single bit, we can optimize the test. */
6587 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6588 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6589 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6591 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6592 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6593 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6595 if (arg00
!= NULL_TREE
6596 /* This is only a win if casting to a signed type is cheap,
6597 i.e. when arg00's type is not a partial mode. */
6598 && TYPE_PRECISION (TREE_TYPE (arg00
))
6599 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6601 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6602 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6604 fold_convert_loc (loc
, stype
, arg00
),
6605 build_int_cst (stype
, 0));
6612 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6613 equality/inequality test, then return a simplified form of
6614 the test using shifts and logical operations. Otherwise return
6615 NULL. TYPE is the desired result type. */
6618 fold_single_bit_test (location_t loc
, enum tree_code code
,
6619 tree arg0
, tree arg1
, tree result_type
)
6621 /* If this is testing a single bit, we can optimize the test. */
6622 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6623 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6624 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6626 tree inner
= TREE_OPERAND (arg0
, 0);
6627 tree type
= TREE_TYPE (arg0
);
6628 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6629 enum machine_mode operand_mode
= TYPE_MODE (type
);
6631 tree signed_type
, unsigned_type
, intermediate_type
;
6634 /* First, see if we can fold the single bit test into a sign-bit
6636 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6641 /* Otherwise we have (A & C) != 0 where C is a single bit,
6642 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6643 Similarly for (A & C) == 0. */
6645 /* If INNER is a right shift of a constant and it plus BITNUM does
6646 not overflow, adjust BITNUM and INNER. */
6647 if (TREE_CODE (inner
) == RSHIFT_EXPR
6648 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6649 && tree_fits_uhwi_p (TREE_OPERAND (inner
, 1))
6650 && bitnum
< TYPE_PRECISION (type
)
6651 && (TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
6652 < (unsigned) (TYPE_PRECISION (type
) - bitnum
)))
6654 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6655 inner
= TREE_OPERAND (inner
, 0);
6658 /* If we are going to be able to omit the AND below, we must do our
6659 operations as unsigned. If we must use the AND, we have a choice.
6660 Normally unsigned is faster, but for some machines signed is. */
6661 #ifdef LOAD_EXTEND_OP
6662 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6663 && !flag_syntax_only
) ? 0 : 1;
6668 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6669 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6670 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6671 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6674 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6675 inner
, size_int (bitnum
));
6677 one
= build_int_cst (intermediate_type
, 1);
6679 if (code
== EQ_EXPR
)
6680 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6682 /* Put the AND last so it can combine with more things. */
6683 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6685 /* Make sure to return the proper type. */
6686 inner
= fold_convert_loc (loc
, result_type
, inner
);
6693 /* Check whether we are allowed to reorder operands arg0 and arg1,
6694 such that the evaluation of arg1 occurs before arg0. */
6697 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6699 if (! flag_evaluation_order
)
6701 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6703 return ! TREE_SIDE_EFFECTS (arg0
)
6704 && ! TREE_SIDE_EFFECTS (arg1
);
6707 /* Test whether it is preferable two swap two operands, ARG0 and
6708 ARG1, for example because ARG0 is an integer constant and ARG1
6709 isn't. If REORDER is true, only recommend swapping if we can
6710 evaluate the operands in reverse order. */
6713 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6715 STRIP_SIGN_NOPS (arg0
);
6716 STRIP_SIGN_NOPS (arg1
);
6718 if (TREE_CODE (arg1
) == INTEGER_CST
)
6720 if (TREE_CODE (arg0
) == INTEGER_CST
)
6723 if (TREE_CODE (arg1
) == REAL_CST
)
6725 if (TREE_CODE (arg0
) == REAL_CST
)
6728 if (TREE_CODE (arg1
) == FIXED_CST
)
6730 if (TREE_CODE (arg0
) == FIXED_CST
)
6733 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6735 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6738 if (TREE_CONSTANT (arg1
))
6740 if (TREE_CONSTANT (arg0
))
6743 if (optimize_function_for_size_p (cfun
))
6746 if (reorder
&& flag_evaluation_order
6747 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6750 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6751 for commutative and comparison operators. Ensuring a canonical
6752 form allows the optimizers to find additional redundancies without
6753 having to explicitly check for both orderings. */
6754 if (TREE_CODE (arg0
) == SSA_NAME
6755 && TREE_CODE (arg1
) == SSA_NAME
6756 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6759 /* Put SSA_NAMEs last. */
6760 if (TREE_CODE (arg1
) == SSA_NAME
)
6762 if (TREE_CODE (arg0
) == SSA_NAME
)
6765 /* Put variables last. */
6774 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6775 ARG0 is extended to a wider type. */
6778 fold_widened_comparison (location_t loc
, enum tree_code code
,
6779 tree type
, tree arg0
, tree arg1
)
6781 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6783 tree shorter_type
, outer_type
;
6787 if (arg0_unw
== arg0
)
6789 shorter_type
= TREE_TYPE (arg0_unw
);
6791 #ifdef HAVE_canonicalize_funcptr_for_compare
6792 /* Disable this optimization if we're casting a function pointer
6793 type on targets that require function pointer canonicalization. */
6794 if (HAVE_canonicalize_funcptr_for_compare
6795 && TREE_CODE (shorter_type
) == POINTER_TYPE
6796 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6800 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6803 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6805 /* If possible, express the comparison in the shorter mode. */
6806 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6807 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6808 && (TREE_TYPE (arg1_unw
) == shorter_type
6809 || ((TYPE_PRECISION (shorter_type
)
6810 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6811 && (TYPE_UNSIGNED (shorter_type
)
6812 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6813 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6814 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6815 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6816 && int_fits_type_p (arg1_unw
, shorter_type
))))
6817 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6818 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6820 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6821 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6822 || !int_fits_type_p (arg1_unw
, shorter_type
))
6825 /* If we are comparing with the integer that does not fit into the range
6826 of the shorter type, the result is known. */
6827 outer_type
= TREE_TYPE (arg1_unw
);
6828 min
= lower_bound_in_type (outer_type
, shorter_type
);
6829 max
= upper_bound_in_type (outer_type
, shorter_type
);
6831 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6833 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6840 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6845 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6851 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6853 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6858 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6860 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6869 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6870 ARG0 just the signedness is changed. */
6873 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6874 tree arg0
, tree arg1
)
6877 tree inner_type
, outer_type
;
6879 if (!CONVERT_EXPR_P (arg0
))
6882 outer_type
= TREE_TYPE (arg0
);
6883 arg0_inner
= TREE_OPERAND (arg0
, 0);
6884 inner_type
= TREE_TYPE (arg0_inner
);
6886 #ifdef HAVE_canonicalize_funcptr_for_compare
6887 /* Disable this optimization if we're casting a function pointer
6888 type on targets that require function pointer canonicalization. */
6889 if (HAVE_canonicalize_funcptr_for_compare
6890 && TREE_CODE (inner_type
) == POINTER_TYPE
6891 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6895 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6898 if (TREE_CODE (arg1
) != INTEGER_CST
6899 && !(CONVERT_EXPR_P (arg1
)
6900 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6903 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6908 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6911 if (TREE_CODE (arg1
) == INTEGER_CST
)
6912 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6913 0, TREE_OVERFLOW (arg1
));
6915 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6917 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6920 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6921 step of the array. Reconstructs s and delta in the case of s *
6922 delta being an integer constant (and thus already folded). ADDR is
6923 the address. MULT is the multiplicative expression. If the
6924 function succeeds, the new address expression is returned.
6925 Otherwise NULL_TREE is returned. LOC is the location of the
6926 resulting expression. */
6929 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6931 tree s
, delta
, step
;
6932 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6937 /* Strip the nops that might be added when converting op1 to sizetype. */
6940 /* Canonicalize op1 into a possibly non-constant delta
6941 and an INTEGER_CST s. */
6942 if (TREE_CODE (op1
) == MULT_EXPR
)
6944 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6949 if (TREE_CODE (arg0
) == INTEGER_CST
)
6954 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6962 else if (TREE_CODE (op1
) == INTEGER_CST
)
6969 /* Simulate we are delta * 1. */
6971 s
= integer_one_node
;
6974 /* Handle &x.array the same as we would handle &x.array[0]. */
6975 if (TREE_CODE (ref
) == COMPONENT_REF
6976 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6980 /* Remember if this was a multi-dimensional array. */
6981 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6984 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6987 itype
= TREE_TYPE (domain
);
6989 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6990 if (TREE_CODE (step
) != INTEGER_CST
)
6995 if (! tree_int_cst_equal (step
, s
))
7000 /* Try if delta is a multiple of step. */
7001 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7007 /* Only fold here if we can verify we do not overflow one
7008 dimension of a multi-dimensional array. */
7013 if (!TYPE_MIN_VALUE (domain
)
7014 || !TYPE_MAX_VALUE (domain
)
7015 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7018 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7019 fold_convert_loc (loc
, itype
,
7020 TYPE_MIN_VALUE (domain
)),
7021 fold_convert_loc (loc
, itype
, delta
));
7022 if (TREE_CODE (tmp
) != INTEGER_CST
7023 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7027 /* We found a suitable component reference. */
7029 pref
= TREE_OPERAND (addr
, 0);
7030 ret
= copy_node (pref
);
7031 SET_EXPR_LOCATION (ret
, loc
);
7033 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
7035 (loc
, PLUS_EXPR
, itype
,
7036 fold_convert_loc (loc
, itype
,
7038 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7039 fold_convert_loc (loc
, itype
, delta
)),
7040 NULL_TREE
, NULL_TREE
);
7041 return build_fold_addr_expr_loc (loc
, ret
);
7046 for (;; ref
= TREE_OPERAND (ref
, 0))
7048 if (TREE_CODE (ref
) == ARRAY_REF
)
7052 /* Remember if this was a multi-dimensional array. */
7053 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7056 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7059 itype
= TREE_TYPE (domain
);
7061 step
= array_ref_element_size (ref
);
7062 if (TREE_CODE (step
) != INTEGER_CST
)
7067 if (! tree_int_cst_equal (step
, s
))
7072 /* Try if delta is a multiple of step. */
7073 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7079 /* Only fold here if we can verify we do not overflow one
7080 dimension of a multi-dimensional array. */
7085 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7086 || !TYPE_MAX_VALUE (domain
)
7087 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7090 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7091 fold_convert_loc (loc
, itype
,
7092 TREE_OPERAND (ref
, 1)),
7093 fold_convert_loc (loc
, itype
, delta
));
7095 || TREE_CODE (tmp
) != INTEGER_CST
7096 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7105 if (!handled_component_p (ref
))
7109 /* We found the suitable array reference. So copy everything up to it,
7110 and replace the index. */
7112 pref
= TREE_OPERAND (addr
, 0);
7113 ret
= copy_node (pref
);
7114 SET_EXPR_LOCATION (ret
, loc
);
7119 pref
= TREE_OPERAND (pref
, 0);
7120 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7121 pos
= TREE_OPERAND (pos
, 0);
7124 TREE_OPERAND (pos
, 1)
7125 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7126 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7127 fold_convert_loc (loc
, itype
, delta
));
7128 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7132 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7133 means A >= Y && A != MAX, but in this case we know that
7134 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7137 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7139 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7141 if (TREE_CODE (bound
) == LT_EXPR
)
7142 a
= TREE_OPERAND (bound
, 0);
7143 else if (TREE_CODE (bound
) == GT_EXPR
)
7144 a
= TREE_OPERAND (bound
, 1);
7148 typea
= TREE_TYPE (a
);
7149 if (!INTEGRAL_TYPE_P (typea
)
7150 && !POINTER_TYPE_P (typea
))
7153 if (TREE_CODE (ineq
) == LT_EXPR
)
7155 a1
= TREE_OPERAND (ineq
, 1);
7156 y
= TREE_OPERAND (ineq
, 0);
7158 else if (TREE_CODE (ineq
) == GT_EXPR
)
7160 a1
= TREE_OPERAND (ineq
, 0);
7161 y
= TREE_OPERAND (ineq
, 1);
7166 if (TREE_TYPE (a1
) != typea
)
7169 if (POINTER_TYPE_P (typea
))
7171 /* Convert the pointer types into integer before taking the difference. */
7172 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7173 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7174 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7177 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7179 if (!diff
|| !integer_onep (diff
))
7182 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7185 /* Fold a sum or difference of at least one multiplication.
7186 Returns the folded tree or NULL if no simplification could be made. */
7189 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7190 tree arg0
, tree arg1
)
7192 tree arg00
, arg01
, arg10
, arg11
;
7193 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7195 /* (A * C) +- (B * C) -> (A+-B) * C.
7196 (A * C) +- A -> A * (C+-1).
7197 We are most concerned about the case where C is a constant,
7198 but other combinations show up during loop reduction. Since
7199 it is not difficult, try all four possibilities. */
7201 if (TREE_CODE (arg0
) == MULT_EXPR
)
7203 arg00
= TREE_OPERAND (arg0
, 0);
7204 arg01
= TREE_OPERAND (arg0
, 1);
7206 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7208 arg00
= build_one_cst (type
);
7213 /* We cannot generate constant 1 for fract. */
7214 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7217 arg01
= build_one_cst (type
);
7219 if (TREE_CODE (arg1
) == MULT_EXPR
)
7221 arg10
= TREE_OPERAND (arg1
, 0);
7222 arg11
= TREE_OPERAND (arg1
, 1);
7224 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7226 arg10
= build_one_cst (type
);
7227 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7228 the purpose of this canonicalization. */
7229 if (TREE_INT_CST_HIGH (arg1
) == -1
7230 && negate_expr_p (arg1
)
7231 && code
== PLUS_EXPR
)
7233 arg11
= negate_expr (arg1
);
7241 /* We cannot generate constant 1 for fract. */
7242 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7245 arg11
= build_one_cst (type
);
7249 if (operand_equal_p (arg01
, arg11
, 0))
7250 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7251 else if (operand_equal_p (arg00
, arg10
, 0))
7252 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7253 else if (operand_equal_p (arg00
, arg11
, 0))
7254 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7255 else if (operand_equal_p (arg01
, arg10
, 0))
7256 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7258 /* No identical multiplicands; see if we can find a common
7259 power-of-two factor in non-power-of-two multiplies. This
7260 can help in multi-dimensional array access. */
7261 else if (tree_fits_shwi_p (arg01
)
7262 && tree_fits_shwi_p (arg11
))
7264 HOST_WIDE_INT int01
, int11
, tmp
;
7267 int01
= TREE_INT_CST_LOW (arg01
);
7268 int11
= TREE_INT_CST_LOW (arg11
);
7270 /* Move min of absolute values to int11. */
7271 if (absu_hwi (int01
) < absu_hwi (int11
))
7273 tmp
= int01
, int01
= int11
, int11
= tmp
;
7274 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7281 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7282 /* The remainder should not be a constant, otherwise we
7283 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7284 increased the number of multiplications necessary. */
7285 && TREE_CODE (arg10
) != INTEGER_CST
)
7287 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7288 build_int_cst (TREE_TYPE (arg00
),
7293 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7298 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7299 fold_build2_loc (loc
, code
, type
,
7300 fold_convert_loc (loc
, type
, alt0
),
7301 fold_convert_loc (loc
, type
, alt1
)),
7302 fold_convert_loc (loc
, type
, same
));
7307 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7308 specified by EXPR into the buffer PTR of length LEN bytes.
7309 Return the number of bytes placed in the buffer, or zero
7313 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7315 tree type
= TREE_TYPE (expr
);
7316 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7317 int byte
, offset
, word
, words
;
7318 unsigned char value
;
7320 if (total_bytes
> len
)
7322 words
= total_bytes
/ UNITS_PER_WORD
;
7324 for (byte
= 0; byte
< total_bytes
; byte
++)
7326 int bitpos
= byte
* BITS_PER_UNIT
;
7327 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7328 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7330 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7331 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7333 if (total_bytes
> UNITS_PER_WORD
)
7335 word
= byte
/ UNITS_PER_WORD
;
7336 if (WORDS_BIG_ENDIAN
)
7337 word
= (words
- 1) - word
;
7338 offset
= word
* UNITS_PER_WORD
;
7339 if (BYTES_BIG_ENDIAN
)
7340 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7342 offset
+= byte
% UNITS_PER_WORD
;
7345 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7346 ptr
[offset
] = value
;
7352 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7353 specified by EXPR into the buffer PTR of length LEN bytes.
7354 Return the number of bytes placed in the buffer, or zero
7358 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7360 tree type
= TREE_TYPE (expr
);
7361 enum machine_mode mode
= TYPE_MODE (type
);
7362 int total_bytes
= GET_MODE_SIZE (mode
);
7363 FIXED_VALUE_TYPE value
;
7364 tree i_value
, i_type
;
7366 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7369 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7371 if (NULL_TREE
== i_type
7372 || TYPE_PRECISION (i_type
) != total_bytes
)
7375 value
= TREE_FIXED_CST (expr
);
7376 i_value
= double_int_to_tree (i_type
, value
.data
);
7378 return native_encode_int (i_value
, ptr
, len
);
7382 /* Subroutine of native_encode_expr. Encode the REAL_CST
7383 specified by EXPR into the buffer PTR of length LEN bytes.
7384 Return the number of bytes placed in the buffer, or zero
7388 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7390 tree type
= TREE_TYPE (expr
);
7391 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7392 int byte
, offset
, word
, words
, bitpos
;
7393 unsigned char value
;
7395 /* There are always 32 bits in each long, no matter the size of
7396 the hosts long. We handle floating point representations with
7400 if (total_bytes
> len
)
7402 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7404 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7406 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7407 bitpos
+= BITS_PER_UNIT
)
7409 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7410 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7412 if (UNITS_PER_WORD
< 4)
7414 word
= byte
/ UNITS_PER_WORD
;
7415 if (WORDS_BIG_ENDIAN
)
7416 word
= (words
- 1) - word
;
7417 offset
= word
* UNITS_PER_WORD
;
7418 if (BYTES_BIG_ENDIAN
)
7419 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7421 offset
+= byte
% UNITS_PER_WORD
;
7424 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7425 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7430 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7431 specified by EXPR into the buffer PTR of length LEN bytes.
7432 Return the number of bytes placed in the buffer, or zero
7436 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7441 part
= TREE_REALPART (expr
);
7442 rsize
= native_encode_expr (part
, ptr
, len
);
7445 part
= TREE_IMAGPART (expr
);
7446 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7449 return rsize
+ isize
;
7453 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7454 specified by EXPR into the buffer PTR of length LEN bytes.
7455 Return the number of bytes placed in the buffer, or zero
7459 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7466 count
= VECTOR_CST_NELTS (expr
);
7467 itype
= TREE_TYPE (TREE_TYPE (expr
));
7468 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7469 for (i
= 0; i
< count
; i
++)
7471 elem
= VECTOR_CST_ELT (expr
, i
);
7472 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7480 /* Subroutine of native_encode_expr. Encode the STRING_CST
7481 specified by EXPR into the buffer PTR of length LEN bytes.
7482 Return the number of bytes placed in the buffer, or zero
7486 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7488 tree type
= TREE_TYPE (expr
);
7489 HOST_WIDE_INT total_bytes
;
7491 if (TREE_CODE (type
) != ARRAY_TYPE
7492 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7493 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7494 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7496 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7497 if (total_bytes
> len
)
7499 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7501 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7502 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7503 total_bytes
- TREE_STRING_LENGTH (expr
));
7506 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7511 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7512 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7513 buffer PTR of length LEN bytes. Return the number of bytes
7514 placed in the buffer, or zero upon failure. */
7517 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7519 switch (TREE_CODE (expr
))
7522 return native_encode_int (expr
, ptr
, len
);
7525 return native_encode_real (expr
, ptr
, len
);
7528 return native_encode_fixed (expr
, ptr
, len
);
7531 return native_encode_complex (expr
, ptr
, len
);
7534 return native_encode_vector (expr
, ptr
, len
);
7537 return native_encode_string (expr
, ptr
, len
);
7545 /* Subroutine of native_interpret_expr. Interpret the contents of
7546 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7547 If the buffer cannot be interpreted, return NULL_TREE. */
7550 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7552 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7555 if (total_bytes
> len
7556 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7559 result
= double_int::from_buffer (ptr
, total_bytes
);
7561 return double_int_to_tree (type
, result
);
7565 /* Subroutine of native_interpret_expr. Interpret the contents of
7566 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7567 If the buffer cannot be interpreted, return NULL_TREE. */
7570 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7572 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7574 FIXED_VALUE_TYPE fixed_value
;
7576 if (total_bytes
> len
7577 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7580 result
= double_int::from_buffer (ptr
, total_bytes
);
7581 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7583 return build_fixed (type
, fixed_value
);
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7592 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7594 enum machine_mode mode
= TYPE_MODE (type
);
7595 int total_bytes
= GET_MODE_SIZE (mode
);
7596 int byte
, offset
, word
, words
, bitpos
;
7597 unsigned char value
;
7598 /* There are always 32 bits in each long, no matter the size of
7599 the hosts long. We handle floating point representations with
7604 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7605 if (total_bytes
> len
|| total_bytes
> 24)
7607 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7609 memset (tmp
, 0, sizeof (tmp
));
7610 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7611 bitpos
+= BITS_PER_UNIT
)
7613 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7614 if (UNITS_PER_WORD
< 4)
7616 word
= byte
/ UNITS_PER_WORD
;
7617 if (WORDS_BIG_ENDIAN
)
7618 word
= (words
- 1) - word
;
7619 offset
= word
* UNITS_PER_WORD
;
7620 if (BYTES_BIG_ENDIAN
)
7621 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7623 offset
+= byte
% UNITS_PER_WORD
;
7626 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7627 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7629 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7632 real_from_target (&r
, tmp
, mode
);
7633 return build_real (type
, r
);
7637 /* Subroutine of native_interpret_expr. Interpret the contents of
7638 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7639 If the buffer cannot be interpreted, return NULL_TREE. */
7642 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7644 tree etype
, rpart
, ipart
;
7647 etype
= TREE_TYPE (type
);
7648 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7651 rpart
= native_interpret_expr (etype
, ptr
, size
);
7654 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7657 return build_complex (type
, rpart
, ipart
);
7661 /* Subroutine of native_interpret_expr. Interpret the contents of
7662 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7663 If the buffer cannot be interpreted, return NULL_TREE. */
7666 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7672 etype
= TREE_TYPE (type
);
7673 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7674 count
= TYPE_VECTOR_SUBPARTS (type
);
7675 if (size
* count
> len
)
7678 elements
= XALLOCAVEC (tree
, count
);
7679 for (i
= count
- 1; i
>= 0; i
--)
7681 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7686 return build_vector (type
, elements
);
7690 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7691 the buffer PTR of length LEN as a constant of type TYPE. For
7692 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7693 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7694 return NULL_TREE. */
7697 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7699 switch (TREE_CODE (type
))
7705 case REFERENCE_TYPE
:
7706 return native_interpret_int (type
, ptr
, len
);
7709 return native_interpret_real (type
, ptr
, len
);
7711 case FIXED_POINT_TYPE
:
7712 return native_interpret_fixed (type
, ptr
, len
);
7715 return native_interpret_complex (type
, ptr
, len
);
7718 return native_interpret_vector (type
, ptr
, len
);
7725 /* Returns true if we can interpret the contents of a native encoding
7729 can_native_interpret_type_p (tree type
)
7731 switch (TREE_CODE (type
))
7737 case REFERENCE_TYPE
:
7738 case FIXED_POINT_TYPE
:
7748 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7749 TYPE at compile-time. If we're unable to perform the conversion
7750 return NULL_TREE. */
7753 fold_view_convert_expr (tree type
, tree expr
)
7755 /* We support up to 512-bit values (for V8DFmode). */
7756 unsigned char buffer
[64];
7759 /* Check that the host and target are sane. */
7760 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7763 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7767 return native_interpret_expr (type
, buffer
, len
);
7770 /* Build an expression for the address of T. Folds away INDIRECT_REF
7771 to avoid confusing the gimplify process. */
7774 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7776 /* The size of the object is not relevant when talking about its address. */
7777 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7778 t
= TREE_OPERAND (t
, 0);
7780 if (TREE_CODE (t
) == INDIRECT_REF
)
7782 t
= TREE_OPERAND (t
, 0);
7784 if (TREE_TYPE (t
) != ptrtype
)
7785 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7787 else if (TREE_CODE (t
) == MEM_REF
7788 && integer_zerop (TREE_OPERAND (t
, 1)))
7789 return TREE_OPERAND (t
, 0);
7790 else if (TREE_CODE (t
) == MEM_REF
7791 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7792 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7793 TREE_OPERAND (t
, 0),
7794 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7795 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7797 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7799 if (TREE_TYPE (t
) != ptrtype
)
7800 t
= fold_convert_loc (loc
, ptrtype
, t
);
7803 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7808 /* Build an expression for the address of T. */
7811 build_fold_addr_expr_loc (location_t loc
, tree t
)
7813 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7815 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7818 static bool vec_cst_ctor_to_array (tree
, tree
*);
7820 /* Fold a unary expression of code CODE and type TYPE with operand
7821 OP0. Return the folded expression if folding is successful.
7822 Otherwise, return NULL_TREE. */
7825 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7829 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7831 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7832 && TREE_CODE_LENGTH (code
) == 1);
7837 if (CONVERT_EXPR_CODE_P (code
)
7838 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7840 /* Don't use STRIP_NOPS, because signedness of argument type
7842 STRIP_SIGN_NOPS (arg0
);
7846 /* Strip any conversions that don't change the mode. This
7847 is safe for every expression, except for a comparison
7848 expression because its signedness is derived from its
7851 Note that this is done as an internal manipulation within
7852 the constant folder, in order to find the simplest
7853 representation of the arguments so that their form can be
7854 studied. In any cases, the appropriate type conversions
7855 should be put back in the tree that will get out of the
7861 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7863 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7864 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7865 fold_build1_loc (loc
, code
, type
,
7866 fold_convert_loc (loc
, TREE_TYPE (op0
),
7867 TREE_OPERAND (arg0
, 1))));
7868 else if (TREE_CODE (arg0
) == COND_EXPR
)
7870 tree arg01
= TREE_OPERAND (arg0
, 1);
7871 tree arg02
= TREE_OPERAND (arg0
, 2);
7872 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7873 arg01
= fold_build1_loc (loc
, code
, type
,
7874 fold_convert_loc (loc
,
7875 TREE_TYPE (op0
), arg01
));
7876 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7877 arg02
= fold_build1_loc (loc
, code
, type
,
7878 fold_convert_loc (loc
,
7879 TREE_TYPE (op0
), arg02
));
7880 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7883 /* If this was a conversion, and all we did was to move into
7884 inside the COND_EXPR, bring it back out. But leave it if
7885 it is a conversion from integer to integer and the
7886 result precision is no wider than a word since such a
7887 conversion is cheap and may be optimized away by combine,
7888 while it couldn't if it were outside the COND_EXPR. Then return
7889 so we don't get into an infinite recursion loop taking the
7890 conversion out and then back in. */
7892 if ((CONVERT_EXPR_CODE_P (code
)
7893 || code
== NON_LVALUE_EXPR
)
7894 && TREE_CODE (tem
) == COND_EXPR
7895 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7896 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7897 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7898 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7899 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7900 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7901 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7903 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7904 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7905 || flag_syntax_only
))
7906 tem
= build1_loc (loc
, code
, type
,
7908 TREE_TYPE (TREE_OPERAND
7909 (TREE_OPERAND (tem
, 1), 0)),
7910 TREE_OPERAND (tem
, 0),
7911 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7912 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7921 /* Re-association barriers around constants and other re-association
7922 barriers can be removed. */
7923 if (CONSTANT_CLASS_P (op0
)
7924 || TREE_CODE (op0
) == PAREN_EXPR
)
7925 return fold_convert_loc (loc
, type
, op0
);
7930 case FIX_TRUNC_EXPR
:
7931 if (TREE_TYPE (op0
) == type
)
7934 if (COMPARISON_CLASS_P (op0
))
7936 /* If we have (type) (a CMP b) and type is an integral type, return
7937 new expression involving the new type. Canonicalize
7938 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7940 Do not fold the result as that would not simplify further, also
7941 folding again results in recursions. */
7942 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7943 return build2_loc (loc
, TREE_CODE (op0
), type
,
7944 TREE_OPERAND (op0
, 0),
7945 TREE_OPERAND (op0
, 1));
7946 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7947 && TREE_CODE (type
) != VECTOR_TYPE
)
7948 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7949 constant_boolean_node (true, type
),
7950 constant_boolean_node (false, type
));
7953 /* Handle cases of two conversions in a row. */
7954 if (CONVERT_EXPR_P (op0
))
7956 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7957 tree inter_type
= TREE_TYPE (op0
);
7958 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7959 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7960 int inside_float
= FLOAT_TYPE_P (inside_type
);
7961 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7962 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7963 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7964 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7965 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7966 int inter_float
= FLOAT_TYPE_P (inter_type
);
7967 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7968 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7969 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7970 int final_int
= INTEGRAL_TYPE_P (type
);
7971 int final_ptr
= POINTER_TYPE_P (type
);
7972 int final_float
= FLOAT_TYPE_P (type
);
7973 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7974 unsigned int final_prec
= TYPE_PRECISION (type
);
7975 int final_unsignedp
= TYPE_UNSIGNED (type
);
7977 /* In addition to the cases of two conversions in a row
7978 handled below, if we are converting something to its own
7979 type via an object of identical or wider precision, neither
7980 conversion is needed. */
7981 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7982 && (((inter_int
|| inter_ptr
) && final_int
)
7983 || (inter_float
&& final_float
))
7984 && inter_prec
>= final_prec
)
7985 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7987 /* Likewise, if the intermediate and initial types are either both
7988 float or both integer, we don't need the middle conversion if the
7989 former is wider than the latter and doesn't change the signedness
7990 (for integers). Avoid this if the final type is a pointer since
7991 then we sometimes need the middle conversion. Likewise if the
7992 final type has a precision not equal to the size of its mode. */
7993 if (((inter_int
&& inside_int
)
7994 || (inter_float
&& inside_float
)
7995 || (inter_vec
&& inside_vec
))
7996 && inter_prec
>= inside_prec
7997 && (inter_float
|| inter_vec
7998 || inter_unsignedp
== inside_unsignedp
)
7999 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8000 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8002 && (! final_vec
|| inter_prec
== inside_prec
))
8003 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8005 /* If we have a sign-extension of a zero-extended value, we can
8006 replace that by a single zero-extension. Likewise if the
8007 final conversion does not change precision we can drop the
8008 intermediate conversion. */
8009 if (inside_int
&& inter_int
&& final_int
8010 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
8011 && inside_unsignedp
&& !inter_unsignedp
)
8012 || final_prec
== inter_prec
))
8013 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8015 /* Two conversions in a row are not needed unless:
8016 - some conversion is floating-point (overstrict for now), or
8017 - some conversion is a vector (overstrict for now), or
8018 - the intermediate type is narrower than both initial and
8020 - the intermediate type and innermost type differ in signedness,
8021 and the outermost type is wider than the intermediate, or
8022 - the initial type is a pointer type and the precisions of the
8023 intermediate and final types differ, or
8024 - the final type is a pointer type and the precisions of the
8025 initial and intermediate types differ. */
8026 if (! inside_float
&& ! inter_float
&& ! final_float
8027 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8028 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8029 && ! (inside_int
&& inter_int
8030 && inter_unsignedp
!= inside_unsignedp
8031 && inter_prec
< final_prec
)
8032 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8033 == (final_unsignedp
&& final_prec
> inter_prec
))
8034 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8035 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8036 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8037 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8038 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8041 /* Handle (T *)&A.B.C for A being of type T and B and C
8042 living at offset zero. This occurs frequently in
8043 C++ upcasting and then accessing the base. */
8044 if (TREE_CODE (op0
) == ADDR_EXPR
8045 && POINTER_TYPE_P (type
)
8046 && handled_component_p (TREE_OPERAND (op0
, 0)))
8048 HOST_WIDE_INT bitsize
, bitpos
;
8050 enum machine_mode mode
;
8051 int unsignedp
, volatilep
;
8052 tree base
= TREE_OPERAND (op0
, 0);
8053 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8054 &mode
, &unsignedp
, &volatilep
, false);
8055 /* If the reference was to a (constant) zero offset, we can use
8056 the address of the base if it has the same base type
8057 as the result type and the pointer type is unqualified. */
8058 if (! offset
&& bitpos
== 0
8059 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8060 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8061 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8062 return fold_convert_loc (loc
, type
,
8063 build_fold_addr_expr_loc (loc
, base
));
8066 if (TREE_CODE (op0
) == MODIFY_EXPR
8067 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8068 /* Detect assigning a bitfield. */
8069 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8071 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8073 /* Don't leave an assignment inside a conversion
8074 unless assigning a bitfield. */
8075 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8076 /* First do the assignment, then return converted constant. */
8077 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8078 TREE_NO_WARNING (tem
) = 1;
8079 TREE_USED (tem
) = 1;
8083 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8084 constants (if x has signed type, the sign bit cannot be set
8085 in c). This folds extension into the BIT_AND_EXPR.
8086 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8087 very likely don't have maximal range for their precision and this
8088 transformation effectively doesn't preserve non-maximal ranges. */
8089 if (TREE_CODE (type
) == INTEGER_TYPE
8090 && TREE_CODE (op0
) == BIT_AND_EXPR
8091 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8093 tree and_expr
= op0
;
8094 tree and0
= TREE_OPERAND (and_expr
, 0);
8095 tree and1
= TREE_OPERAND (and_expr
, 1);
8098 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8099 || (TYPE_PRECISION (type
)
8100 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8102 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8103 <= HOST_BITS_PER_WIDE_INT
8104 && tree_fits_uhwi_p (and1
))
8106 unsigned HOST_WIDE_INT cst
;
8108 cst
= tree_to_uhwi (and1
);
8109 cst
&= HOST_WIDE_INT_M1U
8110 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8111 change
= (cst
== 0);
8112 #ifdef LOAD_EXTEND_OP
8114 && !flag_syntax_only
8115 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8118 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8119 and0
= fold_convert_loc (loc
, uns
, and0
);
8120 and1
= fold_convert_loc (loc
, uns
, and1
);
8126 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8127 0, TREE_OVERFLOW (and1
));
8128 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8129 fold_convert_loc (loc
, type
, and0
), tem
);
8133 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8134 when one of the new casts will fold away. Conservatively we assume
8135 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8136 if (POINTER_TYPE_P (type
)
8137 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8138 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8139 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8140 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8141 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8143 tree arg00
= TREE_OPERAND (arg0
, 0);
8144 tree arg01
= TREE_OPERAND (arg0
, 1);
8146 return fold_build_pointer_plus_loc
8147 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8150 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8151 of the same precision, and X is an integer type not narrower than
8152 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8153 if (INTEGRAL_TYPE_P (type
)
8154 && TREE_CODE (op0
) == BIT_NOT_EXPR
8155 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8156 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8157 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8159 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8160 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8161 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8162 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8163 fold_convert_loc (loc
, type
, tem
));
8166 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8167 type of X and Y (integer types only). */
8168 if (INTEGRAL_TYPE_P (type
)
8169 && TREE_CODE (op0
) == MULT_EXPR
8170 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8171 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8173 /* Be careful not to introduce new overflows. */
8175 if (TYPE_OVERFLOW_WRAPS (type
))
8178 mult_type
= unsigned_type_for (type
);
8180 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8182 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8183 fold_convert_loc (loc
, mult_type
,
8184 TREE_OPERAND (op0
, 0)),
8185 fold_convert_loc (loc
, mult_type
,
8186 TREE_OPERAND (op0
, 1)));
8187 return fold_convert_loc (loc
, type
, tem
);
8191 tem
= fold_convert_const (code
, type
, op0
);
8192 return tem
? tem
: NULL_TREE
;
8194 case ADDR_SPACE_CONVERT_EXPR
:
8195 if (integer_zerop (arg0
))
8196 return fold_convert_const (code
, type
, arg0
);
8199 case FIXED_CONVERT_EXPR
:
8200 tem
= fold_convert_const (code
, type
, arg0
);
8201 return tem
? tem
: NULL_TREE
;
8203 case VIEW_CONVERT_EXPR
:
8204 if (TREE_TYPE (op0
) == type
)
8206 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8207 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8208 type
, TREE_OPERAND (op0
, 0));
8209 if (TREE_CODE (op0
) == MEM_REF
)
8210 return fold_build2_loc (loc
, MEM_REF
, type
,
8211 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8213 /* For integral conversions with the same precision or pointer
8214 conversions use a NOP_EXPR instead. */
8215 if ((INTEGRAL_TYPE_P (type
)
8216 || POINTER_TYPE_P (type
))
8217 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8218 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8219 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8220 return fold_convert_loc (loc
, type
, op0
);
8222 /* Strip inner integral conversions that do not change the precision. */
8223 if (CONVERT_EXPR_P (op0
)
8224 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8225 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8226 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8227 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8228 && (TYPE_PRECISION (TREE_TYPE (op0
))
8229 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8230 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8231 type
, TREE_OPERAND (op0
, 0));
8233 return fold_view_convert_expr (type
, op0
);
8236 tem
= fold_negate_expr (loc
, arg0
);
8238 return fold_convert_loc (loc
, type
, tem
);
8242 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8243 return fold_abs_const (arg0
, type
);
8244 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8245 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8246 /* Convert fabs((double)float) into (double)fabsf(float). */
8247 else if (TREE_CODE (arg0
) == NOP_EXPR
8248 && TREE_CODE (type
) == REAL_TYPE
)
8250 tree targ0
= strip_float_extensions (arg0
);
8252 return fold_convert_loc (loc
, type
,
8253 fold_build1_loc (loc
, ABS_EXPR
,
8257 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8258 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8260 else if (tree_expr_nonnegative_p (arg0
))
8263 /* Strip sign ops from argument. */
8264 if (TREE_CODE (type
) == REAL_TYPE
)
8266 tem
= fold_strip_sign_ops (arg0
);
8268 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8269 fold_convert_loc (loc
, type
, tem
));
8274 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8275 return fold_convert_loc (loc
, type
, arg0
);
8276 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8278 tree itype
= TREE_TYPE (type
);
8279 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8280 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8281 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8282 negate_expr (ipart
));
8284 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8286 tree itype
= TREE_TYPE (type
);
8287 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8288 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8289 return build_complex (type
, rpart
, negate_expr (ipart
));
8291 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8292 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8296 if (TREE_CODE (arg0
) == INTEGER_CST
)
8297 return fold_not_const (arg0
, type
);
8298 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8299 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8300 /* Convert ~ (-A) to A - 1. */
8301 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8302 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8303 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8304 build_int_cst (type
, 1));
8305 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8306 else if (INTEGRAL_TYPE_P (type
)
8307 && ((TREE_CODE (arg0
) == MINUS_EXPR
8308 && integer_onep (TREE_OPERAND (arg0
, 1)))
8309 || (TREE_CODE (arg0
) == PLUS_EXPR
8310 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8311 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8312 fold_convert_loc (loc
, type
,
8313 TREE_OPERAND (arg0
, 0)));
8314 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8315 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8316 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8317 fold_convert_loc (loc
, type
,
8318 TREE_OPERAND (arg0
, 0)))))
8319 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8320 fold_convert_loc (loc
, type
,
8321 TREE_OPERAND (arg0
, 1)));
8322 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8323 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8324 fold_convert_loc (loc
, type
,
8325 TREE_OPERAND (arg0
, 1)))))
8326 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8327 fold_convert_loc (loc
, type
,
8328 TREE_OPERAND (arg0
, 0)), tem
);
8329 /* Perform BIT_NOT_EXPR on each element individually. */
8330 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8334 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8336 elements
= XALLOCAVEC (tree
, count
);
8337 for (i
= 0; i
< count
; i
++)
8339 elem
= VECTOR_CST_ELT (arg0
, i
);
8340 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8341 if (elem
== NULL_TREE
)
8346 return build_vector (type
, elements
);
8348 else if (COMPARISON_CLASS_P (arg0
)
8349 && (VECTOR_TYPE_P (type
)
8350 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8352 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8353 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8354 HONOR_NANS (TYPE_MODE (op_type
)));
8355 if (subcode
!= ERROR_MARK
)
8356 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8357 TREE_OPERAND (arg0
, 1));
8363 case TRUTH_NOT_EXPR
:
8364 /* Note that the operand of this must be an int
8365 and its values must be 0 or 1.
8366 ("true" is a fixed value perhaps depending on the language,
8367 but we don't handle values other than 1 correctly yet.) */
8368 tem
= fold_truth_not_expr (loc
, arg0
);
8371 return fold_convert_loc (loc
, type
, tem
);
8374 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8375 return fold_convert_loc (loc
, type
, arg0
);
8376 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8377 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8378 TREE_OPERAND (arg0
, 1));
8379 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8380 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8381 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8383 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8384 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8385 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8386 TREE_OPERAND (arg0
, 0)),
8387 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8388 TREE_OPERAND (arg0
, 1)));
8389 return fold_convert_loc (loc
, type
, tem
);
8391 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8393 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8394 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8395 TREE_OPERAND (arg0
, 0));
8396 return fold_convert_loc (loc
, type
, tem
);
8398 if (TREE_CODE (arg0
) == CALL_EXPR
)
8400 tree fn
= get_callee_fndecl (arg0
);
8401 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8402 switch (DECL_FUNCTION_CODE (fn
))
8404 CASE_FLT_FN (BUILT_IN_CEXPI
):
8405 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8407 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8417 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8418 return build_zero_cst (type
);
8419 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8420 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8421 TREE_OPERAND (arg0
, 0));
8422 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8423 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8424 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8426 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8427 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8428 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8429 TREE_OPERAND (arg0
, 0)),
8430 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8431 TREE_OPERAND (arg0
, 1)));
8432 return fold_convert_loc (loc
, type
, tem
);
8434 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8436 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8437 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8438 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8440 if (TREE_CODE (arg0
) == CALL_EXPR
)
8442 tree fn
= get_callee_fndecl (arg0
);
8443 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8444 switch (DECL_FUNCTION_CODE (fn
))
8446 CASE_FLT_FN (BUILT_IN_CEXPI
):
8447 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8449 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8459 /* Fold *&X to X if X is an lvalue. */
8460 if (TREE_CODE (op0
) == ADDR_EXPR
)
8462 tree op00
= TREE_OPERAND (op0
, 0);
8463 if ((TREE_CODE (op00
) == VAR_DECL
8464 || TREE_CODE (op00
) == PARM_DECL
8465 || TREE_CODE (op00
) == RESULT_DECL
)
8466 && !TREE_READONLY (op00
))
8471 case VEC_UNPACK_LO_EXPR
:
8472 case VEC_UNPACK_HI_EXPR
:
8473 case VEC_UNPACK_FLOAT_LO_EXPR
:
8474 case VEC_UNPACK_FLOAT_HI_EXPR
:
8476 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8478 enum tree_code subcode
;
8480 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8481 if (TREE_CODE (arg0
) != VECTOR_CST
)
8484 elts
= XALLOCAVEC (tree
, nelts
* 2);
8485 if (!vec_cst_ctor_to_array (arg0
, elts
))
8488 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8489 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8492 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8495 subcode
= FLOAT_EXPR
;
8497 for (i
= 0; i
< nelts
; i
++)
8499 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8500 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8504 return build_vector (type
, elts
);
8507 case REDUC_MIN_EXPR
:
8508 case REDUC_MAX_EXPR
:
8509 case REDUC_PLUS_EXPR
:
8511 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8513 enum tree_code subcode
;
8515 if (TREE_CODE (op0
) != VECTOR_CST
)
8518 elts
= XALLOCAVEC (tree
, nelts
);
8519 if (!vec_cst_ctor_to_array (op0
, elts
))
8524 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8525 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8526 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8527 default: gcc_unreachable ();
8530 for (i
= 1; i
< nelts
; i
++)
8532 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8533 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8535 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8538 return build_vector (type
, elts
);
8543 } /* switch (code) */
8547 /* If the operation was a conversion do _not_ mark a resulting constant
8548 with TREE_OVERFLOW if the original constant was not. These conversions
8549 have implementation defined behavior and retaining the TREE_OVERFLOW
8550 flag here would confuse later passes such as VRP. */
8552 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8553 tree type
, tree op0
)
8555 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8557 && TREE_CODE (res
) == INTEGER_CST
8558 && TREE_CODE (op0
) == INTEGER_CST
8559 && CONVERT_EXPR_CODE_P (code
))
8560 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8565 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8566 operands OP0 and OP1. LOC is the location of the resulting expression.
8567 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8568 Return the folded expression if folding is successful. Otherwise,
8569 return NULL_TREE. */
8571 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8572 tree arg0
, tree arg1
, tree op0
, tree op1
)
8576 /* We only do these simplifications if we are optimizing. */
8580 /* Check for things like (A || B) && (A || C). We can convert this
8581 to A || (B && C). Note that either operator can be any of the four
8582 truth and/or operations and the transformation will still be
8583 valid. Also note that we only care about order for the
8584 ANDIF and ORIF operators. If B contains side effects, this
8585 might change the truth-value of A. */
8586 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8587 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8588 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8589 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8590 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8593 tree a00
= TREE_OPERAND (arg0
, 0);
8594 tree a01
= TREE_OPERAND (arg0
, 1);
8595 tree a10
= TREE_OPERAND (arg1
, 0);
8596 tree a11
= TREE_OPERAND (arg1
, 1);
8597 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8598 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8599 && (code
== TRUTH_AND_EXPR
8600 || code
== TRUTH_OR_EXPR
));
8602 if (operand_equal_p (a00
, a10
, 0))
8603 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8604 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8605 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8606 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8607 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8608 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8609 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8610 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8612 /* This case if tricky because we must either have commutative
8613 operators or else A10 must not have side-effects. */
8615 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8616 && operand_equal_p (a01
, a11
, 0))
8617 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8618 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8622 /* See if we can build a range comparison. */
8623 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8626 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8627 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8629 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8631 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8634 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8635 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8637 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8639 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8642 /* Check for the possibility of merging component references. If our
8643 lhs is another similar operation, try to merge its rhs with our
8644 rhs. Then try to merge our lhs and rhs. */
8645 if (TREE_CODE (arg0
) == code
8646 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8647 TREE_OPERAND (arg0
, 1), arg1
)))
8648 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8650 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8653 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8654 && (code
== TRUTH_AND_EXPR
8655 || code
== TRUTH_ANDIF_EXPR
8656 || code
== TRUTH_OR_EXPR
8657 || code
== TRUTH_ORIF_EXPR
))
8659 enum tree_code ncode
, icode
;
8661 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8662 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8663 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8665 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8666 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8667 We don't want to pack more than two leafs to a non-IF AND/OR
8669 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8670 equal to IF-CODE, then we don't want to add right-hand operand.
8671 If the inner right-hand side of left-hand operand has
8672 side-effects, or isn't simple, then we can't add to it,
8673 as otherwise we might destroy if-sequence. */
8674 if (TREE_CODE (arg0
) == icode
8675 && simple_operand_p_2 (arg1
)
8676 /* Needed for sequence points to handle trappings, and
8678 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8680 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8682 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8685 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8686 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8687 else if (TREE_CODE (arg1
) == icode
8688 && simple_operand_p_2 (arg0
)
8689 /* Needed for sequence points to handle trappings, and
8691 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8693 tem
= fold_build2_loc (loc
, ncode
, type
,
8694 arg0
, TREE_OPERAND (arg1
, 0));
8695 return fold_build2_loc (loc
, icode
, type
, tem
,
8696 TREE_OPERAND (arg1
, 1));
8698 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8700 For sequence point consistancy, we need to check for trapping,
8701 and side-effects. */
8702 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8703 && simple_operand_p_2 (arg1
))
8704 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8710 /* Fold a binary expression of code CODE and type TYPE with operands
8711 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8712 Return the folded expression if folding is successful. Otherwise,
8713 return NULL_TREE. */
8716 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8718 enum tree_code compl_code
;
8720 if (code
== MIN_EXPR
)
8721 compl_code
= MAX_EXPR
;
8722 else if (code
== MAX_EXPR
)
8723 compl_code
= MIN_EXPR
;
8727 /* MIN (MAX (a, b), b) == b. */
8728 if (TREE_CODE (op0
) == compl_code
8729 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8730 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8732 /* MIN (MAX (b, a), b) == b. */
8733 if (TREE_CODE (op0
) == compl_code
8734 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8735 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8736 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8738 /* MIN (a, MAX (a, b)) == a. */
8739 if (TREE_CODE (op1
) == compl_code
8740 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8741 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8742 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8744 /* MIN (a, MAX (b, a)) == a. */
8745 if (TREE_CODE (op1
) == compl_code
8746 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8747 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8748 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8753 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8754 by changing CODE to reduce the magnitude of constants involved in
8755 ARG0 of the comparison.
8756 Returns a canonicalized comparison tree if a simplification was
8757 possible, otherwise returns NULL_TREE.
8758 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8759 valid if signed overflow is undefined. */
8762 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8763 tree arg0
, tree arg1
,
8764 bool *strict_overflow_p
)
8766 enum tree_code code0
= TREE_CODE (arg0
);
8767 tree t
, cst0
= NULL_TREE
;
8771 /* Match A +- CST code arg1 and CST code arg1. We can change the
8772 first form only if overflow is undefined. */
8773 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8774 /* In principle pointers also have undefined overflow behavior,
8775 but that causes problems elsewhere. */
8776 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8777 && (code0
== MINUS_EXPR
8778 || code0
== PLUS_EXPR
)
8779 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8780 || code0
== INTEGER_CST
))
8783 /* Identify the constant in arg0 and its sign. */
8784 if (code0
== INTEGER_CST
)
8787 cst0
= TREE_OPERAND (arg0
, 1);
8788 sgn0
= tree_int_cst_sgn (cst0
);
8790 /* Overflowed constants and zero will cause problems. */
8791 if (integer_zerop (cst0
)
8792 || TREE_OVERFLOW (cst0
))
8795 /* See if we can reduce the magnitude of the constant in
8796 arg0 by changing the comparison code. */
8797 if (code0
== INTEGER_CST
)
8799 /* CST <= arg1 -> CST-1 < arg1. */
8800 if (code
== LE_EXPR
&& sgn0
== 1)
8802 /* -CST < arg1 -> -CST-1 <= arg1. */
8803 else if (code
== LT_EXPR
&& sgn0
== -1)
8805 /* CST > arg1 -> CST-1 >= arg1. */
8806 else if (code
== GT_EXPR
&& sgn0
== 1)
8808 /* -CST >= arg1 -> -CST-1 > arg1. */
8809 else if (code
== GE_EXPR
&& sgn0
== -1)
8813 /* arg1 code' CST' might be more canonical. */
8818 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8820 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8822 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8823 else if (code
== GT_EXPR
8824 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8826 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8827 else if (code
== LE_EXPR
8828 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8830 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8831 else if (code
== GE_EXPR
8832 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8836 *strict_overflow_p
= true;
8839 /* Now build the constant reduced in magnitude. But not if that
8840 would produce one outside of its types range. */
8841 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8843 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8844 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8846 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8847 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8848 /* We cannot swap the comparison here as that would cause us to
8849 endlessly recurse. */
8852 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8853 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8854 if (code0
!= INTEGER_CST
)
8855 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8856 t
= fold_convert (TREE_TYPE (arg1
), t
);
8858 /* If swapping might yield to a more canonical form, do so. */
8860 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8862 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8865 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8866 overflow further. Try to decrease the magnitude of constants involved
8867 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8868 and put sole constants at the second argument position.
8869 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8872 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8873 tree arg0
, tree arg1
)
8876 bool strict_overflow_p
;
8877 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8878 "when reducing constant in comparison");
8880 /* Try canonicalization by simplifying arg0. */
8881 strict_overflow_p
= false;
8882 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8883 &strict_overflow_p
);
8886 if (strict_overflow_p
)
8887 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8891 /* Try canonicalization by simplifying arg1 using the swapped
8893 code
= swap_tree_comparison (code
);
8894 strict_overflow_p
= false;
8895 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8896 &strict_overflow_p
);
8897 if (t
&& strict_overflow_p
)
8898 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8902 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8903 space. This is used to avoid issuing overflow warnings for
8904 expressions like &p->x which can not wrap. */
8907 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8909 double_int di_offset
, total
;
8911 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8917 if (offset
== NULL_TREE
)
8918 di_offset
= double_int_zero
;
8919 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8922 di_offset
= TREE_INT_CST (offset
);
8925 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8926 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8930 if (total
.high
!= 0)
8933 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8937 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8939 if (TREE_CODE (base
) == ADDR_EXPR
)
8941 HOST_WIDE_INT base_size
;
8943 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8944 if (base_size
> 0 && size
< base_size
)
8948 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8951 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8952 kind INTEGER_CST. This makes sure to properly sign-extend the
8955 static HOST_WIDE_INT
8956 size_low_cst (const_tree t
)
8958 double_int d
= tree_to_double_int (t
);
8959 return d
.sext (TYPE_PRECISION (TREE_TYPE (t
))).low
;
8962 /* Subroutine of fold_binary. This routine performs all of the
8963 transformations that are common to the equality/inequality
8964 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8965 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8966 fold_binary should call fold_binary. Fold a comparison with
8967 tree code CODE and type TYPE with operands OP0 and OP1. Return
8968 the folded comparison or NULL_TREE. */
8971 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8974 tree arg0
, arg1
, tem
;
8979 STRIP_SIGN_NOPS (arg0
);
8980 STRIP_SIGN_NOPS (arg1
);
8982 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8983 if (tem
!= NULL_TREE
)
8986 /* If one arg is a real or integer constant, put it last. */
8987 if (tree_swap_operands_p (arg0
, arg1
, true))
8988 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8990 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8991 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8992 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8993 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8994 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8995 && (TREE_CODE (arg1
) == INTEGER_CST
8996 && !TREE_OVERFLOW (arg1
)))
8998 tree const1
= TREE_OPERAND (arg0
, 1);
9000 tree variable
= TREE_OPERAND (arg0
, 0);
9003 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9005 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9006 TREE_TYPE (arg1
), const2
, const1
);
9008 /* If the constant operation overflowed this can be
9009 simplified as a comparison against INT_MAX/INT_MIN. */
9010 if (TREE_CODE (lhs
) == INTEGER_CST
9011 && TREE_OVERFLOW (lhs
))
9013 int const1_sgn
= tree_int_cst_sgn (const1
);
9014 enum tree_code code2
= code
;
9016 /* Get the sign of the constant on the lhs if the
9017 operation were VARIABLE + CONST1. */
9018 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9019 const1_sgn
= -const1_sgn
;
9021 /* The sign of the constant determines if we overflowed
9022 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9023 Canonicalize to the INT_MIN overflow by swapping the comparison
9025 if (const1_sgn
== -1)
9026 code2
= swap_tree_comparison (code
);
9028 /* We now can look at the canonicalized case
9029 VARIABLE + 1 CODE2 INT_MIN
9030 and decide on the result. */
9031 if (code2
== LT_EXPR
9033 || code2
== EQ_EXPR
)
9034 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9035 else if (code2
== NE_EXPR
9037 || code2
== GT_EXPR
)
9038 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9041 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9042 && (TREE_CODE (lhs
) != INTEGER_CST
9043 || !TREE_OVERFLOW (lhs
)))
9045 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9046 fold_overflow_warning ("assuming signed overflow does not occur "
9047 "when changing X +- C1 cmp C2 to "
9049 WARN_STRICT_OVERFLOW_COMPARISON
);
9050 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9054 /* For comparisons of pointers we can decompose it to a compile time
9055 comparison of the base objects and the offsets into the object.
9056 This requires at least one operand being an ADDR_EXPR or a
9057 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9058 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9059 && (TREE_CODE (arg0
) == ADDR_EXPR
9060 || TREE_CODE (arg1
) == ADDR_EXPR
9061 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9062 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9064 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9065 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9066 enum machine_mode mode
;
9067 int volatilep
, unsignedp
;
9068 bool indirect_base0
= false, indirect_base1
= false;
9070 /* Get base and offset for the access. Strip ADDR_EXPR for
9071 get_inner_reference, but put it back by stripping INDIRECT_REF
9072 off the base object if possible. indirect_baseN will be true
9073 if baseN is not an address but refers to the object itself. */
9075 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9077 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9078 &bitsize
, &bitpos0
, &offset0
, &mode
,
9079 &unsignedp
, &volatilep
, false);
9080 if (TREE_CODE (base0
) == INDIRECT_REF
)
9081 base0
= TREE_OPERAND (base0
, 0);
9083 indirect_base0
= true;
9085 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9087 base0
= TREE_OPERAND (arg0
, 0);
9088 STRIP_SIGN_NOPS (base0
);
9089 if (TREE_CODE (base0
) == ADDR_EXPR
)
9091 base0
= TREE_OPERAND (base0
, 0);
9092 indirect_base0
= true;
9094 offset0
= TREE_OPERAND (arg0
, 1);
9095 if (tree_fits_shwi_p (offset0
))
9097 HOST_WIDE_INT off
= size_low_cst (offset0
);
9098 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9100 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9102 bitpos0
= off
* BITS_PER_UNIT
;
9103 offset0
= NULL_TREE
;
9109 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9111 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9112 &bitsize
, &bitpos1
, &offset1
, &mode
,
9113 &unsignedp
, &volatilep
, false);
9114 if (TREE_CODE (base1
) == INDIRECT_REF
)
9115 base1
= TREE_OPERAND (base1
, 0);
9117 indirect_base1
= true;
9119 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9121 base1
= TREE_OPERAND (arg1
, 0);
9122 STRIP_SIGN_NOPS (base1
);
9123 if (TREE_CODE (base1
) == ADDR_EXPR
)
9125 base1
= TREE_OPERAND (base1
, 0);
9126 indirect_base1
= true;
9128 offset1
= TREE_OPERAND (arg1
, 1);
9129 if (tree_fits_shwi_p (offset1
))
9131 HOST_WIDE_INT off
= size_low_cst (offset1
);
9132 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9134 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9136 bitpos1
= off
* BITS_PER_UNIT
;
9137 offset1
= NULL_TREE
;
9142 /* A local variable can never be pointed to by
9143 the default SSA name of an incoming parameter. */
9144 if ((TREE_CODE (arg0
) == ADDR_EXPR
9146 && TREE_CODE (base0
) == VAR_DECL
9147 && auto_var_in_fn_p (base0
, current_function_decl
)
9149 && TREE_CODE (base1
) == SSA_NAME
9150 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9151 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9152 || (TREE_CODE (arg1
) == ADDR_EXPR
9154 && TREE_CODE (base1
) == VAR_DECL
9155 && auto_var_in_fn_p (base1
, current_function_decl
)
9157 && TREE_CODE (base0
) == SSA_NAME
9158 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9159 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9161 if (code
== NE_EXPR
)
9162 return constant_boolean_node (1, type
);
9163 else if (code
== EQ_EXPR
)
9164 return constant_boolean_node (0, type
);
9166 /* If we have equivalent bases we might be able to simplify. */
9167 else if (indirect_base0
== indirect_base1
9168 && operand_equal_p (base0
, base1
, 0))
9170 /* We can fold this expression to a constant if the non-constant
9171 offset parts are equal. */
9172 if ((offset0
== offset1
9173 || (offset0
&& offset1
9174 && operand_equal_p (offset0
, offset1
, 0)))
9177 || (indirect_base0
&& DECL_P (base0
))
9178 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9183 && bitpos0
!= bitpos1
9184 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9185 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9186 fold_overflow_warning (("assuming pointer wraparound does not "
9187 "occur when comparing P +- C1 with "
9189 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9194 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9196 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9198 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9200 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9202 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9204 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9208 /* We can simplify the comparison to a comparison of the variable
9209 offset parts if the constant offset parts are equal.
9210 Be careful to use signed sizetype here because otherwise we
9211 mess with array offsets in the wrong way. This is possible
9212 because pointer arithmetic is restricted to retain within an
9213 object and overflow on pointer differences is undefined as of
9214 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9215 else if (bitpos0
== bitpos1
9216 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9217 || (indirect_base0
&& DECL_P (base0
))
9218 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9220 /* By converting to signed sizetype we cover middle-end pointer
9221 arithmetic which operates on unsigned pointer types of size
9222 type size and ARRAY_REF offsets which are properly sign or
9223 zero extended from their type in case it is narrower than
9225 if (offset0
== NULL_TREE
)
9226 offset0
= build_int_cst (ssizetype
, 0);
9228 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9229 if (offset1
== NULL_TREE
)
9230 offset1
= build_int_cst (ssizetype
, 0);
9232 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9236 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9237 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9238 fold_overflow_warning (("assuming pointer wraparound does not "
9239 "occur when comparing P +- C1 with "
9241 WARN_STRICT_OVERFLOW_COMPARISON
);
9243 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9246 /* For non-equal bases we can simplify if they are addresses
9247 of local binding decls or constants. */
9248 else if (indirect_base0
&& indirect_base1
9249 /* We know that !operand_equal_p (base0, base1, 0)
9250 because the if condition was false. But make
9251 sure two decls are not the same. */
9253 && TREE_CODE (arg0
) == ADDR_EXPR
9254 && TREE_CODE (arg1
) == ADDR_EXPR
9255 && (((TREE_CODE (base0
) == VAR_DECL
9256 || TREE_CODE (base0
) == PARM_DECL
)
9257 && (targetm
.binds_local_p (base0
)
9258 || CONSTANT_CLASS_P (base1
)))
9259 || CONSTANT_CLASS_P (base0
))
9260 && (((TREE_CODE (base1
) == VAR_DECL
9261 || TREE_CODE (base1
) == PARM_DECL
)
9262 && (targetm
.binds_local_p (base1
)
9263 || CONSTANT_CLASS_P (base0
)))
9264 || CONSTANT_CLASS_P (base1
)))
9266 if (code
== EQ_EXPR
)
9267 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9269 else if (code
== NE_EXPR
)
9270 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9273 /* For equal offsets we can simplify to a comparison of the
9275 else if (bitpos0
== bitpos1
9277 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9279 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9280 && ((offset0
== offset1
)
9281 || (offset0
&& offset1
9282 && operand_equal_p (offset0
, offset1
, 0))))
9285 base0
= build_fold_addr_expr_loc (loc
, base0
);
9287 base1
= build_fold_addr_expr_loc (loc
, base1
);
9288 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9292 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9293 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9294 the resulting offset is smaller in absolute value than the
9296 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9297 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9298 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9299 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9300 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9301 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9302 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9304 tree const1
= TREE_OPERAND (arg0
, 1);
9305 tree const2
= TREE_OPERAND (arg1
, 1);
9306 tree variable1
= TREE_OPERAND (arg0
, 0);
9307 tree variable2
= TREE_OPERAND (arg1
, 0);
9309 const char * const warnmsg
= G_("assuming signed overflow does not "
9310 "occur when combining constants around "
9313 /* Put the constant on the side where it doesn't overflow and is
9314 of lower absolute value than before. */
9315 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9316 ? MINUS_EXPR
: PLUS_EXPR
,
9318 if (!TREE_OVERFLOW (cst
)
9319 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9321 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9322 return fold_build2_loc (loc
, code
, type
,
9324 fold_build2_loc (loc
,
9325 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9329 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9330 ? MINUS_EXPR
: PLUS_EXPR
,
9332 if (!TREE_OVERFLOW (cst
)
9333 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9335 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9336 return fold_build2_loc (loc
, code
, type
,
9337 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9343 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9344 signed arithmetic case. That form is created by the compiler
9345 often enough for folding it to be of value. One example is in
9346 computing loop trip counts after Operator Strength Reduction. */
9347 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9348 && TREE_CODE (arg0
) == MULT_EXPR
9349 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9350 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9351 && integer_zerop (arg1
))
9353 tree const1
= TREE_OPERAND (arg0
, 1);
9354 tree const2
= arg1
; /* zero */
9355 tree variable1
= TREE_OPERAND (arg0
, 0);
9356 enum tree_code cmp_code
= code
;
9358 /* Handle unfolded multiplication by zero. */
9359 if (integer_zerop (const1
))
9360 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9362 fold_overflow_warning (("assuming signed overflow does not occur when "
9363 "eliminating multiplication in comparison "
9365 WARN_STRICT_OVERFLOW_COMPARISON
);
9367 /* If const1 is negative we swap the sense of the comparison. */
9368 if (tree_int_cst_sgn (const1
) < 0)
9369 cmp_code
= swap_tree_comparison (cmp_code
);
9371 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9374 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9378 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9380 tree targ0
= strip_float_extensions (arg0
);
9381 tree targ1
= strip_float_extensions (arg1
);
9382 tree newtype
= TREE_TYPE (targ0
);
9384 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9385 newtype
= TREE_TYPE (targ1
);
9387 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9388 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9389 return fold_build2_loc (loc
, code
, type
,
9390 fold_convert_loc (loc
, newtype
, targ0
),
9391 fold_convert_loc (loc
, newtype
, targ1
));
9393 /* (-a) CMP (-b) -> b CMP a */
9394 if (TREE_CODE (arg0
) == NEGATE_EXPR
9395 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9396 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9397 TREE_OPERAND (arg0
, 0));
9399 if (TREE_CODE (arg1
) == REAL_CST
)
9401 REAL_VALUE_TYPE cst
;
9402 cst
= TREE_REAL_CST (arg1
);
9404 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9405 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9406 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9407 TREE_OPERAND (arg0
, 0),
9408 build_real (TREE_TYPE (arg1
),
9409 real_value_negate (&cst
)));
9411 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9412 /* a CMP (-0) -> a CMP 0 */
9413 if (REAL_VALUE_MINUS_ZERO (cst
))
9414 return fold_build2_loc (loc
, code
, type
, arg0
,
9415 build_real (TREE_TYPE (arg1
), dconst0
));
9417 /* x != NaN is always true, other ops are always false. */
9418 if (REAL_VALUE_ISNAN (cst
)
9419 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9421 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9422 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9425 /* Fold comparisons against infinity. */
9426 if (REAL_VALUE_ISINF (cst
)
9427 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9429 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9430 if (tem
!= NULL_TREE
)
9435 /* If this is a comparison of a real constant with a PLUS_EXPR
9436 or a MINUS_EXPR of a real constant, we can convert it into a
9437 comparison with a revised real constant as long as no overflow
9438 occurs when unsafe_math_optimizations are enabled. */
9439 if (flag_unsafe_math_optimizations
9440 && TREE_CODE (arg1
) == REAL_CST
9441 && (TREE_CODE (arg0
) == PLUS_EXPR
9442 || TREE_CODE (arg0
) == MINUS_EXPR
)
9443 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9444 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9445 ? MINUS_EXPR
: PLUS_EXPR
,
9446 arg1
, TREE_OPERAND (arg0
, 1)))
9447 && !TREE_OVERFLOW (tem
))
9448 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9450 /* Likewise, we can simplify a comparison of a real constant with
9451 a MINUS_EXPR whose first operand is also a real constant, i.e.
9452 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9453 floating-point types only if -fassociative-math is set. */
9454 if (flag_associative_math
9455 && TREE_CODE (arg1
) == REAL_CST
9456 && TREE_CODE (arg0
) == MINUS_EXPR
9457 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9458 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9460 && !TREE_OVERFLOW (tem
))
9461 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9462 TREE_OPERAND (arg0
, 1), tem
);
9464 /* Fold comparisons against built-in math functions. */
9465 if (TREE_CODE (arg1
) == REAL_CST
9466 && flag_unsafe_math_optimizations
9467 && ! flag_errno_math
)
9469 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9471 if (fcode
!= END_BUILTINS
)
9473 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9474 if (tem
!= NULL_TREE
)
9480 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9481 && CONVERT_EXPR_P (arg0
))
9483 /* If we are widening one operand of an integer comparison,
9484 see if the other operand is similarly being widened. Perhaps we
9485 can do the comparison in the narrower type. */
9486 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9490 /* Or if we are changing signedness. */
9491 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9496 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9497 constant, we can simplify it. */
9498 if (TREE_CODE (arg1
) == INTEGER_CST
9499 && (TREE_CODE (arg0
) == MIN_EXPR
9500 || TREE_CODE (arg0
) == MAX_EXPR
)
9501 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9503 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9508 /* Simplify comparison of something with itself. (For IEEE
9509 floating-point, we can only do some of these simplifications.) */
9510 if (operand_equal_p (arg0
, arg1
, 0))
9515 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9516 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9517 return constant_boolean_node (1, type
);
9522 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9523 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9524 return constant_boolean_node (1, type
);
9525 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9528 /* For NE, we can only do this simplification if integer
9529 or we don't honor IEEE floating point NaNs. */
9530 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9531 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9533 /* ... fall through ... */
9536 return constant_boolean_node (0, type
);
9542 /* If we are comparing an expression that just has comparisons
9543 of two integer values, arithmetic expressions of those comparisons,
9544 and constants, we can simplify it. There are only three cases
9545 to check: the two values can either be equal, the first can be
9546 greater, or the second can be greater. Fold the expression for
9547 those three values. Since each value must be 0 or 1, we have
9548 eight possibilities, each of which corresponds to the constant 0
9549 or 1 or one of the six possible comparisons.
9551 This handles common cases like (a > b) == 0 but also handles
9552 expressions like ((x > y) - (y > x)) > 0, which supposedly
9553 occur in macroized code. */
9555 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9557 tree cval1
= 0, cval2
= 0;
9560 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9561 /* Don't handle degenerate cases here; they should already
9562 have been handled anyway. */
9563 && cval1
!= 0 && cval2
!= 0
9564 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9565 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9566 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9567 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9568 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9569 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9570 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9572 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9573 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9575 /* We can't just pass T to eval_subst in case cval1 or cval2
9576 was the same as ARG1. */
9579 = fold_build2_loc (loc
, code
, type
,
9580 eval_subst (loc
, arg0
, cval1
, maxval
,
9584 = fold_build2_loc (loc
, code
, type
,
9585 eval_subst (loc
, arg0
, cval1
, maxval
,
9589 = fold_build2_loc (loc
, code
, type
,
9590 eval_subst (loc
, arg0
, cval1
, minval
,
9594 /* All three of these results should be 0 or 1. Confirm they are.
9595 Then use those values to select the proper code to use. */
9597 if (TREE_CODE (high_result
) == INTEGER_CST
9598 && TREE_CODE (equal_result
) == INTEGER_CST
9599 && TREE_CODE (low_result
) == INTEGER_CST
)
9601 /* Make a 3-bit mask with the high-order bit being the
9602 value for `>', the next for '=', and the low for '<'. */
9603 switch ((integer_onep (high_result
) * 4)
9604 + (integer_onep (equal_result
) * 2)
9605 + integer_onep (low_result
))
9609 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9630 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9635 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9636 SET_EXPR_LOCATION (tem
, loc
);
9639 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9644 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9645 into a single range test. */
9646 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9647 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9648 && TREE_CODE (arg1
) == INTEGER_CST
9649 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9650 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9651 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9652 && !TREE_OVERFLOW (arg1
))
9654 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9655 if (tem
!= NULL_TREE
)
9659 /* Fold ~X op ~Y as Y op X. */
9660 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9661 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9663 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9664 return fold_build2_loc (loc
, code
, type
,
9665 fold_convert_loc (loc
, cmp_type
,
9666 TREE_OPERAND (arg1
, 0)),
9667 TREE_OPERAND (arg0
, 0));
9670 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9671 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9672 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9674 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9675 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9676 TREE_OPERAND (arg0
, 0),
9677 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9678 fold_convert_loc (loc
, cmp_type
, arg1
)));
9685 /* Subroutine of fold_binary. Optimize complex multiplications of the
9686 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9687 argument EXPR represents the expression "z" of type TYPE. */
9690 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9692 tree itype
= TREE_TYPE (type
);
9693 tree rpart
, ipart
, tem
;
9695 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9697 rpart
= TREE_OPERAND (expr
, 0);
9698 ipart
= TREE_OPERAND (expr
, 1);
9700 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9702 rpart
= TREE_REALPART (expr
);
9703 ipart
= TREE_IMAGPART (expr
);
9707 expr
= save_expr (expr
);
9708 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9709 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9712 rpart
= save_expr (rpart
);
9713 ipart
= save_expr (ipart
);
9714 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9715 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9716 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9717 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9718 build_zero_cst (itype
));
9722 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9723 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9724 guarantees that P and N have the same least significant log2(M) bits.
9725 N is not otherwise constrained. In particular, N is not normalized to
9726 0 <= N < M as is common. In general, the precise value of P is unknown.
9727 M is chosen as large as possible such that constant N can be determined.
9729 Returns M and sets *RESIDUE to N.
9731 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9732 account. This is not always possible due to PR 35705.
9735 static unsigned HOST_WIDE_INT
9736 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9737 bool allow_func_align
)
9739 enum tree_code code
;
9743 code
= TREE_CODE (expr
);
9744 if (code
== ADDR_EXPR
)
9746 unsigned int bitalign
;
9747 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9748 *residue
/= BITS_PER_UNIT
;
9749 return bitalign
/ BITS_PER_UNIT
;
9751 else if (code
== POINTER_PLUS_EXPR
)
9754 unsigned HOST_WIDE_INT modulus
;
9755 enum tree_code inner_code
;
9757 op0
= TREE_OPERAND (expr
, 0);
9759 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9762 op1
= TREE_OPERAND (expr
, 1);
9764 inner_code
= TREE_CODE (op1
);
9765 if (inner_code
== INTEGER_CST
)
9767 *residue
+= TREE_INT_CST_LOW (op1
);
9770 else if (inner_code
== MULT_EXPR
)
9772 op1
= TREE_OPERAND (op1
, 1);
9773 if (TREE_CODE (op1
) == INTEGER_CST
)
9775 unsigned HOST_WIDE_INT align
;
9777 /* Compute the greatest power-of-2 divisor of op1. */
9778 align
= TREE_INT_CST_LOW (op1
);
9781 /* If align is non-zero and less than *modulus, replace
9782 *modulus with align., If align is 0, then either op1 is 0
9783 or the greatest power-of-2 divisor of op1 doesn't fit in an
9784 unsigned HOST_WIDE_INT. In either case, no additional
9785 constraint is imposed. */
9787 modulus
= MIN (modulus
, align
);
9794 /* If we get here, we were unable to determine anything useful about the
9799 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9800 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9803 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9805 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9807 if (TREE_CODE (arg
) == VECTOR_CST
)
9809 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9810 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9812 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9814 constructor_elt
*elt
;
9816 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9817 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9820 elts
[i
] = elt
->value
;
9824 for (; i
< nelts
; i
++)
9826 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9830 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9831 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9832 NULL_TREE otherwise. */
9835 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9837 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9839 bool need_ctor
= false;
9841 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9842 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9843 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9844 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9847 elts
= XALLOCAVEC (tree
, nelts
* 3);
9848 if (!vec_cst_ctor_to_array (arg0
, elts
)
9849 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9852 for (i
= 0; i
< nelts
; i
++)
9854 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9856 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9861 vec
<constructor_elt
, va_gc
> *v
;
9862 vec_alloc (v
, nelts
);
9863 for (i
= 0; i
< nelts
; i
++)
9864 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9865 return build_constructor (type
, v
);
9868 return build_vector (type
, &elts
[2 * nelts
]);
9871 /* Try to fold a pointer difference of type TYPE two address expressions of
9872 array references AREF0 and AREF1 using location LOC. Return a
9873 simplified expression for the difference or NULL_TREE. */
9876 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9877 tree aref0
, tree aref1
)
9879 tree base0
= TREE_OPERAND (aref0
, 0);
9880 tree base1
= TREE_OPERAND (aref1
, 0);
9881 tree base_offset
= build_int_cst (type
, 0);
9883 /* If the bases are array references as well, recurse. If the bases
9884 are pointer indirections compute the difference of the pointers.
9885 If the bases are equal, we are set. */
9886 if ((TREE_CODE (base0
) == ARRAY_REF
9887 && TREE_CODE (base1
) == ARRAY_REF
9889 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9890 || (INDIRECT_REF_P (base0
)
9891 && INDIRECT_REF_P (base1
)
9892 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9893 TREE_OPERAND (base0
, 0),
9894 TREE_OPERAND (base1
, 0))))
9895 || operand_equal_p (base0
, base1
, 0))
9897 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9898 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9899 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9900 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9901 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9903 fold_build2_loc (loc
, MULT_EXPR
, type
,
9909 /* If the real or vector real constant CST of type TYPE has an exact
9910 inverse, return it, else return NULL. */
9913 exact_inverse (tree type
, tree cst
)
9916 tree unit_type
, *elts
;
9917 enum machine_mode mode
;
9918 unsigned vec_nelts
, i
;
9920 switch (TREE_CODE (cst
))
9923 r
= TREE_REAL_CST (cst
);
9925 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9926 return build_real (type
, r
);
9931 vec_nelts
= VECTOR_CST_NELTS (cst
);
9932 elts
= XALLOCAVEC (tree
, vec_nelts
);
9933 unit_type
= TREE_TYPE (type
);
9934 mode
= TYPE_MODE (unit_type
);
9936 for (i
= 0; i
< vec_nelts
; i
++)
9938 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9939 if (!exact_real_inverse (mode
, &r
))
9941 elts
[i
] = build_real (unit_type
, r
);
9944 return build_vector (type
, elts
);
9951 /* Mask out the tz least significant bits of X of type TYPE where
9952 tz is the number of trailing zeroes in Y. */
9954 mask_with_tz (tree type
, double_int x
, double_int y
)
9956 int tz
= y
.trailing_zeros ();
9962 mask
= ~double_int::mask (tz
);
9963 mask
= mask
.ext (TYPE_PRECISION (type
), TYPE_UNSIGNED (type
));
9969 /* Return true when T is an address and is known to be nonzero.
9970 For floating point we further ensure that T is not denormal.
9971 Similar logic is present in nonzero_address in rtlanal.h.
9973 If the return value is based on the assumption that signed overflow
9974 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9975 change *STRICT_OVERFLOW_P. */
9978 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9980 tree type
= TREE_TYPE (t
);
9981 enum tree_code code
;
9983 /* Doing something useful for floating point would need more work. */
9984 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9987 code
= TREE_CODE (t
);
9988 switch (TREE_CODE_CLASS (code
))
9991 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9994 case tcc_comparison
:
9995 return tree_binary_nonzero_warnv_p (code
, type
,
9996 TREE_OPERAND (t
, 0),
9997 TREE_OPERAND (t
, 1),
10000 case tcc_declaration
:
10001 case tcc_reference
:
10002 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10010 case TRUTH_NOT_EXPR
:
10011 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10012 strict_overflow_p
);
10014 case TRUTH_AND_EXPR
:
10015 case TRUTH_OR_EXPR
:
10016 case TRUTH_XOR_EXPR
:
10017 return tree_binary_nonzero_warnv_p (code
, type
,
10018 TREE_OPERAND (t
, 0),
10019 TREE_OPERAND (t
, 1),
10020 strict_overflow_p
);
10027 case WITH_SIZE_EXPR
:
10029 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10031 case COMPOUND_EXPR
:
10034 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10035 strict_overflow_p
);
10038 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10039 strict_overflow_p
);
10043 tree fndecl
= get_callee_fndecl (t
);
10044 if (!fndecl
) return false;
10045 if (flag_delete_null_pointer_checks
&& !flag_check_new
10046 && DECL_IS_OPERATOR_NEW (fndecl
)
10047 && !TREE_NOTHROW (fndecl
))
10049 if (flag_delete_null_pointer_checks
10050 && lookup_attribute ("returns_nonnull",
10051 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10053 return alloca_call_p (t
);
10062 /* Return true when T is an address and is known to be nonzero.
10063 Handle warnings about undefined signed overflow. */
10066 tree_expr_nonzero_p (tree t
)
10068 bool ret
, strict_overflow_p
;
10070 strict_overflow_p
= false;
10071 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10072 if (strict_overflow_p
)
10073 fold_overflow_warning (("assuming signed overflow does not occur when "
10074 "determining that expression is always "
10076 WARN_STRICT_OVERFLOW_MISC
);
10080 /* Fold a binary expression of code CODE and type TYPE with operands
10081 OP0 and OP1. LOC is the location of the resulting expression.
10082 Return the folded expression if folding is successful. Otherwise,
10083 return NULL_TREE. */
10086 fold_binary_loc (location_t loc
,
10087 enum tree_code code
, tree type
, tree op0
, tree op1
)
10089 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10090 tree arg0
, arg1
, tem
;
10091 tree t1
= NULL_TREE
;
10092 bool strict_overflow_p
;
10095 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10096 && TREE_CODE_LENGTH (code
) == 2
10097 && op0
!= NULL_TREE
10098 && op1
!= NULL_TREE
);
10103 /* Strip any conversions that don't change the mode. This is
10104 safe for every expression, except for a comparison expression
10105 because its signedness is derived from its operands. So, in
10106 the latter case, only strip conversions that don't change the
10107 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10110 Note that this is done as an internal manipulation within the
10111 constant folder, in order to find the simplest representation
10112 of the arguments so that their form can be studied. In any
10113 cases, the appropriate type conversions should be put back in
10114 the tree that will get out of the constant folder. */
10116 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10118 STRIP_SIGN_NOPS (arg0
);
10119 STRIP_SIGN_NOPS (arg1
);
10127 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10128 constant but we can't do arithmetic on them. */
10129 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10130 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10131 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10132 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10133 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10134 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10135 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10137 if (kind
== tcc_binary
)
10139 /* Make sure type and arg0 have the same saturating flag. */
10140 gcc_assert (TYPE_SATURATING (type
)
10141 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10142 tem
= const_binop (code
, arg0
, arg1
);
10144 else if (kind
== tcc_comparison
)
10145 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10149 if (tem
!= NULL_TREE
)
10151 if (TREE_TYPE (tem
) != type
)
10152 tem
= fold_convert_loc (loc
, type
, tem
);
10157 /* If this is a commutative operation, and ARG0 is a constant, move it
10158 to ARG1 to reduce the number of tests below. */
10159 if (commutative_tree_code (code
)
10160 && tree_swap_operands_p (arg0
, arg1
, true))
10161 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10163 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10165 First check for cases where an arithmetic operation is applied to a
10166 compound, conditional, or comparison operation. Push the arithmetic
10167 operation inside the compound or conditional to see if any folding
10168 can then be done. Convert comparison to conditional for this purpose.
10169 The also optimizes non-constant cases that used to be done in
10172 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10173 one of the operands is a comparison and the other is a comparison, a
10174 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10175 code below would make the expression more complex. Change it to a
10176 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10177 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10179 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10180 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10181 && TREE_CODE (type
) != VECTOR_TYPE
10182 && ((truth_value_p (TREE_CODE (arg0
))
10183 && (truth_value_p (TREE_CODE (arg1
))
10184 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10185 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10186 || (truth_value_p (TREE_CODE (arg1
))
10187 && (truth_value_p (TREE_CODE (arg0
))
10188 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10189 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10191 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10192 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10195 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10196 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10198 if (code
== EQ_EXPR
)
10199 tem
= invert_truthvalue_loc (loc
, tem
);
10201 return fold_convert_loc (loc
, type
, tem
);
10204 if (TREE_CODE_CLASS (code
) == tcc_binary
10205 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10207 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10209 tem
= fold_build2_loc (loc
, code
, type
,
10210 fold_convert_loc (loc
, TREE_TYPE (op0
),
10211 TREE_OPERAND (arg0
, 1)), op1
);
10212 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10215 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10216 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10218 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10219 fold_convert_loc (loc
, TREE_TYPE (op1
),
10220 TREE_OPERAND (arg1
, 1)));
10221 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10225 if (TREE_CODE (arg0
) == COND_EXPR
10226 || TREE_CODE (arg0
) == VEC_COND_EXPR
10227 || COMPARISON_CLASS_P (arg0
))
10229 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10231 /*cond_first_p=*/1);
10232 if (tem
!= NULL_TREE
)
10236 if (TREE_CODE (arg1
) == COND_EXPR
10237 || TREE_CODE (arg1
) == VEC_COND_EXPR
10238 || COMPARISON_CLASS_P (arg1
))
10240 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10242 /*cond_first_p=*/0);
10243 if (tem
!= NULL_TREE
)
10251 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10252 if (TREE_CODE (arg0
) == ADDR_EXPR
10253 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10255 tree iref
= TREE_OPERAND (arg0
, 0);
10256 return fold_build2 (MEM_REF
, type
,
10257 TREE_OPERAND (iref
, 0),
10258 int_const_binop (PLUS_EXPR
, arg1
,
10259 TREE_OPERAND (iref
, 1)));
10262 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10263 if (TREE_CODE (arg0
) == ADDR_EXPR
10264 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10267 HOST_WIDE_INT coffset
;
10268 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10272 return fold_build2 (MEM_REF
, type
,
10273 build_fold_addr_expr (base
),
10274 int_const_binop (PLUS_EXPR
, arg1
,
10275 size_int (coffset
)));
10280 case POINTER_PLUS_EXPR
:
10281 /* 0 +p index -> (type)index */
10282 if (integer_zerop (arg0
))
10283 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10285 /* PTR +p 0 -> PTR */
10286 if (integer_zerop (arg1
))
10287 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10289 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10290 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10291 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10292 return fold_convert_loc (loc
, type
,
10293 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10294 fold_convert_loc (loc
, sizetype
,
10296 fold_convert_loc (loc
, sizetype
,
10299 /* (PTR +p B) +p A -> PTR +p (B + A) */
10300 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10303 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10304 tree arg00
= TREE_OPERAND (arg0
, 0);
10305 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10306 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10307 return fold_convert_loc (loc
, type
,
10308 fold_build_pointer_plus_loc (loc
,
10312 /* PTR_CST +p CST -> CST1 */
10313 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10314 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10315 fold_convert_loc (loc
, type
, arg1
));
10317 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10318 of the array. Loop optimizer sometimes produce this type of
10320 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10322 tem
= try_move_mult_to_index (loc
, arg0
,
10323 fold_convert_loc (loc
,
10326 return fold_convert_loc (loc
, type
, tem
);
10332 /* A + (-B) -> A - B */
10333 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10334 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10335 fold_convert_loc (loc
, type
, arg0
),
10336 fold_convert_loc (loc
, type
,
10337 TREE_OPERAND (arg1
, 0)));
10338 /* (-A) + B -> B - A */
10339 if (TREE_CODE (arg0
) == NEGATE_EXPR
10340 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10341 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10342 fold_convert_loc (loc
, type
, arg1
),
10343 fold_convert_loc (loc
, type
,
10344 TREE_OPERAND (arg0
, 0)));
10346 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10348 /* Convert ~A + 1 to -A. */
10349 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10350 && integer_onep (arg1
))
10351 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10352 fold_convert_loc (loc
, type
,
10353 TREE_OPERAND (arg0
, 0)));
10355 /* ~X + X is -1. */
10356 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10357 && !TYPE_OVERFLOW_TRAPS (type
))
10359 tree tem
= TREE_OPERAND (arg0
, 0);
10362 if (operand_equal_p (tem
, arg1
, 0))
10364 t1
= build_all_ones_cst (type
);
10365 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10369 /* X + ~X is -1. */
10370 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10371 && !TYPE_OVERFLOW_TRAPS (type
))
10373 tree tem
= TREE_OPERAND (arg1
, 0);
10376 if (operand_equal_p (arg0
, tem
, 0))
10378 t1
= build_all_ones_cst (type
);
10379 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10383 /* X + (X / CST) * -CST is X % CST. */
10384 if (TREE_CODE (arg1
) == MULT_EXPR
10385 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10386 && operand_equal_p (arg0
,
10387 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10389 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10390 tree cst1
= TREE_OPERAND (arg1
, 1);
10391 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10393 if (sum
&& integer_zerop (sum
))
10394 return fold_convert_loc (loc
, type
,
10395 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10396 TREE_TYPE (arg0
), arg0
,
10401 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10402 one. Make sure the type is not saturating and has the signedness of
10403 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10404 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10405 if ((TREE_CODE (arg0
) == MULT_EXPR
10406 || TREE_CODE (arg1
) == MULT_EXPR
)
10407 && !TYPE_SATURATING (type
)
10408 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10409 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10410 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10412 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10417 if (! FLOAT_TYPE_P (type
))
10419 if (integer_zerop (arg1
))
10420 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10422 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10423 with a constant, and the two constants have no bits in common,
10424 we should treat this as a BIT_IOR_EXPR since this may produce more
10425 simplifications. */
10426 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10427 && TREE_CODE (arg1
) == BIT_AND_EXPR
10428 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10429 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10430 && integer_zerop (const_binop (BIT_AND_EXPR
,
10431 TREE_OPERAND (arg0
, 1),
10432 TREE_OPERAND (arg1
, 1))))
10434 code
= BIT_IOR_EXPR
;
10438 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10439 (plus (plus (mult) (mult)) (foo)) so that we can
10440 take advantage of the factoring cases below. */
10441 if (TYPE_OVERFLOW_WRAPS (type
)
10442 && (((TREE_CODE (arg0
) == PLUS_EXPR
10443 || TREE_CODE (arg0
) == MINUS_EXPR
)
10444 && TREE_CODE (arg1
) == MULT_EXPR
)
10445 || ((TREE_CODE (arg1
) == PLUS_EXPR
10446 || TREE_CODE (arg1
) == MINUS_EXPR
)
10447 && TREE_CODE (arg0
) == MULT_EXPR
)))
10449 tree parg0
, parg1
, parg
, marg
;
10450 enum tree_code pcode
;
10452 if (TREE_CODE (arg1
) == MULT_EXPR
)
10453 parg
= arg0
, marg
= arg1
;
10455 parg
= arg1
, marg
= arg0
;
10456 pcode
= TREE_CODE (parg
);
10457 parg0
= TREE_OPERAND (parg
, 0);
10458 parg1
= TREE_OPERAND (parg
, 1);
10459 STRIP_NOPS (parg0
);
10460 STRIP_NOPS (parg1
);
10462 if (TREE_CODE (parg0
) == MULT_EXPR
10463 && TREE_CODE (parg1
) != MULT_EXPR
)
10464 return fold_build2_loc (loc
, pcode
, type
,
10465 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10466 fold_convert_loc (loc
, type
,
10468 fold_convert_loc (loc
, type
,
10470 fold_convert_loc (loc
, type
, parg1
));
10471 if (TREE_CODE (parg0
) != MULT_EXPR
10472 && TREE_CODE (parg1
) == MULT_EXPR
)
10474 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10475 fold_convert_loc (loc
, type
, parg0
),
10476 fold_build2_loc (loc
, pcode
, type
,
10477 fold_convert_loc (loc
, type
, marg
),
10478 fold_convert_loc (loc
, type
,
10484 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10485 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10486 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10488 /* Likewise if the operands are reversed. */
10489 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10490 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10492 /* Convert X + -C into X - C. */
10493 if (TREE_CODE (arg1
) == REAL_CST
10494 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10496 tem
= fold_negate_const (arg1
, type
);
10497 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10498 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10499 fold_convert_loc (loc
, type
, arg0
),
10500 fold_convert_loc (loc
, type
, tem
));
10503 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10504 to __complex__ ( x, y ). This is not the same for SNaNs or
10505 if signed zeros are involved. */
10506 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10507 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10508 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10510 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10511 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10512 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10513 bool arg0rz
= false, arg0iz
= false;
10514 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10515 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10517 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10518 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10519 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10521 tree rp
= arg1r
? arg1r
10522 : build1 (REALPART_EXPR
, rtype
, arg1
);
10523 tree ip
= arg0i
? arg0i
10524 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10525 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10527 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10529 tree rp
= arg0r
? arg0r
10530 : build1 (REALPART_EXPR
, rtype
, arg0
);
10531 tree ip
= arg1i
? arg1i
10532 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10533 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10538 if (flag_unsafe_math_optimizations
10539 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10540 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10541 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10544 /* Convert x+x into x*2.0. */
10545 if (operand_equal_p (arg0
, arg1
, 0)
10546 && SCALAR_FLOAT_TYPE_P (type
))
10547 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10548 build_real (type
, dconst2
));
10550 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10551 We associate floats only if the user has specified
10552 -fassociative-math. */
10553 if (flag_associative_math
10554 && TREE_CODE (arg1
) == PLUS_EXPR
10555 && TREE_CODE (arg0
) != MULT_EXPR
)
10557 tree tree10
= TREE_OPERAND (arg1
, 0);
10558 tree tree11
= TREE_OPERAND (arg1
, 1);
10559 if (TREE_CODE (tree11
) == MULT_EXPR
10560 && TREE_CODE (tree10
) == MULT_EXPR
)
10563 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10564 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10567 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10568 We associate floats only if the user has specified
10569 -fassociative-math. */
10570 if (flag_associative_math
10571 && TREE_CODE (arg0
) == PLUS_EXPR
10572 && TREE_CODE (arg1
) != MULT_EXPR
)
10574 tree tree00
= TREE_OPERAND (arg0
, 0);
10575 tree tree01
= TREE_OPERAND (arg0
, 1);
10576 if (TREE_CODE (tree01
) == MULT_EXPR
10577 && TREE_CODE (tree00
) == MULT_EXPR
)
10580 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10581 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10587 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10588 is a rotate of A by C1 bits. */
10589 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10590 is a rotate of A by B bits. */
10592 enum tree_code code0
, code1
;
10594 code0
= TREE_CODE (arg0
);
10595 code1
= TREE_CODE (arg1
);
10596 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10597 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10598 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10599 TREE_OPERAND (arg1
, 0), 0)
10600 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10601 TYPE_UNSIGNED (rtype
))
10602 /* Only create rotates in complete modes. Other cases are not
10603 expanded properly. */
10604 && (element_precision (rtype
)
10605 == element_precision (TYPE_MODE (rtype
))))
10607 tree tree01
, tree11
;
10608 enum tree_code code01
, code11
;
10610 tree01
= TREE_OPERAND (arg0
, 1);
10611 tree11
= TREE_OPERAND (arg1
, 1);
10612 STRIP_NOPS (tree01
);
10613 STRIP_NOPS (tree11
);
10614 code01
= TREE_CODE (tree01
);
10615 code11
= TREE_CODE (tree11
);
10616 if (code01
== INTEGER_CST
10617 && code11
== INTEGER_CST
10618 && TREE_INT_CST_HIGH (tree01
) == 0
10619 && TREE_INT_CST_HIGH (tree11
) == 0
10620 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10621 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10623 tem
= build2_loc (loc
, LROTATE_EXPR
,
10624 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10625 TREE_OPERAND (arg0
, 0),
10626 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10627 return fold_convert_loc (loc
, type
, tem
);
10629 else if (code11
== MINUS_EXPR
)
10631 tree tree110
, tree111
;
10632 tree110
= TREE_OPERAND (tree11
, 0);
10633 tree111
= TREE_OPERAND (tree11
, 1);
10634 STRIP_NOPS (tree110
);
10635 STRIP_NOPS (tree111
);
10636 if (TREE_CODE (tree110
) == INTEGER_CST
10637 && 0 == compare_tree_int (tree110
,
10639 (TREE_TYPE (TREE_OPERAND
10641 && operand_equal_p (tree01
, tree111
, 0))
10643 fold_convert_loc (loc
, type
,
10644 build2 ((code0
== LSHIFT_EXPR
10647 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10648 TREE_OPERAND (arg0
, 0), tree01
));
10650 else if (code01
== MINUS_EXPR
)
10652 tree tree010
, tree011
;
10653 tree010
= TREE_OPERAND (tree01
, 0);
10654 tree011
= TREE_OPERAND (tree01
, 1);
10655 STRIP_NOPS (tree010
);
10656 STRIP_NOPS (tree011
);
10657 if (TREE_CODE (tree010
) == INTEGER_CST
10658 && 0 == compare_tree_int (tree010
,
10660 (TREE_TYPE (TREE_OPERAND
10662 && operand_equal_p (tree11
, tree011
, 0))
10663 return fold_convert_loc
10665 build2 ((code0
!= LSHIFT_EXPR
10668 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10669 TREE_OPERAND (arg0
, 0), tree11
));
10675 /* In most languages, can't associate operations on floats through
10676 parentheses. Rather than remember where the parentheses were, we
10677 don't associate floats at all, unless the user has specified
10678 -fassociative-math.
10679 And, we need to make sure type is not saturating. */
10681 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10682 && !TYPE_SATURATING (type
))
10684 tree var0
, con0
, lit0
, minus_lit0
;
10685 tree var1
, con1
, lit1
, minus_lit1
;
10689 /* Split both trees into variables, constants, and literals. Then
10690 associate each group together, the constants with literals,
10691 then the result with variables. This increases the chances of
10692 literals being recombined later and of generating relocatable
10693 expressions for the sum of a constant and literal. */
10694 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10695 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10696 code
== MINUS_EXPR
);
10698 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10699 if (code
== MINUS_EXPR
)
10702 /* With undefined overflow prefer doing association in a type
10703 which wraps on overflow, if that is one of the operand types. */
10704 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10705 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10707 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10708 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10709 atype
= TREE_TYPE (arg0
);
10710 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10711 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10712 atype
= TREE_TYPE (arg1
);
10713 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10716 /* With undefined overflow we can only associate constants with one
10717 variable, and constants whose association doesn't overflow. */
10718 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10719 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10726 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10727 tmp0
= TREE_OPERAND (tmp0
, 0);
10728 if (CONVERT_EXPR_P (tmp0
)
10729 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10730 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10731 <= TYPE_PRECISION (atype
)))
10732 tmp0
= TREE_OPERAND (tmp0
, 0);
10733 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10734 tmp1
= TREE_OPERAND (tmp1
, 0);
10735 if (CONVERT_EXPR_P (tmp1
)
10736 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10737 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10738 <= TYPE_PRECISION (atype
)))
10739 tmp1
= TREE_OPERAND (tmp1
, 0);
10740 /* The only case we can still associate with two variables
10741 is if they are the same, modulo negation and bit-pattern
10742 preserving conversions. */
10743 if (!operand_equal_p (tmp0
, tmp1
, 0))
10748 /* Only do something if we found more than two objects. Otherwise,
10749 nothing has changed and we risk infinite recursion. */
10751 && (2 < ((var0
!= 0) + (var1
!= 0)
10752 + (con0
!= 0) + (con1
!= 0)
10753 + (lit0
!= 0) + (lit1
!= 0)
10754 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10756 bool any_overflows
= false;
10757 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10758 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10759 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10760 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10761 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10762 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10763 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10764 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10767 /* Preserve the MINUS_EXPR if the negative part of the literal is
10768 greater than the positive part. Otherwise, the multiplicative
10769 folding code (i.e extract_muldiv) may be fooled in case
10770 unsigned constants are subtracted, like in the following
10771 example: ((X*2 + 4) - 8U)/2. */
10772 if (minus_lit0
&& lit0
)
10774 if (TREE_CODE (lit0
) == INTEGER_CST
10775 && TREE_CODE (minus_lit0
) == INTEGER_CST
10776 && tree_int_cst_lt (lit0
, minus_lit0
))
10778 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10779 MINUS_EXPR
, atype
);
10784 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10785 MINUS_EXPR
, atype
);
10790 /* Don't introduce overflows through reassociation. */
10792 && ((lit0
&& TREE_OVERFLOW (lit0
))
10793 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10800 fold_convert_loc (loc
, type
,
10801 associate_trees (loc
, var0
, minus_lit0
,
10802 MINUS_EXPR
, atype
));
10805 con0
= associate_trees (loc
, con0
, minus_lit0
,
10806 MINUS_EXPR
, atype
);
10808 fold_convert_loc (loc
, type
,
10809 associate_trees (loc
, var0
, con0
,
10810 PLUS_EXPR
, atype
));
10814 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10816 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10824 /* Pointer simplifications for subtraction, simple reassociations. */
10825 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10827 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10828 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10829 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10831 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10832 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10833 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10834 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10835 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10836 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10838 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10841 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10842 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10844 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10845 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10846 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10847 fold_convert_loc (loc
, type
, arg1
));
10849 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10852 /* A - (-B) -> A + B */
10853 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10854 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10855 fold_convert_loc (loc
, type
,
10856 TREE_OPERAND (arg1
, 0)));
10857 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10858 if (TREE_CODE (arg0
) == NEGATE_EXPR
10859 && negate_expr_p (arg1
)
10860 && reorder_operands_p (arg0
, arg1
))
10861 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10862 fold_convert_loc (loc
, type
,
10863 negate_expr (arg1
)),
10864 fold_convert_loc (loc
, type
,
10865 TREE_OPERAND (arg0
, 0)));
10866 /* Convert -A - 1 to ~A. */
10867 if (TREE_CODE (type
) != COMPLEX_TYPE
10868 && TREE_CODE (arg0
) == NEGATE_EXPR
10869 && integer_onep (arg1
)
10870 && !TYPE_OVERFLOW_TRAPS (type
))
10871 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10872 fold_convert_loc (loc
, type
,
10873 TREE_OPERAND (arg0
, 0)));
10875 /* Convert -1 - A to ~A. */
10876 if (TREE_CODE (type
) != COMPLEX_TYPE
10877 && integer_all_onesp (arg0
))
10878 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10881 /* X - (X / Y) * Y is X % Y. */
10882 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10883 && TREE_CODE (arg1
) == MULT_EXPR
10884 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10885 && operand_equal_p (arg0
,
10886 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10887 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10888 TREE_OPERAND (arg1
, 1), 0))
10890 fold_convert_loc (loc
, type
,
10891 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10892 arg0
, TREE_OPERAND (arg1
, 1)));
10894 if (! FLOAT_TYPE_P (type
))
10896 if (integer_zerop (arg0
))
10897 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10898 if (integer_zerop (arg1
))
10899 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10901 /* Fold A - (A & B) into ~B & A. */
10902 if (!TREE_SIDE_EFFECTS (arg0
)
10903 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10905 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10907 tree arg10
= fold_convert_loc (loc
, type
,
10908 TREE_OPERAND (arg1
, 0));
10909 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10910 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10912 fold_convert_loc (loc
, type
, arg0
));
10914 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10916 tree arg11
= fold_convert_loc (loc
,
10917 type
, TREE_OPERAND (arg1
, 1));
10918 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10919 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10921 fold_convert_loc (loc
, type
, arg0
));
10925 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10926 any power of 2 minus 1. */
10927 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10928 && TREE_CODE (arg1
) == BIT_AND_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10930 TREE_OPERAND (arg1
, 0), 0))
10932 tree mask0
= TREE_OPERAND (arg0
, 1);
10933 tree mask1
= TREE_OPERAND (arg1
, 1);
10934 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10936 if (operand_equal_p (tem
, mask1
, 0))
10938 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10939 TREE_OPERAND (arg0
, 0), mask1
);
10940 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10945 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10946 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10947 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10949 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10950 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10951 (-ARG1 + ARG0) reduces to -ARG1. */
10952 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10953 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10955 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10956 __complex__ ( x, -y ). This is not the same for SNaNs or if
10957 signed zeros are involved. */
10958 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10959 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10960 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10962 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10963 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10964 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10965 bool arg0rz
= false, arg0iz
= false;
10966 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10967 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10969 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10970 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10971 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10973 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10975 : build1 (REALPART_EXPR
, rtype
, arg1
));
10976 tree ip
= arg0i
? arg0i
10977 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10978 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10980 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10982 tree rp
= arg0r
? arg0r
10983 : build1 (REALPART_EXPR
, rtype
, arg0
);
10984 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10986 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10987 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10992 /* Fold &x - &x. This can happen from &x.foo - &x.
10993 This is unsafe for certain floats even in non-IEEE formats.
10994 In IEEE, it is unsafe because it does wrong for NaNs.
10995 Also note that operand_equal_p is always false if an operand
10998 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10999 && operand_equal_p (arg0
, arg1
, 0))
11000 return build_zero_cst (type
);
11002 /* A - B -> A + (-B) if B is easily negatable. */
11003 if (negate_expr_p (arg1
)
11004 && ((FLOAT_TYPE_P (type
)
11005 /* Avoid this transformation if B is a positive REAL_CST. */
11006 && (TREE_CODE (arg1
) != REAL_CST
11007 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
11008 || INTEGRAL_TYPE_P (type
)))
11009 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11010 fold_convert_loc (loc
, type
, arg0
),
11011 fold_convert_loc (loc
, type
,
11012 negate_expr (arg1
)));
11014 /* Try folding difference of addresses. */
11016 HOST_WIDE_INT diff
;
11018 if ((TREE_CODE (arg0
) == ADDR_EXPR
11019 || TREE_CODE (arg1
) == ADDR_EXPR
)
11020 && ptr_difference_const (arg0
, arg1
, &diff
))
11021 return build_int_cst_type (type
, diff
);
11024 /* Fold &a[i] - &a[j] to i-j. */
11025 if (TREE_CODE (arg0
) == ADDR_EXPR
11026 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11027 && TREE_CODE (arg1
) == ADDR_EXPR
11028 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11030 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11031 TREE_OPERAND (arg0
, 0),
11032 TREE_OPERAND (arg1
, 0));
11037 if (FLOAT_TYPE_P (type
)
11038 && flag_unsafe_math_optimizations
11039 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11040 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11041 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11044 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11045 one. Make sure the type is not saturating and has the signedness of
11046 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11047 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11048 if ((TREE_CODE (arg0
) == MULT_EXPR
11049 || TREE_CODE (arg1
) == MULT_EXPR
)
11050 && !TYPE_SATURATING (type
)
11051 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11052 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11053 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11055 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11063 /* (-A) * (-B) -> A * B */
11064 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11065 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11066 fold_convert_loc (loc
, type
,
11067 TREE_OPERAND (arg0
, 0)),
11068 fold_convert_loc (loc
, type
,
11069 negate_expr (arg1
)));
11070 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11071 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11072 fold_convert_loc (loc
, type
,
11073 negate_expr (arg0
)),
11074 fold_convert_loc (loc
, type
,
11075 TREE_OPERAND (arg1
, 0)));
11077 if (! FLOAT_TYPE_P (type
))
11079 if (integer_zerop (arg1
))
11080 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11081 if (integer_onep (arg1
))
11082 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11083 /* Transform x * -1 into -x. Make sure to do the negation
11084 on the original operand with conversions not stripped
11085 because we can only strip non-sign-changing conversions. */
11086 if (integer_minus_onep (arg1
))
11087 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11088 /* Transform x * -C into -x * C if x is easily negatable. */
11089 if (TREE_CODE (arg1
) == INTEGER_CST
11090 && tree_int_cst_sgn (arg1
) == -1
11091 && negate_expr_p (arg0
)
11092 && (tem
= negate_expr (arg1
)) != arg1
11093 && !TREE_OVERFLOW (tem
))
11094 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11095 fold_convert_loc (loc
, type
,
11096 negate_expr (arg0
)),
11099 /* (a * (1 << b)) is (a << b) */
11100 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11101 && integer_onep (TREE_OPERAND (arg1
, 0)))
11102 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11103 TREE_OPERAND (arg1
, 1));
11104 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11105 && integer_onep (TREE_OPERAND (arg0
, 0)))
11106 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11107 TREE_OPERAND (arg0
, 1));
11109 /* (A + A) * C -> A * 2 * C */
11110 if (TREE_CODE (arg0
) == PLUS_EXPR
11111 && TREE_CODE (arg1
) == INTEGER_CST
11112 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11113 TREE_OPERAND (arg0
, 1), 0))
11114 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11115 omit_one_operand_loc (loc
, type
,
11116 TREE_OPERAND (arg0
, 0),
11117 TREE_OPERAND (arg0
, 1)),
11118 fold_build2_loc (loc
, MULT_EXPR
, type
,
11119 build_int_cst (type
, 2) , arg1
));
11121 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11122 sign-changing only. */
11123 if (TREE_CODE (arg1
) == INTEGER_CST
11124 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11125 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11126 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11128 strict_overflow_p
= false;
11129 if (TREE_CODE (arg1
) == INTEGER_CST
11130 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11131 &strict_overflow_p
)))
11133 if (strict_overflow_p
)
11134 fold_overflow_warning (("assuming signed overflow does not "
11135 "occur when simplifying "
11137 WARN_STRICT_OVERFLOW_MISC
);
11138 return fold_convert_loc (loc
, type
, tem
);
11141 /* Optimize z * conj(z) for integer complex numbers. */
11142 if (TREE_CODE (arg0
) == CONJ_EXPR
11143 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11144 return fold_mult_zconjz (loc
, type
, arg1
);
11145 if (TREE_CODE (arg1
) == CONJ_EXPR
11146 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11147 return fold_mult_zconjz (loc
, type
, arg0
);
11151 /* Maybe fold x * 0 to 0. The expressions aren't the same
11152 when x is NaN, since x * 0 is also NaN. Nor are they the
11153 same in modes with signed zeros, since multiplying a
11154 negative value by 0 gives -0, not +0. */
11155 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11156 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11157 && real_zerop (arg1
))
11158 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11159 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11160 Likewise for complex arithmetic with signed zeros. */
11161 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11162 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11163 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11164 && real_onep (arg1
))
11165 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11167 /* Transform x * -1.0 into -x. */
11168 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11169 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11170 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11171 && real_minus_onep (arg1
))
11172 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11174 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11175 the result for floating point types due to rounding so it is applied
11176 only if -fassociative-math was specify. */
11177 if (flag_associative_math
11178 && TREE_CODE (arg0
) == RDIV_EXPR
11179 && TREE_CODE (arg1
) == REAL_CST
11180 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11182 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11185 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11186 TREE_OPERAND (arg0
, 1));
11189 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11190 if (operand_equal_p (arg0
, arg1
, 0))
11192 tree tem
= fold_strip_sign_ops (arg0
);
11193 if (tem
!= NULL_TREE
)
11195 tem
= fold_convert_loc (loc
, type
, tem
);
11196 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11200 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11201 This is not the same for NaNs or if signed zeros are
11203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11204 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11205 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11206 && TREE_CODE (arg1
) == COMPLEX_CST
11207 && real_zerop (TREE_REALPART (arg1
)))
11209 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11210 if (real_onep (TREE_IMAGPART (arg1
)))
11212 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11213 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11215 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11216 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11218 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11219 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11220 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11224 /* Optimize z * conj(z) for floating point complex numbers.
11225 Guarded by flag_unsafe_math_optimizations as non-finite
11226 imaginary components don't produce scalar results. */
11227 if (flag_unsafe_math_optimizations
11228 && TREE_CODE (arg0
) == CONJ_EXPR
11229 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11230 return fold_mult_zconjz (loc
, type
, arg1
);
11231 if (flag_unsafe_math_optimizations
11232 && TREE_CODE (arg1
) == CONJ_EXPR
11233 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11234 return fold_mult_zconjz (loc
, type
, arg0
);
11236 if (flag_unsafe_math_optimizations
)
11238 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11239 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11241 /* Optimizations of root(...)*root(...). */
11242 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11245 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11246 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11248 /* Optimize sqrt(x)*sqrt(x) as x. */
11249 if (BUILTIN_SQRT_P (fcode0
)
11250 && operand_equal_p (arg00
, arg10
, 0)
11251 && ! HONOR_SNANS (TYPE_MODE (type
)))
11254 /* Optimize root(x)*root(y) as root(x*y). */
11255 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11256 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11257 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11260 /* Optimize expN(x)*expN(y) as expN(x+y). */
11261 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11263 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11264 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11265 CALL_EXPR_ARG (arg0
, 0),
11266 CALL_EXPR_ARG (arg1
, 0));
11267 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11270 /* Optimizations of pow(...)*pow(...). */
11271 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11272 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11273 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11275 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11276 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11277 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11278 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11280 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11281 if (operand_equal_p (arg01
, arg11
, 0))
11283 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11284 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11286 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11289 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11290 if (operand_equal_p (arg00
, arg10
, 0))
11292 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11293 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11295 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11299 /* Optimize tan(x)*cos(x) as sin(x). */
11300 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11301 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11302 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11303 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11304 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11305 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11306 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11307 CALL_EXPR_ARG (arg1
, 0), 0))
11309 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11311 if (sinfn
!= NULL_TREE
)
11312 return build_call_expr_loc (loc
, sinfn
, 1,
11313 CALL_EXPR_ARG (arg0
, 0));
11316 /* Optimize x*pow(x,c) as pow(x,c+1). */
11317 if (fcode1
== BUILT_IN_POW
11318 || fcode1
== BUILT_IN_POWF
11319 || fcode1
== BUILT_IN_POWL
)
11321 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11322 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11323 if (TREE_CODE (arg11
) == REAL_CST
11324 && !TREE_OVERFLOW (arg11
)
11325 && operand_equal_p (arg0
, arg10
, 0))
11327 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11331 c
= TREE_REAL_CST (arg11
);
11332 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11333 arg
= build_real (type
, c
);
11334 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11338 /* Optimize pow(x,c)*x as pow(x,c+1). */
11339 if (fcode0
== BUILT_IN_POW
11340 || fcode0
== BUILT_IN_POWF
11341 || fcode0
== BUILT_IN_POWL
)
11343 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11344 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11345 if (TREE_CODE (arg01
) == REAL_CST
11346 && !TREE_OVERFLOW (arg01
)
11347 && operand_equal_p (arg1
, arg00
, 0))
11349 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11353 c
= TREE_REAL_CST (arg01
);
11354 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11355 arg
= build_real (type
, c
);
11356 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11360 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11361 if (!in_gimple_form
11363 && operand_equal_p (arg0
, arg1
, 0))
11365 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11369 tree arg
= build_real (type
, dconst2
);
11370 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11379 if (integer_all_onesp (arg1
))
11380 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11381 if (integer_zerop (arg1
))
11382 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11383 if (operand_equal_p (arg0
, arg1
, 0))
11384 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11386 /* ~X | X is -1. */
11387 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11388 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11390 t1
= build_zero_cst (type
);
11391 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11392 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11395 /* X | ~X is -1. */
11396 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11397 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11399 t1
= build_zero_cst (type
);
11400 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11401 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11404 /* Canonicalize (X & C1) | C2. */
11405 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11406 && TREE_CODE (arg1
) == INTEGER_CST
11407 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11409 double_int c1
, c2
, c3
, msk
;
11410 int width
= TYPE_PRECISION (type
), w
;
11411 bool try_simplify
= true;
11413 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11414 c2
= tree_to_double_int (arg1
);
11416 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11417 if ((c1
& c2
) == c1
)
11418 return omit_one_operand_loc (loc
, type
, arg1
,
11419 TREE_OPERAND (arg0
, 0));
11421 msk
= double_int::mask (width
);
11423 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11424 if (msk
.and_not (c1
| c2
).is_zero ())
11425 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11426 TREE_OPERAND (arg0
, 0), arg1
);
11428 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11429 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11430 mode which allows further optimizations. */
11433 c3
= c1
.and_not (c2
);
11434 for (w
= BITS_PER_UNIT
;
11435 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11438 unsigned HOST_WIDE_INT mask
11439 = HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- w
);
11440 if (((c1
.low
| c2
.low
) & mask
) == mask
11441 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11443 c3
= double_int::from_uhwi (mask
);
11448 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11449 with that optimization from the BIT_AND_EXPR optimizations.
11450 This could end up in an infinite recursion. */
11451 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == MULT_EXPR
11452 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11455 tree t
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11456 double_int masked
= mask_with_tz (type
, c3
, tree_to_double_int (t
));
11458 try_simplify
= (masked
!= c1
);
11461 if (try_simplify
&& c3
!= c1
)
11462 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11463 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11464 TREE_OPERAND (arg0
, 0),
11465 double_int_to_tree (type
,
11470 /* (X & Y) | Y is (X, Y). */
11471 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11472 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11473 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11474 /* (X & Y) | X is (Y, X). */
11475 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11476 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11477 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11478 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11479 /* X | (X & Y) is (Y, X). */
11480 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11481 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11482 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11483 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11484 /* X | (Y & X) is (Y, X). */
11485 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11486 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11487 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11488 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11490 /* (X & ~Y) | (~X & Y) is X ^ Y */
11491 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11492 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11494 tree a0
, a1
, l0
, l1
, n0
, n1
;
11496 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11497 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11499 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11500 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11502 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11503 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11505 if ((operand_equal_p (n0
, a0
, 0)
11506 && operand_equal_p (n1
, a1
, 0))
11507 || (operand_equal_p (n0
, a1
, 0)
11508 && operand_equal_p (n1
, a0
, 0)))
11509 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11512 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11513 if (t1
!= NULL_TREE
)
11516 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11518 This results in more efficient code for machines without a NAND
11519 instruction. Combine will canonicalize to the first form
11520 which will allow use of NAND instructions provided by the
11521 backend if they exist. */
11522 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11523 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11526 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11527 build2 (BIT_AND_EXPR
, type
,
11528 fold_convert_loc (loc
, type
,
11529 TREE_OPERAND (arg0
, 0)),
11530 fold_convert_loc (loc
, type
,
11531 TREE_OPERAND (arg1
, 0))));
11534 /* See if this can be simplified into a rotate first. If that
11535 is unsuccessful continue in the association code. */
11539 if (integer_zerop (arg1
))
11540 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11541 if (integer_all_onesp (arg1
))
11542 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11543 if (operand_equal_p (arg0
, arg1
, 0))
11544 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11546 /* ~X ^ X is -1. */
11547 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11548 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11550 t1
= build_zero_cst (type
);
11551 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11552 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11555 /* X ^ ~X is -1. */
11556 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11557 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11559 t1
= build_zero_cst (type
);
11560 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11561 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11564 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11565 with a constant, and the two constants have no bits in common,
11566 we should treat this as a BIT_IOR_EXPR since this may produce more
11567 simplifications. */
11568 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11569 && TREE_CODE (arg1
) == BIT_AND_EXPR
11570 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11571 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11572 && integer_zerop (const_binop (BIT_AND_EXPR
,
11573 TREE_OPERAND (arg0
, 1),
11574 TREE_OPERAND (arg1
, 1))))
11576 code
= BIT_IOR_EXPR
;
11580 /* (X | Y) ^ X -> Y & ~ X*/
11581 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11582 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11584 tree t2
= TREE_OPERAND (arg0
, 1);
11585 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11587 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11588 fold_convert_loc (loc
, type
, t2
),
11589 fold_convert_loc (loc
, type
, t1
));
11593 /* (Y | X) ^ X -> Y & ~ X*/
11594 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11595 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11597 tree t2
= TREE_OPERAND (arg0
, 0);
11598 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11600 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11601 fold_convert_loc (loc
, type
, t2
),
11602 fold_convert_loc (loc
, type
, t1
));
11606 /* X ^ (X | Y) -> Y & ~ X*/
11607 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11608 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11610 tree t2
= TREE_OPERAND (arg1
, 1);
11611 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11613 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11614 fold_convert_loc (loc
, type
, t2
),
11615 fold_convert_loc (loc
, type
, t1
));
11619 /* X ^ (Y | X) -> Y & ~ X*/
11620 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11621 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11623 tree t2
= TREE_OPERAND (arg1
, 0);
11624 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11626 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11627 fold_convert_loc (loc
, type
, t2
),
11628 fold_convert_loc (loc
, type
, t1
));
11632 /* Convert ~X ^ ~Y to X ^ Y. */
11633 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11634 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11635 return fold_build2_loc (loc
, code
, type
,
11636 fold_convert_loc (loc
, type
,
11637 TREE_OPERAND (arg0
, 0)),
11638 fold_convert_loc (loc
, type
,
11639 TREE_OPERAND (arg1
, 0)));
11641 /* Convert ~X ^ C to X ^ ~C. */
11642 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11643 && TREE_CODE (arg1
) == INTEGER_CST
)
11644 return fold_build2_loc (loc
, code
, type
,
11645 fold_convert_loc (loc
, type
,
11646 TREE_OPERAND (arg0
, 0)),
11647 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11649 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11650 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11651 && integer_onep (TREE_OPERAND (arg0
, 1))
11652 && integer_onep (arg1
))
11653 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11654 build_zero_cst (TREE_TYPE (arg0
)));
11656 /* Fold (X & Y) ^ Y as ~X & Y. */
11657 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11658 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11660 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11661 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11662 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11663 fold_convert_loc (loc
, type
, arg1
));
11665 /* Fold (X & Y) ^ X as ~Y & X. */
11666 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11667 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11668 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11670 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11671 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11672 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11673 fold_convert_loc (loc
, type
, arg1
));
11675 /* Fold X ^ (X & Y) as X & ~Y. */
11676 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11677 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11679 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11680 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11681 fold_convert_loc (loc
, type
, arg0
),
11682 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11684 /* Fold X ^ (Y & X) as ~Y & X. */
11685 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11686 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11687 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11689 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11690 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11691 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11692 fold_convert_loc (loc
, type
, arg0
));
11695 /* See if this can be simplified into a rotate first. If that
11696 is unsuccessful continue in the association code. */
11700 if (integer_all_onesp (arg1
))
11701 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11702 if (integer_zerop (arg1
))
11703 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11704 if (operand_equal_p (arg0
, arg1
, 0))
11705 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11707 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11708 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11709 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11710 || (TREE_CODE (arg0
) == EQ_EXPR
11711 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11712 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11713 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11715 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11716 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11717 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11718 || (TREE_CODE (arg1
) == EQ_EXPR
11719 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11720 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11721 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11723 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11724 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11725 && TREE_CODE (arg1
) == INTEGER_CST
11726 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11728 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11729 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11730 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11731 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11732 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11734 fold_convert_loc (loc
, type
,
11735 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11736 type
, tmp2
, tmp3
));
11739 /* (X | Y) & Y is (X, Y). */
11740 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11741 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11742 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11743 /* (X | Y) & X is (Y, X). */
11744 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11745 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11746 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11747 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11748 /* X & (X | Y) is (Y, X). */
11749 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11750 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11751 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11752 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11753 /* X & (Y | X) is (Y, X). */
11754 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11755 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11756 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11757 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11759 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11760 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11761 && integer_onep (TREE_OPERAND (arg0
, 1))
11762 && integer_onep (arg1
))
11765 tem
= TREE_OPERAND (arg0
, 0);
11766 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11767 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11769 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11770 build_zero_cst (TREE_TYPE (tem
)));
11772 /* Fold ~X & 1 as (X & 1) == 0. */
11773 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11774 && integer_onep (arg1
))
11777 tem
= TREE_OPERAND (arg0
, 0);
11778 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11779 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11781 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11782 build_zero_cst (TREE_TYPE (tem
)));
11784 /* Fold !X & 1 as X == 0. */
11785 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11786 && integer_onep (arg1
))
11788 tem
= TREE_OPERAND (arg0
, 0);
11789 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11790 build_zero_cst (TREE_TYPE (tem
)));
11793 /* Fold (X ^ Y) & Y as ~X & Y. */
11794 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11795 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11797 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11798 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11799 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11800 fold_convert_loc (loc
, type
, arg1
));
11802 /* Fold (X ^ Y) & X as ~Y & X. */
11803 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11804 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11805 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11807 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11808 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11809 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11810 fold_convert_loc (loc
, type
, arg1
));
11812 /* Fold X & (X ^ Y) as X & ~Y. */
11813 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11814 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11816 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11817 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11818 fold_convert_loc (loc
, type
, arg0
),
11819 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11821 /* Fold X & (Y ^ X) as ~Y & X. */
11822 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11823 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11824 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11826 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11827 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11828 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11829 fold_convert_loc (loc
, type
, arg0
));
11832 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11833 multiple of 1 << CST. */
11834 if (TREE_CODE (arg1
) == INTEGER_CST
)
11836 double_int cst1
= tree_to_double_int (arg1
);
11837 double_int ncst1
= (-cst1
).ext (TYPE_PRECISION (TREE_TYPE (arg1
)),
11838 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11839 if ((cst1
& ncst1
) == ncst1
11840 && multiple_of_p (type
, arg0
,
11841 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11842 return fold_convert_loc (loc
, type
, arg0
);
11845 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11847 if (TREE_CODE (arg1
) == INTEGER_CST
11848 && TREE_CODE (arg0
) == MULT_EXPR
11849 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11852 = mask_with_tz (type
, tree_to_double_int (arg1
),
11853 tree_to_double_int (TREE_OPERAND (arg0
, 1)));
11855 if (masked
.is_zero ())
11856 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11858 else if (masked
!= tree_to_double_int (arg1
))
11859 return fold_build2_loc (loc
, code
, type
, op0
,
11860 double_int_to_tree (type
, masked
));
11863 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11864 ((A & N) + B) & M -> (A + B) & M
11865 Similarly if (N & M) == 0,
11866 ((A | N) + B) & M -> (A + B) & M
11867 and for - instead of + (or unary - instead of +)
11868 and/or ^ instead of |.
11869 If B is constant and (B & M) == 0, fold into A & M. */
11870 if (tree_fits_uhwi_p (arg1
))
11872 unsigned HOST_WIDE_INT cst1
= tree_to_uhwi (arg1
);
11873 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11874 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11875 && (TREE_CODE (arg0
) == PLUS_EXPR
11876 || TREE_CODE (arg0
) == MINUS_EXPR
11877 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11878 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11879 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11883 unsigned HOST_WIDE_INT cst0
;
11885 /* Now we know that arg0 is (C + D) or (C - D) or
11886 -C and arg1 (M) is == (1LL << cst) - 1.
11887 Store C into PMOP[0] and D into PMOP[1]. */
11888 pmop
[0] = TREE_OPERAND (arg0
, 0);
11890 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11892 pmop
[1] = TREE_OPERAND (arg0
, 1);
11896 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11897 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11901 for (; which
>= 0; which
--)
11902 switch (TREE_CODE (pmop
[which
]))
11907 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11910 /* tree_to_[su]hwi not used, because we don't care about
11912 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11914 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11919 else if (cst0
!= 0)
11921 /* If C or D is of the form (A & N) where
11922 (N & M) == M, or of the form (A | N) or
11923 (A ^ N) where (N & M) == 0, replace it with A. */
11924 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11927 /* If C or D is a N where (N & M) == 0, it can be
11928 omitted (assumed 0). */
11929 if ((TREE_CODE (arg0
) == PLUS_EXPR
11930 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11931 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11932 pmop
[which
] = NULL
;
11938 /* Only build anything new if we optimized one or both arguments
11940 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11941 || (TREE_CODE (arg0
) != NEGATE_EXPR
11942 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11944 tree utype
= TREE_TYPE (arg0
);
11945 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11947 /* Perform the operations in a type that has defined
11948 overflow behavior. */
11949 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11950 if (pmop
[0] != NULL
)
11951 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11952 if (pmop
[1] != NULL
)
11953 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11956 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11957 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11958 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11960 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11961 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11963 else if (pmop
[0] != NULL
)
11965 else if (pmop
[1] != NULL
)
11968 return build_int_cst (type
, 0);
11970 else if (pmop
[0] == NULL
)
11971 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11973 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11975 /* TEM is now the new binary +, - or unary - replacement. */
11976 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11977 fold_convert_loc (loc
, utype
, arg1
));
11978 return fold_convert_loc (loc
, type
, tem
);
11983 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11984 if (t1
!= NULL_TREE
)
11986 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11987 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11988 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11990 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11992 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11993 && (~TREE_INT_CST_LOW (arg1
)
11994 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11996 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11999 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12001 This results in more efficient code for machines without a NOR
12002 instruction. Combine will canonicalize to the first form
12003 which will allow use of NOR instructions provided by the
12004 backend if they exist. */
12005 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
12006 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
12008 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
12009 build2 (BIT_IOR_EXPR
, type
,
12010 fold_convert_loc (loc
, type
,
12011 TREE_OPERAND (arg0
, 0)),
12012 fold_convert_loc (loc
, type
,
12013 TREE_OPERAND (arg1
, 0))));
12016 /* If arg0 is derived from the address of an object or function, we may
12017 be able to fold this expression using the object or function's
12019 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
12021 unsigned HOST_WIDE_INT modulus
, residue
;
12022 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
12024 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
12025 integer_onep (arg1
));
12027 /* This works because modulus is a power of 2. If this weren't the
12028 case, we'd have to replace it by its greatest power-of-2
12029 divisor: modulus & -modulus. */
12031 return build_int_cst (type
, residue
& low
);
12034 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12035 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12036 if the new mask might be further optimized. */
12037 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
12038 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
12039 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12040 && TREE_CODE (arg1
) == INTEGER_CST
12041 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12042 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
12043 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12044 < TYPE_PRECISION (TREE_TYPE (arg0
))))
12046 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12047 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
12048 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12049 tree shift_type
= TREE_TYPE (arg0
);
12051 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12052 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12053 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12054 && TYPE_PRECISION (TREE_TYPE (arg0
))
12055 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
12057 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12058 tree arg00
= TREE_OPERAND (arg0
, 0);
12059 /* See if more bits can be proven as zero because of
12061 if (TREE_CODE (arg00
) == NOP_EXPR
12062 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12064 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12065 if (TYPE_PRECISION (inner_type
)
12066 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
12067 && TYPE_PRECISION (inner_type
) < prec
)
12069 prec
= TYPE_PRECISION (inner_type
);
12070 /* See if we can shorten the right shift. */
12072 shift_type
= inner_type
;
12075 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12076 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12077 zerobits
<<= prec
- shiftc
;
12078 /* For arithmetic shift if sign bit could be set, zerobits
12079 can contain actually sign bits, so no transformation is
12080 possible, unless MASK masks them all away. In that
12081 case the shift needs to be converted into logical shift. */
12082 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12083 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12085 if ((mask
& zerobits
) == 0)
12086 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12092 /* ((X << 16) & 0xff00) is (X, 0). */
12093 if ((mask
& zerobits
) == mask
)
12094 return omit_one_operand_loc (loc
, type
,
12095 build_int_cst (type
, 0), arg0
);
12097 newmask
= mask
| zerobits
;
12098 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12100 /* Only do the transformation if NEWMASK is some integer
12102 for (prec
= BITS_PER_UNIT
;
12103 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12104 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12106 if (prec
< HOST_BITS_PER_WIDE_INT
12107 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12111 if (shift_type
!= TREE_TYPE (arg0
))
12113 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12114 fold_convert_loc (loc
, shift_type
,
12115 TREE_OPERAND (arg0
, 0)),
12116 TREE_OPERAND (arg0
, 1));
12117 tem
= fold_convert_loc (loc
, type
, tem
);
12121 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12122 if (!tree_int_cst_equal (newmaskt
, arg1
))
12123 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12131 /* Don't touch a floating-point divide by zero unless the mode
12132 of the constant can represent infinity. */
12133 if (TREE_CODE (arg1
) == REAL_CST
12134 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12135 && real_zerop (arg1
))
12138 /* Optimize A / A to 1.0 if we don't care about
12139 NaNs or Infinities. Skip the transformation
12140 for non-real operands. */
12141 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12142 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12143 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12144 && operand_equal_p (arg0
, arg1
, 0))
12146 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12148 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12151 /* The complex version of the above A / A optimization. */
12152 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12153 && operand_equal_p (arg0
, arg1
, 0))
12155 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12156 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12157 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12159 tree r
= build_real (elem_type
, dconst1
);
12160 /* omit_two_operands will call fold_convert for us. */
12161 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12165 /* (-A) / (-B) -> A / B */
12166 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12167 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12168 TREE_OPERAND (arg0
, 0),
12169 negate_expr (arg1
));
12170 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12171 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12172 negate_expr (arg0
),
12173 TREE_OPERAND (arg1
, 0));
12175 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12176 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12177 && real_onep (arg1
))
12178 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12180 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12181 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12182 && real_minus_onep (arg1
))
12183 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12184 negate_expr (arg0
)));
12186 /* If ARG1 is a constant, we can convert this to a multiply by the
12187 reciprocal. This does not have the same rounding properties,
12188 so only do this if -freciprocal-math. We can actually
12189 always safely do it if ARG1 is a power of two, but it's hard to
12190 tell if it is or not in a portable manner. */
12192 && (TREE_CODE (arg1
) == REAL_CST
12193 || (TREE_CODE (arg1
) == COMPLEX_CST
12194 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12195 || (TREE_CODE (arg1
) == VECTOR_CST
12196 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12198 if (flag_reciprocal_math
12199 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12200 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12201 /* Find the reciprocal if optimizing and the result is exact.
12202 TODO: Complex reciprocal not implemented. */
12203 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12205 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12208 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12211 /* Convert A/B/C to A/(B*C). */
12212 if (flag_reciprocal_math
12213 && TREE_CODE (arg0
) == RDIV_EXPR
)
12214 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12215 fold_build2_loc (loc
, MULT_EXPR
, type
,
12216 TREE_OPERAND (arg0
, 1), arg1
));
12218 /* Convert A/(B/C) to (A/B)*C. */
12219 if (flag_reciprocal_math
12220 && TREE_CODE (arg1
) == RDIV_EXPR
)
12221 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12222 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12223 TREE_OPERAND (arg1
, 0)),
12224 TREE_OPERAND (arg1
, 1));
12226 /* Convert C1/(X*C2) into (C1/C2)/X. */
12227 if (flag_reciprocal_math
12228 && TREE_CODE (arg1
) == MULT_EXPR
12229 && TREE_CODE (arg0
) == REAL_CST
12230 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12232 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12233 TREE_OPERAND (arg1
, 1));
12235 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12236 TREE_OPERAND (arg1
, 0));
12239 if (flag_unsafe_math_optimizations
)
12241 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12242 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12244 /* Optimize sin(x)/cos(x) as tan(x). */
12245 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12246 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12247 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12248 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12249 CALL_EXPR_ARG (arg1
, 0), 0))
12251 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12253 if (tanfn
!= NULL_TREE
)
12254 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12257 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12258 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12259 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12260 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12261 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12262 CALL_EXPR_ARG (arg1
, 0), 0))
12264 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12266 if (tanfn
!= NULL_TREE
)
12268 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12269 CALL_EXPR_ARG (arg0
, 0));
12270 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12271 build_real (type
, dconst1
), tmp
);
12275 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12276 NaNs or Infinities. */
12277 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12278 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12279 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12281 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12282 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12284 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12285 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12286 && operand_equal_p (arg00
, arg01
, 0))
12288 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12290 if (cosfn
!= NULL_TREE
)
12291 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12295 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12296 NaNs or Infinities. */
12297 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12298 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12299 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12301 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12302 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12304 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12305 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12306 && operand_equal_p (arg00
, arg01
, 0))
12308 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12310 if (cosfn
!= NULL_TREE
)
12312 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12313 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12314 build_real (type
, dconst1
),
12320 /* Optimize pow(x,c)/x as pow(x,c-1). */
12321 if (fcode0
== BUILT_IN_POW
12322 || fcode0
== BUILT_IN_POWF
12323 || fcode0
== BUILT_IN_POWL
)
12325 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12326 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12327 if (TREE_CODE (arg01
) == REAL_CST
12328 && !TREE_OVERFLOW (arg01
)
12329 && operand_equal_p (arg1
, arg00
, 0))
12331 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12335 c
= TREE_REAL_CST (arg01
);
12336 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12337 arg
= build_real (type
, c
);
12338 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12342 /* Optimize a/root(b/c) into a*root(c/b). */
12343 if (BUILTIN_ROOT_P (fcode1
))
12345 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12347 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12349 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12350 tree b
= TREE_OPERAND (rootarg
, 0);
12351 tree c
= TREE_OPERAND (rootarg
, 1);
12353 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12355 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12356 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12360 /* Optimize x/expN(y) into x*expN(-y). */
12361 if (BUILTIN_EXPONENT_P (fcode1
))
12363 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12364 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12365 arg1
= build_call_expr_loc (loc
,
12367 fold_convert_loc (loc
, type
, arg
));
12368 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12371 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12372 if (fcode1
== BUILT_IN_POW
12373 || fcode1
== BUILT_IN_POWF
12374 || fcode1
== BUILT_IN_POWL
)
12376 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12377 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12378 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12379 tree neg11
= fold_convert_loc (loc
, type
,
12380 negate_expr (arg11
));
12381 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12382 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12387 case TRUNC_DIV_EXPR
:
12388 /* Optimize (X & (-A)) / A where A is a power of 2,
12390 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12391 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12392 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12394 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12395 arg1
, TREE_OPERAND (arg0
, 1));
12396 if (sum
&& integer_zerop (sum
)) {
12397 unsigned long pow2
;
12399 if (TREE_INT_CST_LOW (arg1
))
12400 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12402 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12403 + HOST_BITS_PER_WIDE_INT
;
12405 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12406 TREE_OPERAND (arg0
, 0),
12407 build_int_cst (integer_type_node
, pow2
));
12413 case FLOOR_DIV_EXPR
:
12414 /* Simplify A / (B << N) where A and B are positive and B is
12415 a power of 2, to A >> (N + log2(B)). */
12416 strict_overflow_p
= false;
12417 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12418 && (TYPE_UNSIGNED (type
)
12419 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12421 tree sval
= TREE_OPERAND (arg1
, 0);
12422 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12424 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12425 unsigned long pow2
;
12427 if (TREE_INT_CST_LOW (sval
))
12428 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12430 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12431 + HOST_BITS_PER_WIDE_INT
;
12433 if (strict_overflow_p
)
12434 fold_overflow_warning (("assuming signed overflow does not "
12435 "occur when simplifying A / (B << N)"),
12436 WARN_STRICT_OVERFLOW_MISC
);
12438 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12440 build_int_cst (TREE_TYPE (sh_cnt
),
12442 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12443 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12447 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12448 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12449 if (INTEGRAL_TYPE_P (type
)
12450 && TYPE_UNSIGNED (type
)
12451 && code
== FLOOR_DIV_EXPR
)
12452 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12456 case ROUND_DIV_EXPR
:
12457 case CEIL_DIV_EXPR
:
12458 case EXACT_DIV_EXPR
:
12459 if (integer_onep (arg1
))
12460 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12461 if (integer_zerop (arg1
))
12463 /* X / -1 is -X. */
12464 if (!TYPE_UNSIGNED (type
)
12465 && TREE_CODE (arg1
) == INTEGER_CST
12466 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12467 && TREE_INT_CST_HIGH (arg1
) == -1)
12468 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12470 /* Convert -A / -B to A / B when the type is signed and overflow is
12472 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12473 && TREE_CODE (arg0
) == NEGATE_EXPR
12474 && negate_expr_p (arg1
))
12476 if (INTEGRAL_TYPE_P (type
))
12477 fold_overflow_warning (("assuming signed overflow does not occur "
12478 "when distributing negation across "
12480 WARN_STRICT_OVERFLOW_MISC
);
12481 return fold_build2_loc (loc
, code
, type
,
12482 fold_convert_loc (loc
, type
,
12483 TREE_OPERAND (arg0
, 0)),
12484 fold_convert_loc (loc
, type
,
12485 negate_expr (arg1
)));
12487 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12488 && TREE_CODE (arg1
) == NEGATE_EXPR
12489 && negate_expr_p (arg0
))
12491 if (INTEGRAL_TYPE_P (type
))
12492 fold_overflow_warning (("assuming signed overflow does not occur "
12493 "when distributing negation across "
12495 WARN_STRICT_OVERFLOW_MISC
);
12496 return fold_build2_loc (loc
, code
, type
,
12497 fold_convert_loc (loc
, type
,
12498 negate_expr (arg0
)),
12499 fold_convert_loc (loc
, type
,
12500 TREE_OPERAND (arg1
, 0)));
12503 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12504 operation, EXACT_DIV_EXPR.
12506 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12507 At one time others generated faster code, it's not clear if they do
12508 after the last round to changes to the DIV code in expmed.c. */
12509 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12510 && multiple_of_p (type
, arg0
, arg1
))
12511 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12513 strict_overflow_p
= false;
12514 if (TREE_CODE (arg1
) == INTEGER_CST
12515 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12516 &strict_overflow_p
)))
12518 if (strict_overflow_p
)
12519 fold_overflow_warning (("assuming signed overflow does not occur "
12520 "when simplifying division"),
12521 WARN_STRICT_OVERFLOW_MISC
);
12522 return fold_convert_loc (loc
, type
, tem
);
12527 case CEIL_MOD_EXPR
:
12528 case FLOOR_MOD_EXPR
:
12529 case ROUND_MOD_EXPR
:
12530 case TRUNC_MOD_EXPR
:
12531 /* X % 1 is always zero, but be sure to preserve any side
12533 if (integer_onep (arg1
))
12534 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12536 /* X % 0, return X % 0 unchanged so that we can get the
12537 proper warnings and errors. */
12538 if (integer_zerop (arg1
))
12541 /* 0 % X is always zero, but be sure to preserve any side
12542 effects in X. Place this after checking for X == 0. */
12543 if (integer_zerop (arg0
))
12544 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12546 /* X % -1 is zero. */
12547 if (!TYPE_UNSIGNED (type
)
12548 && TREE_CODE (arg1
) == INTEGER_CST
12549 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12550 && TREE_INT_CST_HIGH (arg1
) == -1)
12551 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12553 /* X % -C is the same as X % C. */
12554 if (code
== TRUNC_MOD_EXPR
12555 && !TYPE_UNSIGNED (type
)
12556 && TREE_CODE (arg1
) == INTEGER_CST
12557 && !TREE_OVERFLOW (arg1
)
12558 && TREE_INT_CST_HIGH (arg1
) < 0
12559 && !TYPE_OVERFLOW_TRAPS (type
)
12560 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12561 && !sign_bit_p (arg1
, arg1
))
12562 return fold_build2_loc (loc
, code
, type
,
12563 fold_convert_loc (loc
, type
, arg0
),
12564 fold_convert_loc (loc
, type
,
12565 negate_expr (arg1
)));
12567 /* X % -Y is the same as X % Y. */
12568 if (code
== TRUNC_MOD_EXPR
12569 && !TYPE_UNSIGNED (type
)
12570 && TREE_CODE (arg1
) == NEGATE_EXPR
12571 && !TYPE_OVERFLOW_TRAPS (type
))
12572 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12573 fold_convert_loc (loc
, type
,
12574 TREE_OPERAND (arg1
, 0)));
12576 strict_overflow_p
= false;
12577 if (TREE_CODE (arg1
) == INTEGER_CST
12578 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12579 &strict_overflow_p
)))
12581 if (strict_overflow_p
)
12582 fold_overflow_warning (("assuming signed overflow does not occur "
12583 "when simplifying modulus"),
12584 WARN_STRICT_OVERFLOW_MISC
);
12585 return fold_convert_loc (loc
, type
, tem
);
12588 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12589 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12590 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12591 && (TYPE_UNSIGNED (type
)
12592 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12595 /* Also optimize A % (C << N) where C is a power of 2,
12596 to A & ((C << N) - 1). */
12597 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12598 c
= TREE_OPERAND (arg1
, 0);
12600 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12603 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12604 build_int_cst (TREE_TYPE (arg1
), 1));
12605 if (strict_overflow_p
)
12606 fold_overflow_warning (("assuming signed overflow does not "
12607 "occur when simplifying "
12608 "X % (power of two)"),
12609 WARN_STRICT_OVERFLOW_MISC
);
12610 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12611 fold_convert_loc (loc
, type
, arg0
),
12612 fold_convert_loc (loc
, type
, mask
));
12620 if (integer_all_onesp (arg0
))
12621 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12625 /* Optimize -1 >> x for arithmetic right shifts. */
12626 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12627 && tree_expr_nonnegative_p (arg1
))
12628 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12629 /* ... fall through ... */
12633 if (integer_zerop (arg1
))
12634 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12635 if (integer_zerop (arg0
))
12636 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12638 /* Prefer vector1 << scalar to vector1 << vector2
12639 if vector2 is uniform. */
12640 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12641 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12642 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12644 /* Since negative shift count is not well-defined,
12645 don't try to compute it in the compiler. */
12646 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12649 prec
= element_precision (type
);
12651 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12652 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12653 && TREE_INT_CST_LOW (arg1
) < prec
12654 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12655 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12657 unsigned int low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12658 + TREE_INT_CST_LOW (arg1
));
12660 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12661 being well defined. */
12664 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12666 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12667 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12668 TREE_OPERAND (arg0
, 0));
12673 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12674 build_int_cst (TREE_TYPE (arg1
), low
));
12677 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12678 into x & ((unsigned)-1 >> c) for unsigned types. */
12679 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12680 || (TYPE_UNSIGNED (type
)
12681 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12682 && tree_fits_shwi_p (arg1
)
12683 && TREE_INT_CST_LOW (arg1
) < prec
12684 && tree_fits_shwi_p (TREE_OPERAND (arg0
, 1))
12685 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12687 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12688 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12694 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12696 lshift
= build_minus_one_cst (type
);
12697 lshift
= const_binop (code
, lshift
, arg1
);
12699 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12703 /* Rewrite an LROTATE_EXPR by a constant into an
12704 RROTATE_EXPR by a new constant. */
12705 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12707 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12708 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12709 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12712 /* If we have a rotate of a bit operation with the rotate count and
12713 the second operand of the bit operation both constant,
12714 permute the two operations. */
12715 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12716 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12717 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12718 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12719 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12720 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12721 fold_build2_loc (loc
, code
, type
,
12722 TREE_OPERAND (arg0
, 0), arg1
),
12723 fold_build2_loc (loc
, code
, type
,
12724 TREE_OPERAND (arg0
, 1), arg1
));
12726 /* Two consecutive rotates adding up to the precision of the
12727 type can be ignored. */
12728 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12729 && TREE_CODE (arg0
) == RROTATE_EXPR
12730 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12731 && TREE_INT_CST_HIGH (arg1
) == 0
12732 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12733 && ((TREE_INT_CST_LOW (arg1
)
12734 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12736 return TREE_OPERAND (arg0
, 0);
12738 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12739 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12740 if the latter can be further optimized. */
12741 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12742 && TREE_CODE (arg0
) == BIT_AND_EXPR
12743 && TREE_CODE (arg1
) == INTEGER_CST
12744 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12746 tree mask
= fold_build2_loc (loc
, code
, type
,
12747 fold_convert_loc (loc
, type
,
12748 TREE_OPERAND (arg0
, 1)),
12750 tree shift
= fold_build2_loc (loc
, code
, type
,
12751 fold_convert_loc (loc
, type
,
12752 TREE_OPERAND (arg0
, 0)),
12754 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12762 if (operand_equal_p (arg0
, arg1
, 0))
12763 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12764 if (INTEGRAL_TYPE_P (type
)
12765 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12766 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12767 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12773 if (operand_equal_p (arg0
, arg1
, 0))
12774 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12775 if (INTEGRAL_TYPE_P (type
)
12776 && TYPE_MAX_VALUE (type
)
12777 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12778 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12779 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12784 case TRUTH_ANDIF_EXPR
:
12785 /* Note that the operands of this must be ints
12786 and their values must be 0 or 1.
12787 ("true" is a fixed value perhaps depending on the language.) */
12788 /* If first arg is constant zero, return it. */
12789 if (integer_zerop (arg0
))
12790 return fold_convert_loc (loc
, type
, arg0
);
12791 case TRUTH_AND_EXPR
:
12792 /* If either arg is constant true, drop it. */
12793 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12794 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12795 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12796 /* Preserve sequence points. */
12797 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12798 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12799 /* If second arg is constant zero, result is zero, but first arg
12800 must be evaluated. */
12801 if (integer_zerop (arg1
))
12802 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12803 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12804 case will be handled here. */
12805 if (integer_zerop (arg0
))
12806 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12808 /* !X && X is always false. */
12809 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12810 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12811 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12812 /* X && !X is always false. */
12813 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12814 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12815 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12817 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12818 means A >= Y && A != MAX, but in this case we know that
12821 if (!TREE_SIDE_EFFECTS (arg0
)
12822 && !TREE_SIDE_EFFECTS (arg1
))
12824 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12825 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12826 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12828 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12829 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12830 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12833 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12839 case TRUTH_ORIF_EXPR
:
12840 /* Note that the operands of this must be ints
12841 and their values must be 0 or true.
12842 ("true" is a fixed value perhaps depending on the language.) */
12843 /* If first arg is constant true, return it. */
12844 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12845 return fold_convert_loc (loc
, type
, arg0
);
12846 case TRUTH_OR_EXPR
:
12847 /* If either arg is constant zero, drop it. */
12848 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12849 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12850 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12851 /* Preserve sequence points. */
12852 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12853 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12854 /* If second arg is constant true, result is true, but we must
12855 evaluate first arg. */
12856 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12857 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12858 /* Likewise for first arg, but note this only occurs here for
12860 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12861 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12863 /* !X || X is always true. */
12864 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12865 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12866 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12867 /* X || !X is always true. */
12868 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12869 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12870 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12872 /* (X && !Y) || (!X && Y) is X ^ Y */
12873 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12874 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12876 tree a0
, a1
, l0
, l1
, n0
, n1
;
12878 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12879 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12881 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12882 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12884 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12885 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12887 if ((operand_equal_p (n0
, a0
, 0)
12888 && operand_equal_p (n1
, a1
, 0))
12889 || (operand_equal_p (n0
, a1
, 0)
12890 && operand_equal_p (n1
, a0
, 0)))
12891 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12894 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12900 case TRUTH_XOR_EXPR
:
12901 /* If the second arg is constant zero, drop it. */
12902 if (integer_zerop (arg1
))
12903 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12904 /* If the second arg is constant true, this is a logical inversion. */
12905 if (integer_onep (arg1
))
12907 tem
= invert_truthvalue_loc (loc
, arg0
);
12908 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12910 /* Identical arguments cancel to zero. */
12911 if (operand_equal_p (arg0
, arg1
, 0))
12912 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12914 /* !X ^ X is always true. */
12915 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12916 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12917 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12919 /* X ^ !X is always true. */
12920 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12921 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12922 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12931 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12932 if (tem
!= NULL_TREE
)
12935 /* bool_var != 0 becomes bool_var. */
12936 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12937 && code
== NE_EXPR
)
12938 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12940 /* bool_var == 1 becomes bool_var. */
12941 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12942 && code
== EQ_EXPR
)
12943 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12945 /* bool_var != 1 becomes !bool_var. */
12946 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12947 && code
== NE_EXPR
)
12948 return fold_convert_loc (loc
, type
,
12949 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12950 TREE_TYPE (arg0
), arg0
));
12952 /* bool_var == 0 becomes !bool_var. */
12953 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12954 && code
== EQ_EXPR
)
12955 return fold_convert_loc (loc
, type
,
12956 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12957 TREE_TYPE (arg0
), arg0
));
12959 /* !exp != 0 becomes !exp */
12960 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12961 && code
== NE_EXPR
)
12962 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12964 /* If this is an equality comparison of the address of two non-weak,
12965 unaliased symbols neither of which are extern (since we do not
12966 have access to attributes for externs), then we know the result. */
12967 if (TREE_CODE (arg0
) == ADDR_EXPR
12968 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12969 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12970 && ! lookup_attribute ("alias",
12971 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12972 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12973 && TREE_CODE (arg1
) == ADDR_EXPR
12974 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12975 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12976 && ! lookup_attribute ("alias",
12977 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12978 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12980 /* We know that we're looking at the address of two
12981 non-weak, unaliased, static _DECL nodes.
12983 It is both wasteful and incorrect to call operand_equal_p
12984 to compare the two ADDR_EXPR nodes. It is wasteful in that
12985 all we need to do is test pointer equality for the arguments
12986 to the two ADDR_EXPR nodes. It is incorrect to use
12987 operand_equal_p as that function is NOT equivalent to a
12988 C equality test. It can in fact return false for two
12989 objects which would test as equal using the C equality
12991 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12992 return constant_boolean_node (equal
12993 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12997 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12998 a MINUS_EXPR of a constant, we can convert it into a comparison with
12999 a revised constant as long as no overflow occurs. */
13000 if (TREE_CODE (arg1
) == INTEGER_CST
13001 && (TREE_CODE (arg0
) == PLUS_EXPR
13002 || TREE_CODE (arg0
) == MINUS_EXPR
)
13003 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13004 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
13005 ? MINUS_EXPR
: PLUS_EXPR
,
13006 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13008 TREE_OPERAND (arg0
, 1)))
13009 && !TREE_OVERFLOW (tem
))
13010 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13012 /* Similarly for a NEGATE_EXPR. */
13013 if (TREE_CODE (arg0
) == NEGATE_EXPR
13014 && TREE_CODE (arg1
) == INTEGER_CST
13015 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
13017 && TREE_CODE (tem
) == INTEGER_CST
13018 && !TREE_OVERFLOW (tem
))
13019 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13021 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13022 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13023 && TREE_CODE (arg1
) == INTEGER_CST
13024 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13025 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13026 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
13027 fold_convert_loc (loc
,
13030 TREE_OPERAND (arg0
, 1)));
13032 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13033 if ((TREE_CODE (arg0
) == PLUS_EXPR
13034 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
13035 || TREE_CODE (arg0
) == MINUS_EXPR
)
13036 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13039 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13040 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
13042 tree val
= TREE_OPERAND (arg0
, 1);
13043 return omit_two_operands_loc (loc
, type
,
13044 fold_build2_loc (loc
, code
, type
,
13046 build_int_cst (TREE_TYPE (val
),
13048 TREE_OPERAND (arg0
, 0), arg1
);
13051 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13052 if (TREE_CODE (arg0
) == MINUS_EXPR
13053 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
13054 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13057 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
13059 return omit_two_operands_loc (loc
, type
,
13061 ? boolean_true_node
: boolean_false_node
,
13062 TREE_OPERAND (arg0
, 1), arg1
);
13065 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13066 for !=. Don't do this for ordered comparisons due to overflow. */
13067 if (TREE_CODE (arg0
) == MINUS_EXPR
13068 && integer_zerop (arg1
))
13069 return fold_build2_loc (loc
, code
, type
,
13070 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
13072 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13073 if (TREE_CODE (arg0
) == ABS_EXPR
13074 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13075 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13077 /* If this is an EQ or NE comparison with zero and ARG0 is
13078 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13079 two operations, but the latter can be done in one less insn
13080 on machines that have only two-operand insns or on which a
13081 constant cannot be the first operand. */
13082 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13083 && integer_zerop (arg1
))
13085 tree arg00
= TREE_OPERAND (arg0
, 0);
13086 tree arg01
= TREE_OPERAND (arg0
, 1);
13087 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13088 && integer_onep (TREE_OPERAND (arg00
, 0)))
13090 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13091 arg01
, TREE_OPERAND (arg00
, 1));
13092 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13093 build_int_cst (TREE_TYPE (arg0
), 1));
13094 return fold_build2_loc (loc
, code
, type
,
13095 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13098 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13099 && integer_onep (TREE_OPERAND (arg01
, 0)))
13101 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13102 arg00
, TREE_OPERAND (arg01
, 1));
13103 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13104 build_int_cst (TREE_TYPE (arg0
), 1));
13105 return fold_build2_loc (loc
, code
, type
,
13106 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13111 /* If this is an NE or EQ comparison of zero against the result of a
13112 signed MOD operation whose second operand is a power of 2, make
13113 the MOD operation unsigned since it is simpler and equivalent. */
13114 if (integer_zerop (arg1
)
13115 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13116 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13117 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13118 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13119 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13120 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13122 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13123 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13124 fold_convert_loc (loc
, newtype
,
13125 TREE_OPERAND (arg0
, 0)),
13126 fold_convert_loc (loc
, newtype
,
13127 TREE_OPERAND (arg0
, 1)));
13129 return fold_build2_loc (loc
, code
, type
, newmod
,
13130 fold_convert_loc (loc
, newtype
, arg1
));
13133 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13134 C1 is a valid shift constant, and C2 is a power of two, i.e.
13136 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13137 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13138 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13140 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13141 && integer_zerop (arg1
))
13143 tree itype
= TREE_TYPE (arg0
);
13144 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13145 prec
= TYPE_PRECISION (itype
);
13147 /* Check for a valid shift count. */
13148 if (TREE_INT_CST_HIGH (arg001
) == 0
13149 && TREE_INT_CST_LOW (arg001
) < prec
)
13151 tree arg01
= TREE_OPERAND (arg0
, 1);
13152 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13153 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13154 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13155 can be rewritten as (X & (C2 << C1)) != 0. */
13156 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13158 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13159 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13160 return fold_build2_loc (loc
, code
, type
, tem
,
13161 fold_convert_loc (loc
, itype
, arg1
));
13163 /* Otherwise, for signed (arithmetic) shifts,
13164 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13165 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13166 else if (!TYPE_UNSIGNED (itype
))
13167 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13168 arg000
, build_int_cst (itype
, 0));
13169 /* Otherwise, of unsigned (logical) shifts,
13170 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13171 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13173 return omit_one_operand_loc (loc
, type
,
13174 code
== EQ_EXPR
? integer_one_node
13175 : integer_zero_node
,
13180 /* If we have (A & C) == C where C is a power of 2, convert this into
13181 (A & C) != 0. Similarly for NE_EXPR. */
13182 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13183 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13184 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13185 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13186 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13187 integer_zero_node
));
13189 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13190 bit, then fold the expression into A < 0 or A >= 0. */
13191 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13195 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13196 Similarly for NE_EXPR. */
13197 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13198 && TREE_CODE (arg1
) == INTEGER_CST
13199 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13201 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13202 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13203 TREE_OPERAND (arg0
, 1));
13205 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13206 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13208 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13209 if (integer_nonzerop (dandnotc
))
13210 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13213 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13214 Similarly for NE_EXPR. */
13215 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13216 && TREE_CODE (arg1
) == INTEGER_CST
13217 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13219 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13221 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13222 TREE_OPERAND (arg0
, 1),
13223 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13224 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13225 if (integer_nonzerop (candnotd
))
13226 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13229 /* If this is a comparison of a field, we may be able to simplify it. */
13230 if ((TREE_CODE (arg0
) == COMPONENT_REF
13231 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13232 /* Handle the constant case even without -O
13233 to make sure the warnings are given. */
13234 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13236 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13241 /* Optimize comparisons of strlen vs zero to a compare of the
13242 first character of the string vs zero. To wit,
13243 strlen(ptr) == 0 => *ptr == 0
13244 strlen(ptr) != 0 => *ptr != 0
13245 Other cases should reduce to one of these two (or a constant)
13246 due to the return value of strlen being unsigned. */
13247 if (TREE_CODE (arg0
) == CALL_EXPR
13248 && integer_zerop (arg1
))
13250 tree fndecl
= get_callee_fndecl (arg0
);
13253 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13254 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13255 && call_expr_nargs (arg0
) == 1
13256 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13258 tree iref
= build_fold_indirect_ref_loc (loc
,
13259 CALL_EXPR_ARG (arg0
, 0));
13260 return fold_build2_loc (loc
, code
, type
, iref
,
13261 build_int_cst (TREE_TYPE (iref
), 0));
13265 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13266 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13267 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13268 && integer_zerop (arg1
)
13269 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13271 tree arg00
= TREE_OPERAND (arg0
, 0);
13272 tree arg01
= TREE_OPERAND (arg0
, 1);
13273 tree itype
= TREE_TYPE (arg00
);
13274 if (TREE_INT_CST_HIGH (arg01
) == 0
13275 && TREE_INT_CST_LOW (arg01
)
13276 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13278 if (TYPE_UNSIGNED (itype
))
13280 itype
= signed_type_for (itype
);
13281 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13283 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13284 type
, arg00
, build_zero_cst (itype
));
13288 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13289 if (integer_zerop (arg1
)
13290 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13291 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13292 TREE_OPERAND (arg0
, 1));
13294 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13295 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13296 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13297 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13298 build_zero_cst (TREE_TYPE (arg0
)));
13299 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13300 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13301 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13302 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13303 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13304 build_zero_cst (TREE_TYPE (arg0
)));
13306 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13307 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13308 && TREE_CODE (arg1
) == INTEGER_CST
13309 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13310 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13311 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13312 TREE_OPERAND (arg0
, 1), arg1
));
13314 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13315 (X & C) == 0 when C is a single bit. */
13316 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13317 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13318 && integer_zerop (arg1
)
13319 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13321 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13322 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13323 TREE_OPERAND (arg0
, 1));
13324 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13326 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13330 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13331 constant C is a power of two, i.e. a single bit. */
13332 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13333 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13334 && integer_zerop (arg1
)
13335 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13336 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13337 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13339 tree arg00
= TREE_OPERAND (arg0
, 0);
13340 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13341 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13344 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13345 when is C is a power of two, i.e. a single bit. */
13346 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13347 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13348 && integer_zerop (arg1
)
13349 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13350 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13351 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13353 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13354 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13355 arg000
, TREE_OPERAND (arg0
, 1));
13356 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13357 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13360 if (integer_zerop (arg1
)
13361 && tree_expr_nonzero_p (arg0
))
13363 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13364 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13367 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13368 if (TREE_CODE (arg0
) == NEGATE_EXPR
13369 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13370 return fold_build2_loc (loc
, code
, type
,
13371 TREE_OPERAND (arg0
, 0),
13372 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13373 TREE_OPERAND (arg1
, 0)));
13375 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13376 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13377 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13379 tree arg00
= TREE_OPERAND (arg0
, 0);
13380 tree arg01
= TREE_OPERAND (arg0
, 1);
13381 tree arg10
= TREE_OPERAND (arg1
, 0);
13382 tree arg11
= TREE_OPERAND (arg1
, 1);
13383 tree itype
= TREE_TYPE (arg0
);
13385 if (operand_equal_p (arg01
, arg11
, 0))
13386 return fold_build2_loc (loc
, code
, type
,
13387 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13388 fold_build2_loc (loc
,
13389 BIT_XOR_EXPR
, itype
,
13392 build_zero_cst (itype
));
13394 if (operand_equal_p (arg01
, arg10
, 0))
13395 return fold_build2_loc (loc
, code
, type
,
13396 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13397 fold_build2_loc (loc
,
13398 BIT_XOR_EXPR
, itype
,
13401 build_zero_cst (itype
));
13403 if (operand_equal_p (arg00
, arg11
, 0))
13404 return fold_build2_loc (loc
, code
, type
,
13405 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13406 fold_build2_loc (loc
,
13407 BIT_XOR_EXPR
, itype
,
13410 build_zero_cst (itype
));
13412 if (operand_equal_p (arg00
, arg10
, 0))
13413 return fold_build2_loc (loc
, code
, type
,
13414 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13415 fold_build2_loc (loc
,
13416 BIT_XOR_EXPR
, itype
,
13419 build_zero_cst (itype
));
13422 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13423 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13425 tree arg00
= TREE_OPERAND (arg0
, 0);
13426 tree arg01
= TREE_OPERAND (arg0
, 1);
13427 tree arg10
= TREE_OPERAND (arg1
, 0);
13428 tree arg11
= TREE_OPERAND (arg1
, 1);
13429 tree itype
= TREE_TYPE (arg0
);
13431 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13432 operand_equal_p guarantees no side-effects so we don't need
13433 to use omit_one_operand on Z. */
13434 if (operand_equal_p (arg01
, arg11
, 0))
13435 return fold_build2_loc (loc
, code
, type
, arg00
,
13436 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13438 if (operand_equal_p (arg01
, arg10
, 0))
13439 return fold_build2_loc (loc
, code
, type
, arg00
,
13440 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13442 if (operand_equal_p (arg00
, arg11
, 0))
13443 return fold_build2_loc (loc
, code
, type
, arg01
,
13444 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13446 if (operand_equal_p (arg00
, arg10
, 0))
13447 return fold_build2_loc (loc
, code
, type
, arg01
,
13448 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13451 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13452 if (TREE_CODE (arg01
) == INTEGER_CST
13453 && TREE_CODE (arg11
) == INTEGER_CST
)
13455 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13456 fold_convert_loc (loc
, itype
, arg11
));
13457 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13458 return fold_build2_loc (loc
, code
, type
, tem
,
13459 fold_convert_loc (loc
, itype
, arg10
));
13463 /* Attempt to simplify equality/inequality comparisons of complex
13464 values. Only lower the comparison if the result is known or
13465 can be simplified to a single scalar comparison. */
13466 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13467 || TREE_CODE (arg0
) == COMPLEX_CST
)
13468 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13469 || TREE_CODE (arg1
) == COMPLEX_CST
))
13471 tree real0
, imag0
, real1
, imag1
;
13474 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13476 real0
= TREE_OPERAND (arg0
, 0);
13477 imag0
= TREE_OPERAND (arg0
, 1);
13481 real0
= TREE_REALPART (arg0
);
13482 imag0
= TREE_IMAGPART (arg0
);
13485 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13487 real1
= TREE_OPERAND (arg1
, 0);
13488 imag1
= TREE_OPERAND (arg1
, 1);
13492 real1
= TREE_REALPART (arg1
);
13493 imag1
= TREE_IMAGPART (arg1
);
13496 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13497 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13499 if (integer_zerop (rcond
))
13501 if (code
== EQ_EXPR
)
13502 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13504 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13508 if (code
== NE_EXPR
)
13509 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13511 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13515 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13516 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13518 if (integer_zerop (icond
))
13520 if (code
== EQ_EXPR
)
13521 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13523 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13527 if (code
== NE_EXPR
)
13528 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13530 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13541 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13542 if (tem
!= NULL_TREE
)
13545 /* Transform comparisons of the form X +- C CMP X. */
13546 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13547 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13548 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13549 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13550 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13551 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13553 tree arg01
= TREE_OPERAND (arg0
, 1);
13554 enum tree_code code0
= TREE_CODE (arg0
);
13557 if (TREE_CODE (arg01
) == REAL_CST
)
13558 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13560 is_positive
= tree_int_cst_sgn (arg01
);
13562 /* (X - c) > X becomes false. */
13563 if (code
== GT_EXPR
13564 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13565 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13567 if (TREE_CODE (arg01
) == INTEGER_CST
13568 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13569 fold_overflow_warning (("assuming signed overflow does not "
13570 "occur when assuming that (X - c) > X "
13571 "is always false"),
13572 WARN_STRICT_OVERFLOW_ALL
);
13573 return constant_boolean_node (0, type
);
13576 /* Likewise (X + c) < X becomes false. */
13577 if (code
== LT_EXPR
13578 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13579 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13581 if (TREE_CODE (arg01
) == INTEGER_CST
13582 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13583 fold_overflow_warning (("assuming signed overflow does not "
13584 "occur when assuming that "
13585 "(X + c) < X is always false"),
13586 WARN_STRICT_OVERFLOW_ALL
);
13587 return constant_boolean_node (0, type
);
13590 /* Convert (X - c) <= X to true. */
13591 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13593 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13594 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13596 if (TREE_CODE (arg01
) == INTEGER_CST
13597 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13598 fold_overflow_warning (("assuming signed overflow does not "
13599 "occur when assuming that "
13600 "(X - c) <= X is always true"),
13601 WARN_STRICT_OVERFLOW_ALL
);
13602 return constant_boolean_node (1, type
);
13605 /* Convert (X + c) >= X to true. */
13606 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13608 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13609 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13611 if (TREE_CODE (arg01
) == INTEGER_CST
13612 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13613 fold_overflow_warning (("assuming signed overflow does not "
13614 "occur when assuming that "
13615 "(X + c) >= X is always true"),
13616 WARN_STRICT_OVERFLOW_ALL
);
13617 return constant_boolean_node (1, type
);
13620 if (TREE_CODE (arg01
) == INTEGER_CST
)
13622 /* Convert X + c > X and X - c < X to true for integers. */
13623 if (code
== GT_EXPR
13624 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13625 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13627 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13628 fold_overflow_warning (("assuming signed overflow does "
13629 "not occur when assuming that "
13630 "(X + c) > X is always true"),
13631 WARN_STRICT_OVERFLOW_ALL
);
13632 return constant_boolean_node (1, type
);
13635 if (code
== LT_EXPR
13636 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13637 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13639 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13640 fold_overflow_warning (("assuming signed overflow does "
13641 "not occur when assuming that "
13642 "(X - c) < X is always true"),
13643 WARN_STRICT_OVERFLOW_ALL
);
13644 return constant_boolean_node (1, type
);
13647 /* Convert X + c <= X and X - c >= X to false for integers. */
13648 if (code
== LE_EXPR
13649 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13650 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13652 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13653 fold_overflow_warning (("assuming signed overflow does "
13654 "not occur when assuming that "
13655 "(X + c) <= X is always false"),
13656 WARN_STRICT_OVERFLOW_ALL
);
13657 return constant_boolean_node (0, type
);
13660 if (code
== GE_EXPR
13661 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13662 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13664 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13665 fold_overflow_warning (("assuming signed overflow does "
13666 "not occur when assuming that "
13667 "(X - c) >= X is always false"),
13668 WARN_STRICT_OVERFLOW_ALL
);
13669 return constant_boolean_node (0, type
);
13674 /* Comparisons with the highest or lowest possible integer of
13675 the specified precision will have known values. */
13677 tree arg1_type
= TREE_TYPE (arg1
);
13678 unsigned int width
= TYPE_PRECISION (arg1_type
);
13680 if (TREE_CODE (arg1
) == INTEGER_CST
13681 && width
<= HOST_BITS_PER_DOUBLE_INT
13682 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13684 HOST_WIDE_INT signed_max_hi
;
13685 unsigned HOST_WIDE_INT signed_max_lo
;
13686 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13688 if (width
<= HOST_BITS_PER_WIDE_INT
)
13690 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13695 if (TYPE_UNSIGNED (arg1_type
))
13697 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13703 max_lo
= signed_max_lo
;
13704 min_lo
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13710 width
-= HOST_BITS_PER_WIDE_INT
;
13711 signed_max_lo
= -1;
13712 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13717 if (TYPE_UNSIGNED (arg1_type
))
13719 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13724 max_hi
= signed_max_hi
;
13725 min_hi
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13729 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13730 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13734 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13737 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13740 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13743 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13745 /* The GE_EXPR and LT_EXPR cases above are not normally
13746 reached because of previous transformations. */
13751 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13753 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13757 arg1
= const_binop (PLUS_EXPR
, arg1
,
13758 build_int_cst (TREE_TYPE (arg1
), 1));
13759 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13760 fold_convert_loc (loc
,
13761 TREE_TYPE (arg1
), arg0
),
13764 arg1
= const_binop (PLUS_EXPR
, arg1
,
13765 build_int_cst (TREE_TYPE (arg1
), 1));
13766 return fold_build2_loc (loc
, NE_EXPR
, type
,
13767 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13773 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13775 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13779 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13782 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13785 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13788 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13793 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13795 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13799 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13800 return fold_build2_loc (loc
, NE_EXPR
, type
,
13801 fold_convert_loc (loc
,
13802 TREE_TYPE (arg1
), arg0
),
13805 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13806 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13807 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13814 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13815 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13816 && TYPE_UNSIGNED (arg1_type
)
13817 /* We will flip the signedness of the comparison operator
13818 associated with the mode of arg1, so the sign bit is
13819 specified by this mode. Check that arg1 is the signed
13820 max associated with this sign bit. */
13821 && width
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13822 /* signed_type does not work on pointer types. */
13823 && INTEGRAL_TYPE_P (arg1_type
))
13825 /* The following case also applies to X < signed_max+1
13826 and X >= signed_max+1 because previous transformations. */
13827 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13829 tree st
= signed_type_for (arg1_type
);
13830 return fold_build2_loc (loc
,
13831 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13832 type
, fold_convert_loc (loc
, st
, arg0
),
13833 build_int_cst (st
, 0));
13839 /* If we are comparing an ABS_EXPR with a constant, we can
13840 convert all the cases into explicit comparisons, but they may
13841 well not be faster than doing the ABS and one comparison.
13842 But ABS (X) <= C is a range comparison, which becomes a subtraction
13843 and a comparison, and is probably faster. */
13844 if (code
== LE_EXPR
13845 && TREE_CODE (arg1
) == INTEGER_CST
13846 && TREE_CODE (arg0
) == ABS_EXPR
13847 && ! TREE_SIDE_EFFECTS (arg0
)
13848 && (0 != (tem
= negate_expr (arg1
)))
13849 && TREE_CODE (tem
) == INTEGER_CST
13850 && !TREE_OVERFLOW (tem
))
13851 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13852 build2 (GE_EXPR
, type
,
13853 TREE_OPERAND (arg0
, 0), tem
),
13854 build2 (LE_EXPR
, type
,
13855 TREE_OPERAND (arg0
, 0), arg1
));
13857 /* Convert ABS_EXPR<x> >= 0 to true. */
13858 strict_overflow_p
= false;
13859 if (code
== GE_EXPR
13860 && (integer_zerop (arg1
)
13861 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13862 && real_zerop (arg1
)))
13863 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13865 if (strict_overflow_p
)
13866 fold_overflow_warning (("assuming signed overflow does not occur "
13867 "when simplifying comparison of "
13868 "absolute value and zero"),
13869 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13870 return omit_one_operand_loc (loc
, type
,
13871 constant_boolean_node (true, type
),
13875 /* Convert ABS_EXPR<x> < 0 to false. */
13876 strict_overflow_p
= false;
13877 if (code
== LT_EXPR
13878 && (integer_zerop (arg1
) || real_zerop (arg1
))
13879 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13881 if (strict_overflow_p
)
13882 fold_overflow_warning (("assuming signed overflow does not occur "
13883 "when simplifying comparison of "
13884 "absolute value and zero"),
13885 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13886 return omit_one_operand_loc (loc
, type
,
13887 constant_boolean_node (false, type
),
13891 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13892 and similarly for >= into !=. */
13893 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13894 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13895 && TREE_CODE (arg1
) == LSHIFT_EXPR
13896 && integer_onep (TREE_OPERAND (arg1
, 0)))
13897 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13898 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13899 TREE_OPERAND (arg1
, 1)),
13900 build_zero_cst (TREE_TYPE (arg0
)));
13902 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13903 otherwise Y might be >= # of bits in X's type and thus e.g.
13904 (unsigned char) (1 << Y) for Y 15 might be 0.
13905 If the cast is widening, then 1 << Y should have unsigned type,
13906 otherwise if Y is number of bits in the signed shift type minus 1,
13907 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13908 31 might be 0xffffffff80000000. */
13909 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13910 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13911 && CONVERT_EXPR_P (arg1
)
13912 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13913 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13914 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13915 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13916 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13917 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13918 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13920 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13921 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13922 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13923 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13924 build_zero_cst (TREE_TYPE (arg0
)));
13929 case UNORDERED_EXPR
:
13937 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13939 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13940 if (t1
!= NULL_TREE
)
13944 /* If the first operand is NaN, the result is constant. */
13945 if (TREE_CODE (arg0
) == REAL_CST
13946 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13947 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13949 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13950 ? integer_zero_node
13951 : integer_one_node
;
13952 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13955 /* If the second operand is NaN, the result is constant. */
13956 if (TREE_CODE (arg1
) == REAL_CST
13957 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13958 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13960 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13961 ? integer_zero_node
13962 : integer_one_node
;
13963 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13966 /* Simplify unordered comparison of something with itself. */
13967 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13968 && operand_equal_p (arg0
, arg1
, 0))
13969 return constant_boolean_node (1, type
);
13971 if (code
== LTGT_EXPR
13972 && !flag_trapping_math
13973 && operand_equal_p (arg0
, arg1
, 0))
13974 return constant_boolean_node (0, type
);
13976 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13978 tree targ0
= strip_float_extensions (arg0
);
13979 tree targ1
= strip_float_extensions (arg1
);
13980 tree newtype
= TREE_TYPE (targ0
);
13982 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13983 newtype
= TREE_TYPE (targ1
);
13985 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13986 return fold_build2_loc (loc
, code
, type
,
13987 fold_convert_loc (loc
, newtype
, targ0
),
13988 fold_convert_loc (loc
, newtype
, targ1
));
13993 case COMPOUND_EXPR
:
13994 /* When pedantic, a compound expression can be neither an lvalue
13995 nor an integer constant expression. */
13996 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13998 /* Don't let (0, 0) be null pointer constant. */
13999 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
14000 : fold_convert_loc (loc
, type
, arg1
);
14001 return pedantic_non_lvalue_loc (loc
, tem
);
14004 if ((TREE_CODE (arg0
) == REAL_CST
14005 && TREE_CODE (arg1
) == REAL_CST
)
14006 || (TREE_CODE (arg0
) == INTEGER_CST
14007 && TREE_CODE (arg1
) == INTEGER_CST
))
14008 return build_complex (type
, arg0
, arg1
);
14009 if (TREE_CODE (arg0
) == REALPART_EXPR
14010 && TREE_CODE (arg1
) == IMAGPART_EXPR
14011 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
14012 && operand_equal_p (TREE_OPERAND (arg0
, 0),
14013 TREE_OPERAND (arg1
, 0), 0))
14014 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
14015 TREE_OPERAND (arg1
, 0));
14019 /* An ASSERT_EXPR should never be passed to fold_binary. */
14020 gcc_unreachable ();
14022 case VEC_PACK_TRUNC_EXPR
:
14023 case VEC_PACK_FIX_TRUNC_EXPR
:
14025 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14028 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
14029 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
14030 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14033 elts
= XALLOCAVEC (tree
, nelts
);
14034 if (!vec_cst_ctor_to_array (arg0
, elts
)
14035 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
14038 for (i
= 0; i
< nelts
; i
++)
14040 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
14041 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
14042 TREE_TYPE (type
), elts
[i
]);
14043 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
14047 return build_vector (type
, elts
);
14050 case VEC_WIDEN_MULT_LO_EXPR
:
14051 case VEC_WIDEN_MULT_HI_EXPR
:
14052 case VEC_WIDEN_MULT_EVEN_EXPR
:
14053 case VEC_WIDEN_MULT_ODD_EXPR
:
14055 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
14056 unsigned int out
, ofs
, scale
;
14059 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
14060 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
14061 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14064 elts
= XALLOCAVEC (tree
, nelts
* 4);
14065 if (!vec_cst_ctor_to_array (arg0
, elts
)
14066 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
14069 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
14070 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
14071 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
14072 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
14073 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
14074 scale
= 1, ofs
= 0;
14075 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14076 scale
= 1, ofs
= 1;
14078 for (out
= 0; out
< nelts
; out
++)
14080 unsigned int in1
= (out
<< scale
) + ofs
;
14081 unsigned int in2
= in1
+ nelts
* 2;
14084 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
14085 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
14087 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
14089 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
14090 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
14094 return build_vector (type
, elts
);
14099 } /* switch (code) */
14102 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14103 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14107 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
14109 switch (TREE_CODE (*tp
))
14115 *walk_subtrees
= 0;
14117 /* ... fall through ... */
14124 /* Return whether the sub-tree ST contains a label which is accessible from
14125 outside the sub-tree. */
14128 contains_label_p (tree st
)
14131 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14134 /* Fold a ternary expression of code CODE and type TYPE with operands
14135 OP0, OP1, and OP2. Return the folded expression if folding is
14136 successful. Otherwise, return NULL_TREE. */
14139 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14140 tree op0
, tree op1
, tree op2
)
14143 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14144 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14146 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14147 && TREE_CODE_LENGTH (code
) == 3);
14149 /* Strip any conversions that don't change the mode. This is safe
14150 for every expression, except for a comparison expression because
14151 its signedness is derived from its operands. So, in the latter
14152 case, only strip conversions that don't change the signedness.
14154 Note that this is done as an internal manipulation within the
14155 constant folder, in order to find the simplest representation of
14156 the arguments so that their form can be studied. In any cases,
14157 the appropriate type conversions should be put back in the tree
14158 that will get out of the constant folder. */
14179 case COMPONENT_REF
:
14180 if (TREE_CODE (arg0
) == CONSTRUCTOR
14181 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14183 unsigned HOST_WIDE_INT idx
;
14185 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14192 case VEC_COND_EXPR
:
14193 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14194 so all simple results must be passed through pedantic_non_lvalue. */
14195 if (TREE_CODE (arg0
) == INTEGER_CST
)
14197 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14198 tem
= integer_zerop (arg0
) ? op2
: op1
;
14199 /* Only optimize constant conditions when the selected branch
14200 has the same type as the COND_EXPR. This avoids optimizing
14201 away "c ? x : throw", where the throw has a void type.
14202 Avoid throwing away that operand which contains label. */
14203 if ((!TREE_SIDE_EFFECTS (unused_op
)
14204 || !contains_label_p (unused_op
))
14205 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14206 || VOID_TYPE_P (type
)))
14207 return pedantic_non_lvalue_loc (loc
, tem
);
14210 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14212 if (integer_all_onesp (arg0
))
14213 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14214 if (integer_zerop (arg0
))
14215 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14217 if ((TREE_CODE (arg1
) == VECTOR_CST
14218 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14219 && (TREE_CODE (arg2
) == VECTOR_CST
14220 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14222 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14223 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14224 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14225 for (i
= 0; i
< nelts
; i
++)
14227 tree val
= VECTOR_CST_ELT (arg0
, i
);
14228 if (integer_all_onesp (val
))
14230 else if (integer_zerop (val
))
14231 sel
[i
] = nelts
+ i
;
14232 else /* Currently unreachable. */
14235 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14236 if (t
!= NULL_TREE
)
14241 if (operand_equal_p (arg1
, op2
, 0))
14242 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14244 /* If we have A op B ? A : C, we may be able to convert this to a
14245 simpler expression, depending on the operation and the values
14246 of B and C. Signed zeros prevent all of these transformations,
14247 for reasons given above each one.
14249 Also try swapping the arguments and inverting the conditional. */
14250 if (COMPARISON_CLASS_P (arg0
)
14251 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14252 arg1
, TREE_OPERAND (arg0
, 1))
14253 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14255 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14260 if (COMPARISON_CLASS_P (arg0
)
14261 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14263 TREE_OPERAND (arg0
, 1))
14264 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14266 location_t loc0
= expr_location_or (arg0
, loc
);
14267 tem
= fold_invert_truthvalue (loc0
, arg0
);
14268 if (tem
&& COMPARISON_CLASS_P (tem
))
14270 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14276 /* If the second operand is simpler than the third, swap them
14277 since that produces better jump optimization results. */
14278 if (truth_value_p (TREE_CODE (arg0
))
14279 && tree_swap_operands_p (op1
, op2
, false))
14281 location_t loc0
= expr_location_or (arg0
, loc
);
14282 /* See if this can be inverted. If it can't, possibly because
14283 it was a floating-point inequality comparison, don't do
14285 tem
= fold_invert_truthvalue (loc0
, arg0
);
14287 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14290 /* Convert A ? 1 : 0 to simply A. */
14291 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14292 : (integer_onep (op1
)
14293 && !VECTOR_TYPE_P (type
)))
14294 && integer_zerop (op2
)
14295 /* If we try to convert OP0 to our type, the
14296 call to fold will try to move the conversion inside
14297 a COND, which will recurse. In that case, the COND_EXPR
14298 is probably the best choice, so leave it alone. */
14299 && type
== TREE_TYPE (arg0
))
14300 return pedantic_non_lvalue_loc (loc
, arg0
);
14302 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14303 over COND_EXPR in cases such as floating point comparisons. */
14304 if (integer_zerop (op1
)
14305 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14306 : (integer_onep (op2
)
14307 && !VECTOR_TYPE_P (type
)))
14308 && truth_value_p (TREE_CODE (arg0
)))
14309 return pedantic_non_lvalue_loc (loc
,
14310 fold_convert_loc (loc
, type
,
14311 invert_truthvalue_loc (loc
,
14314 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14315 if (TREE_CODE (arg0
) == LT_EXPR
14316 && integer_zerop (TREE_OPERAND (arg0
, 1))
14317 && integer_zerop (op2
)
14318 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14320 /* sign_bit_p looks through both zero and sign extensions,
14321 but for this optimization only sign extensions are
14323 tree tem2
= TREE_OPERAND (arg0
, 0);
14324 while (tem
!= tem2
)
14326 if (TREE_CODE (tem2
) != NOP_EXPR
14327 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14332 tem2
= TREE_OPERAND (tem2
, 0);
14334 /* sign_bit_p only checks ARG1 bits within A's precision.
14335 If <sign bit of A> has wider type than A, bits outside
14336 of A's precision in <sign bit of A> need to be checked.
14337 If they are all 0, this optimization needs to be done
14338 in unsigned A's type, if they are all 1 in signed A's type,
14339 otherwise this can't be done. */
14341 && TYPE_PRECISION (TREE_TYPE (tem
))
14342 < TYPE_PRECISION (TREE_TYPE (arg1
))
14343 && TYPE_PRECISION (TREE_TYPE (tem
))
14344 < TYPE_PRECISION (type
))
14346 unsigned HOST_WIDE_INT mask_lo
;
14347 HOST_WIDE_INT mask_hi
;
14348 int inner_width
, outer_width
;
14351 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14352 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14353 if (outer_width
> TYPE_PRECISION (type
))
14354 outer_width
= TYPE_PRECISION (type
);
14356 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14358 mask_hi
= (HOST_WIDE_INT_M1U
14359 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14365 mask_lo
= (HOST_WIDE_INT_M1U
14366 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14368 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14370 mask_hi
&= ~(HOST_WIDE_INT_M1U
14371 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14375 mask_lo
&= ~(HOST_WIDE_INT_M1U
14376 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14378 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14379 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14381 tem_type
= signed_type_for (TREE_TYPE (tem
));
14382 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14384 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14385 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14387 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14388 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14396 fold_convert_loc (loc
, type
,
14397 fold_build2_loc (loc
, BIT_AND_EXPR
,
14398 TREE_TYPE (tem
), tem
,
14399 fold_convert_loc (loc
,
14404 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14405 already handled above. */
14406 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14407 && integer_onep (TREE_OPERAND (arg0
, 1))
14408 && integer_zerop (op2
)
14409 && integer_pow2p (arg1
))
14411 tree tem
= TREE_OPERAND (arg0
, 0);
14413 if (TREE_CODE (tem
) == RSHIFT_EXPR
14414 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14415 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14416 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14417 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14418 TREE_OPERAND (tem
, 0), arg1
);
14421 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14422 is probably obsolete because the first operand should be a
14423 truth value (that's why we have the two cases above), but let's
14424 leave it in until we can confirm this for all front-ends. */
14425 if (integer_zerop (op2
)
14426 && TREE_CODE (arg0
) == NE_EXPR
14427 && integer_zerop (TREE_OPERAND (arg0
, 1))
14428 && integer_pow2p (arg1
)
14429 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14430 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14431 arg1
, OEP_ONLY_CONST
))
14432 return pedantic_non_lvalue_loc (loc
,
14433 fold_convert_loc (loc
, type
,
14434 TREE_OPERAND (arg0
, 0)));
14436 /* Disable the transformations below for vectors, since
14437 fold_binary_op_with_conditional_arg may undo them immediately,
14438 yielding an infinite loop. */
14439 if (code
== VEC_COND_EXPR
)
14442 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14443 if (integer_zerop (op2
)
14444 && truth_value_p (TREE_CODE (arg0
))
14445 && truth_value_p (TREE_CODE (arg1
))
14446 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14447 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14448 : TRUTH_ANDIF_EXPR
,
14449 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14451 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14452 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14453 && truth_value_p (TREE_CODE (arg0
))
14454 && truth_value_p (TREE_CODE (arg1
))
14455 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14457 location_t loc0
= expr_location_or (arg0
, loc
);
14458 /* Only perform transformation if ARG0 is easily inverted. */
14459 tem
= fold_invert_truthvalue (loc0
, arg0
);
14461 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14464 type
, fold_convert_loc (loc
, type
, tem
),
14468 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14469 if (integer_zerop (arg1
)
14470 && truth_value_p (TREE_CODE (arg0
))
14471 && truth_value_p (TREE_CODE (op2
))
14472 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14474 location_t loc0
= expr_location_or (arg0
, loc
);
14475 /* Only perform transformation if ARG0 is easily inverted. */
14476 tem
= fold_invert_truthvalue (loc0
, arg0
);
14478 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14479 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14480 type
, fold_convert_loc (loc
, type
, tem
),
14484 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14485 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14486 && truth_value_p (TREE_CODE (arg0
))
14487 && truth_value_p (TREE_CODE (op2
))
14488 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14489 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14490 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14491 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14496 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14497 of fold_ternary on them. */
14498 gcc_unreachable ();
14500 case BIT_FIELD_REF
:
14501 if ((TREE_CODE (arg0
) == VECTOR_CST
14502 || (TREE_CODE (arg0
) == CONSTRUCTOR
14503 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14504 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14505 || (TREE_CODE (type
) == VECTOR_TYPE
14506 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14508 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14509 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14510 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14511 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14514 && (idx
% width
) == 0
14515 && (n
% width
) == 0
14516 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14521 if (TREE_CODE (arg0
) == VECTOR_CST
)
14524 return VECTOR_CST_ELT (arg0
, idx
);
14526 tree
*vals
= XALLOCAVEC (tree
, n
);
14527 for (unsigned i
= 0; i
< n
; ++i
)
14528 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14529 return build_vector (type
, vals
);
14532 /* Constructor elements can be subvectors. */
14533 unsigned HOST_WIDE_INT k
= 1;
14534 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14536 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14537 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14538 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14541 /* We keep an exact subset of the constructor elements. */
14542 if ((idx
% k
) == 0 && (n
% k
) == 0)
14544 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14545 return build_constructor (type
, NULL
);
14550 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14551 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14552 return build_zero_cst (type
);
14555 vec
<constructor_elt
, va_gc
> *vals
;
14556 vec_alloc (vals
, n
);
14557 for (unsigned i
= 0;
14558 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14560 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14562 (arg0
, idx
+ i
)->value
);
14563 return build_constructor (type
, vals
);
14565 /* The bitfield references a single constructor element. */
14566 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14568 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14569 return build_zero_cst (type
);
14571 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14573 return fold_build3_loc (loc
, code
, type
,
14574 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14575 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14580 /* A bit-field-ref that referenced the full argument can be stripped. */
14581 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14582 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14583 && integer_zerop (op2
))
14584 return fold_convert_loc (loc
, type
, arg0
);
14586 /* On constants we can use native encode/interpret to constant
14587 fold (nearly) all BIT_FIELD_REFs. */
14588 if (CONSTANT_CLASS_P (arg0
)
14589 && can_native_interpret_type_p (type
)
14590 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14591 /* This limitation should not be necessary, we just need to
14592 round this up to mode size. */
14593 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14594 /* Need bit-shifting of the buffer to relax the following. */
14595 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14597 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14598 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14599 unsigned HOST_WIDE_INT clen
;
14600 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14601 /* ??? We cannot tell native_encode_expr to start at
14602 some random byte only. So limit us to a reasonable amount
14606 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14607 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14609 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14611 tree v
= native_interpret_expr (type
,
14612 b
+ bitpos
/ BITS_PER_UNIT
,
14613 bitsize
/ BITS_PER_UNIT
);
14623 /* For integers we can decompose the FMA if possible. */
14624 if (TREE_CODE (arg0
) == INTEGER_CST
14625 && TREE_CODE (arg1
) == INTEGER_CST
)
14626 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14627 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14628 if (integer_zerop (arg2
))
14629 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14631 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14633 case VEC_PERM_EXPR
:
14634 if (TREE_CODE (arg2
) == VECTOR_CST
)
14636 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14637 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14639 bool need_mask_canon
= false;
14640 bool all_in_vec0
= true;
14641 bool all_in_vec1
= true;
14642 bool maybe_identity
= true;
14643 bool single_arg
= (op0
== op1
);
14644 bool changed
= false;
14646 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14647 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14648 for (i
= 0; i
< nelts
; i
++)
14650 tree val
= VECTOR_CST_ELT (arg2
, i
);
14651 if (TREE_CODE (val
) != INTEGER_CST
)
14654 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14655 if (TREE_INT_CST_HIGH (val
)
14656 || ((unsigned HOST_WIDE_INT
)
14657 TREE_INT_CST_LOW (val
) != sel
[i
]))
14658 need_mask_canon
= true;
14660 if (sel
[i
] < nelts
)
14661 all_in_vec1
= false;
14663 all_in_vec0
= false;
14665 if ((sel
[i
] & (nelts
-1)) != i
)
14666 maybe_identity
= false;
14669 if (maybe_identity
)
14679 else if (all_in_vec1
)
14682 for (i
= 0; i
< nelts
; i
++)
14684 need_mask_canon
= true;
14687 if ((TREE_CODE (op0
) == VECTOR_CST
14688 || TREE_CODE (op0
) == CONSTRUCTOR
)
14689 && (TREE_CODE (op1
) == VECTOR_CST
14690 || TREE_CODE (op1
) == CONSTRUCTOR
))
14692 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14693 if (t
!= NULL_TREE
)
14697 if (op0
== op1
&& !single_arg
)
14700 if (need_mask_canon
&& arg2
== op2
)
14702 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14703 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14704 for (i
= 0; i
< nelts
; i
++)
14705 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14706 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14711 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14717 } /* switch (code) */
14720 /* Perform constant folding and related simplification of EXPR.
14721 The related simplifications include x*1 => x, x*0 => 0, etc.,
14722 and application of the associative law.
14723 NOP_EXPR conversions may be removed freely (as long as we
14724 are careful not to change the type of the overall expression).
14725 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14726 but we can constant-fold them if they have constant operands. */
14728 #ifdef ENABLE_FOLD_CHECKING
14729 # define fold(x) fold_1 (x)
14730 static tree
fold_1 (tree
);
14736 const tree t
= expr
;
14737 enum tree_code code
= TREE_CODE (t
);
14738 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14740 location_t loc
= EXPR_LOCATION (expr
);
14742 /* Return right away if a constant. */
14743 if (kind
== tcc_constant
)
14746 /* CALL_EXPR-like objects with variable numbers of operands are
14747 treated specially. */
14748 if (kind
== tcc_vl_exp
)
14750 if (code
== CALL_EXPR
)
14752 tem
= fold_call_expr (loc
, expr
, false);
14753 return tem
? tem
: expr
;
14758 if (IS_EXPR_CODE_CLASS (kind
))
14760 tree type
= TREE_TYPE (t
);
14761 tree op0
, op1
, op2
;
14763 switch (TREE_CODE_LENGTH (code
))
14766 op0
= TREE_OPERAND (t
, 0);
14767 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14768 return tem
? tem
: expr
;
14770 op0
= TREE_OPERAND (t
, 0);
14771 op1
= TREE_OPERAND (t
, 1);
14772 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14773 return tem
? tem
: expr
;
14775 op0
= TREE_OPERAND (t
, 0);
14776 op1
= TREE_OPERAND (t
, 1);
14777 op2
= TREE_OPERAND (t
, 2);
14778 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14779 return tem
? tem
: expr
;
14789 tree op0
= TREE_OPERAND (t
, 0);
14790 tree op1
= TREE_OPERAND (t
, 1);
14792 if (TREE_CODE (op1
) == INTEGER_CST
14793 && TREE_CODE (op0
) == CONSTRUCTOR
14794 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14796 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14797 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14798 unsigned HOST_WIDE_INT begin
= 0;
14800 /* Find a matching index by means of a binary search. */
14801 while (begin
!= end
)
14803 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14804 tree index
= (*elts
)[middle
].index
;
14806 if (TREE_CODE (index
) == INTEGER_CST
14807 && tree_int_cst_lt (index
, op1
))
14808 begin
= middle
+ 1;
14809 else if (TREE_CODE (index
) == INTEGER_CST
14810 && tree_int_cst_lt (op1
, index
))
14812 else if (TREE_CODE (index
) == RANGE_EXPR
14813 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14814 begin
= middle
+ 1;
14815 else if (TREE_CODE (index
) == RANGE_EXPR
14816 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14819 return (*elts
)[middle
].value
;
14826 /* Return a VECTOR_CST if possible. */
14829 tree type
= TREE_TYPE (t
);
14830 if (TREE_CODE (type
) != VECTOR_TYPE
)
14833 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14834 unsigned HOST_WIDE_INT idx
, pos
= 0;
14837 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14839 if (!CONSTANT_CLASS_P (value
))
14841 if (TREE_CODE (value
) == VECTOR_CST
)
14843 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14844 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14847 vec
[pos
++] = value
;
14849 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14850 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14852 return build_vector (type
, vec
);
14856 return fold (DECL_INITIAL (t
));
14860 } /* switch (code) */
14863 #ifdef ENABLE_FOLD_CHECKING
14866 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14867 hash_table
<pointer_hash
<tree_node
> >);
14868 static void fold_check_failed (const_tree
, const_tree
);
14869 void print_fold_checksum (const_tree
);
14871 /* When --enable-checking=fold, compute a digest of expr before
14872 and after actual fold call to see if fold did not accidentally
14873 change original expr. */
14879 struct md5_ctx ctx
;
14880 unsigned char checksum_before
[16], checksum_after
[16];
14881 hash_table
<pointer_hash
<tree_node
> > ht
;
14884 md5_init_ctx (&ctx
);
14885 fold_checksum_tree (expr
, &ctx
, ht
);
14886 md5_finish_ctx (&ctx
, checksum_before
);
14889 ret
= fold_1 (expr
);
14891 md5_init_ctx (&ctx
);
14892 fold_checksum_tree (expr
, &ctx
, ht
);
14893 md5_finish_ctx (&ctx
, checksum_after
);
14896 if (memcmp (checksum_before
, checksum_after
, 16))
14897 fold_check_failed (expr
, ret
);
14903 print_fold_checksum (const_tree expr
)
14905 struct md5_ctx ctx
;
14906 unsigned char checksum
[16], cnt
;
14907 hash_table
<pointer_hash
<tree_node
> > ht
;
14910 md5_init_ctx (&ctx
);
14911 fold_checksum_tree (expr
, &ctx
, ht
);
14912 md5_finish_ctx (&ctx
, checksum
);
14914 for (cnt
= 0; cnt
< 16; ++cnt
)
14915 fprintf (stderr
, "%02x", checksum
[cnt
]);
14916 putc ('\n', stderr
);
14920 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14922 internal_error ("fold check: original tree changed by fold");
14926 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14927 hash_table
<pointer_hash
<tree_node
> > ht
)
14930 enum tree_code code
;
14931 union tree_node buf
;
14937 slot
= ht
.find_slot (expr
, INSERT
);
14940 *slot
= CONST_CAST_TREE (expr
);
14941 code
= TREE_CODE (expr
);
14942 if (TREE_CODE_CLASS (code
) == tcc_declaration
14943 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14945 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14946 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14947 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14948 expr
= (tree
) &buf
;
14950 else if (TREE_CODE_CLASS (code
) == tcc_type
14951 && (TYPE_POINTER_TO (expr
)
14952 || TYPE_REFERENCE_TO (expr
)
14953 || TYPE_CACHED_VALUES_P (expr
)
14954 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14955 || TYPE_NEXT_VARIANT (expr
)))
14957 /* Allow these fields to be modified. */
14959 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14960 expr
= tmp
= (tree
) &buf
;
14961 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14962 TYPE_POINTER_TO (tmp
) = NULL
;
14963 TYPE_REFERENCE_TO (tmp
) = NULL
;
14964 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14965 if (TYPE_CACHED_VALUES_P (tmp
))
14967 TYPE_CACHED_VALUES_P (tmp
) = 0;
14968 TYPE_CACHED_VALUES (tmp
) = NULL
;
14971 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14972 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14973 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14974 if (TREE_CODE_CLASS (code
) != tcc_type
14975 && TREE_CODE_CLASS (code
) != tcc_declaration
14976 && code
!= TREE_LIST
14977 && code
!= SSA_NAME
14978 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14979 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14980 switch (TREE_CODE_CLASS (code
))
14986 md5_process_bytes (TREE_STRING_POINTER (expr
),
14987 TREE_STRING_LENGTH (expr
), ctx
);
14990 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14991 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14994 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14995 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
15001 case tcc_exceptional
:
15005 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
15006 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
15007 expr
= TREE_CHAIN (expr
);
15008 goto recursive_label
;
15011 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
15012 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
15018 case tcc_expression
:
15019 case tcc_reference
:
15020 case tcc_comparison
:
15023 case tcc_statement
:
15025 len
= TREE_OPERAND_LENGTH (expr
);
15026 for (i
= 0; i
< len
; ++i
)
15027 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
15029 case tcc_declaration
:
15030 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
15031 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
15032 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
15034 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
15035 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
15036 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
15037 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
15038 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
15040 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
15041 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
15043 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
15045 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
15046 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
15047 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
15051 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
15052 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
15053 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
15054 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
15055 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
15056 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
15057 if (INTEGRAL_TYPE_P (expr
)
15058 || SCALAR_FLOAT_TYPE_P (expr
))
15060 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
15061 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
15063 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
15064 if (TREE_CODE (expr
) == RECORD_TYPE
15065 || TREE_CODE (expr
) == UNION_TYPE
15066 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
15067 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
15068 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
15075 /* Helper function for outputting the checksum of a tree T. When
15076 debugging with gdb, you can "define mynext" to be "next" followed
15077 by "call debug_fold_checksum (op0)", then just trace down till the
15080 DEBUG_FUNCTION
void
15081 debug_fold_checksum (const_tree t
)
15084 unsigned char checksum
[16];
15085 struct md5_ctx ctx
;
15086 hash_table
<pointer_hash
<tree_node
> > ht
;
15089 md5_init_ctx (&ctx
);
15090 fold_checksum_tree (t
, &ctx
, ht
);
15091 md5_finish_ctx (&ctx
, checksum
);
15094 for (i
= 0; i
< 16; i
++)
15095 fprintf (stderr
, "%d ", checksum
[i
]);
15097 fprintf (stderr
, "\n");
15102 /* Fold a unary tree expression with code CODE of type TYPE with an
15103 operand OP0. LOC is the location of the resulting expression.
15104 Return a folded expression if successful. Otherwise, return a tree
15105 expression with code CODE of type TYPE with an operand OP0. */
15108 fold_build1_stat_loc (location_t loc
,
15109 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
15112 #ifdef ENABLE_FOLD_CHECKING
15113 unsigned char checksum_before
[16], checksum_after
[16];
15114 struct md5_ctx ctx
;
15115 hash_table
<pointer_hash
<tree_node
> > ht
;
15118 md5_init_ctx (&ctx
);
15119 fold_checksum_tree (op0
, &ctx
, ht
);
15120 md5_finish_ctx (&ctx
, checksum_before
);
15124 tem
= fold_unary_loc (loc
, code
, type
, op0
);
15126 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
15128 #ifdef ENABLE_FOLD_CHECKING
15129 md5_init_ctx (&ctx
);
15130 fold_checksum_tree (op0
, &ctx
, ht
);
15131 md5_finish_ctx (&ctx
, checksum_after
);
15134 if (memcmp (checksum_before
, checksum_after
, 16))
15135 fold_check_failed (op0
, tem
);
15140 /* Fold a binary tree expression with code CODE of type TYPE with
15141 operands OP0 and OP1. LOC is the location of the resulting
15142 expression. Return a folded expression if successful. Otherwise,
15143 return a tree expression with code CODE of type TYPE with operands
15147 fold_build2_stat_loc (location_t loc
,
15148 enum tree_code code
, tree type
, tree op0
, tree op1
15152 #ifdef ENABLE_FOLD_CHECKING
15153 unsigned char checksum_before_op0
[16],
15154 checksum_before_op1
[16],
15155 checksum_after_op0
[16],
15156 checksum_after_op1
[16];
15157 struct md5_ctx ctx
;
15158 hash_table
<pointer_hash
<tree_node
> > ht
;
15161 md5_init_ctx (&ctx
);
15162 fold_checksum_tree (op0
, &ctx
, ht
);
15163 md5_finish_ctx (&ctx
, checksum_before_op0
);
15166 md5_init_ctx (&ctx
);
15167 fold_checksum_tree (op1
, &ctx
, ht
);
15168 md5_finish_ctx (&ctx
, checksum_before_op1
);
15172 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15174 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15176 #ifdef ENABLE_FOLD_CHECKING
15177 md5_init_ctx (&ctx
);
15178 fold_checksum_tree (op0
, &ctx
, ht
);
15179 md5_finish_ctx (&ctx
, checksum_after_op0
);
15182 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15183 fold_check_failed (op0
, tem
);
15185 md5_init_ctx (&ctx
);
15186 fold_checksum_tree (op1
, &ctx
, ht
);
15187 md5_finish_ctx (&ctx
, checksum_after_op1
);
15190 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15191 fold_check_failed (op1
, tem
);
15196 /* Fold a ternary tree expression with code CODE of type TYPE with
15197 operands OP0, OP1, and OP2. Return a folded expression if
15198 successful. Otherwise, return a tree expression with code CODE of
15199 type TYPE with operands OP0, OP1, and OP2. */
15202 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15203 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15206 #ifdef ENABLE_FOLD_CHECKING
15207 unsigned char checksum_before_op0
[16],
15208 checksum_before_op1
[16],
15209 checksum_before_op2
[16],
15210 checksum_after_op0
[16],
15211 checksum_after_op1
[16],
15212 checksum_after_op2
[16];
15213 struct md5_ctx ctx
;
15214 hash_table
<pointer_hash
<tree_node
> > ht
;
15217 md5_init_ctx (&ctx
);
15218 fold_checksum_tree (op0
, &ctx
, ht
);
15219 md5_finish_ctx (&ctx
, checksum_before_op0
);
15222 md5_init_ctx (&ctx
);
15223 fold_checksum_tree (op1
, &ctx
, ht
);
15224 md5_finish_ctx (&ctx
, checksum_before_op1
);
15227 md5_init_ctx (&ctx
);
15228 fold_checksum_tree (op2
, &ctx
, ht
);
15229 md5_finish_ctx (&ctx
, checksum_before_op2
);
15233 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15234 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15236 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15238 #ifdef ENABLE_FOLD_CHECKING
15239 md5_init_ctx (&ctx
);
15240 fold_checksum_tree (op0
, &ctx
, ht
);
15241 md5_finish_ctx (&ctx
, checksum_after_op0
);
15244 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15245 fold_check_failed (op0
, tem
);
15247 md5_init_ctx (&ctx
);
15248 fold_checksum_tree (op1
, &ctx
, ht
);
15249 md5_finish_ctx (&ctx
, checksum_after_op1
);
15252 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15253 fold_check_failed (op1
, tem
);
15255 md5_init_ctx (&ctx
);
15256 fold_checksum_tree (op2
, &ctx
, ht
);
15257 md5_finish_ctx (&ctx
, checksum_after_op2
);
15260 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15261 fold_check_failed (op2
, tem
);
15266 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15267 arguments in ARGARRAY, and a null static chain.
15268 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15269 of type TYPE from the given operands as constructed by build_call_array. */
15272 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15273 int nargs
, tree
*argarray
)
15276 #ifdef ENABLE_FOLD_CHECKING
15277 unsigned char checksum_before_fn
[16],
15278 checksum_before_arglist
[16],
15279 checksum_after_fn
[16],
15280 checksum_after_arglist
[16];
15281 struct md5_ctx ctx
;
15282 hash_table
<pointer_hash
<tree_node
> > ht
;
15286 md5_init_ctx (&ctx
);
15287 fold_checksum_tree (fn
, &ctx
, ht
);
15288 md5_finish_ctx (&ctx
, checksum_before_fn
);
15291 md5_init_ctx (&ctx
);
15292 for (i
= 0; i
< nargs
; i
++)
15293 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15294 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15298 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15300 #ifdef ENABLE_FOLD_CHECKING
15301 md5_init_ctx (&ctx
);
15302 fold_checksum_tree (fn
, &ctx
, ht
);
15303 md5_finish_ctx (&ctx
, checksum_after_fn
);
15306 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15307 fold_check_failed (fn
, tem
);
15309 md5_init_ctx (&ctx
);
15310 for (i
= 0; i
< nargs
; i
++)
15311 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15312 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15315 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15316 fold_check_failed (NULL_TREE
, tem
);
15321 /* Perform constant folding and related simplification of initializer
15322 expression EXPR. These behave identically to "fold_buildN" but ignore
15323 potential run-time traps and exceptions that fold must preserve. */
15325 #define START_FOLD_INIT \
15326 int saved_signaling_nans = flag_signaling_nans;\
15327 int saved_trapping_math = flag_trapping_math;\
15328 int saved_rounding_math = flag_rounding_math;\
15329 int saved_trapv = flag_trapv;\
15330 int saved_folding_initializer = folding_initializer;\
15331 flag_signaling_nans = 0;\
15332 flag_trapping_math = 0;\
15333 flag_rounding_math = 0;\
15335 folding_initializer = 1;
15337 #define END_FOLD_INIT \
15338 flag_signaling_nans = saved_signaling_nans;\
15339 flag_trapping_math = saved_trapping_math;\
15340 flag_rounding_math = saved_rounding_math;\
15341 flag_trapv = saved_trapv;\
15342 folding_initializer = saved_folding_initializer;
15345 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15346 tree type
, tree op
)
15351 result
= fold_build1_loc (loc
, code
, type
, op
);
15358 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15359 tree type
, tree op0
, tree op1
)
15364 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15371 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15372 int nargs
, tree
*argarray
)
15377 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15383 #undef START_FOLD_INIT
15384 #undef END_FOLD_INIT
15386 /* Determine if first argument is a multiple of second argument. Return 0 if
15387 it is not, or we cannot easily determined it to be.
15389 An example of the sort of thing we care about (at this point; this routine
15390 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15391 fold cases do now) is discovering that
15393 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15399 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15401 This code also handles discovering that
15403 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15405 is a multiple of 8 so we don't have to worry about dealing with a
15406 possible remainder.
15408 Note that we *look* inside a SAVE_EXPR only to determine how it was
15409 calculated; it is not safe for fold to do much of anything else with the
15410 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15411 at run time. For example, the latter example above *cannot* be implemented
15412 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15413 evaluation time of the original SAVE_EXPR is not necessarily the same at
15414 the time the new expression is evaluated. The only optimization of this
15415 sort that would be valid is changing
15417 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15421 SAVE_EXPR (I) * SAVE_EXPR (J)
15423 (where the same SAVE_EXPR (J) is used in the original and the
15424 transformed version). */
15427 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15429 if (operand_equal_p (top
, bottom
, 0))
15432 if (TREE_CODE (type
) != INTEGER_TYPE
)
15435 switch (TREE_CODE (top
))
15438 /* Bitwise and provides a power of two multiple. If the mask is
15439 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15440 if (!integer_pow2p (bottom
))
15445 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15446 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15450 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15451 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15454 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15458 op1
= TREE_OPERAND (top
, 1);
15459 /* const_binop may not detect overflow correctly,
15460 so check for it explicitly here. */
15461 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15462 > TREE_INT_CST_LOW (op1
)
15463 && TREE_INT_CST_HIGH (op1
) == 0
15464 && 0 != (t1
= fold_convert (type
,
15465 const_binop (LSHIFT_EXPR
,
15468 && !TREE_OVERFLOW (t1
))
15469 return multiple_of_p (type
, t1
, bottom
);
15474 /* Can't handle conversions from non-integral or wider integral type. */
15475 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15476 || (TYPE_PRECISION (type
)
15477 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15480 /* .. fall through ... */
15483 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15486 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15487 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15490 if (TREE_CODE (bottom
) != INTEGER_CST
15491 || integer_zerop (bottom
)
15492 || (TYPE_UNSIGNED (type
)
15493 && (tree_int_cst_sgn (top
) < 0
15494 || tree_int_cst_sgn (bottom
) < 0)))
15496 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15504 /* Return true if CODE or TYPE is known to be non-negative. */
15507 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15509 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15510 && truth_value_p (code
))
15511 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15512 have a signed:1 type (where the value is -1 and 0). */
15517 /* Return true if (CODE OP0) is known to be non-negative. If the return
15518 value is based on the assumption that signed overflow is undefined,
15519 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15520 *STRICT_OVERFLOW_P. */
15523 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15524 bool *strict_overflow_p
)
15526 if (TYPE_UNSIGNED (type
))
15532 /* We can't return 1 if flag_wrapv is set because
15533 ABS_EXPR<INT_MIN> = INT_MIN. */
15534 if (!INTEGRAL_TYPE_P (type
))
15536 if (TYPE_OVERFLOW_UNDEFINED (type
))
15538 *strict_overflow_p
= true;
15543 case NON_LVALUE_EXPR
:
15545 case FIX_TRUNC_EXPR
:
15546 return tree_expr_nonnegative_warnv_p (op0
,
15547 strict_overflow_p
);
15551 tree inner_type
= TREE_TYPE (op0
);
15552 tree outer_type
= type
;
15554 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15556 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15557 return tree_expr_nonnegative_warnv_p (op0
,
15558 strict_overflow_p
);
15559 if (INTEGRAL_TYPE_P (inner_type
))
15561 if (TYPE_UNSIGNED (inner_type
))
15563 return tree_expr_nonnegative_warnv_p (op0
,
15564 strict_overflow_p
);
15567 else if (INTEGRAL_TYPE_P (outer_type
))
15569 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15570 return tree_expr_nonnegative_warnv_p (op0
,
15571 strict_overflow_p
);
15572 if (INTEGRAL_TYPE_P (inner_type
))
15573 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15574 && TYPE_UNSIGNED (inner_type
);
15580 return tree_simple_nonnegative_warnv_p (code
, type
);
15583 /* We don't know sign of `t', so be conservative and return false. */
15587 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15588 value is based on the assumption that signed overflow is undefined,
15589 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15590 *STRICT_OVERFLOW_P. */
15593 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15594 tree op1
, bool *strict_overflow_p
)
15596 if (TYPE_UNSIGNED (type
))
15601 case POINTER_PLUS_EXPR
:
15603 if (FLOAT_TYPE_P (type
))
15604 return (tree_expr_nonnegative_warnv_p (op0
,
15606 && tree_expr_nonnegative_warnv_p (op1
,
15607 strict_overflow_p
));
15609 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15610 both unsigned and at least 2 bits shorter than the result. */
15611 if (TREE_CODE (type
) == INTEGER_TYPE
15612 && TREE_CODE (op0
) == NOP_EXPR
15613 && TREE_CODE (op1
) == NOP_EXPR
)
15615 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15616 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15617 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15618 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15620 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15621 TYPE_PRECISION (inner2
)) + 1;
15622 return prec
< TYPE_PRECISION (type
);
15628 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15630 /* x * x is always non-negative for floating point x
15631 or without overflow. */
15632 if (operand_equal_p (op0
, op1
, 0)
15633 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15634 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15636 if (TYPE_OVERFLOW_UNDEFINED (type
))
15637 *strict_overflow_p
= true;
15642 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15643 both unsigned and their total bits is shorter than the result. */
15644 if (TREE_CODE (type
) == INTEGER_TYPE
15645 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15646 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15648 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15649 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15651 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15652 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15655 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15656 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15658 if (TREE_CODE (op0
) == INTEGER_CST
)
15659 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15661 if (TREE_CODE (op1
) == INTEGER_CST
)
15662 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15664 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15665 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15667 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15668 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15669 : TYPE_PRECISION (inner0
);
15671 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15672 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15673 : TYPE_PRECISION (inner1
);
15675 return precision0
+ precision1
< TYPE_PRECISION (type
);
15682 return (tree_expr_nonnegative_warnv_p (op0
,
15684 || tree_expr_nonnegative_warnv_p (op1
,
15685 strict_overflow_p
));
15691 case TRUNC_DIV_EXPR
:
15692 case CEIL_DIV_EXPR
:
15693 case FLOOR_DIV_EXPR
:
15694 case ROUND_DIV_EXPR
:
15695 return (tree_expr_nonnegative_warnv_p (op0
,
15697 && tree_expr_nonnegative_warnv_p (op1
,
15698 strict_overflow_p
));
15700 case TRUNC_MOD_EXPR
:
15701 case CEIL_MOD_EXPR
:
15702 case FLOOR_MOD_EXPR
:
15703 case ROUND_MOD_EXPR
:
15704 return tree_expr_nonnegative_warnv_p (op0
,
15705 strict_overflow_p
);
15707 return tree_simple_nonnegative_warnv_p (code
, type
);
15710 /* We don't know sign of `t', so be conservative and return false. */
15714 /* Return true if T is known to be non-negative. If the return
15715 value is based on the assumption that signed overflow is undefined,
15716 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15717 *STRICT_OVERFLOW_P. */
15720 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15722 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15725 switch (TREE_CODE (t
))
15728 return tree_int_cst_sgn (t
) >= 0;
15731 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15734 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15737 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15739 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15740 strict_overflow_p
));
15742 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15745 /* We don't know sign of `t', so be conservative and return false. */
15749 /* Return true if T is known to be non-negative. If the return
15750 value is based on the assumption that signed overflow is undefined,
15751 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15752 *STRICT_OVERFLOW_P. */
15755 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15756 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15758 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15759 switch (DECL_FUNCTION_CODE (fndecl
))
15761 CASE_FLT_FN (BUILT_IN_ACOS
):
15762 CASE_FLT_FN (BUILT_IN_ACOSH
):
15763 CASE_FLT_FN (BUILT_IN_CABS
):
15764 CASE_FLT_FN (BUILT_IN_COSH
):
15765 CASE_FLT_FN (BUILT_IN_ERFC
):
15766 CASE_FLT_FN (BUILT_IN_EXP
):
15767 CASE_FLT_FN (BUILT_IN_EXP10
):
15768 CASE_FLT_FN (BUILT_IN_EXP2
):
15769 CASE_FLT_FN (BUILT_IN_FABS
):
15770 CASE_FLT_FN (BUILT_IN_FDIM
):
15771 CASE_FLT_FN (BUILT_IN_HYPOT
):
15772 CASE_FLT_FN (BUILT_IN_POW10
):
15773 CASE_INT_FN (BUILT_IN_FFS
):
15774 CASE_INT_FN (BUILT_IN_PARITY
):
15775 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15776 CASE_INT_FN (BUILT_IN_CLZ
):
15777 CASE_INT_FN (BUILT_IN_CLRSB
):
15778 case BUILT_IN_BSWAP32
:
15779 case BUILT_IN_BSWAP64
:
15783 CASE_FLT_FN (BUILT_IN_SQRT
):
15784 /* sqrt(-0.0) is -0.0. */
15785 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15787 return tree_expr_nonnegative_warnv_p (arg0
,
15788 strict_overflow_p
);
15790 CASE_FLT_FN (BUILT_IN_ASINH
):
15791 CASE_FLT_FN (BUILT_IN_ATAN
):
15792 CASE_FLT_FN (BUILT_IN_ATANH
):
15793 CASE_FLT_FN (BUILT_IN_CBRT
):
15794 CASE_FLT_FN (BUILT_IN_CEIL
):
15795 CASE_FLT_FN (BUILT_IN_ERF
):
15796 CASE_FLT_FN (BUILT_IN_EXPM1
):
15797 CASE_FLT_FN (BUILT_IN_FLOOR
):
15798 CASE_FLT_FN (BUILT_IN_FMOD
):
15799 CASE_FLT_FN (BUILT_IN_FREXP
):
15800 CASE_FLT_FN (BUILT_IN_ICEIL
):
15801 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15802 CASE_FLT_FN (BUILT_IN_IRINT
):
15803 CASE_FLT_FN (BUILT_IN_IROUND
):
15804 CASE_FLT_FN (BUILT_IN_LCEIL
):
15805 CASE_FLT_FN (BUILT_IN_LDEXP
):
15806 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15807 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15808 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15809 CASE_FLT_FN (BUILT_IN_LLRINT
):
15810 CASE_FLT_FN (BUILT_IN_LLROUND
):
15811 CASE_FLT_FN (BUILT_IN_LRINT
):
15812 CASE_FLT_FN (BUILT_IN_LROUND
):
15813 CASE_FLT_FN (BUILT_IN_MODF
):
15814 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15815 CASE_FLT_FN (BUILT_IN_RINT
):
15816 CASE_FLT_FN (BUILT_IN_ROUND
):
15817 CASE_FLT_FN (BUILT_IN_SCALB
):
15818 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15819 CASE_FLT_FN (BUILT_IN_SCALBN
):
15820 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15821 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15822 CASE_FLT_FN (BUILT_IN_SINH
):
15823 CASE_FLT_FN (BUILT_IN_TANH
):
15824 CASE_FLT_FN (BUILT_IN_TRUNC
):
15825 /* True if the 1st argument is nonnegative. */
15826 return tree_expr_nonnegative_warnv_p (arg0
,
15827 strict_overflow_p
);
15829 CASE_FLT_FN (BUILT_IN_FMAX
):
15830 /* True if the 1st OR 2nd arguments are nonnegative. */
15831 return (tree_expr_nonnegative_warnv_p (arg0
,
15833 || (tree_expr_nonnegative_warnv_p (arg1
,
15834 strict_overflow_p
)));
15836 CASE_FLT_FN (BUILT_IN_FMIN
):
15837 /* True if the 1st AND 2nd arguments are nonnegative. */
15838 return (tree_expr_nonnegative_warnv_p (arg0
,
15840 && (tree_expr_nonnegative_warnv_p (arg1
,
15841 strict_overflow_p
)));
15843 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15844 /* True if the 2nd argument is nonnegative. */
15845 return tree_expr_nonnegative_warnv_p (arg1
,
15846 strict_overflow_p
);
15848 CASE_FLT_FN (BUILT_IN_POWI
):
15849 /* True if the 1st argument is nonnegative or the second
15850 argument is an even integer. */
15851 if (TREE_CODE (arg1
) == INTEGER_CST
15852 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15854 return tree_expr_nonnegative_warnv_p (arg0
,
15855 strict_overflow_p
);
15857 CASE_FLT_FN (BUILT_IN_POW
):
15858 /* True if the 1st argument is nonnegative or the second
15859 argument is an even integer valued real. */
15860 if (TREE_CODE (arg1
) == REAL_CST
)
15865 c
= TREE_REAL_CST (arg1
);
15866 n
= real_to_integer (&c
);
15869 REAL_VALUE_TYPE cint
;
15870 real_from_integer (&cint
, VOIDmode
, n
,
15871 n
< 0 ? -1 : 0, 0);
15872 if (real_identical (&c
, &cint
))
15876 return tree_expr_nonnegative_warnv_p (arg0
,
15877 strict_overflow_p
);
15882 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15886 /* Return true if T is known to be non-negative. If the return
15887 value is based on the assumption that signed overflow is undefined,
15888 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15889 *STRICT_OVERFLOW_P. */
15892 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15894 enum tree_code code
= TREE_CODE (t
);
15895 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15902 tree temp
= TARGET_EXPR_SLOT (t
);
15903 t
= TARGET_EXPR_INITIAL (t
);
15905 /* If the initializer is non-void, then it's a normal expression
15906 that will be assigned to the slot. */
15907 if (!VOID_TYPE_P (t
))
15908 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15910 /* Otherwise, the initializer sets the slot in some way. One common
15911 way is an assignment statement at the end of the initializer. */
15914 if (TREE_CODE (t
) == BIND_EXPR
)
15915 t
= expr_last (BIND_EXPR_BODY (t
));
15916 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15917 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15918 t
= expr_last (TREE_OPERAND (t
, 0));
15919 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15924 if (TREE_CODE (t
) == MODIFY_EXPR
15925 && TREE_OPERAND (t
, 0) == temp
)
15926 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15927 strict_overflow_p
);
15934 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15935 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15937 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15938 get_callee_fndecl (t
),
15941 strict_overflow_p
);
15943 case COMPOUND_EXPR
:
15945 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15946 strict_overflow_p
);
15948 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15949 strict_overflow_p
);
15951 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15952 strict_overflow_p
);
15955 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15959 /* We don't know sign of `t', so be conservative and return false. */
15963 /* Return true if T is known to be non-negative. If the return
15964 value is based on the assumption that signed overflow is undefined,
15965 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15966 *STRICT_OVERFLOW_P. */
15969 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15971 enum tree_code code
;
15972 if (t
== error_mark_node
)
15975 code
= TREE_CODE (t
);
15976 switch (TREE_CODE_CLASS (code
))
15979 case tcc_comparison
:
15980 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15982 TREE_OPERAND (t
, 0),
15983 TREE_OPERAND (t
, 1),
15984 strict_overflow_p
);
15987 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15989 TREE_OPERAND (t
, 0),
15990 strict_overflow_p
);
15993 case tcc_declaration
:
15994 case tcc_reference
:
15995 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16003 case TRUTH_AND_EXPR
:
16004 case TRUTH_OR_EXPR
:
16005 case TRUTH_XOR_EXPR
:
16006 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
16008 TREE_OPERAND (t
, 0),
16009 TREE_OPERAND (t
, 1),
16010 strict_overflow_p
);
16011 case TRUTH_NOT_EXPR
:
16012 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
16014 TREE_OPERAND (t
, 0),
16015 strict_overflow_p
);
16022 case WITH_SIZE_EXPR
:
16024 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16027 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
16031 /* Return true if `t' is known to be non-negative. Handle warnings
16032 about undefined signed overflow. */
16035 tree_expr_nonnegative_p (tree t
)
16037 bool ret
, strict_overflow_p
;
16039 strict_overflow_p
= false;
16040 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
16041 if (strict_overflow_p
)
16042 fold_overflow_warning (("assuming signed overflow does not occur when "
16043 "determining that expression is always "
16045 WARN_STRICT_OVERFLOW_MISC
);
16050 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16051 For floating point we further ensure that T is not denormal.
16052 Similar logic is present in nonzero_address in rtlanal.h.
16054 If the return value is based on the assumption that signed overflow
16055 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16056 change *STRICT_OVERFLOW_P. */
16059 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
16060 bool *strict_overflow_p
)
16065 return tree_expr_nonzero_warnv_p (op0
,
16066 strict_overflow_p
);
16070 tree inner_type
= TREE_TYPE (op0
);
16071 tree outer_type
= type
;
16073 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
16074 && tree_expr_nonzero_warnv_p (op0
,
16075 strict_overflow_p
));
16079 case NON_LVALUE_EXPR
:
16080 return tree_expr_nonzero_warnv_p (op0
,
16081 strict_overflow_p
);
16090 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16091 For floating point we further ensure that T is not denormal.
16092 Similar logic is present in nonzero_address in rtlanal.h.
16094 If the return value is based on the assumption that signed overflow
16095 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16096 change *STRICT_OVERFLOW_P. */
16099 tree_binary_nonzero_warnv_p (enum tree_code code
,
16102 tree op1
, bool *strict_overflow_p
)
16104 bool sub_strict_overflow_p
;
16107 case POINTER_PLUS_EXPR
:
16109 if (TYPE_OVERFLOW_UNDEFINED (type
))
16111 /* With the presence of negative values it is hard
16112 to say something. */
16113 sub_strict_overflow_p
= false;
16114 if (!tree_expr_nonnegative_warnv_p (op0
,
16115 &sub_strict_overflow_p
)
16116 || !tree_expr_nonnegative_warnv_p (op1
,
16117 &sub_strict_overflow_p
))
16119 /* One of operands must be positive and the other non-negative. */
16120 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16121 overflows, on a twos-complement machine the sum of two
16122 nonnegative numbers can never be zero. */
16123 return (tree_expr_nonzero_warnv_p (op0
,
16125 || tree_expr_nonzero_warnv_p (op1
,
16126 strict_overflow_p
));
16131 if (TYPE_OVERFLOW_UNDEFINED (type
))
16133 if (tree_expr_nonzero_warnv_p (op0
,
16135 && tree_expr_nonzero_warnv_p (op1
,
16136 strict_overflow_p
))
16138 *strict_overflow_p
= true;
16145 sub_strict_overflow_p
= false;
16146 if (tree_expr_nonzero_warnv_p (op0
,
16147 &sub_strict_overflow_p
)
16148 && tree_expr_nonzero_warnv_p (op1
,
16149 &sub_strict_overflow_p
))
16151 if (sub_strict_overflow_p
)
16152 *strict_overflow_p
= true;
16157 sub_strict_overflow_p
= false;
16158 if (tree_expr_nonzero_warnv_p (op0
,
16159 &sub_strict_overflow_p
))
16161 if (sub_strict_overflow_p
)
16162 *strict_overflow_p
= true;
16164 /* When both operands are nonzero, then MAX must be too. */
16165 if (tree_expr_nonzero_warnv_p (op1
,
16166 strict_overflow_p
))
16169 /* MAX where operand 0 is positive is positive. */
16170 return tree_expr_nonnegative_warnv_p (op0
,
16171 strict_overflow_p
);
16173 /* MAX where operand 1 is positive is positive. */
16174 else if (tree_expr_nonzero_warnv_p (op1
,
16175 &sub_strict_overflow_p
)
16176 && tree_expr_nonnegative_warnv_p (op1
,
16177 &sub_strict_overflow_p
))
16179 if (sub_strict_overflow_p
)
16180 *strict_overflow_p
= true;
16186 return (tree_expr_nonzero_warnv_p (op1
,
16188 || tree_expr_nonzero_warnv_p (op0
,
16189 strict_overflow_p
));
16198 /* Return true when T is an address and is known to be nonzero.
16199 For floating point we further ensure that T is not denormal.
16200 Similar logic is present in nonzero_address in rtlanal.h.
16202 If the return value is based on the assumption that signed overflow
16203 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16204 change *STRICT_OVERFLOW_P. */
16207 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16209 bool sub_strict_overflow_p
;
16210 switch (TREE_CODE (t
))
16213 return !integer_zerop (t
);
16217 tree base
= TREE_OPERAND (t
, 0);
16218 if (!DECL_P (base
))
16219 base
= get_base_address (base
);
16224 /* Weak declarations may link to NULL. Other things may also be NULL
16225 so protect with -fdelete-null-pointer-checks; but not variables
16226 allocated on the stack. */
16228 && (flag_delete_null_pointer_checks
16229 || (DECL_CONTEXT (base
)
16230 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16231 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
16232 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
16234 /* Constants are never weak. */
16235 if (CONSTANT_CLASS_P (base
))
16242 sub_strict_overflow_p
= false;
16243 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16244 &sub_strict_overflow_p
)
16245 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16246 &sub_strict_overflow_p
))
16248 if (sub_strict_overflow_p
)
16249 *strict_overflow_p
= true;
16260 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16261 attempt to fold the expression to a constant without modifying TYPE,
16264 If the expression could be simplified to a constant, then return
16265 the constant. If the expression would not be simplified to a
16266 constant, then return NULL_TREE. */
16269 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16271 tree tem
= fold_binary (code
, type
, op0
, op1
);
16272 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16275 /* Given the components of a unary expression CODE, TYPE and OP0,
16276 attempt to fold the expression to a constant without modifying
16279 If the expression could be simplified to a constant, then return
16280 the constant. If the expression would not be simplified to a
16281 constant, then return NULL_TREE. */
16284 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16286 tree tem
= fold_unary (code
, type
, op0
);
16287 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16290 /* If EXP represents referencing an element in a constant string
16291 (either via pointer arithmetic or array indexing), return the
16292 tree representing the value accessed, otherwise return NULL. */
16295 fold_read_from_constant_string (tree exp
)
16297 if ((TREE_CODE (exp
) == INDIRECT_REF
16298 || TREE_CODE (exp
) == ARRAY_REF
)
16299 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16301 tree exp1
= TREE_OPERAND (exp
, 0);
16304 location_t loc
= EXPR_LOCATION (exp
);
16306 if (TREE_CODE (exp
) == INDIRECT_REF
)
16307 string
= string_constant (exp1
, &index
);
16310 tree low_bound
= array_ref_low_bound (exp
);
16311 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16313 /* Optimize the special-case of a zero lower bound.
16315 We convert the low_bound to sizetype to avoid some problems
16316 with constant folding. (E.g. suppose the lower bound is 1,
16317 and its mode is QI. Without the conversion,l (ARRAY
16318 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16319 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16320 if (! integer_zerop (low_bound
))
16321 index
= size_diffop_loc (loc
, index
,
16322 fold_convert_loc (loc
, sizetype
, low_bound
));
16328 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16329 && TREE_CODE (string
) == STRING_CST
16330 && TREE_CODE (index
) == INTEGER_CST
16331 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16332 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16334 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16335 return build_int_cst_type (TREE_TYPE (exp
),
16336 (TREE_STRING_POINTER (string
)
16337 [TREE_INT_CST_LOW (index
)]));
16342 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16343 an integer constant, real, or fixed-point constant.
16345 TYPE is the type of the result. */
16348 fold_negate_const (tree arg0
, tree type
)
16350 tree t
= NULL_TREE
;
16352 switch (TREE_CODE (arg0
))
16356 double_int val
= tree_to_double_int (arg0
);
16358 val
= val
.neg_with_overflow (&overflow
);
16359 t
= force_fit_type_double (type
, val
, 1,
16360 (overflow
| TREE_OVERFLOW (arg0
))
16361 && !TYPE_UNSIGNED (type
));
16366 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16371 FIXED_VALUE_TYPE f
;
16372 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16373 &(TREE_FIXED_CST (arg0
)), NULL
,
16374 TYPE_SATURATING (type
));
16375 t
= build_fixed (type
, f
);
16376 /* Propagate overflow flags. */
16377 if (overflow_p
| TREE_OVERFLOW (arg0
))
16378 TREE_OVERFLOW (t
) = 1;
16383 gcc_unreachable ();
16389 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16390 an integer constant or real constant.
16392 TYPE is the type of the result. */
16395 fold_abs_const (tree arg0
, tree type
)
16397 tree t
= NULL_TREE
;
16399 switch (TREE_CODE (arg0
))
16403 double_int val
= tree_to_double_int (arg0
);
16405 /* If the value is unsigned or non-negative, then the absolute value
16406 is the same as the ordinary value. */
16407 if (TYPE_UNSIGNED (type
)
16408 || !val
.is_negative ())
16411 /* If the value is negative, then the absolute value is
16416 val
= val
.neg_with_overflow (&overflow
);
16417 t
= force_fit_type_double (type
, val
, -1,
16418 overflow
| TREE_OVERFLOW (arg0
));
16424 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16425 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16431 gcc_unreachable ();
16437 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16438 constant. TYPE is the type of the result. */
16441 fold_not_const (const_tree arg0
, tree type
)
16445 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16447 val
= ~tree_to_double_int (arg0
);
16448 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16451 /* Given CODE, a relational operator, the target type, TYPE and two
16452 constant operands OP0 and OP1, return the result of the
16453 relational operation. If the result is not a compile time
16454 constant, then return NULL_TREE. */
16457 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16459 int result
, invert
;
16461 /* From here on, the only cases we handle are when the result is
16462 known to be a constant. */
16464 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16466 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16467 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16469 /* Handle the cases where either operand is a NaN. */
16470 if (real_isnan (c0
) || real_isnan (c1
))
16480 case UNORDERED_EXPR
:
16494 if (flag_trapping_math
)
16500 gcc_unreachable ();
16503 return constant_boolean_node (result
, type
);
16506 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16509 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16511 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16512 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16513 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16516 /* Handle equality/inequality of complex constants. */
16517 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16519 tree rcond
= fold_relational_const (code
, type
,
16520 TREE_REALPART (op0
),
16521 TREE_REALPART (op1
));
16522 tree icond
= fold_relational_const (code
, type
,
16523 TREE_IMAGPART (op0
),
16524 TREE_IMAGPART (op1
));
16525 if (code
== EQ_EXPR
)
16526 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16527 else if (code
== NE_EXPR
)
16528 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16533 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16535 unsigned count
= VECTOR_CST_NELTS (op0
);
16536 tree
*elts
= XALLOCAVEC (tree
, count
);
16537 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16538 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16540 for (unsigned i
= 0; i
< count
; i
++)
16542 tree elem_type
= TREE_TYPE (type
);
16543 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16544 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16546 tree tem
= fold_relational_const (code
, elem_type
,
16549 if (tem
== NULL_TREE
)
16552 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16555 return build_vector (type
, elts
);
16558 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16560 To compute GT, swap the arguments and do LT.
16561 To compute GE, do LT and invert the result.
16562 To compute LE, swap the arguments, do LT and invert the result.
16563 To compute NE, do EQ and invert the result.
16565 Therefore, the code below must handle only EQ and LT. */
16567 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16572 code
= swap_tree_comparison (code
);
16575 /* Note that it is safe to invert for real values here because we
16576 have already handled the one case that it matters. */
16579 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16582 code
= invert_tree_comparison (code
, false);
16585 /* Compute a result for LT or EQ if args permit;
16586 Otherwise return T. */
16587 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16589 if (code
== EQ_EXPR
)
16590 result
= tree_int_cst_equal (op0
, op1
);
16591 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16592 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16594 result
= INT_CST_LT (op0
, op1
);
16601 return constant_boolean_node (result
, type
);
16604 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16605 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16609 fold_build_cleanup_point_expr (tree type
, tree expr
)
16611 /* If the expression does not have side effects then we don't have to wrap
16612 it with a cleanup point expression. */
16613 if (!TREE_SIDE_EFFECTS (expr
))
16616 /* If the expression is a return, check to see if the expression inside the
16617 return has no side effects or the right hand side of the modify expression
16618 inside the return. If either don't have side effects set we don't need to
16619 wrap the expression in a cleanup point expression. Note we don't check the
16620 left hand side of the modify because it should always be a return decl. */
16621 if (TREE_CODE (expr
) == RETURN_EXPR
)
16623 tree op
= TREE_OPERAND (expr
, 0);
16624 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16626 op
= TREE_OPERAND (op
, 1);
16627 if (!TREE_SIDE_EFFECTS (op
))
16631 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16634 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16635 of an indirection through OP0, or NULL_TREE if no simplification is
16639 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16645 subtype
= TREE_TYPE (sub
);
16646 if (!POINTER_TYPE_P (subtype
))
16649 if (TREE_CODE (sub
) == ADDR_EXPR
)
16651 tree op
= TREE_OPERAND (sub
, 0);
16652 tree optype
= TREE_TYPE (op
);
16653 /* *&CONST_DECL -> to the value of the const decl. */
16654 if (TREE_CODE (op
) == CONST_DECL
)
16655 return DECL_INITIAL (op
);
16656 /* *&p => p; make sure to handle *&"str"[cst] here. */
16657 if (type
== optype
)
16659 tree fop
= fold_read_from_constant_string (op
);
16665 /* *(foo *)&fooarray => fooarray[0] */
16666 else if (TREE_CODE (optype
) == ARRAY_TYPE
16667 && type
== TREE_TYPE (optype
)
16668 && (!in_gimple_form
16669 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16671 tree type_domain
= TYPE_DOMAIN (optype
);
16672 tree min_val
= size_zero_node
;
16673 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16674 min_val
= TYPE_MIN_VALUE (type_domain
);
16676 && TREE_CODE (min_val
) != INTEGER_CST
)
16678 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16679 NULL_TREE
, NULL_TREE
);
16681 /* *(foo *)&complexfoo => __real__ complexfoo */
16682 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16683 && type
== TREE_TYPE (optype
))
16684 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16685 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16686 else if (TREE_CODE (optype
) == VECTOR_TYPE
16687 && type
== TREE_TYPE (optype
))
16689 tree part_width
= TYPE_SIZE (type
);
16690 tree index
= bitsize_int (0);
16691 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16695 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16696 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16698 tree op00
= TREE_OPERAND (sub
, 0);
16699 tree op01
= TREE_OPERAND (sub
, 1);
16702 if (TREE_CODE (op00
) == ADDR_EXPR
)
16705 op00
= TREE_OPERAND (op00
, 0);
16706 op00type
= TREE_TYPE (op00
);
16708 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16709 if (TREE_CODE (op00type
) == VECTOR_TYPE
16710 && type
== TREE_TYPE (op00type
))
16712 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16713 tree part_width
= TYPE_SIZE (type
);
16714 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16715 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16716 tree index
= bitsize_int (indexi
);
16718 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16719 return fold_build3_loc (loc
,
16720 BIT_FIELD_REF
, type
, op00
,
16721 part_width
, index
);
16724 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16725 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16726 && type
== TREE_TYPE (op00type
))
16728 tree size
= TYPE_SIZE_UNIT (type
);
16729 if (tree_int_cst_equal (size
, op01
))
16730 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16732 /* ((foo *)&fooarray)[1] => fooarray[1] */
16733 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16734 && type
== TREE_TYPE (op00type
))
16736 tree type_domain
= TYPE_DOMAIN (op00type
);
16737 tree min_val
= size_zero_node
;
16738 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16739 min_val
= TYPE_MIN_VALUE (type_domain
);
16740 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16741 TYPE_SIZE_UNIT (type
));
16742 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16743 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16744 NULL_TREE
, NULL_TREE
);
16749 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16750 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16751 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16752 && (!in_gimple_form
16753 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16756 tree min_val
= size_zero_node
;
16757 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16758 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16759 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16760 min_val
= TYPE_MIN_VALUE (type_domain
);
16762 && TREE_CODE (min_val
) != INTEGER_CST
)
16764 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16771 /* Builds an expression for an indirection through T, simplifying some
16775 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16777 tree type
= TREE_TYPE (TREE_TYPE (t
));
16778 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16783 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16786 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16789 fold_indirect_ref_loc (location_t loc
, tree t
)
16791 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16799 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16800 whose result is ignored. The type of the returned tree need not be
16801 the same as the original expression. */
16804 fold_ignored_result (tree t
)
16806 if (!TREE_SIDE_EFFECTS (t
))
16807 return integer_zero_node
;
16810 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16813 t
= TREE_OPERAND (t
, 0);
16817 case tcc_comparison
:
16818 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16819 t
= TREE_OPERAND (t
, 0);
16820 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16821 t
= TREE_OPERAND (t
, 1);
16826 case tcc_expression
:
16827 switch (TREE_CODE (t
))
16829 case COMPOUND_EXPR
:
16830 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16832 t
= TREE_OPERAND (t
, 0);
16836 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16837 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16839 t
= TREE_OPERAND (t
, 0);
16852 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16853 This can only be applied to objects of a sizetype. */
16856 round_up_loc (location_t loc
, tree value
, int divisor
)
16858 tree div
= NULL_TREE
;
16860 gcc_assert (divisor
> 0);
16864 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16865 have to do anything. Only do this when we are not given a const,
16866 because in that case, this check is more expensive than just
16868 if (TREE_CODE (value
) != INTEGER_CST
)
16870 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16872 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16876 /* If divisor is a power of two, simplify this to bit manipulation. */
16877 if (divisor
== (divisor
& -divisor
))
16879 if (TREE_CODE (value
) == INTEGER_CST
)
16881 double_int val
= tree_to_double_int (value
);
16884 if ((val
.low
& (divisor
- 1)) == 0)
16887 overflow_p
= TREE_OVERFLOW (value
);
16888 val
.low
&= ~(divisor
- 1);
16889 val
.low
+= divisor
;
16897 return force_fit_type_double (TREE_TYPE (value
), val
,
16904 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16905 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16906 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16907 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16913 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16914 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16915 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16921 /* Likewise, but round down. */
16924 round_down_loc (location_t loc
, tree value
, int divisor
)
16926 tree div
= NULL_TREE
;
16928 gcc_assert (divisor
> 0);
16932 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16933 have to do anything. Only do this when we are not given a const,
16934 because in that case, this check is more expensive than just
16936 if (TREE_CODE (value
) != INTEGER_CST
)
16938 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16940 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16944 /* If divisor is a power of two, simplify this to bit manipulation. */
16945 if (divisor
== (divisor
& -divisor
))
16949 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16950 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16955 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16956 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16957 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16963 /* Returns the pointer to the base of the object addressed by EXP and
16964 extracts the information about the offset of the access, storing it
16965 to PBITPOS and POFFSET. */
16968 split_address_to_core_and_offset (tree exp
,
16969 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16972 enum machine_mode mode
;
16973 int unsignedp
, volatilep
;
16974 HOST_WIDE_INT bitsize
;
16975 location_t loc
= EXPR_LOCATION (exp
);
16977 if (TREE_CODE (exp
) == ADDR_EXPR
)
16979 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16980 poffset
, &mode
, &unsignedp
, &volatilep
,
16982 core
= build_fold_addr_expr_loc (loc
, core
);
16988 *poffset
= NULL_TREE
;
16994 /* Returns true if addresses of E1 and E2 differ by a constant, false
16995 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16998 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
17001 HOST_WIDE_INT bitpos1
, bitpos2
;
17002 tree toffset1
, toffset2
, tdiff
, type
;
17004 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
17005 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
17007 if (bitpos1
% BITS_PER_UNIT
!= 0
17008 || bitpos2
% BITS_PER_UNIT
!= 0
17009 || !operand_equal_p (core1
, core2
, 0))
17012 if (toffset1
&& toffset2
)
17014 type
= TREE_TYPE (toffset1
);
17015 if (type
!= TREE_TYPE (toffset2
))
17016 toffset2
= fold_convert (type
, toffset2
);
17018 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
17019 if (!cst_and_fits_in_hwi (tdiff
))
17022 *diff
= int_cst_value (tdiff
);
17024 else if (toffset1
|| toffset2
)
17026 /* If only one of the offsets is non-constant, the difference cannot
17033 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
17037 /* Simplify the floating point expression EXP when the sign of the
17038 result is not significant. Return NULL_TREE if no simplification
17042 fold_strip_sign_ops (tree exp
)
17045 location_t loc
= EXPR_LOCATION (exp
);
17047 switch (TREE_CODE (exp
))
17051 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17052 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
17056 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
17058 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17059 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17060 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
17061 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
17062 arg0
? arg0
: TREE_OPERAND (exp
, 0),
17063 arg1
? arg1
: TREE_OPERAND (exp
, 1));
17066 case COMPOUND_EXPR
:
17067 arg0
= TREE_OPERAND (exp
, 0);
17068 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17070 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
17074 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17075 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
17077 return fold_build3_loc (loc
,
17078 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
17079 arg0
? arg0
: TREE_OPERAND (exp
, 1),
17080 arg1
? arg1
: TREE_OPERAND (exp
, 2));
17085 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
17088 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
17089 /* Strip copysign function call, return the 1st argument. */
17090 arg0
= CALL_EXPR_ARG (exp
, 0);
17091 arg1
= CALL_EXPR_ARG (exp
, 1);
17092 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
17095 /* Strip sign ops from the argument of "odd" math functions. */
17096 if (negate_mathfn_p (fcode
))
17098 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
17100 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);