1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
64 /* Nonzero if we are folding constants inside an initializer; zero
66 int folding_initializer
= 0;
68 /* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71 enum comparison_code
{
90 static bool negate_mathfn_p (enum built_in_function
);
91 static bool negate_expr_p (tree
);
92 static tree
negate_expr (tree
);
93 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
94 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
95 static tree
const_binop (enum tree_code
, tree
, tree
);
96 static enum comparison_code
comparison_to_compcode (enum tree_code
);
97 static enum tree_code
compcode_to_comparison (enum comparison_code
);
98 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
99 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
100 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
101 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
102 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
103 static tree
make_bit_field_ref (location_t
, tree
, tree
,
104 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
105 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
107 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
109 enum machine_mode
*, int *, int *,
111 static int all_ones_mask_p (const_tree
, int);
112 static tree
sign_bit_p (tree
, const_tree
);
113 static int simple_operand_p (const_tree
);
114 static bool simple_operand_p_2 (tree
);
115 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
116 static tree
range_predecessor (tree
);
117 static tree
range_successor (tree
);
118 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
119 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
123 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
fold_binary_op_with_conditional_arg (location_t
,
126 enum tree_code
, tree
,
129 static tree
fold_mathfn_compare (location_t
,
130 enum built_in_function
, enum tree_code
,
132 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static bool reorder_operands_p (const_tree
, const_tree
);
135 static tree
fold_negate_const (tree
, tree
);
136 static tree
fold_not_const (const_tree
, tree
);
137 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
140 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
141 Otherwise, return LOC. */
144 expr_location_or (tree t
, location_t loc
)
146 location_t tloc
= EXPR_LOCATION (t
);
147 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
150 /* Similar to protected_set_expr_location, but never modify x in place,
151 if location can and needs to be set, unshare it. */
154 protected_set_expr_location_unshare (tree x
, location_t loc
)
156 if (CAN_HAVE_LOCATION_P (x
)
157 && EXPR_LOCATION (x
) != loc
158 && !(TREE_CODE (x
) == SAVE_EXPR
159 || TREE_CODE (x
) == TARGET_EXPR
160 || TREE_CODE (x
) == BIND_EXPR
))
163 SET_EXPR_LOCATION (x
, loc
);
168 /* If ARG2 divides ARG1 with zero remainder, carries out the division
169 of type CODE and returns the quotient.
170 Otherwise returns NULL_TREE. */
173 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
178 /* The sign of the division is according to operand two, that
179 does the correct thing for POINTER_PLUS_EXPR where we want
180 a signed division. */
181 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
183 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
187 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
201 static int fold_deferring_overflow_warnings
;
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
208 static const char* fold_deferred_overflow_warning
;
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
213 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
219 fold_defer_overflow_warnings (void)
221 ++fold_deferring_overflow_warnings
;
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
234 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
239 gcc_assert (fold_deferring_overflow_warnings
> 0);
240 --fold_deferring_overflow_warnings
;
241 if (fold_deferring_overflow_warnings
> 0)
243 if (fold_deferred_overflow_warning
!= NULL
245 && code
< (int) fold_deferred_overflow_code
)
246 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
250 warnmsg
= fold_deferred_overflow_warning
;
251 fold_deferred_overflow_warning
= NULL
;
253 if (!issue
|| warnmsg
== NULL
)
256 if (gimple_no_warning_p (stmt
))
259 /* Use the smallest code level when deciding to issue the
261 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
262 code
= fold_deferred_overflow_code
;
264 if (!issue_strict_overflow_warning (code
))
268 locus
= input_location
;
270 locus
= gimple_location (stmt
);
271 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
274 /* Stop deferring overflow warnings, ignoring any deferred
278 fold_undefer_and_ignore_overflow_warnings (void)
280 fold_undefer_overflow_warnings (false, NULL
, 0);
283 /* Whether we are deferring overflow warnings. */
286 fold_deferring_overflow_warnings_p (void)
288 return fold_deferring_overflow_warnings
> 0;
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
295 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
297 if (fold_deferring_overflow_warnings
> 0)
299 if (fold_deferred_overflow_warning
== NULL
300 || wc
< fold_deferred_overflow_code
)
302 fold_deferred_overflow_warning
= gmsgid
;
303 fold_deferred_overflow_code
= wc
;
306 else if (issue_strict_overflow_warning (wc
))
307 warning (OPT_Wstrict_overflow
, gmsgid
);
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
314 negate_mathfn_p (enum built_in_function code
)
318 CASE_FLT_FN (BUILT_IN_ASIN
):
319 CASE_FLT_FN (BUILT_IN_ASINH
):
320 CASE_FLT_FN (BUILT_IN_ATAN
):
321 CASE_FLT_FN (BUILT_IN_ATANH
):
322 CASE_FLT_FN (BUILT_IN_CASIN
):
323 CASE_FLT_FN (BUILT_IN_CASINH
):
324 CASE_FLT_FN (BUILT_IN_CATAN
):
325 CASE_FLT_FN (BUILT_IN_CATANH
):
326 CASE_FLT_FN (BUILT_IN_CBRT
):
327 CASE_FLT_FN (BUILT_IN_CPROJ
):
328 CASE_FLT_FN (BUILT_IN_CSIN
):
329 CASE_FLT_FN (BUILT_IN_CSINH
):
330 CASE_FLT_FN (BUILT_IN_CTAN
):
331 CASE_FLT_FN (BUILT_IN_CTANH
):
332 CASE_FLT_FN (BUILT_IN_ERF
):
333 CASE_FLT_FN (BUILT_IN_LLROUND
):
334 CASE_FLT_FN (BUILT_IN_LROUND
):
335 CASE_FLT_FN (BUILT_IN_ROUND
):
336 CASE_FLT_FN (BUILT_IN_SIN
):
337 CASE_FLT_FN (BUILT_IN_SINH
):
338 CASE_FLT_FN (BUILT_IN_TAN
):
339 CASE_FLT_FN (BUILT_IN_TANH
):
340 CASE_FLT_FN (BUILT_IN_TRUNC
):
343 CASE_FLT_FN (BUILT_IN_LLRINT
):
344 CASE_FLT_FN (BUILT_IN_LRINT
):
345 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
346 CASE_FLT_FN (BUILT_IN_RINT
):
347 return !flag_rounding_math
;
355 /* Check whether we may negate an integer constant T without causing
359 may_negate_without_overflow_p (const_tree t
)
361 unsigned HOST_WIDE_INT val
;
365 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
367 type
= TREE_TYPE (t
);
368 if (TYPE_UNSIGNED (type
))
371 prec
= TYPE_PRECISION (type
);
372 if (prec
> HOST_BITS_PER_WIDE_INT
)
374 if (TREE_INT_CST_LOW (t
) != 0)
376 prec
-= HOST_BITS_PER_WIDE_INT
;
377 val
= TREE_INT_CST_HIGH (t
);
380 val
= TREE_INT_CST_LOW (t
);
381 if (prec
< HOST_BITS_PER_WIDE_INT
)
382 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
383 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
390 negate_expr_p (tree t
)
397 type
= TREE_TYPE (t
);
400 switch (TREE_CODE (t
))
403 if (TYPE_OVERFLOW_WRAPS (type
))
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t
);
409 return (INTEGRAL_TYPE_P (type
)
410 && TYPE_OVERFLOW_WRAPS (type
));
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
422 return negate_expr_p (TREE_REALPART (t
))
423 && negate_expr_p (TREE_IMAGPART (t
));
427 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
430 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
432 for (i
= 0; i
< count
; i
++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
440 return negate_expr_p (TREE_OPERAND (t
, 0))
441 && negate_expr_p (TREE_OPERAND (t
, 1));
444 return negate_expr_p (TREE_OPERAND (t
, 0));
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t
, 1))
452 && reorder_operands_p (TREE_OPERAND (t
, 0),
453 TREE_OPERAND (t
, 1)))
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t
, 0));
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
462 && reorder_operands_p (TREE_OPERAND (t
, 0),
463 TREE_OPERAND (t
, 1));
466 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
473 return negate_expr_p (TREE_OPERAND (t
, 1))
474 || negate_expr_p (TREE_OPERAND (t
, 0));
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
502 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
504 return negate_expr_p (TREE_OPERAND (t
, 1));
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type
) == REAL_TYPE
)
510 tree tem
= strip_float_extensions (t
);
512 return negate_expr_p (tem
);
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t
)))
519 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
524 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
526 tree op1
= TREE_OPERAND (t
, 1);
527 if (TREE_INT_CST_HIGH (op1
) == 0
528 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
529 == TREE_INT_CST_LOW (op1
))
540 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
541 simplification is possible.
542 If negate_expr_p would return true for T, NULL_TREE will never be
546 fold_negate_expr (location_t loc
, tree t
)
548 tree type
= TREE_TYPE (t
);
551 switch (TREE_CODE (t
))
553 /* Convert - (~A) to A + 1. */
555 if (INTEGRAL_TYPE_P (type
))
556 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
557 build_one_cst (type
));
561 tem
= fold_negate_const (t
, type
);
562 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
563 || !TYPE_OVERFLOW_TRAPS (type
))
568 tem
= fold_negate_const (t
, type
);
569 /* Two's complement FP formats, such as c4x, may overflow. */
570 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
575 tem
= fold_negate_const (t
, type
);
580 tree rpart
= negate_expr (TREE_REALPART (t
));
581 tree ipart
= negate_expr (TREE_IMAGPART (t
));
583 if ((TREE_CODE (rpart
) == REAL_CST
584 && TREE_CODE (ipart
) == REAL_CST
)
585 || (TREE_CODE (rpart
) == INTEGER_CST
586 && TREE_CODE (ipart
) == INTEGER_CST
))
587 return build_complex (type
, rpart
, ipart
);
593 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
594 tree
*elts
= XALLOCAVEC (tree
, count
);
596 for (i
= 0; i
< count
; i
++)
598 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
599 if (elts
[i
] == NULL_TREE
)
603 return build_vector (type
, elts
);
607 if (negate_expr_p (t
))
608 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
609 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
610 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
614 if (negate_expr_p (t
))
615 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
616 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
620 return TREE_OPERAND (t
, 0);
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
624 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t
, 1))
628 && reorder_operands_p (TREE_OPERAND (t
, 0),
629 TREE_OPERAND (t
, 1)))
631 tem
= negate_expr (TREE_OPERAND (t
, 1));
632 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
633 tem
, TREE_OPERAND (t
, 0));
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t
, 0)))
639 tem
= negate_expr (TREE_OPERAND (t
, 0));
640 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
641 tem
, TREE_OPERAND (t
, 1));
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
649 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
650 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
651 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
652 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
656 if (TYPE_UNSIGNED (type
))
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
664 tem
= TREE_OPERAND (t
, 1);
665 if (negate_expr_p (tem
))
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 TREE_OPERAND (t
, 0), negate_expr (tem
));
668 tem
= TREE_OPERAND (t
, 0);
669 if (negate_expr_p (tem
))
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 negate_expr (tem
), TREE_OPERAND (t
, 1));
680 /* In general we can't negate A / B, because if A is INT_MIN and
681 B is 1, we may turn this into INT_MIN / -1 which is undefined
682 and actually traps on some architectures. But if overflow is
683 undefined, we can negate, because - (INT_MIN / 1) is an
685 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
687 const char * const warnmsg
= G_("assuming signed overflow does not "
688 "occur when negating a division");
689 tem
= TREE_OPERAND (t
, 1);
690 if (negate_expr_p (tem
))
692 if (INTEGRAL_TYPE_P (type
)
693 && (TREE_CODE (tem
) != INTEGER_CST
694 || integer_onep (tem
)))
695 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
696 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
697 TREE_OPERAND (t
, 0), negate_expr (tem
));
699 /* If overflow is undefined then we have to be careful because
700 we ask whether it's ok to associate the negate with the
701 division which is not ok for example for
702 -((a - b) / c) where (-(a - b)) / c may invoke undefined
703 overflow because of negating INT_MIN. So do not use
704 negate_expr_p here but open-code the two important cases. */
705 tem
= TREE_OPERAND (t
, 0);
706 if ((INTEGRAL_TYPE_P (type
)
707 && (TREE_CODE (tem
) == NEGATE_EXPR
708 || (TREE_CODE (tem
) == INTEGER_CST
709 && may_negate_without_overflow_p (tem
))))
710 || !INTEGRAL_TYPE_P (type
))
711 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
712 negate_expr (tem
), TREE_OPERAND (t
, 1));
717 /* Convert -((double)float) into (double)(-float). */
718 if (TREE_CODE (type
) == REAL_TYPE
)
720 tem
= strip_float_extensions (t
);
721 if (tem
!= t
&& negate_expr_p (tem
))
722 return fold_convert_loc (loc
, type
, negate_expr (tem
));
727 /* Negate -f(x) as f(-x). */
728 if (negate_mathfn_p (builtin_mathfn_code (t
))
729 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
733 fndecl
= get_callee_fndecl (t
);
734 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
735 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
740 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
741 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
743 tree op1
= TREE_OPERAND (t
, 1);
744 if (TREE_INT_CST_HIGH (op1
) == 0
745 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
746 == TREE_INT_CST_LOW (op1
))
748 tree ntype
= TYPE_UNSIGNED (type
)
749 ? signed_type_for (type
)
750 : unsigned_type_for (type
);
751 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
752 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
753 return fold_convert_loc (loc
, type
, temp
);
765 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
766 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
778 loc
= EXPR_LOCATION (t
);
779 type
= TREE_TYPE (t
);
782 tem
= fold_negate_expr (loc
, t
);
784 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
785 return fold_convert_loc (loc
, type
, tem
);
788 /* Split a tree IN into a constant, literal and variable parts that could be
789 combined with CODE to make IN. "constant" means an expression with
790 TREE_CONSTANT but that isn't an actual constant. CODE must be a
791 commutative arithmetic operation. Store the constant part into *CONP,
792 the literal in *LITP and return the variable part. If a part isn't
793 present, set it to null. If the tree does not decompose in this way,
794 return the entire tree as the variable part and the other parts as null.
796 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
797 case, we negate an operand that was subtracted. Except if it is a
798 literal for which we use *MINUS_LITP instead.
800 If NEGATE_P is true, we are negating all of IN, again except a literal
801 for which we use *MINUS_LITP instead.
803 If IN is itself a literal or constant, return it as appropriate.
805 Note that we do not guarantee that any of the three values will be the
806 same type as IN, but they will have the same signedness and mode. */
809 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
810 tree
*minus_litp
, int negate_p
)
818 /* Strip any conversions that don't change the machine mode or signedness. */
819 STRIP_SIGN_NOPS (in
);
821 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
822 || TREE_CODE (in
) == FIXED_CST
)
824 else if (TREE_CODE (in
) == code
825 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
826 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
827 /* We can associate addition and subtraction together (even
828 though the C standard doesn't say so) for integers because
829 the value is not affected. For reals, the value might be
830 affected, so we can't. */
831 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
832 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
834 tree op0
= TREE_OPERAND (in
, 0);
835 tree op1
= TREE_OPERAND (in
, 1);
836 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
837 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
839 /* First see if either of the operands is a literal, then a constant. */
840 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
841 || TREE_CODE (op0
) == FIXED_CST
)
842 *litp
= op0
, op0
= 0;
843 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
844 || TREE_CODE (op1
) == FIXED_CST
)
845 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
847 if (op0
!= 0 && TREE_CONSTANT (op0
))
848 *conp
= op0
, op0
= 0;
849 else if (op1
!= 0 && TREE_CONSTANT (op1
))
850 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
852 /* If we haven't dealt with either operand, this is not a case we can
853 decompose. Otherwise, VAR is either of the ones remaining, if any. */
854 if (op0
!= 0 && op1
!= 0)
859 var
= op1
, neg_var_p
= neg1_p
;
861 /* Now do any needed negations. */
863 *minus_litp
= *litp
, *litp
= 0;
865 *conp
= negate_expr (*conp
);
867 var
= negate_expr (var
);
869 else if (TREE_CODE (in
) == BIT_NOT_EXPR
870 && code
== PLUS_EXPR
)
872 /* -X - 1 is folded to ~X, undo that here. */
873 *minus_litp
= build_one_cst (TREE_TYPE (in
));
874 var
= negate_expr (TREE_OPERAND (in
, 0));
876 else if (TREE_CONSTANT (in
))
884 *minus_litp
= *litp
, *litp
= 0;
885 else if (*minus_litp
)
886 *litp
= *minus_litp
, *minus_litp
= 0;
887 *conp
= negate_expr (*conp
);
888 var
= negate_expr (var
);
894 /* Re-associate trees split by the above function. T1 and T2 are
895 either expressions to associate or null. Return the new
896 expression, if any. LOC is the location of the new expression. If
897 we build an operation, do it in TYPE and with CODE. */
900 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
911 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
913 if (code
== PLUS_EXPR
)
915 if (TREE_CODE (t1
) == NEGATE_EXPR
)
916 return build2_loc (loc
, MINUS_EXPR
, type
,
917 fold_convert_loc (loc
, type
, t2
),
918 fold_convert_loc (loc
, type
,
919 TREE_OPERAND (t1
, 0)));
920 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
921 return build2_loc (loc
, MINUS_EXPR
, type
,
922 fold_convert_loc (loc
, type
, t1
),
923 fold_convert_loc (loc
, type
,
924 TREE_OPERAND (t2
, 0)));
925 else if (integer_zerop (t2
))
926 return fold_convert_loc (loc
, type
, t1
);
928 else if (code
== MINUS_EXPR
)
930 if (integer_zerop (t2
))
931 return fold_convert_loc (loc
, type
, t1
);
934 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
935 fold_convert_loc (loc
, type
, t2
));
938 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
939 fold_convert_loc (loc
, type
, t2
));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
946 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
948 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
950 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
965 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
966 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
967 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
971 /* Combine two integer constants ARG1 and ARG2 under operation CODE
972 to produce a new constant. Return NULL_TREE if we don't know how
973 to evaluate CODE at compile-time. */
976 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
979 double_int op1
, op2
, res
, tmp
;
981 tree type
= TREE_TYPE (arg1
);
982 bool uns
= TYPE_UNSIGNED (type
);
983 bool overflow
= false;
985 op1
= tree_to_double_int (arg1
);
986 op2
= tree_to_double_int (arg2
);
1003 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1014 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1018 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1022 res
= op1
.add_with_sign (op2
, false, &overflow
);
1026 res
= op1
.sub_with_overflow (op2
, &overflow
);
1030 res
= op1
.mul_with_sign (op2
, false, &overflow
);
1033 case MULT_HIGHPART_EXPR
:
1034 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1036 bool dummy_overflow
;
1037 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
1039 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
1043 bool dummy_overflow
;
1044 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1045 is performed in twice the precision of arguments. */
1046 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1047 res
= tmp
.rshift (TYPE_PRECISION (type
),
1048 2 * TYPE_PRECISION (type
), !uns
);
1052 case TRUNC_DIV_EXPR
:
1053 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1054 case EXACT_DIV_EXPR
:
1055 /* This is a shortcut for a common special case. */
1056 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1057 && !TREE_OVERFLOW (arg1
)
1058 && !TREE_OVERFLOW (arg2
)
1059 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1061 if (code
== CEIL_DIV_EXPR
)
1062 op1
.low
+= op2
.low
- 1;
1064 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1068 /* ... fall through ... */
1070 case ROUND_DIV_EXPR
:
1078 if (op1
== op2
&& !op1
.is_zero ())
1080 res
= double_int_one
;
1083 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1086 case TRUNC_MOD_EXPR
:
1087 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1088 /* This is a shortcut for a common special case. */
1089 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1090 && !TREE_OVERFLOW (arg1
)
1091 && !TREE_OVERFLOW (arg2
)
1092 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1094 if (code
== CEIL_MOD_EXPR
)
1095 op1
.low
+= op2
.low
- 1;
1096 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1100 /* ... fall through ... */
1102 case ROUND_MOD_EXPR
:
1105 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1109 res
= op1
.min (op2
, uns
);
1113 res
= op1
.max (op2
, uns
);
1120 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1122 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1128 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1130 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1133 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1134 constant. We assume ARG1 and ARG2 have the same data type, or at least
1135 are the same kind of constant and the same machine mode. Return zero if
1136 combining the constants is not allowed in the current operating mode. */
1139 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1141 /* Sanity check for the recursive cases. */
1148 if (TREE_CODE (arg1
) == INTEGER_CST
)
1149 return int_const_binop (code
, arg1
, arg2
);
1151 if (TREE_CODE (arg1
) == REAL_CST
)
1153 enum machine_mode mode
;
1156 REAL_VALUE_TYPE value
;
1157 REAL_VALUE_TYPE result
;
1161 /* The following codes are handled by real_arithmetic. */
1176 d1
= TREE_REAL_CST (arg1
);
1177 d2
= TREE_REAL_CST (arg2
);
1179 type
= TREE_TYPE (arg1
);
1180 mode
= TYPE_MODE (type
);
1182 /* Don't perform operation if we honor signaling NaNs and
1183 either operand is a NaN. */
1184 if (HONOR_SNANS (mode
)
1185 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1188 /* Don't perform operation if it would raise a division
1189 by zero exception. */
1190 if (code
== RDIV_EXPR
1191 && REAL_VALUES_EQUAL (d2
, dconst0
)
1192 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1195 /* If either operand is a NaN, just return it. Otherwise, set up
1196 for floating-point trap; we return an overflow. */
1197 if (REAL_VALUE_ISNAN (d1
))
1199 else if (REAL_VALUE_ISNAN (d2
))
1202 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1203 real_convert (&result
, mode
, &value
);
1205 /* Don't constant fold this floating point operation if
1206 the result has overflowed and flag_trapping_math. */
1207 if (flag_trapping_math
1208 && MODE_HAS_INFINITIES (mode
)
1209 && REAL_VALUE_ISINF (result
)
1210 && !REAL_VALUE_ISINF (d1
)
1211 && !REAL_VALUE_ISINF (d2
))
1214 /* Don't constant fold this floating point operation if the
1215 result may dependent upon the run-time rounding mode and
1216 flag_rounding_math is set, or if GCC's software emulation
1217 is unable to accurately represent the result. */
1218 if ((flag_rounding_math
1219 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1220 && (inexact
|| !real_identical (&result
, &value
)))
1223 t
= build_real (type
, result
);
1225 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1229 if (TREE_CODE (arg1
) == FIXED_CST
)
1231 FIXED_VALUE_TYPE f1
;
1232 FIXED_VALUE_TYPE f2
;
1233 FIXED_VALUE_TYPE result
;
1238 /* The following codes are handled by fixed_arithmetic. */
1244 case TRUNC_DIV_EXPR
:
1245 f2
= TREE_FIXED_CST (arg2
);
1250 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1251 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1259 f1
= TREE_FIXED_CST (arg1
);
1260 type
= TREE_TYPE (arg1
);
1261 sat_p
= TYPE_SATURATING (type
);
1262 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1263 t
= build_fixed (type
, result
);
1264 /* Propagate overflow flags. */
1265 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1266 TREE_OVERFLOW (t
) = 1;
1270 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1272 tree type
= TREE_TYPE (arg1
);
1273 tree r1
= TREE_REALPART (arg1
);
1274 tree i1
= TREE_IMAGPART (arg1
);
1275 tree r2
= TREE_REALPART (arg2
);
1276 tree i2
= TREE_IMAGPART (arg2
);
1283 real
= const_binop (code
, r1
, r2
);
1284 imag
= const_binop (code
, i1
, i2
);
1288 if (COMPLEX_FLOAT_TYPE_P (type
))
1289 return do_mpc_arg2 (arg1
, arg2
, type
,
1290 /* do_nonfinite= */ folding_initializer
,
1293 real
= const_binop (MINUS_EXPR
,
1294 const_binop (MULT_EXPR
, r1
, r2
),
1295 const_binop (MULT_EXPR
, i1
, i2
));
1296 imag
= const_binop (PLUS_EXPR
,
1297 const_binop (MULT_EXPR
, r1
, i2
),
1298 const_binop (MULT_EXPR
, i1
, r2
));
1302 if (COMPLEX_FLOAT_TYPE_P (type
))
1303 return do_mpc_arg2 (arg1
, arg2
, type
,
1304 /* do_nonfinite= */ folding_initializer
,
1307 case TRUNC_DIV_EXPR
:
1309 case FLOOR_DIV_EXPR
:
1310 case ROUND_DIV_EXPR
:
1311 if (flag_complex_method
== 0)
1313 /* Keep this algorithm in sync with
1314 tree-complex.c:expand_complex_div_straight().
1316 Expand complex division to scalars, straightforward algorithm.
1317 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1321 = const_binop (PLUS_EXPR
,
1322 const_binop (MULT_EXPR
, r2
, r2
),
1323 const_binop (MULT_EXPR
, i2
, i2
));
1325 = const_binop (PLUS_EXPR
,
1326 const_binop (MULT_EXPR
, r1
, r2
),
1327 const_binop (MULT_EXPR
, i1
, i2
));
1329 = const_binop (MINUS_EXPR
,
1330 const_binop (MULT_EXPR
, i1
, r2
),
1331 const_binop (MULT_EXPR
, r1
, i2
));
1333 real
= const_binop (code
, t1
, magsquared
);
1334 imag
= const_binop (code
, t2
, magsquared
);
1338 /* Keep this algorithm in sync with
1339 tree-complex.c:expand_complex_div_wide().
1341 Expand complex division to scalars, modified algorithm to minimize
1342 overflow with wide input ranges. */
1343 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1344 fold_abs_const (r2
, TREE_TYPE (type
)),
1345 fold_abs_const (i2
, TREE_TYPE (type
)));
1347 if (integer_nonzerop (compare
))
1349 /* In the TRUE branch, we compute
1351 div = (br * ratio) + bi;
1352 tr = (ar * ratio) + ai;
1353 ti = (ai * ratio) - ar;
1356 tree ratio
= const_binop (code
, r2
, i2
);
1357 tree div
= const_binop (PLUS_EXPR
, i2
,
1358 const_binop (MULT_EXPR
, r2
, ratio
));
1359 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1360 real
= const_binop (PLUS_EXPR
, real
, i1
);
1361 real
= const_binop (code
, real
, div
);
1363 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1364 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1365 imag
= const_binop (code
, imag
, div
);
1369 /* In the FALSE branch, we compute
1371 divisor = (d * ratio) + c;
1372 tr = (b * ratio) + a;
1373 ti = b - (a * ratio);
1376 tree ratio
= const_binop (code
, i2
, r2
);
1377 tree div
= const_binop (PLUS_EXPR
, r2
,
1378 const_binop (MULT_EXPR
, i2
, ratio
));
1380 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1381 real
= const_binop (PLUS_EXPR
, real
, r1
);
1382 real
= const_binop (code
, real
, div
);
1384 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1385 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1386 imag
= const_binop (code
, imag
, div
);
1396 return build_complex (type
, real
, imag
);
1399 if (TREE_CODE (arg1
) == VECTOR_CST
1400 && TREE_CODE (arg2
) == VECTOR_CST
)
1402 tree type
= TREE_TYPE (arg1
);
1403 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1404 tree
*elts
= XALLOCAVEC (tree
, count
);
1406 for (i
= 0; i
< count
; i
++)
1408 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1409 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1411 elts
[i
] = const_binop (code
, elem1
, elem2
);
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE */
1415 if (elts
[i
] == NULL_TREE
)
1419 return build_vector (type
, elts
);
1422 /* Shifts allow a scalar offset for a vector. */
1423 if (TREE_CODE (arg1
) == VECTOR_CST
1424 && TREE_CODE (arg2
) == INTEGER_CST
)
1426 tree type
= TREE_TYPE (arg1
);
1427 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1428 tree
*elts
= XALLOCAVEC (tree
, count
);
1430 if (code
== VEC_LSHIFT_EXPR
1431 || code
== VEC_RSHIFT_EXPR
)
1433 if (!tree_fits_uhwi_p (arg2
))
1436 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1437 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1438 unsigned HOST_WIDE_INT innerc
1439 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1440 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1442 int offset
= shiftc
/ innerc
;
1443 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1444 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1445 for !BYTES_BIG_ENDIAN picks first vector element, but
1446 for BYTES_BIG_ENDIAN last element from the vector. */
1447 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1449 tree zero
= build_zero_cst (TREE_TYPE (type
));
1450 for (i
= 0; i
< count
; i
++)
1452 if (i
+ offset
< 0 || i
+ offset
>= count
)
1455 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1459 for (i
= 0; i
< count
; i
++)
1461 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1463 elts
[i
] = const_binop (code
, elem1
, arg2
);
1465 /* It is possible that const_binop cannot handle the given
1466 code and return NULL_TREE */
1467 if (elts
[i
] == NULL_TREE
)
1471 return build_vector (type
, elts
);
1476 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1477 indicates which particular sizetype to create. */
1480 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1482 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1485 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1486 is a tree code. The type of the result is taken from the operands.
1487 Both must be equivalent integer types, ala int_binop_types_match_p.
1488 If the operands are constant, so is the result. */
1491 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1493 tree type
= TREE_TYPE (arg0
);
1495 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1496 return error_mark_node
;
1498 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1501 /* Handle the special case of two integer constants faster. */
1502 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1504 /* And some specific cases even faster than that. */
1505 if (code
== PLUS_EXPR
)
1507 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1509 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1512 else if (code
== MINUS_EXPR
)
1514 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1517 else if (code
== MULT_EXPR
)
1519 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1523 /* Handle general case of two integer constants. For sizetype
1524 constant calculations we always want to know about overflow,
1525 even in the unsigned case. */
1526 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1529 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1532 /* Given two values, either both of sizetype or both of bitsizetype,
1533 compute the difference between the two values. Return the value
1534 in signed type corresponding to the type of the operands. */
1537 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1539 tree type
= TREE_TYPE (arg0
);
1542 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1545 /* If the type is already signed, just do the simple thing. */
1546 if (!TYPE_UNSIGNED (type
))
1547 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1549 if (type
== sizetype
)
1551 else if (type
== bitsizetype
)
1552 ctype
= sbitsizetype
;
1554 ctype
= signed_type_for (type
);
1556 /* If either operand is not a constant, do the conversions to the signed
1557 type and subtract. The hardware will do the right thing with any
1558 overflow in the subtraction. */
1559 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1560 return size_binop_loc (loc
, MINUS_EXPR
,
1561 fold_convert_loc (loc
, ctype
, arg0
),
1562 fold_convert_loc (loc
, ctype
, arg1
));
1564 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1565 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1566 overflow) and negate (which can't either). Special-case a result
1567 of zero while we're here. */
1568 if (tree_int_cst_equal (arg0
, arg1
))
1569 return build_int_cst (ctype
, 0);
1570 else if (tree_int_cst_lt (arg1
, arg0
))
1571 return fold_convert_loc (loc
, ctype
,
1572 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1574 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1575 fold_convert_loc (loc
, ctype
,
1576 size_binop_loc (loc
,
1581 /* A subroutine of fold_convert_const handling conversions of an
1582 INTEGER_CST to another integer type. */
1585 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1589 /* Given an integer constant, make new constant with new type,
1590 appropriately sign-extended or truncated. */
1591 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1592 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1593 (TREE_INT_CST_HIGH (arg1
) < 0
1594 && (TYPE_UNSIGNED (type
)
1595 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1596 | TREE_OVERFLOW (arg1
));
1601 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1602 to an integer type. */
1605 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1610 /* The following code implements the floating point to integer
1611 conversion rules required by the Java Language Specification,
1612 that IEEE NaNs are mapped to zero and values that overflow
1613 the target precision saturate, i.e. values greater than
1614 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1615 are mapped to INT_MIN. These semantics are allowed by the
1616 C and C++ standards that simply state that the behavior of
1617 FP-to-integer conversion is unspecified upon overflow. */
1621 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1625 case FIX_TRUNC_EXPR
:
1626 real_trunc (&r
, VOIDmode
, &x
);
1633 /* If R is NaN, return zero and show we have an overflow. */
1634 if (REAL_VALUE_ISNAN (r
))
1637 val
= double_int_zero
;
1640 /* See if R is less than the lower bound or greater than the
1645 tree lt
= TYPE_MIN_VALUE (type
);
1646 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1647 if (REAL_VALUES_LESS (r
, l
))
1650 val
= tree_to_double_int (lt
);
1656 tree ut
= TYPE_MAX_VALUE (type
);
1659 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1660 if (REAL_VALUES_LESS (u
, r
))
1663 val
= tree_to_double_int (ut
);
1669 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1671 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1675 /* A subroutine of fold_convert_const handling conversions of a
1676 FIXED_CST to an integer type. */
1679 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1682 double_int temp
, temp_trunc
;
1685 /* Right shift FIXED_CST to temp by fbit. */
1686 temp
= TREE_FIXED_CST (arg1
).data
;
1687 mode
= TREE_FIXED_CST (arg1
).mode
;
1688 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1690 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1691 HOST_BITS_PER_DOUBLE_INT
,
1692 SIGNED_FIXED_POINT_MODE_P (mode
));
1694 /* Left shift temp to temp_trunc by fbit. */
1695 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1696 HOST_BITS_PER_DOUBLE_INT
,
1697 SIGNED_FIXED_POINT_MODE_P (mode
));
1701 temp
= double_int_zero
;
1702 temp_trunc
= double_int_zero
;
1705 /* If FIXED_CST is negative, we need to round the value toward 0.
1706 By checking if the fractional bits are not zero to add 1 to temp. */
1707 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1708 && temp_trunc
.is_negative ()
1709 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1710 temp
+= double_int_one
;
1712 /* Given a fixed-point constant, make new constant with new type,
1713 appropriately sign-extended or truncated. */
1714 t
= force_fit_type_double (type
, temp
, -1,
1715 (temp
.is_negative ()
1716 && (TYPE_UNSIGNED (type
)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1718 | TREE_OVERFLOW (arg1
));
1723 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1724 to another floating point type. */
1727 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1729 REAL_VALUE_TYPE value
;
1732 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1733 t
= build_real (type
, value
);
1735 /* If converting an infinity or NAN to a representation that doesn't
1736 have one, set the overflow bit so that we can produce some kind of
1737 error message at the appropriate point if necessary. It's not the
1738 most user-friendly message, but it's better than nothing. */
1739 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1740 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1741 TREE_OVERFLOW (t
) = 1;
1742 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1743 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1744 TREE_OVERFLOW (t
) = 1;
1745 /* Regular overflow, conversion produced an infinity in a mode that
1746 can't represent them. */
1747 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1748 && REAL_VALUE_ISINF (value
)
1749 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1750 TREE_OVERFLOW (t
) = 1;
1752 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to a floating point type. */
1760 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1762 REAL_VALUE_TYPE value
;
1765 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1766 t
= build_real (type
, value
);
1768 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1772 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1773 to another fixed-point type. */
1776 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1778 FIXED_VALUE_TYPE value
;
1782 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1783 TYPE_SATURATING (type
));
1784 t
= build_fixed (type
, value
);
1786 /* Propagate overflow flags. */
1787 if (overflow_p
| TREE_OVERFLOW (arg1
))
1788 TREE_OVERFLOW (t
) = 1;
1792 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1793 to a fixed-point type. */
1796 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1798 FIXED_VALUE_TYPE value
;
1802 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1803 TREE_INT_CST (arg1
),
1804 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1805 TYPE_SATURATING (type
));
1806 t
= build_fixed (type
, value
);
1808 /* Propagate overflow flags. */
1809 if (overflow_p
| TREE_OVERFLOW (arg1
))
1810 TREE_OVERFLOW (t
) = 1;
1814 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1815 to a fixed-point type. */
1818 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1820 FIXED_VALUE_TYPE value
;
1824 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1825 &TREE_REAL_CST (arg1
),
1826 TYPE_SATURATING (type
));
1827 t
= build_fixed (type
, value
);
1829 /* Propagate overflow flags. */
1830 if (overflow_p
| TREE_OVERFLOW (arg1
))
1831 TREE_OVERFLOW (t
) = 1;
1835 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1836 type TYPE. If no simplification can be done return NULL_TREE. */
1839 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1841 if (TREE_TYPE (arg1
) == type
)
1844 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1845 || TREE_CODE (type
) == OFFSET_TYPE
)
1847 if (TREE_CODE (arg1
) == INTEGER_CST
)
1848 return fold_convert_const_int_from_int (type
, arg1
);
1849 else if (TREE_CODE (arg1
) == REAL_CST
)
1850 return fold_convert_const_int_from_real (code
, type
, arg1
);
1851 else if (TREE_CODE (arg1
) == FIXED_CST
)
1852 return fold_convert_const_int_from_fixed (type
, arg1
);
1854 else if (TREE_CODE (type
) == REAL_TYPE
)
1856 if (TREE_CODE (arg1
) == INTEGER_CST
)
1857 return build_real_from_int_cst (type
, arg1
);
1858 else if (TREE_CODE (arg1
) == REAL_CST
)
1859 return fold_convert_const_real_from_real (type
, arg1
);
1860 else if (TREE_CODE (arg1
) == FIXED_CST
)
1861 return fold_convert_const_real_from_fixed (type
, arg1
);
1863 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1865 if (TREE_CODE (arg1
) == FIXED_CST
)
1866 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1867 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1868 return fold_convert_const_fixed_from_int (type
, arg1
);
1869 else if (TREE_CODE (arg1
) == REAL_CST
)
1870 return fold_convert_const_fixed_from_real (type
, arg1
);
1875 /* Construct a vector of zero elements of vector type TYPE. */
1878 build_zero_vector (tree type
)
1882 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1883 return build_vector_from_val (type
, t
);
1886 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1889 fold_convertible_p (const_tree type
, const_tree arg
)
1891 tree orig
= TREE_TYPE (arg
);
1896 if (TREE_CODE (arg
) == ERROR_MARK
1897 || TREE_CODE (type
) == ERROR_MARK
1898 || TREE_CODE (orig
) == ERROR_MARK
)
1901 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1904 switch (TREE_CODE (type
))
1906 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1907 case POINTER_TYPE
: case REFERENCE_TYPE
:
1909 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1910 || TREE_CODE (orig
) == OFFSET_TYPE
)
1912 return (TREE_CODE (orig
) == VECTOR_TYPE
1913 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1916 case FIXED_POINT_TYPE
:
1920 return TREE_CODE (type
) == TREE_CODE (orig
);
1927 /* Convert expression ARG to type TYPE. Used by the middle-end for
1928 simple conversions in preference to calling the front-end's convert. */
1931 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1933 tree orig
= TREE_TYPE (arg
);
1939 if (TREE_CODE (arg
) == ERROR_MARK
1940 || TREE_CODE (type
) == ERROR_MARK
1941 || TREE_CODE (orig
) == ERROR_MARK
)
1942 return error_mark_node
;
1944 switch (TREE_CODE (type
))
1947 case REFERENCE_TYPE
:
1948 /* Handle conversions between pointers to different address spaces. */
1949 if (POINTER_TYPE_P (orig
)
1950 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1951 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1952 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1955 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1957 if (TREE_CODE (arg
) == INTEGER_CST
)
1959 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1960 if (tem
!= NULL_TREE
)
1963 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1964 || TREE_CODE (orig
) == OFFSET_TYPE
)
1965 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1966 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1967 return fold_convert_loc (loc
, type
,
1968 fold_build1_loc (loc
, REALPART_EXPR
,
1969 TREE_TYPE (orig
), arg
));
1970 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1971 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1972 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1975 if (TREE_CODE (arg
) == INTEGER_CST
)
1977 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1978 if (tem
!= NULL_TREE
)
1981 else if (TREE_CODE (arg
) == REAL_CST
)
1983 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1984 if (tem
!= NULL_TREE
)
1987 else if (TREE_CODE (arg
) == FIXED_CST
)
1989 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1990 if (tem
!= NULL_TREE
)
1994 switch (TREE_CODE (orig
))
1997 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1998 case POINTER_TYPE
: case REFERENCE_TYPE
:
1999 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2002 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2004 case FIXED_POINT_TYPE
:
2005 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2008 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2009 return fold_convert_loc (loc
, type
, tem
);
2015 case FIXED_POINT_TYPE
:
2016 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2017 || TREE_CODE (arg
) == REAL_CST
)
2019 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2020 if (tem
!= NULL_TREE
)
2021 goto fold_convert_exit
;
2024 switch (TREE_CODE (orig
))
2026 case FIXED_POINT_TYPE
:
2031 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2034 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2035 return fold_convert_loc (loc
, type
, tem
);
2042 switch (TREE_CODE (orig
))
2045 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2046 case POINTER_TYPE
: case REFERENCE_TYPE
:
2048 case FIXED_POINT_TYPE
:
2049 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2050 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2051 fold_convert_loc (loc
, TREE_TYPE (type
),
2052 integer_zero_node
));
2057 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2059 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2060 TREE_OPERAND (arg
, 0));
2061 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2062 TREE_OPERAND (arg
, 1));
2063 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2066 arg
= save_expr (arg
);
2067 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2068 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2069 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2070 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2071 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2079 if (integer_zerop (arg
))
2080 return build_zero_vector (type
);
2081 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2082 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2083 || TREE_CODE (orig
) == VECTOR_TYPE
);
2084 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2087 tem
= fold_ignored_result (arg
);
2088 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2091 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2092 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2096 protected_set_expr_location_unshare (tem
, loc
);
2100 /* Return false if expr can be assumed not to be an lvalue, true
2104 maybe_lvalue_p (const_tree x
)
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x
))
2120 case ARRAY_RANGE_REF
:
2126 case PREINCREMENT_EXPR
:
2127 case PREDECREMENT_EXPR
:
2129 case TRY_CATCH_EXPR
:
2130 case WITH_CLEANUP_EXPR
:
2139 /* Assume the worst for front-end tree codes. */
2140 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2148 /* Return an expr equal to X but certainly not valid as an lvalue. */
2151 non_lvalue_loc (location_t loc
, tree x
)
2153 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2158 if (! maybe_lvalue_p (x
))
2160 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2163 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2164 Zero means allow extended lvalues. */
2166 int pedantic_lvalues
;
2168 /* When pedantic, return an expr equal to X but certainly not valid as a
2169 pedantic lvalue. Otherwise, return X. */
2172 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2174 if (pedantic_lvalues
)
2175 return non_lvalue_loc (loc
, x
);
2177 return protected_set_expr_location_unshare (x
, loc
);
2180 /* Given a tree comparison code, return the code that is the logical inverse.
2181 It is generally not safe to do this for floating-point comparisons, except
2182 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2183 ERROR_MARK in this case. */
2186 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2188 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2189 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2199 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2201 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2203 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2205 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2219 return UNORDERED_EXPR
;
2220 case UNORDERED_EXPR
:
2221 return ORDERED_EXPR
;
2227 /* Similar, but return the comparison that results if the operands are
2228 swapped. This is safe for floating-point. */
2231 swap_tree_comparison (enum tree_code code
)
2238 case UNORDERED_EXPR
:
2264 /* Convert a comparison tree code from an enum tree_code representation
2265 into a compcode bit-based encoding. This function is the inverse of
2266 compcode_to_comparison. */
2268 static enum comparison_code
2269 comparison_to_compcode (enum tree_code code
)
2286 return COMPCODE_ORD
;
2287 case UNORDERED_EXPR
:
2288 return COMPCODE_UNORD
;
2290 return COMPCODE_UNLT
;
2292 return COMPCODE_UNEQ
;
2294 return COMPCODE_UNLE
;
2296 return COMPCODE_UNGT
;
2298 return COMPCODE_LTGT
;
2300 return COMPCODE_UNGE
;
2306 /* Convert a compcode bit-based encoding of a comparison operator back
2307 to GCC's enum tree_code representation. This function is the
2308 inverse of comparison_to_compcode. */
2310 static enum tree_code
2311 compcode_to_comparison (enum comparison_code code
)
2328 return ORDERED_EXPR
;
2329 case COMPCODE_UNORD
:
2330 return UNORDERED_EXPR
;
2348 /* Return a tree for the comparison which is the combination of
2349 doing the AND or OR (depending on CODE) of the two operations LCODE
2350 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2351 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2352 if this makes the transformation invalid. */
2355 combine_comparisons (location_t loc
,
2356 enum tree_code code
, enum tree_code lcode
,
2357 enum tree_code rcode
, tree truth_type
,
2358 tree ll_arg
, tree lr_arg
)
2360 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2361 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2362 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2367 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2368 compcode
= lcompcode
& rcompcode
;
2371 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2372 compcode
= lcompcode
| rcompcode
;
2381 /* Eliminate unordered comparisons, as well as LTGT and ORD
2382 which are not used unless the mode has NaNs. */
2383 compcode
&= ~COMPCODE_UNORD
;
2384 if (compcode
== COMPCODE_LTGT
)
2385 compcode
= COMPCODE_NE
;
2386 else if (compcode
== COMPCODE_ORD
)
2387 compcode
= COMPCODE_TRUE
;
2389 else if (flag_trapping_math
)
2391 /* Check that the original operation and the optimized ones will trap
2392 under the same condition. */
2393 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2394 && (lcompcode
!= COMPCODE_EQ
)
2395 && (lcompcode
!= COMPCODE_ORD
);
2396 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2397 && (rcompcode
!= COMPCODE_EQ
)
2398 && (rcompcode
!= COMPCODE_ORD
);
2399 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2400 && (compcode
!= COMPCODE_EQ
)
2401 && (compcode
!= COMPCODE_ORD
);
2403 /* In a short-circuited boolean expression the LHS might be
2404 such that the RHS, if evaluated, will never trap. For
2405 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2406 if neither x nor y is NaN. (This is a mixed blessing: for
2407 example, the expression above will never trap, hence
2408 optimizing it to x < y would be invalid). */
2409 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2410 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2413 /* If the comparison was short-circuited, and only the RHS
2414 trapped, we may now generate a spurious trap. */
2416 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2419 /* If we changed the conditions that cause a trap, we lose. */
2420 if ((ltrap
|| rtrap
) != trap
)
2424 if (compcode
== COMPCODE_TRUE
)
2425 return constant_boolean_node (true, truth_type
);
2426 else if (compcode
== COMPCODE_FALSE
)
2427 return constant_boolean_node (false, truth_type
);
2430 enum tree_code tcode
;
2432 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2433 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2437 /* Return nonzero if two operands (typically of the same tree node)
2438 are necessarily equal. If either argument has side-effects this
2439 function returns zero. FLAGS modifies behavior as follows:
2441 If OEP_ONLY_CONST is set, only return nonzero for constants.
2442 This function tests whether the operands are indistinguishable;
2443 it does not test whether they are equal using C's == operation.
2444 The distinction is important for IEEE floating point, because
2445 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2446 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2448 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2449 even though it may hold multiple values during a function.
2450 This is because a GCC tree node guarantees that nothing else is
2451 executed between the evaluation of its "operands" (which may often
2452 be evaluated in arbitrary order). Hence if the operands themselves
2453 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2454 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2455 unset means assuming isochronic (or instantaneous) tree equivalence.
2456 Unless comparing arbitrary expression trees, such as from different
2457 statements, this flag can usually be left unset.
2459 If OEP_PURE_SAME is set, then pure functions with identical arguments
2460 are considered the same. It is used when the caller has other ways
2461 to ensure that global memory is unchanged in between. */
2464 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2466 /* If either is ERROR_MARK, they aren't equal. */
2467 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2468 || TREE_TYPE (arg0
) == error_mark_node
2469 || TREE_TYPE (arg1
) == error_mark_node
)
2472 /* Similar, if either does not have a type (like a released SSA name),
2473 they aren't equal. */
2474 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2477 /* Check equality of integer constants before bailing out due to
2478 precision differences. */
2479 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2480 return tree_int_cst_equal (arg0
, arg1
);
2482 /* If both types don't have the same signedness, then we can't consider
2483 them equal. We must check this before the STRIP_NOPS calls
2484 because they may change the signedness of the arguments. As pointers
2485 strictly don't have a signedness, require either two pointers or
2486 two non-pointers as well. */
2487 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2488 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2491 /* We cannot consider pointers to different address space equal. */
2492 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2493 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2494 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2497 /* If both types don't have the same precision, then it is not safe
2499 if (element_precision (TREE_TYPE (arg0
))
2500 != element_precision (TREE_TYPE (arg1
)))
2506 /* In case both args are comparisons but with different comparison
2507 code, try to swap the comparison operands of one arg to produce
2508 a match and compare that variant. */
2509 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2510 && COMPARISON_CLASS_P (arg0
)
2511 && COMPARISON_CLASS_P (arg1
))
2513 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2515 if (TREE_CODE (arg0
) == swap_code
)
2516 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2517 TREE_OPERAND (arg1
, 1), flags
)
2518 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2519 TREE_OPERAND (arg1
, 0), flags
);
2522 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2523 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2524 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2527 /* This is needed for conversions and for COMPONENT_REF.
2528 Might as well play it safe and always test this. */
2529 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2530 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2531 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2534 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2535 We don't care about side effects in that case because the SAVE_EXPR
2536 takes care of that for us. In all other cases, two expressions are
2537 equal if they have no side effects. If we have two identical
2538 expressions with side effects that should be treated the same due
2539 to the only side effects being identical SAVE_EXPR's, that will
2540 be detected in the recursive calls below.
2541 If we are taking an invariant address of two identical objects
2542 they are necessarily equal as well. */
2543 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2544 && (TREE_CODE (arg0
) == SAVE_EXPR
2545 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2546 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2549 /* Next handle constant cases, those for which we can return 1 even
2550 if ONLY_CONST is set. */
2551 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2552 switch (TREE_CODE (arg0
))
2555 return tree_int_cst_equal (arg0
, arg1
);
2558 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2559 TREE_FIXED_CST (arg1
));
2562 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2563 TREE_REAL_CST (arg1
)))
2567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2569 /* If we do not distinguish between signed and unsigned zero,
2570 consider them equal. */
2571 if (real_zerop (arg0
) && real_zerop (arg1
))
2580 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2583 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2585 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2586 VECTOR_CST_ELT (arg1
, i
), flags
))
2593 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2595 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2599 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2600 && ! memcmp (TREE_STRING_POINTER (arg0
),
2601 TREE_STRING_POINTER (arg1
),
2602 TREE_STRING_LENGTH (arg0
)));
2605 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2606 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2607 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2612 if (flags
& OEP_ONLY_CONST
)
2615 /* Define macros to test an operand from arg0 and arg1 for equality and a
2616 variant that allows null and views null as being different from any
2617 non-null value. In the latter case, if either is null, the both
2618 must be; otherwise, do the normal comparison. */
2619 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2620 TREE_OPERAND (arg1, N), flags)
2622 #define OP_SAME_WITH_NULL(N) \
2623 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2624 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2626 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2629 /* Two conversions are equal only if signedness and modes match. */
2630 switch (TREE_CODE (arg0
))
2633 case FIX_TRUNC_EXPR
:
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2635 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2645 case tcc_comparison
:
2647 if (OP_SAME (0) && OP_SAME (1))
2650 /* For commutative ops, allow the other order. */
2651 return (commutative_tree_code (TREE_CODE (arg0
))
2652 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2653 TREE_OPERAND (arg1
, 1), flags
)
2654 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2655 TREE_OPERAND (arg1
, 0), flags
));
2658 /* If either of the pointer (or reference) expressions we are
2659 dereferencing contain a side effect, these cannot be equal,
2660 but their addresses can be. */
2661 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2662 && (TREE_SIDE_EFFECTS (arg0
)
2663 || TREE_SIDE_EFFECTS (arg1
)))
2666 switch (TREE_CODE (arg0
))
2669 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2676 case TARGET_MEM_REF
:
2677 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2678 /* Require equal extra operands and then fall through to MEM_REF
2679 handling of the two common operands. */
2680 if (!OP_SAME_WITH_NULL (2)
2681 || !OP_SAME_WITH_NULL (3)
2682 || !OP_SAME_WITH_NULL (4))
2686 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2687 /* Require equal access sizes, and similar pointer types.
2688 We can have incomplete types for array references of
2689 variable-sized arrays from the Fortran frontend
2690 though. Also verify the types are compatible. */
2691 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2692 || (TYPE_SIZE (TREE_TYPE (arg0
))
2693 && TYPE_SIZE (TREE_TYPE (arg1
))
2694 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2695 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2696 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2697 && alias_ptr_types_compatible_p
2698 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2699 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2700 && OP_SAME (0) && OP_SAME (1));
2703 case ARRAY_RANGE_REF
:
2704 /* Operands 2 and 3 may be null.
2705 Compare the array index by value if it is constant first as we
2706 may have different types but same value here. */
2709 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2710 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2711 TREE_OPERAND (arg1
, 1))
2713 && OP_SAME_WITH_NULL (2)
2714 && OP_SAME_WITH_NULL (3));
2717 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2718 may be NULL when we're called to compare MEM_EXPRs. */
2719 if (!OP_SAME_WITH_NULL (0)
2722 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2723 return OP_SAME_WITH_NULL (2);
2728 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2729 return OP_SAME (1) && OP_SAME (2);
2735 case tcc_expression
:
2736 switch (TREE_CODE (arg0
))
2739 case TRUTH_NOT_EXPR
:
2742 case TRUTH_ANDIF_EXPR
:
2743 case TRUTH_ORIF_EXPR
:
2744 return OP_SAME (0) && OP_SAME (1);
2747 case WIDEN_MULT_PLUS_EXPR
:
2748 case WIDEN_MULT_MINUS_EXPR
:
2751 /* The multiplcation operands are commutative. */
2754 case TRUTH_AND_EXPR
:
2756 case TRUTH_XOR_EXPR
:
2757 if (OP_SAME (0) && OP_SAME (1))
2760 /* Otherwise take into account this is a commutative operation. */
2761 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2762 TREE_OPERAND (arg1
, 1), flags
)
2763 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2764 TREE_OPERAND (arg1
, 0), flags
));
2769 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2776 switch (TREE_CODE (arg0
))
2779 /* If the CALL_EXPRs call different functions, then they
2780 clearly can not be equal. */
2781 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2786 unsigned int cef
= call_expr_flags (arg0
);
2787 if (flags
& OEP_PURE_SAME
)
2788 cef
&= ECF_CONST
| ECF_PURE
;
2795 /* Now see if all the arguments are the same. */
2797 const_call_expr_arg_iterator iter0
, iter1
;
2799 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2800 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2802 a0
= next_const_call_expr_arg (&iter0
),
2803 a1
= next_const_call_expr_arg (&iter1
))
2804 if (! operand_equal_p (a0
, a1
, flags
))
2807 /* If we get here and both argument lists are exhausted
2808 then the CALL_EXPRs are equal. */
2809 return ! (a0
|| a1
);
2815 case tcc_declaration
:
2816 /* Consider __builtin_sqrt equal to sqrt. */
2817 return (TREE_CODE (arg0
) == FUNCTION_DECL
2818 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2819 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2820 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2827 #undef OP_SAME_WITH_NULL
2830 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2831 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2833 When in doubt, return 0. */
2836 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2838 int unsignedp1
, unsignedpo
;
2839 tree primarg0
, primarg1
, primother
;
2840 unsigned int correct_width
;
2842 if (operand_equal_p (arg0
, arg1
, 0))
2845 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2846 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2849 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2850 and see if the inner values are the same. This removes any
2851 signedness comparison, which doesn't matter here. */
2852 primarg0
= arg0
, primarg1
= arg1
;
2853 STRIP_NOPS (primarg0
);
2854 STRIP_NOPS (primarg1
);
2855 if (operand_equal_p (primarg0
, primarg1
, 0))
2858 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2859 actual comparison operand, ARG0.
2861 First throw away any conversions to wider types
2862 already present in the operands. */
2864 primarg1
= get_narrower (arg1
, &unsignedp1
);
2865 primother
= get_narrower (other
, &unsignedpo
);
2867 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2868 if (unsignedp1
== unsignedpo
2869 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2870 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2872 tree type
= TREE_TYPE (arg0
);
2874 /* Make sure shorter operand is extended the right way
2875 to match the longer operand. */
2876 primarg1
= fold_convert (signed_or_unsigned_type_for
2877 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2879 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2886 /* See if ARG is an expression that is either a comparison or is performing
2887 arithmetic on comparisons. The comparisons must only be comparing
2888 two different values, which will be stored in *CVAL1 and *CVAL2; if
2889 they are nonzero it means that some operands have already been found.
2890 No variables may be used anywhere else in the expression except in the
2891 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2892 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2894 If this is true, return 1. Otherwise, return zero. */
2897 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2899 enum tree_code code
= TREE_CODE (arg
);
2900 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2902 /* We can handle some of the tcc_expression cases here. */
2903 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2905 else if (tclass
== tcc_expression
2906 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2907 || code
== COMPOUND_EXPR
))
2908 tclass
= tcc_binary
;
2910 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2911 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2913 /* If we've already found a CVAL1 or CVAL2, this expression is
2914 two complex to handle. */
2915 if (*cval1
|| *cval2
)
2925 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2928 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2929 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2930 cval1
, cval2
, save_p
));
2935 case tcc_expression
:
2936 if (code
== COND_EXPR
)
2937 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2938 cval1
, cval2
, save_p
)
2939 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2940 cval1
, cval2
, save_p
)
2941 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2942 cval1
, cval2
, save_p
));
2945 case tcc_comparison
:
2946 /* First see if we can handle the first operand, then the second. For
2947 the second operand, we know *CVAL1 can't be zero. It must be that
2948 one side of the comparison is each of the values; test for the
2949 case where this isn't true by failing if the two operands
2952 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2953 TREE_OPERAND (arg
, 1), 0))
2957 *cval1
= TREE_OPERAND (arg
, 0);
2958 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2960 else if (*cval2
== 0)
2961 *cval2
= TREE_OPERAND (arg
, 0);
2962 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2967 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2969 else if (*cval2
== 0)
2970 *cval2
= TREE_OPERAND (arg
, 1);
2971 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2983 /* ARG is a tree that is known to contain just arithmetic operations and
2984 comparisons. Evaluate the operations in the tree substituting NEW0 for
2985 any occurrence of OLD0 as an operand of a comparison and likewise for
2989 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2990 tree old1
, tree new1
)
2992 tree type
= TREE_TYPE (arg
);
2993 enum tree_code code
= TREE_CODE (arg
);
2994 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2996 /* We can handle some of the tcc_expression cases here. */
2997 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2999 else if (tclass
== tcc_expression
3000 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3001 tclass
= tcc_binary
;
3006 return fold_build1_loc (loc
, code
, type
,
3007 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3008 old0
, new0
, old1
, new1
));
3011 return fold_build2_loc (loc
, code
, type
,
3012 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3013 old0
, new0
, old1
, new1
),
3014 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3015 old0
, new0
, old1
, new1
));
3017 case tcc_expression
:
3021 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3025 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3029 return fold_build3_loc (loc
, code
, type
,
3030 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3031 old0
, new0
, old1
, new1
),
3032 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3033 old0
, new0
, old1
, new1
),
3034 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3035 old0
, new0
, old1
, new1
));
3039 /* Fall through - ??? */
3041 case tcc_comparison
:
3043 tree arg0
= TREE_OPERAND (arg
, 0);
3044 tree arg1
= TREE_OPERAND (arg
, 1);
3046 /* We need to check both for exact equality and tree equality. The
3047 former will be true if the operand has a side-effect. In that
3048 case, we know the operand occurred exactly once. */
3050 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3052 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3055 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3057 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3060 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3068 /* Return a tree for the case when the result of an expression is RESULT
3069 converted to TYPE and OMITTED was previously an operand of the expression
3070 but is now not needed (e.g., we folded OMITTED * 0).
3072 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3073 the conversion of RESULT to TYPE. */
3076 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3078 tree t
= fold_convert_loc (loc
, type
, result
);
3080 /* If the resulting operand is an empty statement, just return the omitted
3081 statement casted to void. */
3082 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3083 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3084 fold_ignored_result (omitted
));
3086 if (TREE_SIDE_EFFECTS (omitted
))
3087 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3088 fold_ignored_result (omitted
), t
);
3090 return non_lvalue_loc (loc
, t
);
3093 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3096 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3099 tree t
= fold_convert_loc (loc
, type
, result
);
3101 /* If the resulting operand is an empty statement, just return the omitted
3102 statement casted to void. */
3103 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3104 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3105 fold_ignored_result (omitted
));
3107 if (TREE_SIDE_EFFECTS (omitted
))
3108 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3109 fold_ignored_result (omitted
), t
);
3111 return pedantic_non_lvalue_loc (loc
, t
);
3114 /* Return a tree for the case when the result of an expression is RESULT
3115 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3116 of the expression but are now not needed.
3118 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3119 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3120 evaluated before OMITTED2. Otherwise, if neither has side effects,
3121 just do the conversion of RESULT to TYPE. */
3124 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3125 tree omitted1
, tree omitted2
)
3127 tree t
= fold_convert_loc (loc
, type
, result
);
3129 if (TREE_SIDE_EFFECTS (omitted2
))
3130 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3131 if (TREE_SIDE_EFFECTS (omitted1
))
3132 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3134 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3138 /* Return a simplified tree node for the truth-negation of ARG. This
3139 never alters ARG itself. We assume that ARG is an operation that
3140 returns a truth value (0 or 1).
3142 FIXME: one would think we would fold the result, but it causes
3143 problems with the dominator optimizer. */
3146 fold_truth_not_expr (location_t loc
, tree arg
)
3148 tree type
= TREE_TYPE (arg
);
3149 enum tree_code code
= TREE_CODE (arg
);
3150 location_t loc1
, loc2
;
3152 /* If this is a comparison, we can simply invert it, except for
3153 floating-point non-equality comparisons, in which case we just
3154 enclose a TRUTH_NOT_EXPR around what we have. */
3156 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3158 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3159 if (FLOAT_TYPE_P (op_type
)
3160 && flag_trapping_math
3161 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3162 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3165 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3166 if (code
== ERROR_MARK
)
3169 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3170 TREE_OPERAND (arg
, 1));
3176 return constant_boolean_node (integer_zerop (arg
), type
);
3178 case TRUTH_AND_EXPR
:
3179 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3180 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3181 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3182 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3183 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3186 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3187 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3188 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3189 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3190 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3192 case TRUTH_XOR_EXPR
:
3193 /* Here we can invert either operand. We invert the first operand
3194 unless the second operand is a TRUTH_NOT_EXPR in which case our
3195 result is the XOR of the first operand with the inside of the
3196 negation of the second operand. */
3198 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3199 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3200 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3202 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3203 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3204 TREE_OPERAND (arg
, 1));
3206 case TRUTH_ANDIF_EXPR
:
3207 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3208 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3209 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3210 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3211 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3213 case TRUTH_ORIF_EXPR
:
3214 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3215 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3216 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3217 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3218 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3220 case TRUTH_NOT_EXPR
:
3221 return TREE_OPERAND (arg
, 0);
3225 tree arg1
= TREE_OPERAND (arg
, 1);
3226 tree arg2
= TREE_OPERAND (arg
, 2);
3228 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3229 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3231 /* A COND_EXPR may have a throw as one operand, which
3232 then has void type. Just leave void operands
3234 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3235 VOID_TYPE_P (TREE_TYPE (arg1
))
3236 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3237 VOID_TYPE_P (TREE_TYPE (arg2
))
3238 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3242 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3243 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3244 TREE_OPERAND (arg
, 0),
3245 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3247 case NON_LVALUE_EXPR
:
3248 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3249 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3252 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3253 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3255 /* ... fall through ... */
3258 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3259 return build1_loc (loc
, TREE_CODE (arg
), type
,
3260 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3263 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3265 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3268 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3270 case CLEANUP_POINT_EXPR
:
3271 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3272 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3273 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3280 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3281 assume that ARG is an operation that returns a truth value (0 or 1
3282 for scalars, 0 or -1 for vectors). Return the folded expression if
3283 folding is successful. Otherwise, return NULL_TREE. */
3286 fold_invert_truthvalue (location_t loc
, tree arg
)
3288 tree type
= TREE_TYPE (arg
);
3289 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3295 /* Return a simplified tree node for the truth-negation of ARG. This
3296 never alters ARG itself. We assume that ARG is an operation that
3297 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3300 invert_truthvalue_loc (location_t loc
, tree arg
)
3302 if (TREE_CODE (arg
) == ERROR_MARK
)
3305 tree type
= TREE_TYPE (arg
);
3306 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3312 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3313 operands are another bit-wise operation with a common input. If so,
3314 distribute the bit operations to save an operation and possibly two if
3315 constants are involved. For example, convert
3316 (A | B) & (A | C) into A | (B & C)
3317 Further simplification will occur if B and C are constants.
3319 If this optimization cannot be done, 0 will be returned. */
3322 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3323 tree arg0
, tree arg1
)
3328 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3329 || TREE_CODE (arg0
) == code
3330 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3331 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3334 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3336 common
= TREE_OPERAND (arg0
, 0);
3337 left
= TREE_OPERAND (arg0
, 1);
3338 right
= TREE_OPERAND (arg1
, 1);
3340 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3342 common
= TREE_OPERAND (arg0
, 0);
3343 left
= TREE_OPERAND (arg0
, 1);
3344 right
= TREE_OPERAND (arg1
, 0);
3346 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3348 common
= TREE_OPERAND (arg0
, 1);
3349 left
= TREE_OPERAND (arg0
, 0);
3350 right
= TREE_OPERAND (arg1
, 1);
3352 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3354 common
= TREE_OPERAND (arg0
, 1);
3355 left
= TREE_OPERAND (arg0
, 0);
3356 right
= TREE_OPERAND (arg1
, 0);
3361 common
= fold_convert_loc (loc
, type
, common
);
3362 left
= fold_convert_loc (loc
, type
, left
);
3363 right
= fold_convert_loc (loc
, type
, right
);
3364 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3365 fold_build2_loc (loc
, code
, type
, left
, right
));
3368 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3369 with code CODE. This optimization is unsafe. */
3371 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3372 tree arg0
, tree arg1
)
3374 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3375 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3377 /* (A / C) +- (B / C) -> (A +- B) / C. */
3379 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3380 TREE_OPERAND (arg1
, 1), 0))
3381 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3382 fold_build2_loc (loc
, code
, type
,
3383 TREE_OPERAND (arg0
, 0),
3384 TREE_OPERAND (arg1
, 0)),
3385 TREE_OPERAND (arg0
, 1));
3387 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3388 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3389 TREE_OPERAND (arg1
, 0), 0)
3390 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3391 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3393 REAL_VALUE_TYPE r0
, r1
;
3394 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3395 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3397 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3399 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3400 real_arithmetic (&r0
, code
, &r0
, &r1
);
3401 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3402 TREE_OPERAND (arg0
, 0),
3403 build_real (type
, r0
));
3409 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3410 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3413 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3414 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3416 tree result
, bftype
;
3420 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3421 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3422 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3423 && tree_fits_shwi_p (size
)
3424 && tree_to_shwi (size
) == bitsize
)
3425 return fold_convert_loc (loc
, type
, inner
);
3429 if (TYPE_PRECISION (bftype
) != bitsize
3430 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3431 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3433 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3434 size_int (bitsize
), bitsize_int (bitpos
));
3437 result
= fold_convert_loc (loc
, type
, result
);
3442 /* Optimize a bit-field compare.
3444 There are two cases: First is a compare against a constant and the
3445 second is a comparison of two items where the fields are at the same
3446 bit position relative to the start of a chunk (byte, halfword, word)
3447 large enough to contain it. In these cases we can avoid the shift
3448 implicit in bitfield extractions.
3450 For constants, we emit a compare of the shifted constant with the
3451 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3452 compared. For two fields at the same position, we do the ANDs with the
3453 similar mask and compare the result of the ANDs.
3455 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3456 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3457 are the left and right operands of the comparison, respectively.
3459 If the optimization described above can be done, we return the resulting
3460 tree. Otherwise we return zero. */
3463 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3464 tree compare_type
, tree lhs
, tree rhs
)
3466 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3467 tree type
= TREE_TYPE (lhs
);
3468 tree signed_type
, unsigned_type
;
3469 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3470 enum machine_mode lmode
, rmode
, nmode
;
3471 int lunsignedp
, runsignedp
;
3472 int lvolatilep
= 0, rvolatilep
= 0;
3473 tree linner
, rinner
= NULL_TREE
;
3477 /* Get all the information about the extractions being done. If the bit size
3478 if the same as the size of the underlying object, we aren't doing an
3479 extraction at all and so can do nothing. We also don't want to
3480 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3481 then will no longer be able to replace it. */
3482 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3483 &lunsignedp
, &lvolatilep
, false);
3484 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3485 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3490 /* If this is not a constant, we can only do something if bit positions,
3491 sizes, and signedness are the same. */
3492 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3493 &runsignedp
, &rvolatilep
, false);
3495 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3496 || lunsignedp
!= runsignedp
|| offset
!= 0
3497 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3501 /* See if we can find a mode to refer to this field. We should be able to,
3502 but fail if we can't. */
3503 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3504 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3505 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3506 TYPE_ALIGN (TREE_TYPE (rinner
))),
3508 if (nmode
== VOIDmode
)
3511 /* Set signed and unsigned types of the precision of this mode for the
3513 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3514 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3516 /* Compute the bit position and size for the new reference and our offset
3517 within it. If the new reference is the same size as the original, we
3518 won't optimize anything, so return zero. */
3519 nbitsize
= GET_MODE_BITSIZE (nmode
);
3520 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3522 if (nbitsize
== lbitsize
)
3525 if (BYTES_BIG_ENDIAN
)
3526 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3528 /* Make the mask to be used against the extracted field. */
3529 mask
= build_int_cst_type (unsigned_type
, -1);
3530 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3531 mask
= const_binop (RSHIFT_EXPR
, mask
,
3532 size_int (nbitsize
- lbitsize
- lbitpos
));
3535 /* If not comparing with constant, just rework the comparison
3537 return fold_build2_loc (loc
, code
, compare_type
,
3538 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3539 make_bit_field_ref (loc
, linner
,
3544 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3545 make_bit_field_ref (loc
, rinner
,
3551 /* Otherwise, we are handling the constant case. See if the constant is too
3552 big for the field. Warn and return a tree of for 0 (false) if so. We do
3553 this not only for its own sake, but to avoid having to test for this
3554 error case below. If we didn't, we might generate wrong code.
3556 For unsigned fields, the constant shifted right by the field length should
3557 be all zero. For signed fields, the high-order bits should agree with
3562 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3563 fold_convert_loc (loc
,
3564 unsigned_type
, rhs
),
3565 size_int (lbitsize
))))
3567 warning (0, "comparison is always %d due to width of bit-field",
3569 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3574 tree tem
= const_binop (RSHIFT_EXPR
,
3575 fold_convert_loc (loc
, signed_type
, rhs
),
3576 size_int (lbitsize
- 1));
3577 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3579 warning (0, "comparison is always %d due to width of bit-field",
3581 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3585 /* Single-bit compares should always be against zero. */
3586 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3588 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3589 rhs
= build_int_cst (type
, 0);
3592 /* Make a new bitfield reference, shift the constant over the
3593 appropriate number of bits and mask it with the computed mask
3594 (in case this was a signed field). If we changed it, make a new one. */
3595 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3597 rhs
= const_binop (BIT_AND_EXPR
,
3598 const_binop (LSHIFT_EXPR
,
3599 fold_convert_loc (loc
, unsigned_type
, rhs
),
3600 size_int (lbitpos
)),
3603 lhs
= build2_loc (loc
, code
, compare_type
,
3604 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3608 /* Subroutine for fold_truth_andor_1: decode a field reference.
3610 If EXP is a comparison reference, we return the innermost reference.
3612 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3613 set to the starting bit number.
3615 If the innermost field can be completely contained in a mode-sized
3616 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3618 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3619 otherwise it is not changed.
3621 *PUNSIGNEDP is set to the signedness of the field.
3623 *PMASK is set to the mask used. This is either contained in a
3624 BIT_AND_EXPR or derived from the width of the field.
3626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3628 Return 0 if this is not a component reference or is one that we can't
3629 do anything with. */
3632 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3633 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3634 int *punsignedp
, int *pvolatilep
,
3635 tree
*pmask
, tree
*pand_mask
)
3637 tree outer_type
= 0;
3639 tree mask
, inner
, offset
;
3641 unsigned int precision
;
3643 /* All the optimizations using this function assume integer fields.
3644 There are problems with FP fields since the type_for_size call
3645 below can fail for, e.g., XFmode. */
3646 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3649 /* We are interested in the bare arrangement of bits, so strip everything
3650 that doesn't affect the machine mode. However, record the type of the
3651 outermost expression if it may matter below. */
3652 if (CONVERT_EXPR_P (exp
)
3653 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3654 outer_type
= TREE_TYPE (exp
);
3657 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3659 and_mask
= TREE_OPERAND (exp
, 1);
3660 exp
= TREE_OPERAND (exp
, 0);
3661 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3662 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3666 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3667 punsignedp
, pvolatilep
, false);
3668 if ((inner
== exp
&& and_mask
== 0)
3669 || *pbitsize
< 0 || offset
!= 0
3670 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3673 /* If the number of bits in the reference is the same as the bitsize of
3674 the outer type, then the outer type gives the signedness. Otherwise
3675 (in case of a small bitfield) the signedness is unchanged. */
3676 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3677 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3679 /* Compute the mask to access the bitfield. */
3680 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3681 precision
= TYPE_PRECISION (unsigned_type
);
3683 mask
= build_int_cst_type (unsigned_type
, -1);
3685 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3686 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3688 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3690 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3691 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3694 *pand_mask
= and_mask
;
3698 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3702 all_ones_mask_p (const_tree mask
, int size
)
3704 tree type
= TREE_TYPE (mask
);
3705 unsigned int precision
= TYPE_PRECISION (type
);
3708 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3711 tree_int_cst_equal (mask
,
3712 const_binop (RSHIFT_EXPR
,
3713 const_binop (LSHIFT_EXPR
, tmask
,
3714 size_int (precision
- size
)),
3715 size_int (precision
- size
)));
3718 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3719 represents the sign bit of EXP's type. If EXP represents a sign
3720 or zero extension, also test VAL against the unextended type.
3721 The return value is the (sub)expression whose sign bit is VAL,
3722 or NULL_TREE otherwise. */
3725 sign_bit_p (tree exp
, const_tree val
)
3727 unsigned HOST_WIDE_INT mask_lo
, lo
;
3728 HOST_WIDE_INT mask_hi
, hi
;
3732 /* Tree EXP must have an integral type. */
3733 t
= TREE_TYPE (exp
);
3734 if (! INTEGRAL_TYPE_P (t
))
3737 /* Tree VAL must be an integer constant. */
3738 if (TREE_CODE (val
) != INTEGER_CST
3739 || TREE_OVERFLOW (val
))
3742 width
= TYPE_PRECISION (t
);
3743 if (width
> HOST_BITS_PER_WIDE_INT
)
3745 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3748 mask_hi
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_DOUBLE_INT
- width
));
3754 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3757 mask_lo
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- width
));
3760 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3761 treat VAL as if it were unsigned. */
3762 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3763 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3766 /* Handle extension from a narrower type. */
3767 if (TREE_CODE (exp
) == NOP_EXPR
3768 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3769 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3774 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3775 to be evaluated unconditionally. */
3778 simple_operand_p (const_tree exp
)
3780 /* Strip any conversions that don't change the machine mode. */
3783 return (CONSTANT_CLASS_P (exp
)
3784 || TREE_CODE (exp
) == SSA_NAME
3786 && ! TREE_ADDRESSABLE (exp
)
3787 && ! TREE_THIS_VOLATILE (exp
)
3788 && ! DECL_NONLOCAL (exp
)
3789 /* Don't regard global variables as simple. They may be
3790 allocated in ways unknown to the compiler (shared memory,
3791 #pragma weak, etc). */
3792 && ! TREE_PUBLIC (exp
)
3793 && ! DECL_EXTERNAL (exp
)
3794 /* Weakrefs are not safe to be read, since they can be NULL.
3795 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3796 have DECL_WEAK flag set. */
3797 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3798 /* Loading a static variable is unduly expensive, but global
3799 registers aren't expensive. */
3800 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3803 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3804 to be evaluated unconditionally.
3805 I addition to simple_operand_p, we assume that comparisons, conversions,
3806 and logic-not operations are simple, if their operands are simple, too. */
3809 simple_operand_p_2 (tree exp
)
3811 enum tree_code code
;
3813 if (TREE_SIDE_EFFECTS (exp
)
3814 || tree_could_trap_p (exp
))
3817 while (CONVERT_EXPR_P (exp
))
3818 exp
= TREE_OPERAND (exp
, 0);
3820 code
= TREE_CODE (exp
);
3822 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3823 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3824 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3826 if (code
== TRUTH_NOT_EXPR
)
3827 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3829 return simple_operand_p (exp
);
3833 /* The following functions are subroutines to fold_range_test and allow it to
3834 try to change a logical combination of comparisons into a range test.
3837 X == 2 || X == 3 || X == 4 || X == 5
3841 (unsigned) (X - 2) <= 3
3843 We describe each set of comparisons as being either inside or outside
3844 a range, using a variable named like IN_P, and then describe the
3845 range with a lower and upper bound. If one of the bounds is omitted,
3846 it represents either the highest or lowest value of the type.
3848 In the comments below, we represent a range by two numbers in brackets
3849 preceded by a "+" to designate being inside that range, or a "-" to
3850 designate being outside that range, so the condition can be inverted by
3851 flipping the prefix. An omitted bound is represented by a "-". For
3852 example, "- [-, 10]" means being outside the range starting at the lowest
3853 possible value and ending at 10, in other words, being greater than 10.
3854 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3857 We set up things so that the missing bounds are handled in a consistent
3858 manner so neither a missing bound nor "true" and "false" need to be
3859 handled using a special case. */
3861 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3862 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3863 and UPPER1_P are nonzero if the respective argument is an upper bound
3864 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3865 must be specified for a comparison. ARG1 will be converted to ARG0's
3866 type if both are specified. */
3869 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3870 tree arg1
, int upper1_p
)
3876 /* If neither arg represents infinity, do the normal operation.
3877 Else, if not a comparison, return infinity. Else handle the special
3878 comparison rules. Note that most of the cases below won't occur, but
3879 are handled for consistency. */
3881 if (arg0
!= 0 && arg1
!= 0)
3883 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3884 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3886 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3889 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3892 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3893 for neither. In real maths, we cannot assume open ended ranges are
3894 the same. But, this is computer arithmetic, where numbers are finite.
3895 We can therefore make the transformation of any unbounded range with
3896 the value Z, Z being greater than any representable number. This permits
3897 us to treat unbounded ranges as equal. */
3898 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3899 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3903 result
= sgn0
== sgn1
;
3906 result
= sgn0
!= sgn1
;
3909 result
= sgn0
< sgn1
;
3912 result
= sgn0
<= sgn1
;
3915 result
= sgn0
> sgn1
;
3918 result
= sgn0
>= sgn1
;
3924 return constant_boolean_node (result
, type
);
3927 /* Helper routine for make_range. Perform one step for it, return
3928 new expression if the loop should continue or NULL_TREE if it should
3932 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3933 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3934 bool *strict_overflow_p
)
3936 tree arg0_type
= TREE_TYPE (arg0
);
3937 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3938 int in_p
= *p_in_p
, n_in_p
;
3942 case TRUTH_NOT_EXPR
:
3943 /* We can only do something if the range is testing for zero. */
3944 if (low
== NULL_TREE
|| high
== NULL_TREE
3945 || ! integer_zerop (low
) || ! integer_zerop (high
))
3950 case EQ_EXPR
: case NE_EXPR
:
3951 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3952 /* We can only do something if the range is testing for zero
3953 and if the second operand is an integer constant. Note that
3954 saying something is "in" the range we make is done by
3955 complementing IN_P since it will set in the initial case of
3956 being not equal to zero; "out" is leaving it alone. */
3957 if (low
== NULL_TREE
|| high
== NULL_TREE
3958 || ! integer_zerop (low
) || ! integer_zerop (high
)
3959 || TREE_CODE (arg1
) != INTEGER_CST
)
3964 case NE_EXPR
: /* - [c, c] */
3967 case EQ_EXPR
: /* + [c, c] */
3968 in_p
= ! in_p
, low
= high
= arg1
;
3970 case GT_EXPR
: /* - [-, c] */
3971 low
= 0, high
= arg1
;
3973 case GE_EXPR
: /* + [c, -] */
3974 in_p
= ! in_p
, low
= arg1
, high
= 0;
3976 case LT_EXPR
: /* - [c, -] */
3977 low
= arg1
, high
= 0;
3979 case LE_EXPR
: /* + [-, c] */
3980 in_p
= ! in_p
, low
= 0, high
= arg1
;
3986 /* If this is an unsigned comparison, we also know that EXP is
3987 greater than or equal to zero. We base the range tests we make
3988 on that fact, so we record it here so we can parse existing
3989 range tests. We test arg0_type since often the return type
3990 of, e.g. EQ_EXPR, is boolean. */
3991 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3993 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3995 build_int_cst (arg0_type
, 0),
3999 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4001 /* If the high bound is missing, but we have a nonzero low
4002 bound, reverse the range so it goes from zero to the low bound
4004 if (high
== 0 && low
&& ! integer_zerop (low
))
4007 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4008 integer_one_node
, 0);
4009 low
= build_int_cst (arg0_type
, 0);
4019 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4020 low and high are non-NULL, then normalize will DTRT. */
4021 if (!TYPE_UNSIGNED (arg0_type
)
4022 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4024 if (low
== NULL_TREE
)
4025 low
= TYPE_MIN_VALUE (arg0_type
);
4026 if (high
== NULL_TREE
)
4027 high
= TYPE_MAX_VALUE (arg0_type
);
4030 /* (-x) IN [a,b] -> x in [-b, -a] */
4031 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4032 build_int_cst (exp_type
, 0),
4034 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4035 build_int_cst (exp_type
, 0),
4037 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4043 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4044 build_int_cst (exp_type
, 1));
4048 if (TREE_CODE (arg1
) != INTEGER_CST
)
4051 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4052 move a constant to the other side. */
4053 if (!TYPE_UNSIGNED (arg0_type
)
4054 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4057 /* If EXP is signed, any overflow in the computation is undefined,
4058 so we don't worry about it so long as our computations on
4059 the bounds don't overflow. For unsigned, overflow is defined
4060 and this is exactly the right thing. */
4061 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4062 arg0_type
, low
, 0, arg1
, 0);
4063 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4064 arg0_type
, high
, 1, arg1
, 0);
4065 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4066 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4069 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4070 *strict_overflow_p
= true;
4073 /* Check for an unsigned range which has wrapped around the maximum
4074 value thus making n_high < n_low, and normalize it. */
4075 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4077 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4078 integer_one_node
, 0);
4079 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4080 integer_one_node
, 0);
4082 /* If the range is of the form +/- [ x+1, x ], we won't
4083 be able to normalize it. But then, it represents the
4084 whole range or the empty set, so make it
4086 if (tree_int_cst_equal (n_low
, low
)
4087 && tree_int_cst_equal (n_high
, high
))
4093 low
= n_low
, high
= n_high
;
4101 case NON_LVALUE_EXPR
:
4102 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4105 if (! INTEGRAL_TYPE_P (arg0_type
)
4106 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4107 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4110 n_low
= low
, n_high
= high
;
4113 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4116 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4118 /* If we're converting arg0 from an unsigned type, to exp,
4119 a signed type, we will be doing the comparison as unsigned.
4120 The tests above have already verified that LOW and HIGH
4123 So we have to ensure that we will handle large unsigned
4124 values the same way that the current signed bounds treat
4127 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4131 /* For fixed-point modes, we need to pass the saturating flag
4132 as the 2nd parameter. */
4133 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4135 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4136 TYPE_SATURATING (arg0_type
));
4139 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4141 /* A range without an upper bound is, naturally, unbounded.
4142 Since convert would have cropped a very large value, use
4143 the max value for the destination type. */
4145 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4146 : TYPE_MAX_VALUE (arg0_type
);
4148 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4149 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4150 fold_convert_loc (loc
, arg0_type
,
4152 build_int_cst (arg0_type
, 1));
4154 /* If the low bound is specified, "and" the range with the
4155 range for which the original unsigned value will be
4159 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4160 1, fold_convert_loc (loc
, arg0_type
,
4165 in_p
= (n_in_p
== in_p
);
4169 /* Otherwise, "or" the range with the range of the input
4170 that will be interpreted as negative. */
4171 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4172 1, fold_convert_loc (loc
, arg0_type
,
4177 in_p
= (in_p
!= n_in_p
);
4191 /* Given EXP, a logical expression, set the range it is testing into
4192 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4193 actually being tested. *PLOW and *PHIGH will be made of the same
4194 type as the returned expression. If EXP is not a comparison, we
4195 will most likely not be returning a useful value and range. Set
4196 *STRICT_OVERFLOW_P to true if the return value is only valid
4197 because signed overflow is undefined; otherwise, do not change
4198 *STRICT_OVERFLOW_P. */
4201 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4202 bool *strict_overflow_p
)
4204 enum tree_code code
;
4205 tree arg0
, arg1
= NULL_TREE
;
4206 tree exp_type
, nexp
;
4209 location_t loc
= EXPR_LOCATION (exp
);
4211 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4212 and see if we can refine the range. Some of the cases below may not
4213 happen, but it doesn't seem worth worrying about this. We "continue"
4214 the outer loop when we've changed something; otherwise we "break"
4215 the switch, which will "break" the while. */
4218 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4222 code
= TREE_CODE (exp
);
4223 exp_type
= TREE_TYPE (exp
);
4226 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4228 if (TREE_OPERAND_LENGTH (exp
) > 0)
4229 arg0
= TREE_OPERAND (exp
, 0);
4230 if (TREE_CODE_CLASS (code
) == tcc_binary
4231 || TREE_CODE_CLASS (code
) == tcc_comparison
4232 || (TREE_CODE_CLASS (code
) == tcc_expression
4233 && TREE_OPERAND_LENGTH (exp
) > 1))
4234 arg1
= TREE_OPERAND (exp
, 1);
4236 if (arg0
== NULL_TREE
)
4239 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4240 &high
, &in_p
, strict_overflow_p
);
4241 if (nexp
== NULL_TREE
)
4246 /* If EXP is a constant, we can evaluate whether this is true or false. */
4247 if (TREE_CODE (exp
) == INTEGER_CST
)
4249 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4251 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4257 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4261 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4262 type, TYPE, return an expression to test if EXP is in (or out of, depending
4263 on IN_P) the range. Return 0 if the test couldn't be created. */
4266 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4267 tree low
, tree high
)
4269 tree etype
= TREE_TYPE (exp
), value
;
4271 #ifdef HAVE_canonicalize_funcptr_for_compare
4272 /* Disable this optimization for function pointer expressions
4273 on targets that require function pointer canonicalization. */
4274 if (HAVE_canonicalize_funcptr_for_compare
4275 && TREE_CODE (etype
) == POINTER_TYPE
4276 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4282 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4284 return invert_truthvalue_loc (loc
, value
);
4289 if (low
== 0 && high
== 0)
4290 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4293 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4294 fold_convert_loc (loc
, etype
, high
));
4297 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4298 fold_convert_loc (loc
, etype
, low
));
4300 if (operand_equal_p (low
, high
, 0))
4301 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4302 fold_convert_loc (loc
, etype
, low
));
4304 if (integer_zerop (low
))
4306 if (! TYPE_UNSIGNED (etype
))
4308 etype
= unsigned_type_for (etype
);
4309 high
= fold_convert_loc (loc
, etype
, high
);
4310 exp
= fold_convert_loc (loc
, etype
, exp
);
4312 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4315 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4316 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4318 unsigned HOST_WIDE_INT lo
;
4322 prec
= TYPE_PRECISION (etype
);
4323 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4326 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4330 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4331 lo
= HOST_WIDE_INT_M1U
;
4334 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4336 if (TYPE_UNSIGNED (etype
))
4338 tree signed_etype
= signed_type_for (etype
);
4339 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4341 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4343 etype
= signed_etype
;
4344 exp
= fold_convert_loc (loc
, etype
, exp
);
4346 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4347 build_int_cst (etype
, 0));
4351 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4352 This requires wrap-around arithmetics for the type of the expression.
4353 First make sure that arithmetics in this type is valid, then make sure
4354 that it wraps around. */
4355 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4356 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4357 TYPE_UNSIGNED (etype
));
4359 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4361 tree utype
, minv
, maxv
;
4363 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4364 for the type in question, as we rely on this here. */
4365 utype
= unsigned_type_for (etype
);
4366 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4367 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4368 integer_one_node
, 1);
4369 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4371 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4378 high
= fold_convert_loc (loc
, etype
, high
);
4379 low
= fold_convert_loc (loc
, etype
, low
);
4380 exp
= fold_convert_loc (loc
, etype
, exp
);
4382 value
= const_binop (MINUS_EXPR
, high
, low
);
4385 if (POINTER_TYPE_P (etype
))
4387 if (value
!= 0 && !TREE_OVERFLOW (value
))
4389 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4390 return build_range_check (loc
, type
,
4391 fold_build_pointer_plus_loc (loc
, exp
, low
),
4392 1, build_int_cst (etype
, 0), value
);
4397 if (value
!= 0 && !TREE_OVERFLOW (value
))
4398 return build_range_check (loc
, type
,
4399 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4400 1, build_int_cst (etype
, 0), value
);
4405 /* Return the predecessor of VAL in its type, handling the infinite case. */
4408 range_predecessor (tree val
)
4410 tree type
= TREE_TYPE (val
);
4412 if (INTEGRAL_TYPE_P (type
)
4413 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4416 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4419 /* Return the successor of VAL in its type, handling the infinite case. */
4422 range_successor (tree val
)
4424 tree type
= TREE_TYPE (val
);
4426 if (INTEGRAL_TYPE_P (type
)
4427 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4430 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4433 /* Given two ranges, see if we can merge them into one. Return 1 if we
4434 can, 0 if we can't. Set the output range into the specified parameters. */
4437 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4438 tree high0
, int in1_p
, tree low1
, tree high1
)
4446 int lowequal
= ((low0
== 0 && low1
== 0)
4447 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4448 low0
, 0, low1
, 0)));
4449 int highequal
= ((high0
== 0 && high1
== 0)
4450 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4451 high0
, 1, high1
, 1)));
4453 /* Make range 0 be the range that starts first, or ends last if they
4454 start at the same value. Swap them if it isn't. */
4455 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4458 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4459 high1
, 1, high0
, 1))))
4461 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4462 tem
= low0
, low0
= low1
, low1
= tem
;
4463 tem
= high0
, high0
= high1
, high1
= tem
;
4466 /* Now flag two cases, whether the ranges are disjoint or whether the
4467 second range is totally subsumed in the first. Note that the tests
4468 below are simplified by the ones above. */
4469 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4470 high0
, 1, low1
, 0));
4471 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4472 high1
, 1, high0
, 1));
4474 /* We now have four cases, depending on whether we are including or
4475 excluding the two ranges. */
4478 /* If they don't overlap, the result is false. If the second range
4479 is a subset it is the result. Otherwise, the range is from the start
4480 of the second to the end of the first. */
4482 in_p
= 0, low
= high
= 0;
4484 in_p
= 1, low
= low1
, high
= high1
;
4486 in_p
= 1, low
= low1
, high
= high0
;
4489 else if (in0_p
&& ! in1_p
)
4491 /* If they don't overlap, the result is the first range. If they are
4492 equal, the result is false. If the second range is a subset of the
4493 first, and the ranges begin at the same place, we go from just after
4494 the end of the second range to the end of the first. If the second
4495 range is not a subset of the first, or if it is a subset and both
4496 ranges end at the same place, the range starts at the start of the
4497 first range and ends just before the second range.
4498 Otherwise, we can't describe this as a single range. */
4500 in_p
= 1, low
= low0
, high
= high0
;
4501 else if (lowequal
&& highequal
)
4502 in_p
= 0, low
= high
= 0;
4503 else if (subset
&& lowequal
)
4505 low
= range_successor (high1
);
4510 /* We are in the weird situation where high0 > high1 but
4511 high1 has no successor. Punt. */
4515 else if (! subset
|| highequal
)
4518 high
= range_predecessor (low1
);
4522 /* low0 < low1 but low1 has no predecessor. Punt. */
4530 else if (! in0_p
&& in1_p
)
4532 /* If they don't overlap, the result is the second range. If the second
4533 is a subset of the first, the result is false. Otherwise,
4534 the range starts just after the first range and ends at the
4535 end of the second. */
4537 in_p
= 1, low
= low1
, high
= high1
;
4538 else if (subset
|| highequal
)
4539 in_p
= 0, low
= high
= 0;
4542 low
= range_successor (high0
);
4547 /* high1 > high0 but high0 has no successor. Punt. */
4555 /* The case where we are excluding both ranges. Here the complex case
4556 is if they don't overlap. In that case, the only time we have a
4557 range is if they are adjacent. If the second is a subset of the
4558 first, the result is the first. Otherwise, the range to exclude
4559 starts at the beginning of the first range and ends at the end of the
4563 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4564 range_successor (high0
),
4566 in_p
= 0, low
= low0
, high
= high1
;
4569 /* Canonicalize - [min, x] into - [-, x]. */
4570 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4571 switch (TREE_CODE (TREE_TYPE (low0
)))
4574 if (TYPE_PRECISION (TREE_TYPE (low0
))
4575 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4579 if (tree_int_cst_equal (low0
,
4580 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4584 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4585 && integer_zerop (low0
))
4592 /* Canonicalize - [x, max] into - [x, -]. */
4593 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4594 switch (TREE_CODE (TREE_TYPE (high1
)))
4597 if (TYPE_PRECISION (TREE_TYPE (high1
))
4598 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4602 if (tree_int_cst_equal (high1
,
4603 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4607 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4608 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4610 integer_one_node
, 1)))
4617 /* The ranges might be also adjacent between the maximum and
4618 minimum values of the given type. For
4619 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4620 return + [x + 1, y - 1]. */
4621 if (low0
== 0 && high1
== 0)
4623 low
= range_successor (high0
);
4624 high
= range_predecessor (low1
);
4625 if (low
== 0 || high
== 0)
4635 in_p
= 0, low
= low0
, high
= high0
;
4637 in_p
= 0, low
= low0
, high
= high1
;
4640 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4645 /* Subroutine of fold, looking inside expressions of the form
4646 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4647 of the COND_EXPR. This function is being used also to optimize
4648 A op B ? C : A, by reversing the comparison first.
4650 Return a folded expression whose code is not a COND_EXPR
4651 anymore, or NULL_TREE if no folding opportunity is found. */
4654 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4655 tree arg0
, tree arg1
, tree arg2
)
4657 enum tree_code comp_code
= TREE_CODE (arg0
);
4658 tree arg00
= TREE_OPERAND (arg0
, 0);
4659 tree arg01
= TREE_OPERAND (arg0
, 1);
4660 tree arg1_type
= TREE_TYPE (arg1
);
4666 /* If we have A op 0 ? A : -A, consider applying the following
4669 A == 0? A : -A same as -A
4670 A != 0? A : -A same as A
4671 A >= 0? A : -A same as abs (A)
4672 A > 0? A : -A same as abs (A)
4673 A <= 0? A : -A same as -abs (A)
4674 A < 0? A : -A same as -abs (A)
4676 None of these transformations work for modes with signed
4677 zeros. If A is +/-0, the first two transformations will
4678 change the sign of the result (from +0 to -0, or vice
4679 versa). The last four will fix the sign of the result,
4680 even though the original expressions could be positive or
4681 negative, depending on the sign of A.
4683 Note that all these transformations are correct if A is
4684 NaN, since the two alternatives (A and -A) are also NaNs. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4686 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4687 ? real_zerop (arg01
)
4688 : integer_zerop (arg01
))
4689 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4690 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4691 /* In the case that A is of the form X-Y, '-A' (arg2) may
4692 have already been folded to Y-X, check for that. */
4693 || (TREE_CODE (arg1
) == MINUS_EXPR
4694 && TREE_CODE (arg2
) == MINUS_EXPR
4695 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4696 TREE_OPERAND (arg2
, 1), 0)
4697 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4698 TREE_OPERAND (arg2
, 0), 0))))
4703 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4704 return pedantic_non_lvalue_loc (loc
,
4705 fold_convert_loc (loc
, type
,
4706 negate_expr (tem
)));
4709 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4712 if (flag_trapping_math
)
4717 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4718 arg1
= fold_convert_loc (loc
, signed_type_for
4719 (TREE_TYPE (arg1
)), arg1
);
4720 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4721 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4724 if (flag_trapping_math
)
4728 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4729 arg1
= fold_convert_loc (loc
, signed_type_for
4730 (TREE_TYPE (arg1
)), arg1
);
4731 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4732 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4734 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4738 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4739 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4740 both transformations are correct when A is NaN: A != 0
4741 is then true, and A == 0 is false. */
4743 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4744 && integer_zerop (arg01
) && integer_zerop (arg2
))
4746 if (comp_code
== NE_EXPR
)
4747 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4748 else if (comp_code
== EQ_EXPR
)
4749 return build_zero_cst (type
);
4752 /* Try some transformations of A op B ? A : B.
4754 A == B? A : B same as B
4755 A != B? A : B same as A
4756 A >= B? A : B same as max (A, B)
4757 A > B? A : B same as max (B, A)
4758 A <= B? A : B same as min (A, B)
4759 A < B? A : B same as min (B, A)
4761 As above, these transformations don't work in the presence
4762 of signed zeros. For example, if A and B are zeros of
4763 opposite sign, the first two transformations will change
4764 the sign of the result. In the last four, the original
4765 expressions give different results for (A=+0, B=-0) and
4766 (A=-0, B=+0), but the transformed expressions do not.
4768 The first two transformations are correct if either A or B
4769 is a NaN. In the first transformation, the condition will
4770 be false, and B will indeed be chosen. In the case of the
4771 second transformation, the condition A != B will be true,
4772 and A will be chosen.
4774 The conversions to max() and min() are not correct if B is
4775 a number and A is not. The conditions in the original
4776 expressions will be false, so all four give B. The min()
4777 and max() versions would give a NaN instead. */
4778 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4779 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4780 /* Avoid these transformations if the COND_EXPR may be used
4781 as an lvalue in the C++ front-end. PR c++/19199. */
4783 || VECTOR_TYPE_P (type
)
4784 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4785 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4786 || ! maybe_lvalue_p (arg1
)
4787 || ! maybe_lvalue_p (arg2
)))
4789 tree comp_op0
= arg00
;
4790 tree comp_op1
= arg01
;
4791 tree comp_type
= TREE_TYPE (comp_op0
);
4793 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4794 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4804 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4806 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4811 /* In C++ a ?: expression can be an lvalue, so put the
4812 operand which will be used if they are equal first
4813 so that we can convert this back to the
4814 corresponding COND_EXPR. */
4815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4817 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4818 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4819 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4820 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4821 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4822 comp_op1
, comp_op0
);
4823 return pedantic_non_lvalue_loc (loc
,
4824 fold_convert_loc (loc
, type
, tem
));
4831 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4833 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4834 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4835 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4836 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4837 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4838 comp_op1
, comp_op0
);
4839 return pedantic_non_lvalue_loc (loc
,
4840 fold_convert_loc (loc
, type
, tem
));
4844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4845 return pedantic_non_lvalue_loc (loc
,
4846 fold_convert_loc (loc
, type
, arg2
));
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4850 return pedantic_non_lvalue_loc (loc
,
4851 fold_convert_loc (loc
, type
, arg1
));
4854 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4859 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4860 we might still be able to simplify this. For example,
4861 if C1 is one less or one more than C2, this might have started
4862 out as a MIN or MAX and been transformed by this function.
4863 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4865 if (INTEGRAL_TYPE_P (type
)
4866 && TREE_CODE (arg01
) == INTEGER_CST
4867 && TREE_CODE (arg2
) == INTEGER_CST
)
4871 if (TREE_CODE (arg1
) == INTEGER_CST
)
4873 /* We can replace A with C1 in this case. */
4874 arg1
= fold_convert_loc (loc
, type
, arg01
);
4875 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4878 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4879 MIN_EXPR, to preserve the signedness of the comparison. */
4880 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4882 && operand_equal_p (arg01
,
4883 const_binop (PLUS_EXPR
, arg2
,
4884 build_int_cst (type
, 1)),
4887 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4888 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4890 return pedantic_non_lvalue_loc (loc
,
4891 fold_convert_loc (loc
, type
, tem
));
4896 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4898 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4900 && operand_equal_p (arg01
,
4901 const_binop (MINUS_EXPR
, arg2
,
4902 build_int_cst (type
, 1)),
4905 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4906 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4908 return pedantic_non_lvalue_loc (loc
,
4909 fold_convert_loc (loc
, type
, tem
));
4914 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4915 MAX_EXPR, to preserve the signedness of the comparison. */
4916 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4918 && operand_equal_p (arg01
,
4919 const_binop (MINUS_EXPR
, arg2
,
4920 build_int_cst (type
, 1)),
4923 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4924 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4926 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4931 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4932 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4934 && operand_equal_p (arg01
,
4935 const_binop (PLUS_EXPR
, arg2
,
4936 build_int_cst (type
, 1)),
4939 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4940 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4942 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4956 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4957 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4958 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4962 /* EXP is some logical combination of boolean tests. See if we can
4963 merge it into some range test. Return the new tree if so. */
4966 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4969 int or_op
= (code
== TRUTH_ORIF_EXPR
4970 || code
== TRUTH_OR_EXPR
);
4971 int in0_p
, in1_p
, in_p
;
4972 tree low0
, low1
, low
, high0
, high1
, high
;
4973 bool strict_overflow_p
= false;
4975 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4976 "when simplifying range test");
4978 if (!INTEGRAL_TYPE_P (type
))
4981 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4982 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4984 /* If this is an OR operation, invert both sides; we will invert
4985 again at the end. */
4987 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4989 /* If both expressions are the same, if we can merge the ranges, and we
4990 can build the range test, return it or it inverted. If one of the
4991 ranges is always true or always false, consider it to be the same
4992 expression as the other. */
4993 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4994 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4996 && 0 != (tem
= (build_range_check (loc
, type
,
4998 : rhs
!= 0 ? rhs
: integer_zero_node
,
5001 if (strict_overflow_p
)
5002 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5003 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5006 /* On machines where the branch cost is expensive, if this is a
5007 short-circuited branch and the underlying object on both sides
5008 is the same, make a non-short-circuit operation. */
5009 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5010 && lhs
!= 0 && rhs
!= 0
5011 && (code
== TRUTH_ANDIF_EXPR
5012 || code
== TRUTH_ORIF_EXPR
)
5013 && operand_equal_p (lhs
, rhs
, 0))
5015 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5016 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5017 which cases we can't do this. */
5018 if (simple_operand_p (lhs
))
5019 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5020 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5023 else if (!lang_hooks
.decls
.global_bindings_p ()
5024 && !CONTAINS_PLACEHOLDER_P (lhs
))
5026 tree common
= save_expr (lhs
);
5028 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5029 or_op
? ! in0_p
: in0_p
,
5031 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5032 or_op
? ! in1_p
: in1_p
,
5035 if (strict_overflow_p
)
5036 fold_overflow_warning (warnmsg
,
5037 WARN_STRICT_OVERFLOW_COMPARISON
);
5038 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5039 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5048 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5049 bit value. Arrange things so the extra bits will be set to zero if and
5050 only if C is signed-extended to its full width. If MASK is nonzero,
5051 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5054 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5056 tree type
= TREE_TYPE (c
);
5057 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5060 if (p
== modesize
|| unsignedp
)
5063 /* We work by getting just the sign bit into the low-order bit, then
5064 into the high-order bit, then sign-extend. We then XOR that value
5066 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5067 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5069 /* We must use a signed type in order to get an arithmetic right shift.
5070 However, we must also avoid introducing accidental overflows, so that
5071 a subsequent call to integer_zerop will work. Hence we must
5072 do the type conversion here. At this point, the constant is either
5073 zero or one, and the conversion to a signed type can never overflow.
5074 We could get an overflow if this conversion is done anywhere else. */
5075 if (TYPE_UNSIGNED (type
))
5076 temp
= fold_convert (signed_type_for (type
), temp
);
5078 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5079 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5081 temp
= const_binop (BIT_AND_EXPR
, temp
,
5082 fold_convert (TREE_TYPE (c
), mask
));
5083 /* If necessary, convert the type back to match the type of C. */
5084 if (TYPE_UNSIGNED (type
))
5085 temp
= fold_convert (type
, temp
);
5087 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5090 /* For an expression that has the form
5094 we can drop one of the inner expressions and simplify to
5098 LOC is the location of the resulting expression. OP is the inner
5099 logical operation; the left-hand side in the examples above, while CMPOP
5100 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5101 removing a condition that guards another, as in
5102 (A != NULL && A->...) || A == NULL
5103 which we must not transform. If RHS_ONLY is true, only eliminate the
5104 right-most operand of the inner logical operation. */
5107 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5110 tree type
= TREE_TYPE (cmpop
);
5111 enum tree_code code
= TREE_CODE (cmpop
);
5112 enum tree_code truthop_code
= TREE_CODE (op
);
5113 tree lhs
= TREE_OPERAND (op
, 0);
5114 tree rhs
= TREE_OPERAND (op
, 1);
5115 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5116 enum tree_code rhs_code
= TREE_CODE (rhs
);
5117 enum tree_code lhs_code
= TREE_CODE (lhs
);
5118 enum tree_code inv_code
;
5120 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5123 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5126 if (rhs_code
== truthop_code
)
5128 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5129 if (newrhs
!= NULL_TREE
)
5132 rhs_code
= TREE_CODE (rhs
);
5135 if (lhs_code
== truthop_code
&& !rhs_only
)
5137 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5138 if (newlhs
!= NULL_TREE
)
5141 lhs_code
= TREE_CODE (lhs
);
5145 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5146 if (inv_code
== rhs_code
5147 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5148 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5150 if (!rhs_only
&& inv_code
== lhs_code
5151 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5152 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5154 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5155 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5160 /* Find ways of folding logical expressions of LHS and RHS:
5161 Try to merge two comparisons to the same innermost item.
5162 Look for range tests like "ch >= '0' && ch <= '9'".
5163 Look for combinations of simple terms on machines with expensive branches
5164 and evaluate the RHS unconditionally.
5166 For example, if we have p->a == 2 && p->b == 4 and we can make an
5167 object large enough to span both A and B, we can do this with a comparison
5168 against the object ANDed with the a mask.
5170 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5171 operations to do this with one comparison.
5173 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5174 function and the one above.
5176 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5177 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5179 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5182 We return the simplified tree or 0 if no optimization is possible. */
5185 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5188 /* If this is the "or" of two comparisons, we can do something if
5189 the comparisons are NE_EXPR. If this is the "and", we can do something
5190 if the comparisons are EQ_EXPR. I.e.,
5191 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5193 WANTED_CODE is this operation code. For single bit fields, we can
5194 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5195 comparison for one-bit fields. */
5197 enum tree_code wanted_code
;
5198 enum tree_code lcode
, rcode
;
5199 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5200 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5201 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5202 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5203 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5204 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5205 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5206 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5207 enum machine_mode lnmode
, rnmode
;
5208 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5209 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5210 tree l_const
, r_const
;
5211 tree lntype
, rntype
, result
;
5212 HOST_WIDE_INT first_bit
, end_bit
;
5215 /* Start by getting the comparison codes. Fail if anything is volatile.
5216 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5217 it were surrounded with a NE_EXPR. */
5219 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5222 lcode
= TREE_CODE (lhs
);
5223 rcode
= TREE_CODE (rhs
);
5225 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5227 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5228 build_int_cst (TREE_TYPE (lhs
), 0));
5232 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5234 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5235 build_int_cst (TREE_TYPE (rhs
), 0));
5239 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5240 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5243 ll_arg
= TREE_OPERAND (lhs
, 0);
5244 lr_arg
= TREE_OPERAND (lhs
, 1);
5245 rl_arg
= TREE_OPERAND (rhs
, 0);
5246 rr_arg
= TREE_OPERAND (rhs
, 1);
5248 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5249 if (simple_operand_p (ll_arg
)
5250 && simple_operand_p (lr_arg
))
5252 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5253 && operand_equal_p (lr_arg
, rr_arg
, 0))
5255 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5256 truth_type
, ll_arg
, lr_arg
);
5260 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5261 && operand_equal_p (lr_arg
, rl_arg
, 0))
5263 result
= combine_comparisons (loc
, code
, lcode
,
5264 swap_tree_comparison (rcode
),
5265 truth_type
, ll_arg
, lr_arg
);
5271 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5272 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5274 /* If the RHS can be evaluated unconditionally and its operands are
5275 simple, it wins to evaluate the RHS unconditionally on machines
5276 with expensive branches. In this case, this isn't a comparison
5277 that can be merged. */
5279 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5281 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5282 && simple_operand_p (rl_arg
)
5283 && simple_operand_p (rr_arg
))
5285 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5286 if (code
== TRUTH_OR_EXPR
5287 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5288 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5289 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5290 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5291 return build2_loc (loc
, NE_EXPR
, truth_type
,
5292 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5294 build_int_cst (TREE_TYPE (ll_arg
), 0));
5296 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5297 if (code
== TRUTH_AND_EXPR
5298 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5299 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5300 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5301 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5302 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5303 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5305 build_int_cst (TREE_TYPE (ll_arg
), 0));
5308 /* See if the comparisons can be merged. Then get all the parameters for
5311 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5312 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5316 ll_inner
= decode_field_reference (loc
, ll_arg
,
5317 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5318 &ll_unsignedp
, &volatilep
, &ll_mask
,
5320 lr_inner
= decode_field_reference (loc
, lr_arg
,
5321 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5322 &lr_unsignedp
, &volatilep
, &lr_mask
,
5324 rl_inner
= decode_field_reference (loc
, rl_arg
,
5325 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5326 &rl_unsignedp
, &volatilep
, &rl_mask
,
5328 rr_inner
= decode_field_reference (loc
, rr_arg
,
5329 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5330 &rr_unsignedp
, &volatilep
, &rr_mask
,
5333 /* It must be true that the inner operation on the lhs of each
5334 comparison must be the same if we are to be able to do anything.
5335 Then see if we have constants. If not, the same must be true for
5337 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5338 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5341 if (TREE_CODE (lr_arg
) == INTEGER_CST
5342 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5343 l_const
= lr_arg
, r_const
= rr_arg
;
5344 else if (lr_inner
== 0 || rr_inner
== 0
5345 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5348 l_const
= r_const
= 0;
5350 /* If either comparison code is not correct for our logical operation,
5351 fail. However, we can convert a one-bit comparison against zero into
5352 the opposite comparison against that bit being set in the field. */
5354 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5355 if (lcode
!= wanted_code
)
5357 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5359 /* Make the left operand unsigned, since we are only interested
5360 in the value of one bit. Otherwise we are doing the wrong
5369 /* This is analogous to the code for l_const above. */
5370 if (rcode
!= wanted_code
)
5372 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5381 /* See if we can find a mode that contains both fields being compared on
5382 the left. If we can't, fail. Otherwise, update all constants and masks
5383 to be relative to a field of that size. */
5384 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5385 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5386 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5387 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5389 if (lnmode
== VOIDmode
)
5392 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5393 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5394 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5395 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5397 if (BYTES_BIG_ENDIAN
)
5399 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5400 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5403 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5404 size_int (xll_bitpos
));
5405 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5406 size_int (xrl_bitpos
));
5410 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5411 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5412 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5413 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5414 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5417 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5419 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5424 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5425 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5426 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5427 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5428 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5431 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5433 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5437 /* If the right sides are not constant, do the same for it. Also,
5438 disallow this optimization if a size or signedness mismatch occurs
5439 between the left and right sides. */
5442 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5443 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5444 /* Make sure the two fields on the right
5445 correspond to the left without being swapped. */
5446 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5449 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5450 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5451 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5452 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5454 if (rnmode
== VOIDmode
)
5457 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5458 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5459 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5460 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5462 if (BYTES_BIG_ENDIAN
)
5464 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5465 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5468 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5470 size_int (xlr_bitpos
));
5471 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5473 size_int (xrr_bitpos
));
5475 /* Make a mask that corresponds to both fields being compared.
5476 Do this for both items being compared. If the operands are the
5477 same size and the bits being compared are in the same position
5478 then we can do this by masking both and comparing the masked
5480 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5481 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5482 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5484 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5485 ll_unsignedp
|| rl_unsignedp
);
5486 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5487 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5489 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5490 lr_unsignedp
|| rr_unsignedp
);
5491 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5492 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5494 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5497 /* There is still another way we can do something: If both pairs of
5498 fields being compared are adjacent, we may be able to make a wider
5499 field containing them both.
5501 Note that we still must mask the lhs/rhs expressions. Furthermore,
5502 the mask must be shifted to account for the shift done by
5503 make_bit_field_ref. */
5504 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5505 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5506 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5507 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5511 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5512 ll_bitsize
+ rl_bitsize
,
5513 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5514 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5515 lr_bitsize
+ rr_bitsize
,
5516 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5518 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5519 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5520 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5521 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5523 /* Convert to the smaller type before masking out unwanted bits. */
5525 if (lntype
!= rntype
)
5527 if (lnbitsize
> rnbitsize
)
5529 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5530 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5533 else if (lnbitsize
< rnbitsize
)
5535 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5536 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5541 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5542 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5544 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5545 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5547 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5553 /* Handle the case of comparisons with constants. If there is something in
5554 common between the masks, those bits of the constants must be the same.
5555 If not, the condition is always false. Test for this to avoid generating
5556 incorrect code below. */
5557 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5558 if (! integer_zerop (result
)
5559 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5560 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5562 if (wanted_code
== NE_EXPR
)
5564 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5565 return constant_boolean_node (true, truth_type
);
5569 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5570 return constant_boolean_node (false, truth_type
);
5574 /* Construct the expression we will return. First get the component
5575 reference we will make. Unless the mask is all ones the width of
5576 that field, perform the mask operation. Then compare with the
5578 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5579 ll_unsignedp
|| rl_unsignedp
);
5581 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5582 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5583 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5585 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5586 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5589 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5593 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5597 enum tree_code op_code
;
5600 int consts_equal
, consts_lt
;
5603 STRIP_SIGN_NOPS (arg0
);
5605 op_code
= TREE_CODE (arg0
);
5606 minmax_const
= TREE_OPERAND (arg0
, 1);
5607 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5608 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5609 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5610 inner
= TREE_OPERAND (arg0
, 0);
5612 /* If something does not permit us to optimize, return the original tree. */
5613 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5614 || TREE_CODE (comp_const
) != INTEGER_CST
5615 || TREE_OVERFLOW (comp_const
)
5616 || TREE_CODE (minmax_const
) != INTEGER_CST
5617 || TREE_OVERFLOW (minmax_const
))
5620 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5621 and GT_EXPR, doing the rest with recursive calls using logical
5625 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5628 = optimize_minmax_comparison (loc
,
5629 invert_tree_comparison (code
, false),
5632 return invert_truthvalue_loc (loc
, tem
);
5638 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5639 optimize_minmax_comparison
5640 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5641 optimize_minmax_comparison
5642 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5645 if (op_code
== MAX_EXPR
&& consts_equal
)
5646 /* MAX (X, 0) == 0 -> X <= 0 */
5647 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5649 else if (op_code
== MAX_EXPR
&& consts_lt
)
5650 /* MAX (X, 0) == 5 -> X == 5 */
5651 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5653 else if (op_code
== MAX_EXPR
)
5654 /* MAX (X, 0) == -1 -> false */
5655 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5657 else if (consts_equal
)
5658 /* MIN (X, 0) == 0 -> X >= 0 */
5659 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5662 /* MIN (X, 0) == 5 -> false */
5663 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5666 /* MIN (X, 0) == -1 -> X == -1 */
5667 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5670 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5671 /* MAX (X, 0) > 0 -> X > 0
5672 MAX (X, 0) > 5 -> X > 5 */
5673 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5675 else if (op_code
== MAX_EXPR
)
5676 /* MAX (X, 0) > -1 -> true */
5677 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5679 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5680 /* MIN (X, 0) > 0 -> false
5681 MIN (X, 0) > 5 -> false */
5682 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5685 /* MIN (X, 0) > -1 -> X > -1 */
5686 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5693 /* T is an integer expression that is being multiplied, divided, or taken a
5694 modulus (CODE says which and what kind of divide or modulus) by a
5695 constant C. See if we can eliminate that operation by folding it with
5696 other operations already in T. WIDE_TYPE, if non-null, is a type that
5697 should be used for the computation if wider than our type.
5699 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5700 (X * 2) + (Y * 4). We must, however, be assured that either the original
5701 expression would not overflow or that overflow is undefined for the type
5702 in the language in question.
5704 If we return a non-null expression, it is an equivalent form of the
5705 original computation, but need not be in the original type.
5707 We set *STRICT_OVERFLOW_P to true if the return values depends on
5708 signed overflow being undefined. Otherwise we do not change
5709 *STRICT_OVERFLOW_P. */
5712 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5713 bool *strict_overflow_p
)
5715 /* To avoid exponential search depth, refuse to allow recursion past
5716 three levels. Beyond that (1) it's highly unlikely that we'll find
5717 something interesting and (2) we've probably processed it before
5718 when we built the inner expression. */
5727 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5734 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5735 bool *strict_overflow_p
)
5737 tree type
= TREE_TYPE (t
);
5738 enum tree_code tcode
= TREE_CODE (t
);
5739 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5740 > GET_MODE_SIZE (TYPE_MODE (type
)))
5741 ? wide_type
: type
);
5743 int same_p
= tcode
== code
;
5744 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5745 bool sub_strict_overflow_p
;
5747 /* Don't deal with constants of zero here; they confuse the code below. */
5748 if (integer_zerop (c
))
5751 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5752 op0
= TREE_OPERAND (t
, 0);
5754 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5755 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5757 /* Note that we need not handle conditional operations here since fold
5758 already handles those cases. So just do arithmetic here. */
5762 /* For a constant, we can always simplify if we are a multiply
5763 or (for divide and modulus) if it is a multiple of our constant. */
5764 if (code
== MULT_EXPR
5765 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5766 return const_binop (code
, fold_convert (ctype
, t
),
5767 fold_convert (ctype
, c
));
5770 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5771 /* If op0 is an expression ... */
5772 if ((COMPARISON_CLASS_P (op0
)
5773 || UNARY_CLASS_P (op0
)
5774 || BINARY_CLASS_P (op0
)
5775 || VL_EXP_CLASS_P (op0
)
5776 || EXPRESSION_CLASS_P (op0
))
5777 /* ... and has wrapping overflow, and its type is smaller
5778 than ctype, then we cannot pass through as widening. */
5779 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5780 && (TYPE_PRECISION (ctype
)
5781 > TYPE_PRECISION (TREE_TYPE (op0
))))
5782 /* ... or this is a truncation (t is narrower than op0),
5783 then we cannot pass through this narrowing. */
5784 || (TYPE_PRECISION (type
)
5785 < TYPE_PRECISION (TREE_TYPE (op0
)))
5786 /* ... or signedness changes for division or modulus,
5787 then we cannot pass through this conversion. */
5788 || (code
!= MULT_EXPR
5789 && (TYPE_UNSIGNED (ctype
)
5790 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5791 /* ... or has undefined overflow while the converted to
5792 type has not, we cannot do the operation in the inner type
5793 as that would introduce undefined overflow. */
5794 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5795 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5798 /* Pass the constant down and see if we can make a simplification. If
5799 we can, replace this expression with the inner simplification for
5800 possible later conversion to our or some other type. */
5801 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5802 && TREE_CODE (t2
) == INTEGER_CST
5803 && !TREE_OVERFLOW (t2
)
5804 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5806 ? ctype
: NULL_TREE
,
5807 strict_overflow_p
))))
5812 /* If widening the type changes it from signed to unsigned, then we
5813 must avoid building ABS_EXPR itself as unsigned. */
5814 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5816 tree cstype
= (*signed_type_for
) (ctype
);
5817 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5820 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5821 return fold_convert (ctype
, t1
);
5825 /* If the constant is negative, we cannot simplify this. */
5826 if (tree_int_cst_sgn (c
) == -1)
5830 /* For division and modulus, type can't be unsigned, as e.g.
5831 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5832 For signed types, even with wrapping overflow, this is fine. */
5833 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5835 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5837 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5840 case MIN_EXPR
: case MAX_EXPR
:
5841 /* If widening the type changes the signedness, then we can't perform
5842 this optimization as that changes the result. */
5843 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5846 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5847 sub_strict_overflow_p
= false;
5848 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5849 &sub_strict_overflow_p
)) != 0
5850 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5851 &sub_strict_overflow_p
)) != 0)
5853 if (tree_int_cst_sgn (c
) < 0)
5854 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5855 if (sub_strict_overflow_p
)
5856 *strict_overflow_p
= true;
5857 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5858 fold_convert (ctype
, t2
));
5862 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5863 /* If the second operand is constant, this is a multiplication
5864 or floor division, by a power of two, so we can treat it that
5865 way unless the multiplier or divisor overflows. Signed
5866 left-shift overflow is implementation-defined rather than
5867 undefined in C90, so do not convert signed left shift into
5869 if (TREE_CODE (op1
) == INTEGER_CST
5870 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5871 /* const_binop may not detect overflow correctly,
5872 so check for it explicitly here. */
5873 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5874 && TREE_INT_CST_HIGH (op1
) == 0
5875 && 0 != (t1
= fold_convert (ctype
,
5876 const_binop (LSHIFT_EXPR
,
5879 && !TREE_OVERFLOW (t1
))
5880 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5881 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5883 fold_convert (ctype
, op0
),
5885 c
, code
, wide_type
, strict_overflow_p
);
5888 case PLUS_EXPR
: case MINUS_EXPR
:
5889 /* See if we can eliminate the operation on both sides. If we can, we
5890 can return a new PLUS or MINUS. If we can't, the only remaining
5891 cases where we can do anything are if the second operand is a
5893 sub_strict_overflow_p
= false;
5894 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5895 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5896 if (t1
!= 0 && t2
!= 0
5897 && (code
== MULT_EXPR
5898 /* If not multiplication, we can only do this if both operands
5899 are divisible by c. */
5900 || (multiple_of_p (ctype
, op0
, c
)
5901 && multiple_of_p (ctype
, op1
, c
))))
5903 if (sub_strict_overflow_p
)
5904 *strict_overflow_p
= true;
5905 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5906 fold_convert (ctype
, t2
));
5909 /* If this was a subtraction, negate OP1 and set it to be an addition.
5910 This simplifies the logic below. */
5911 if (tcode
== MINUS_EXPR
)
5913 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5914 /* If OP1 was not easily negatable, the constant may be OP0. */
5915 if (TREE_CODE (op0
) == INTEGER_CST
)
5926 if (TREE_CODE (op1
) != INTEGER_CST
)
5929 /* If either OP1 or C are negative, this optimization is not safe for
5930 some of the division and remainder types while for others we need
5931 to change the code. */
5932 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5934 if (code
== CEIL_DIV_EXPR
)
5935 code
= FLOOR_DIV_EXPR
;
5936 else if (code
== FLOOR_DIV_EXPR
)
5937 code
= CEIL_DIV_EXPR
;
5938 else if (code
!= MULT_EXPR
5939 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5943 /* If it's a multiply or a division/modulus operation of a multiple
5944 of our constant, do the operation and verify it doesn't overflow. */
5945 if (code
== MULT_EXPR
5946 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5948 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5949 fold_convert (ctype
, c
));
5950 /* We allow the constant to overflow with wrapping semantics. */
5952 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5958 /* If we have an unsigned type, we cannot widen the operation since it
5959 will change the result if the original computation overflowed. */
5960 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5963 /* If we were able to eliminate our operation from the first side,
5964 apply our operation to the second side and reform the PLUS. */
5965 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5966 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5968 /* The last case is if we are a multiply. In that case, we can
5969 apply the distributive law to commute the multiply and addition
5970 if the multiplication of the constants doesn't overflow
5971 and overflow is defined. With undefined overflow
5972 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5973 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5974 return fold_build2 (tcode
, ctype
,
5975 fold_build2 (code
, ctype
,
5976 fold_convert (ctype
, op0
),
5977 fold_convert (ctype
, c
)),
5983 /* We have a special case here if we are doing something like
5984 (C * 8) % 4 since we know that's zero. */
5985 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5986 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5987 /* If the multiplication can overflow we cannot optimize this. */
5988 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5989 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5990 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5992 *strict_overflow_p
= true;
5993 return omit_one_operand (type
, integer_zero_node
, op0
);
5996 /* ... fall through ... */
5998 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5999 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6000 /* If we can extract our operation from the LHS, do so and return a
6001 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6002 do something only if the second operand is a constant. */
6004 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6005 strict_overflow_p
)) != 0)
6006 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6007 fold_convert (ctype
, op1
));
6008 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6009 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6010 strict_overflow_p
)) != 0)
6011 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6012 fold_convert (ctype
, t1
));
6013 else if (TREE_CODE (op1
) != INTEGER_CST
)
6016 /* If these are the same operation types, we can associate them
6017 assuming no overflow. */
6022 unsigned prec
= TYPE_PRECISION (ctype
);
6023 bool uns
= TYPE_UNSIGNED (ctype
);
6024 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
6025 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
6026 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
6027 overflow_p
= ((!uns
&& overflow_p
)
6028 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
6029 if (!double_int_fits_to_tree_p (ctype
, mul
)
6030 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
6033 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6034 double_int_to_tree (ctype
, mul
));
6037 /* If these operations "cancel" each other, we have the main
6038 optimizations of this pass, which occur when either constant is a
6039 multiple of the other, in which case we replace this with either an
6040 operation or CODE or TCODE.
6042 If we have an unsigned type, we cannot do this since it will change
6043 the result if the original computation overflowed. */
6044 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6045 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6046 || (tcode
== MULT_EXPR
6047 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6048 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6049 && code
!= MULT_EXPR
)))
6051 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
6053 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6054 *strict_overflow_p
= true;
6055 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6056 fold_convert (ctype
,
6057 const_binop (TRUNC_DIV_EXPR
,
6060 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6062 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6063 *strict_overflow_p
= true;
6064 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6065 fold_convert (ctype
,
6066 const_binop (TRUNC_DIV_EXPR
,
6079 /* Return a node which has the indicated constant VALUE (either 0 or
6080 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6081 and is of the indicated TYPE. */
6084 constant_boolean_node (bool value
, tree type
)
6086 if (type
== integer_type_node
)
6087 return value
? integer_one_node
: integer_zero_node
;
6088 else if (type
== boolean_type_node
)
6089 return value
? boolean_true_node
: boolean_false_node
;
6090 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6091 return build_vector_from_val (type
,
6092 build_int_cst (TREE_TYPE (type
),
6095 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6099 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6100 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6101 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6102 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6103 COND is the first argument to CODE; otherwise (as in the example
6104 given here), it is the second argument. TYPE is the type of the
6105 original expression. Return NULL_TREE if no simplification is
6109 fold_binary_op_with_conditional_arg (location_t loc
,
6110 enum tree_code code
,
6111 tree type
, tree op0
, tree op1
,
6112 tree cond
, tree arg
, int cond_first_p
)
6114 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6115 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6116 tree test
, true_value
, false_value
;
6117 tree lhs
= NULL_TREE
;
6118 tree rhs
= NULL_TREE
;
6119 enum tree_code cond_code
= COND_EXPR
;
6121 if (TREE_CODE (cond
) == COND_EXPR
6122 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6124 test
= TREE_OPERAND (cond
, 0);
6125 true_value
= TREE_OPERAND (cond
, 1);
6126 false_value
= TREE_OPERAND (cond
, 2);
6127 /* If this operand throws an expression, then it does not make
6128 sense to try to perform a logical or arithmetic operation
6130 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6132 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6137 tree testtype
= TREE_TYPE (cond
);
6139 true_value
= constant_boolean_node (true, testtype
);
6140 false_value
= constant_boolean_node (false, testtype
);
6143 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6144 cond_code
= VEC_COND_EXPR
;
6146 /* This transformation is only worthwhile if we don't have to wrap ARG
6147 in a SAVE_EXPR and the operation can be simplified without recursing
6148 on at least one of the branches once its pushed inside the COND_EXPR. */
6149 if (!TREE_CONSTANT (arg
)
6150 && (TREE_SIDE_EFFECTS (arg
)
6151 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6152 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6155 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6158 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6160 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6162 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6166 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6168 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6170 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6173 /* Check that we have simplified at least one of the branches. */
6174 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6177 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6181 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6183 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6184 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6185 ADDEND is the same as X.
6187 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6188 and finite. The problematic cases are when X is zero, and its mode
6189 has signed zeros. In the case of rounding towards -infinity,
6190 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6191 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6194 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6196 if (!real_zerop (addend
))
6199 /* Don't allow the fold with -fsignaling-nans. */
6200 if (HONOR_SNANS (TYPE_MODE (type
)))
6203 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6204 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6207 /* In a vector or complex, we would need to check the sign of all zeros. */
6208 if (TREE_CODE (addend
) != REAL_CST
)
6211 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6212 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6215 /* The mode has signed zeros, and we have to honor their sign.
6216 In this situation, there is only one case we can return true for.
6217 X - 0 is the same as X unless rounding towards -infinity is
6219 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6222 /* Subroutine of fold() that checks comparisons of built-in math
6223 functions against real constants.
6225 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6226 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6227 is the type of the result and ARG0 and ARG1 are the operands of the
6228 comparison. ARG1 must be a TREE_REAL_CST.
6230 The function returns the constant folded tree if a simplification
6231 can be made, and NULL_TREE otherwise. */
6234 fold_mathfn_compare (location_t loc
,
6235 enum built_in_function fcode
, enum tree_code code
,
6236 tree type
, tree arg0
, tree arg1
)
6240 if (BUILTIN_SQRT_P (fcode
))
6242 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6243 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6245 c
= TREE_REAL_CST (arg1
);
6246 if (REAL_VALUE_NEGATIVE (c
))
6248 /* sqrt(x) < y is always false, if y is negative. */
6249 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6250 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6252 /* sqrt(x) > y is always true, if y is negative and we
6253 don't care about NaNs, i.e. negative values of x. */
6254 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6255 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6257 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6258 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6259 build_real (TREE_TYPE (arg
), dconst0
));
6261 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6265 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6266 real_convert (&c2
, mode
, &c2
);
6268 if (REAL_VALUE_ISINF (c2
))
6270 /* sqrt(x) > y is x == +Inf, when y is very large. */
6271 if (HONOR_INFINITIES (mode
))
6272 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6273 build_real (TREE_TYPE (arg
), c2
));
6275 /* sqrt(x) > y is always false, when y is very large
6276 and we don't care about infinities. */
6277 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6280 /* sqrt(x) > c is the same as x > c*c. */
6281 return fold_build2_loc (loc
, code
, type
, arg
,
6282 build_real (TREE_TYPE (arg
), c2
));
6284 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6288 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6289 real_convert (&c2
, mode
, &c2
);
6291 if (REAL_VALUE_ISINF (c2
))
6293 /* sqrt(x) < y is always true, when y is a very large
6294 value and we don't care about NaNs or Infinities. */
6295 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6296 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6298 /* sqrt(x) < y is x != +Inf when y is very large and we
6299 don't care about NaNs. */
6300 if (! HONOR_NANS (mode
))
6301 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6302 build_real (TREE_TYPE (arg
), c2
));
6304 /* sqrt(x) < y is x >= 0 when y is very large and we
6305 don't care about Infinities. */
6306 if (! HONOR_INFINITIES (mode
))
6307 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6308 build_real (TREE_TYPE (arg
), dconst0
));
6310 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6311 arg
= save_expr (arg
);
6312 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6313 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6314 build_real (TREE_TYPE (arg
),
6316 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6317 build_real (TREE_TYPE (arg
),
6321 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6322 if (! HONOR_NANS (mode
))
6323 return fold_build2_loc (loc
, code
, type
, arg
,
6324 build_real (TREE_TYPE (arg
), c2
));
6326 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6327 arg
= save_expr (arg
);
6328 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6329 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6330 build_real (TREE_TYPE (arg
),
6332 fold_build2_loc (loc
, code
, type
, arg
,
6333 build_real (TREE_TYPE (arg
),
6341 /* Subroutine of fold() that optimizes comparisons against Infinities,
6342 either +Inf or -Inf.
6344 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6345 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6346 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6348 The function returns the constant folded tree if a simplification
6349 can be made, and NULL_TREE otherwise. */
6352 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6353 tree arg0
, tree arg1
)
6355 enum machine_mode mode
;
6356 REAL_VALUE_TYPE max
;
6360 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6362 /* For negative infinity swap the sense of the comparison. */
6363 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6365 code
= swap_tree_comparison (code
);
6370 /* x > +Inf is always false, if with ignore sNANs. */
6371 if (HONOR_SNANS (mode
))
6373 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6376 /* x <= +Inf is always true, if we don't case about NaNs. */
6377 if (! HONOR_NANS (mode
))
6378 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6380 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6381 arg0
= save_expr (arg0
);
6382 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6386 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6387 real_maxval (&max
, neg
, mode
);
6388 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6389 arg0
, build_real (TREE_TYPE (arg0
), max
));
6392 /* x < +Inf is always equal to x <= DBL_MAX. */
6393 real_maxval (&max
, neg
, mode
);
6394 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6395 arg0
, build_real (TREE_TYPE (arg0
), max
));
6398 /* x != +Inf is always equal to !(x > DBL_MAX). */
6399 real_maxval (&max
, neg
, mode
);
6400 if (! HONOR_NANS (mode
))
6401 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6402 arg0
, build_real (TREE_TYPE (arg0
), max
));
6404 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6405 arg0
, build_real (TREE_TYPE (arg0
), max
));
6406 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6415 /* Subroutine of fold() that optimizes comparisons of a division by
6416 a nonzero integer constant against an integer constant, i.e.
6419 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6420 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6421 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6423 The function returns the constant folded tree if a simplification
6424 can be made, and NULL_TREE otherwise. */
6427 fold_div_compare (location_t loc
,
6428 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6430 tree prod
, tmp
, hi
, lo
;
6431 tree arg00
= TREE_OPERAND (arg0
, 0);
6432 tree arg01
= TREE_OPERAND (arg0
, 1);
6434 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6438 /* We have to do this the hard way to detect unsigned overflow.
6439 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6440 val
= TREE_INT_CST (arg01
)
6441 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6442 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6443 neg_overflow
= false;
6447 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6448 build_int_cst (TREE_TYPE (arg01
), 1));
6451 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6452 val
= TREE_INT_CST (prod
)
6453 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6454 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6455 -1, overflow
| TREE_OVERFLOW (prod
));
6457 else if (tree_int_cst_sgn (arg01
) >= 0)
6459 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6460 build_int_cst (TREE_TYPE (arg01
), 1));
6461 switch (tree_int_cst_sgn (arg1
))
6464 neg_overflow
= true;
6465 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6470 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6475 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6485 /* A negative divisor reverses the relational operators. */
6486 code
= swap_tree_comparison (code
);
6488 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6489 build_int_cst (TREE_TYPE (arg01
), 1));
6490 switch (tree_int_cst_sgn (arg1
))
6493 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6498 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6503 neg_overflow
= true;
6504 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6516 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6517 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6518 if (TREE_OVERFLOW (hi
))
6519 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6520 if (TREE_OVERFLOW (lo
))
6521 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6522 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6525 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6526 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6527 if (TREE_OVERFLOW (hi
))
6528 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6529 if (TREE_OVERFLOW (lo
))
6530 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6531 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6534 if (TREE_OVERFLOW (lo
))
6536 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6537 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6539 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6542 if (TREE_OVERFLOW (hi
))
6544 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6545 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6547 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6550 if (TREE_OVERFLOW (hi
))
6552 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6553 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6555 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6558 if (TREE_OVERFLOW (lo
))
6560 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6561 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6563 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6573 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6574 equality/inequality test, then return a simplified form of the test
6575 using a sign testing. Otherwise return NULL. TYPE is the desired
6579 fold_single_bit_test_into_sign_test (location_t loc
,
6580 enum tree_code code
, tree arg0
, tree arg1
,
6583 /* If this is testing a single bit, we can optimize the test. */
6584 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6585 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6586 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6588 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6589 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6590 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6592 if (arg00
!= NULL_TREE
6593 /* This is only a win if casting to a signed type is cheap,
6594 i.e. when arg00's type is not a partial mode. */
6595 && TYPE_PRECISION (TREE_TYPE (arg00
))
6596 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6598 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6599 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6601 fold_convert_loc (loc
, stype
, arg00
),
6602 build_int_cst (stype
, 0));
6609 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6610 equality/inequality test, then return a simplified form of
6611 the test using shifts and logical operations. Otherwise return
6612 NULL. TYPE is the desired result type. */
6615 fold_single_bit_test (location_t loc
, enum tree_code code
,
6616 tree arg0
, tree arg1
, tree result_type
)
6618 /* If this is testing a single bit, we can optimize the test. */
6619 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6620 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6621 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6623 tree inner
= TREE_OPERAND (arg0
, 0);
6624 tree type
= TREE_TYPE (arg0
);
6625 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6626 enum machine_mode operand_mode
= TYPE_MODE (type
);
6628 tree signed_type
, unsigned_type
, intermediate_type
;
6631 /* First, see if we can fold the single bit test into a sign-bit
6633 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6638 /* Otherwise we have (A & C) != 0 where C is a single bit,
6639 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6640 Similarly for (A & C) == 0. */
6642 /* If INNER is a right shift of a constant and it plus BITNUM does
6643 not overflow, adjust BITNUM and INNER. */
6644 if (TREE_CODE (inner
) == RSHIFT_EXPR
6645 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6646 && tree_fits_uhwi_p (TREE_OPERAND (inner
, 1))
6647 && bitnum
< TYPE_PRECISION (type
)
6648 && (TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
6649 < (unsigned) (TYPE_PRECISION (type
) - bitnum
)))
6651 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6652 inner
= TREE_OPERAND (inner
, 0);
6655 /* If we are going to be able to omit the AND below, we must do our
6656 operations as unsigned. If we must use the AND, we have a choice.
6657 Normally unsigned is faster, but for some machines signed is. */
6658 #ifdef LOAD_EXTEND_OP
6659 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6660 && !flag_syntax_only
) ? 0 : 1;
6665 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6666 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6667 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6668 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6671 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6672 inner
, size_int (bitnum
));
6674 one
= build_int_cst (intermediate_type
, 1);
6676 if (code
== EQ_EXPR
)
6677 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6679 /* Put the AND last so it can combine with more things. */
6680 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6682 /* Make sure to return the proper type. */
6683 inner
= fold_convert_loc (loc
, result_type
, inner
);
6690 /* Check whether we are allowed to reorder operands arg0 and arg1,
6691 such that the evaluation of arg1 occurs before arg0. */
6694 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6696 if (! flag_evaluation_order
)
6698 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6700 return ! TREE_SIDE_EFFECTS (arg0
)
6701 && ! TREE_SIDE_EFFECTS (arg1
);
6704 /* Test whether it is preferable two swap two operands, ARG0 and
6705 ARG1, for example because ARG0 is an integer constant and ARG1
6706 isn't. If REORDER is true, only recommend swapping if we can
6707 evaluate the operands in reverse order. */
6710 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6712 STRIP_SIGN_NOPS (arg0
);
6713 STRIP_SIGN_NOPS (arg1
);
6715 if (TREE_CODE (arg1
) == INTEGER_CST
)
6717 if (TREE_CODE (arg0
) == INTEGER_CST
)
6720 if (TREE_CODE (arg1
) == REAL_CST
)
6722 if (TREE_CODE (arg0
) == REAL_CST
)
6725 if (TREE_CODE (arg1
) == FIXED_CST
)
6727 if (TREE_CODE (arg0
) == FIXED_CST
)
6730 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6732 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6735 if (TREE_CONSTANT (arg1
))
6737 if (TREE_CONSTANT (arg0
))
6740 if (optimize_function_for_size_p (cfun
))
6743 if (reorder
&& flag_evaluation_order
6744 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6747 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6748 for commutative and comparison operators. Ensuring a canonical
6749 form allows the optimizers to find additional redundancies without
6750 having to explicitly check for both orderings. */
6751 if (TREE_CODE (arg0
) == SSA_NAME
6752 && TREE_CODE (arg1
) == SSA_NAME
6753 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6756 /* Put SSA_NAMEs last. */
6757 if (TREE_CODE (arg1
) == SSA_NAME
)
6759 if (TREE_CODE (arg0
) == SSA_NAME
)
6762 /* Put variables last. */
6771 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6772 ARG0 is extended to a wider type. */
6775 fold_widened_comparison (location_t loc
, enum tree_code code
,
6776 tree type
, tree arg0
, tree arg1
)
6778 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6780 tree shorter_type
, outer_type
;
6784 if (arg0_unw
== arg0
)
6786 shorter_type
= TREE_TYPE (arg0_unw
);
6788 #ifdef HAVE_canonicalize_funcptr_for_compare
6789 /* Disable this optimization if we're casting a function pointer
6790 type on targets that require function pointer canonicalization. */
6791 if (HAVE_canonicalize_funcptr_for_compare
6792 && TREE_CODE (shorter_type
) == POINTER_TYPE
6793 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6797 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6800 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6802 /* If possible, express the comparison in the shorter mode. */
6803 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6804 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6805 && (TREE_TYPE (arg1_unw
) == shorter_type
6806 || ((TYPE_PRECISION (shorter_type
)
6807 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6808 && (TYPE_UNSIGNED (shorter_type
)
6809 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6810 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6811 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6812 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6813 && int_fits_type_p (arg1_unw
, shorter_type
))))
6814 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6815 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6817 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6818 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6819 || !int_fits_type_p (arg1_unw
, shorter_type
))
6822 /* If we are comparing with the integer that does not fit into the range
6823 of the shorter type, the result is known. */
6824 outer_type
= TREE_TYPE (arg1_unw
);
6825 min
= lower_bound_in_type (outer_type
, shorter_type
);
6826 max
= upper_bound_in_type (outer_type
, shorter_type
);
6828 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6830 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6837 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6842 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6848 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6850 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6855 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6857 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6866 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6867 ARG0 just the signedness is changed. */
6870 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6871 tree arg0
, tree arg1
)
6874 tree inner_type
, outer_type
;
6876 if (!CONVERT_EXPR_P (arg0
))
6879 outer_type
= TREE_TYPE (arg0
);
6880 arg0_inner
= TREE_OPERAND (arg0
, 0);
6881 inner_type
= TREE_TYPE (arg0_inner
);
6883 #ifdef HAVE_canonicalize_funcptr_for_compare
6884 /* Disable this optimization if we're casting a function pointer
6885 type on targets that require function pointer canonicalization. */
6886 if (HAVE_canonicalize_funcptr_for_compare
6887 && TREE_CODE (inner_type
) == POINTER_TYPE
6888 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6892 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6895 if (TREE_CODE (arg1
) != INTEGER_CST
6896 && !(CONVERT_EXPR_P (arg1
)
6897 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6900 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6905 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6908 if (TREE_CODE (arg1
) == INTEGER_CST
)
6909 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6910 0, TREE_OVERFLOW (arg1
));
6912 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6914 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6917 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6918 step of the array. Reconstructs s and delta in the case of s *
6919 delta being an integer constant (and thus already folded). ADDR is
6920 the address. MULT is the multiplicative expression. If the
6921 function succeeds, the new address expression is returned.
6922 Otherwise NULL_TREE is returned. LOC is the location of the
6923 resulting expression. */
6926 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6928 tree s
, delta
, step
;
6929 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6934 /* Strip the nops that might be added when converting op1 to sizetype. */
6937 /* Canonicalize op1 into a possibly non-constant delta
6938 and an INTEGER_CST s. */
6939 if (TREE_CODE (op1
) == MULT_EXPR
)
6941 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6946 if (TREE_CODE (arg0
) == INTEGER_CST
)
6951 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6959 else if (TREE_CODE (op1
) == INTEGER_CST
)
6966 /* Simulate we are delta * 1. */
6968 s
= integer_one_node
;
6971 /* Handle &x.array the same as we would handle &x.array[0]. */
6972 if (TREE_CODE (ref
) == COMPONENT_REF
6973 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6977 /* Remember if this was a multi-dimensional array. */
6978 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6981 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6984 itype
= TREE_TYPE (domain
);
6986 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6987 if (TREE_CODE (step
) != INTEGER_CST
)
6992 if (! tree_int_cst_equal (step
, s
))
6997 /* Try if delta is a multiple of step. */
6998 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7004 /* Only fold here if we can verify we do not overflow one
7005 dimension of a multi-dimensional array. */
7010 if (!TYPE_MIN_VALUE (domain
)
7011 || !TYPE_MAX_VALUE (domain
)
7012 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7015 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7016 fold_convert_loc (loc
, itype
,
7017 TYPE_MIN_VALUE (domain
)),
7018 fold_convert_loc (loc
, itype
, delta
));
7019 if (TREE_CODE (tmp
) != INTEGER_CST
7020 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7024 /* We found a suitable component reference. */
7026 pref
= TREE_OPERAND (addr
, 0);
7027 ret
= copy_node (pref
);
7028 SET_EXPR_LOCATION (ret
, loc
);
7030 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
7032 (loc
, PLUS_EXPR
, itype
,
7033 fold_convert_loc (loc
, itype
,
7035 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7036 fold_convert_loc (loc
, itype
, delta
)),
7037 NULL_TREE
, NULL_TREE
);
7038 return build_fold_addr_expr_loc (loc
, ret
);
7043 for (;; ref
= TREE_OPERAND (ref
, 0))
7045 if (TREE_CODE (ref
) == ARRAY_REF
)
7049 /* Remember if this was a multi-dimensional array. */
7050 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7053 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7056 itype
= TREE_TYPE (domain
);
7058 step
= array_ref_element_size (ref
);
7059 if (TREE_CODE (step
) != INTEGER_CST
)
7064 if (! tree_int_cst_equal (step
, s
))
7069 /* Try if delta is a multiple of step. */
7070 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7076 /* Only fold here if we can verify we do not overflow one
7077 dimension of a multi-dimensional array. */
7082 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7083 || !TYPE_MAX_VALUE (domain
)
7084 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7087 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7088 fold_convert_loc (loc
, itype
,
7089 TREE_OPERAND (ref
, 1)),
7090 fold_convert_loc (loc
, itype
, delta
));
7092 || TREE_CODE (tmp
) != INTEGER_CST
7093 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7102 if (!handled_component_p (ref
))
7106 /* We found the suitable array reference. So copy everything up to it,
7107 and replace the index. */
7109 pref
= TREE_OPERAND (addr
, 0);
7110 ret
= copy_node (pref
);
7111 SET_EXPR_LOCATION (ret
, loc
);
7116 pref
= TREE_OPERAND (pref
, 0);
7117 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7118 pos
= TREE_OPERAND (pos
, 0);
7121 TREE_OPERAND (pos
, 1)
7122 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7123 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7124 fold_convert_loc (loc
, itype
, delta
));
7125 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7129 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7130 means A >= Y && A != MAX, but in this case we know that
7131 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7134 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7136 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7138 if (TREE_CODE (bound
) == LT_EXPR
)
7139 a
= TREE_OPERAND (bound
, 0);
7140 else if (TREE_CODE (bound
) == GT_EXPR
)
7141 a
= TREE_OPERAND (bound
, 1);
7145 typea
= TREE_TYPE (a
);
7146 if (!INTEGRAL_TYPE_P (typea
)
7147 && !POINTER_TYPE_P (typea
))
7150 if (TREE_CODE (ineq
) == LT_EXPR
)
7152 a1
= TREE_OPERAND (ineq
, 1);
7153 y
= TREE_OPERAND (ineq
, 0);
7155 else if (TREE_CODE (ineq
) == GT_EXPR
)
7157 a1
= TREE_OPERAND (ineq
, 0);
7158 y
= TREE_OPERAND (ineq
, 1);
7163 if (TREE_TYPE (a1
) != typea
)
7166 if (POINTER_TYPE_P (typea
))
7168 /* Convert the pointer types into integer before taking the difference. */
7169 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7170 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7171 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7174 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7176 if (!diff
|| !integer_onep (diff
))
7179 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7182 /* Fold a sum or difference of at least one multiplication.
7183 Returns the folded tree or NULL if no simplification could be made. */
7186 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7187 tree arg0
, tree arg1
)
7189 tree arg00
, arg01
, arg10
, arg11
;
7190 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7192 /* (A * C) +- (B * C) -> (A+-B) * C.
7193 (A * C) +- A -> A * (C+-1).
7194 We are most concerned about the case where C is a constant,
7195 but other combinations show up during loop reduction. Since
7196 it is not difficult, try all four possibilities. */
7198 if (TREE_CODE (arg0
) == MULT_EXPR
)
7200 arg00
= TREE_OPERAND (arg0
, 0);
7201 arg01
= TREE_OPERAND (arg0
, 1);
7203 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7205 arg00
= build_one_cst (type
);
7210 /* We cannot generate constant 1 for fract. */
7211 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7214 arg01
= build_one_cst (type
);
7216 if (TREE_CODE (arg1
) == MULT_EXPR
)
7218 arg10
= TREE_OPERAND (arg1
, 0);
7219 arg11
= TREE_OPERAND (arg1
, 1);
7221 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7223 arg10
= build_one_cst (type
);
7224 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7225 the purpose of this canonicalization. */
7226 if (TREE_INT_CST_HIGH (arg1
) == -1
7227 && negate_expr_p (arg1
)
7228 && code
== PLUS_EXPR
)
7230 arg11
= negate_expr (arg1
);
7238 /* We cannot generate constant 1 for fract. */
7239 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7242 arg11
= build_one_cst (type
);
7246 if (operand_equal_p (arg01
, arg11
, 0))
7247 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7248 else if (operand_equal_p (arg00
, arg10
, 0))
7249 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7250 else if (operand_equal_p (arg00
, arg11
, 0))
7251 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7252 else if (operand_equal_p (arg01
, arg10
, 0))
7253 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7255 /* No identical multiplicands; see if we can find a common
7256 power-of-two factor in non-power-of-two multiplies. This
7257 can help in multi-dimensional array access. */
7258 else if (tree_fits_shwi_p (arg01
)
7259 && tree_fits_shwi_p (arg11
))
7261 HOST_WIDE_INT int01
, int11
, tmp
;
7264 int01
= TREE_INT_CST_LOW (arg01
);
7265 int11
= TREE_INT_CST_LOW (arg11
);
7267 /* Move min of absolute values to int11. */
7268 if (absu_hwi (int01
) < absu_hwi (int11
))
7270 tmp
= int01
, int01
= int11
, int11
= tmp
;
7271 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7278 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7279 /* The remainder should not be a constant, otherwise we
7280 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7281 increased the number of multiplications necessary. */
7282 && TREE_CODE (arg10
) != INTEGER_CST
)
7284 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7285 build_int_cst (TREE_TYPE (arg00
),
7290 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7295 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7296 fold_build2_loc (loc
, code
, type
,
7297 fold_convert_loc (loc
, type
, alt0
),
7298 fold_convert_loc (loc
, type
, alt1
)),
7299 fold_convert_loc (loc
, type
, same
));
7304 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7305 specified by EXPR into the buffer PTR of length LEN bytes.
7306 Return the number of bytes placed in the buffer, or zero
7310 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7312 tree type
= TREE_TYPE (expr
);
7313 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7314 int byte
, offset
, word
, words
;
7315 unsigned char value
;
7317 if (total_bytes
> len
)
7319 words
= total_bytes
/ UNITS_PER_WORD
;
7321 for (byte
= 0; byte
< total_bytes
; byte
++)
7323 int bitpos
= byte
* BITS_PER_UNIT
;
7324 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7325 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7327 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7328 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7330 if (total_bytes
> UNITS_PER_WORD
)
7332 word
= byte
/ UNITS_PER_WORD
;
7333 if (WORDS_BIG_ENDIAN
)
7334 word
= (words
- 1) - word
;
7335 offset
= word
* UNITS_PER_WORD
;
7336 if (BYTES_BIG_ENDIAN
)
7337 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7339 offset
+= byte
% UNITS_PER_WORD
;
7342 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7343 ptr
[offset
] = value
;
7349 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7350 specified by EXPR into the buffer PTR of length LEN bytes.
7351 Return the number of bytes placed in the buffer, or zero
7355 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7357 tree type
= TREE_TYPE (expr
);
7358 enum machine_mode mode
= TYPE_MODE (type
);
7359 int total_bytes
= GET_MODE_SIZE (mode
);
7360 FIXED_VALUE_TYPE value
;
7361 tree i_value
, i_type
;
7363 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7366 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7368 if (NULL_TREE
== i_type
7369 || TYPE_PRECISION (i_type
) != total_bytes
)
7372 value
= TREE_FIXED_CST (expr
);
7373 i_value
= double_int_to_tree (i_type
, value
.data
);
7375 return native_encode_int (i_value
, ptr
, len
);
7379 /* Subroutine of native_encode_expr. Encode the REAL_CST
7380 specified by EXPR into the buffer PTR of length LEN bytes.
7381 Return the number of bytes placed in the buffer, or zero
7385 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7387 tree type
= TREE_TYPE (expr
);
7388 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7389 int byte
, offset
, word
, words
, bitpos
;
7390 unsigned char value
;
7392 /* There are always 32 bits in each long, no matter the size of
7393 the hosts long. We handle floating point representations with
7397 if (total_bytes
> len
)
7399 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7401 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7403 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7404 bitpos
+= BITS_PER_UNIT
)
7406 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7407 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7409 if (UNITS_PER_WORD
< 4)
7411 word
= byte
/ UNITS_PER_WORD
;
7412 if (WORDS_BIG_ENDIAN
)
7413 word
= (words
- 1) - word
;
7414 offset
= word
* UNITS_PER_WORD
;
7415 if (BYTES_BIG_ENDIAN
)
7416 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7418 offset
+= byte
% UNITS_PER_WORD
;
7421 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7422 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7427 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7428 specified by EXPR into the buffer PTR of length LEN bytes.
7429 Return the number of bytes placed in the buffer, or zero
7433 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7438 part
= TREE_REALPART (expr
);
7439 rsize
= native_encode_expr (part
, ptr
, len
);
7442 part
= TREE_IMAGPART (expr
);
7443 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7446 return rsize
+ isize
;
7450 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7451 specified by EXPR into the buffer PTR of length LEN bytes.
7452 Return the number of bytes placed in the buffer, or zero
7456 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7463 count
= VECTOR_CST_NELTS (expr
);
7464 itype
= TREE_TYPE (TREE_TYPE (expr
));
7465 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7466 for (i
= 0; i
< count
; i
++)
7468 elem
= VECTOR_CST_ELT (expr
, i
);
7469 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7477 /* Subroutine of native_encode_expr. Encode the STRING_CST
7478 specified by EXPR into the buffer PTR of length LEN bytes.
7479 Return the number of bytes placed in the buffer, or zero
7483 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7485 tree type
= TREE_TYPE (expr
);
7486 HOST_WIDE_INT total_bytes
;
7488 if (TREE_CODE (type
) != ARRAY_TYPE
7489 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7490 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7491 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7493 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7494 if (total_bytes
> len
)
7496 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7498 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7499 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7500 total_bytes
- TREE_STRING_LENGTH (expr
));
7503 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7508 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7509 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7510 buffer PTR of length LEN bytes. Return the number of bytes
7511 placed in the buffer, or zero upon failure. */
7514 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7516 switch (TREE_CODE (expr
))
7519 return native_encode_int (expr
, ptr
, len
);
7522 return native_encode_real (expr
, ptr
, len
);
7525 return native_encode_fixed (expr
, ptr
, len
);
7528 return native_encode_complex (expr
, ptr
, len
);
7531 return native_encode_vector (expr
, ptr
, len
);
7534 return native_encode_string (expr
, ptr
, len
);
7542 /* Subroutine of native_interpret_expr. Interpret the contents of
7543 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7544 If the buffer cannot be interpreted, return NULL_TREE. */
7547 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7549 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7552 if (total_bytes
> len
7553 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7556 result
= double_int::from_buffer (ptr
, total_bytes
);
7558 return double_int_to_tree (type
, result
);
7562 /* Subroutine of native_interpret_expr. Interpret the contents of
7563 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7564 If the buffer cannot be interpreted, return NULL_TREE. */
7567 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7569 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7571 FIXED_VALUE_TYPE fixed_value
;
7573 if (total_bytes
> len
7574 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7577 result
= double_int::from_buffer (ptr
, total_bytes
);
7578 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7580 return build_fixed (type
, fixed_value
);
7584 /* Subroutine of native_interpret_expr. Interpret the contents of
7585 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7586 If the buffer cannot be interpreted, return NULL_TREE. */
7589 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7591 enum machine_mode mode
= TYPE_MODE (type
);
7592 int total_bytes
= GET_MODE_SIZE (mode
);
7593 int byte
, offset
, word
, words
, bitpos
;
7594 unsigned char value
;
7595 /* There are always 32 bits in each long, no matter the size of
7596 the hosts long. We handle floating point representations with
7601 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7602 if (total_bytes
> len
|| total_bytes
> 24)
7604 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7606 memset (tmp
, 0, sizeof (tmp
));
7607 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7608 bitpos
+= BITS_PER_UNIT
)
7610 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7611 if (UNITS_PER_WORD
< 4)
7613 word
= byte
/ UNITS_PER_WORD
;
7614 if (WORDS_BIG_ENDIAN
)
7615 word
= (words
- 1) - word
;
7616 offset
= word
* UNITS_PER_WORD
;
7617 if (BYTES_BIG_ENDIAN
)
7618 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7620 offset
+= byte
% UNITS_PER_WORD
;
7623 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7624 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7626 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7629 real_from_target (&r
, tmp
, mode
);
7630 return build_real (type
, r
);
7634 /* Subroutine of native_interpret_expr. Interpret the contents of
7635 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7636 If the buffer cannot be interpreted, return NULL_TREE. */
7639 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7641 tree etype
, rpart
, ipart
;
7644 etype
= TREE_TYPE (type
);
7645 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7648 rpart
= native_interpret_expr (etype
, ptr
, size
);
7651 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7654 return build_complex (type
, rpart
, ipart
);
7658 /* Subroutine of native_interpret_expr. Interpret the contents of
7659 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7660 If the buffer cannot be interpreted, return NULL_TREE. */
7663 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7669 etype
= TREE_TYPE (type
);
7670 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7671 count
= TYPE_VECTOR_SUBPARTS (type
);
7672 if (size
* count
> len
)
7675 elements
= XALLOCAVEC (tree
, count
);
7676 for (i
= count
- 1; i
>= 0; i
--)
7678 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7683 return build_vector (type
, elements
);
7687 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7688 the buffer PTR of length LEN as a constant of type TYPE. For
7689 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7690 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7691 return NULL_TREE. */
7694 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7696 switch (TREE_CODE (type
))
7702 case REFERENCE_TYPE
:
7703 return native_interpret_int (type
, ptr
, len
);
7706 return native_interpret_real (type
, ptr
, len
);
7708 case FIXED_POINT_TYPE
:
7709 return native_interpret_fixed (type
, ptr
, len
);
7712 return native_interpret_complex (type
, ptr
, len
);
7715 return native_interpret_vector (type
, ptr
, len
);
7722 /* Returns true if we can interpret the contents of a native encoding
7726 can_native_interpret_type_p (tree type
)
7728 switch (TREE_CODE (type
))
7734 case REFERENCE_TYPE
:
7735 case FIXED_POINT_TYPE
:
7745 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7746 TYPE at compile-time. If we're unable to perform the conversion
7747 return NULL_TREE. */
7750 fold_view_convert_expr (tree type
, tree expr
)
7752 /* We support up to 512-bit values (for V8DFmode). */
7753 unsigned char buffer
[64];
7756 /* Check that the host and target are sane. */
7757 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7760 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7764 return native_interpret_expr (type
, buffer
, len
);
7767 /* Build an expression for the address of T. Folds away INDIRECT_REF
7768 to avoid confusing the gimplify process. */
7771 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7773 /* The size of the object is not relevant when talking about its address. */
7774 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7775 t
= TREE_OPERAND (t
, 0);
7777 if (TREE_CODE (t
) == INDIRECT_REF
)
7779 t
= TREE_OPERAND (t
, 0);
7781 if (TREE_TYPE (t
) != ptrtype
)
7782 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7784 else if (TREE_CODE (t
) == MEM_REF
7785 && integer_zerop (TREE_OPERAND (t
, 1)))
7786 return TREE_OPERAND (t
, 0);
7787 else if (TREE_CODE (t
) == MEM_REF
7788 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7789 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7790 TREE_OPERAND (t
, 0),
7791 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7792 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7794 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7796 if (TREE_TYPE (t
) != ptrtype
)
7797 t
= fold_convert_loc (loc
, ptrtype
, t
);
7800 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7805 /* Build an expression for the address of T. */
7808 build_fold_addr_expr_loc (location_t loc
, tree t
)
7810 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7812 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7815 static bool vec_cst_ctor_to_array (tree
, tree
*);
7817 /* Fold a unary expression of code CODE and type TYPE with operand
7818 OP0. Return the folded expression if folding is successful.
7819 Otherwise, return NULL_TREE. */
7822 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7826 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7828 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7829 && TREE_CODE_LENGTH (code
) == 1);
7834 if (CONVERT_EXPR_CODE_P (code
)
7835 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7837 /* Don't use STRIP_NOPS, because signedness of argument type
7839 STRIP_SIGN_NOPS (arg0
);
7843 /* Strip any conversions that don't change the mode. This
7844 is safe for every expression, except for a comparison
7845 expression because its signedness is derived from its
7848 Note that this is done as an internal manipulation within
7849 the constant folder, in order to find the simplest
7850 representation of the arguments so that their form can be
7851 studied. In any cases, the appropriate type conversions
7852 should be put back in the tree that will get out of the
7858 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7860 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7861 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7862 fold_build1_loc (loc
, code
, type
,
7863 fold_convert_loc (loc
, TREE_TYPE (op0
),
7864 TREE_OPERAND (arg0
, 1))));
7865 else if (TREE_CODE (arg0
) == COND_EXPR
)
7867 tree arg01
= TREE_OPERAND (arg0
, 1);
7868 tree arg02
= TREE_OPERAND (arg0
, 2);
7869 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7870 arg01
= fold_build1_loc (loc
, code
, type
,
7871 fold_convert_loc (loc
,
7872 TREE_TYPE (op0
), arg01
));
7873 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7874 arg02
= fold_build1_loc (loc
, code
, type
,
7875 fold_convert_loc (loc
,
7876 TREE_TYPE (op0
), arg02
));
7877 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7880 /* If this was a conversion, and all we did was to move into
7881 inside the COND_EXPR, bring it back out. But leave it if
7882 it is a conversion from integer to integer and the
7883 result precision is no wider than a word since such a
7884 conversion is cheap and may be optimized away by combine,
7885 while it couldn't if it were outside the COND_EXPR. Then return
7886 so we don't get into an infinite recursion loop taking the
7887 conversion out and then back in. */
7889 if ((CONVERT_EXPR_CODE_P (code
)
7890 || code
== NON_LVALUE_EXPR
)
7891 && TREE_CODE (tem
) == COND_EXPR
7892 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7893 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7894 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7895 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7896 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7897 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7898 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7900 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7901 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7902 || flag_syntax_only
))
7903 tem
= build1_loc (loc
, code
, type
,
7905 TREE_TYPE (TREE_OPERAND
7906 (TREE_OPERAND (tem
, 1), 0)),
7907 TREE_OPERAND (tem
, 0),
7908 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7909 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7918 /* Re-association barriers around constants and other re-association
7919 barriers can be removed. */
7920 if (CONSTANT_CLASS_P (op0
)
7921 || TREE_CODE (op0
) == PAREN_EXPR
)
7922 return fold_convert_loc (loc
, type
, op0
);
7927 case FIX_TRUNC_EXPR
:
7928 if (TREE_TYPE (op0
) == type
)
7931 if (COMPARISON_CLASS_P (op0
))
7933 /* If we have (type) (a CMP b) and type is an integral type, return
7934 new expression involving the new type. Canonicalize
7935 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7937 Do not fold the result as that would not simplify further, also
7938 folding again results in recursions. */
7939 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7940 return build2_loc (loc
, TREE_CODE (op0
), type
,
7941 TREE_OPERAND (op0
, 0),
7942 TREE_OPERAND (op0
, 1));
7943 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7944 && TREE_CODE (type
) != VECTOR_TYPE
)
7945 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7946 constant_boolean_node (true, type
),
7947 constant_boolean_node (false, type
));
7950 /* Handle cases of two conversions in a row. */
7951 if (CONVERT_EXPR_P (op0
))
7953 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7954 tree inter_type
= TREE_TYPE (op0
);
7955 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7956 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7957 int inside_float
= FLOAT_TYPE_P (inside_type
);
7958 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7959 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7960 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7961 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7962 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7963 int inter_float
= FLOAT_TYPE_P (inter_type
);
7964 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7965 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7966 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7967 int final_int
= INTEGRAL_TYPE_P (type
);
7968 int final_ptr
= POINTER_TYPE_P (type
);
7969 int final_float
= FLOAT_TYPE_P (type
);
7970 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7971 unsigned int final_prec
= TYPE_PRECISION (type
);
7972 int final_unsignedp
= TYPE_UNSIGNED (type
);
7974 /* In addition to the cases of two conversions in a row
7975 handled below, if we are converting something to its own
7976 type via an object of identical or wider precision, neither
7977 conversion is needed. */
7978 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7979 && (((inter_int
|| inter_ptr
) && final_int
)
7980 || (inter_float
&& final_float
))
7981 && inter_prec
>= final_prec
)
7982 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7984 /* Likewise, if the intermediate and initial types are either both
7985 float or both integer, we don't need the middle conversion if the
7986 former is wider than the latter and doesn't change the signedness
7987 (for integers). Avoid this if the final type is a pointer since
7988 then we sometimes need the middle conversion. Likewise if the
7989 final type has a precision not equal to the size of its mode. */
7990 if (((inter_int
&& inside_int
)
7991 || (inter_float
&& inside_float
)
7992 || (inter_vec
&& inside_vec
))
7993 && inter_prec
>= inside_prec
7994 && (inter_float
|| inter_vec
7995 || inter_unsignedp
== inside_unsignedp
)
7996 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7997 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7999 && (! final_vec
|| inter_prec
== inside_prec
))
8000 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8002 /* If we have a sign-extension of a zero-extended value, we can
8003 replace that by a single zero-extension. Likewise if the
8004 final conversion does not change precision we can drop the
8005 intermediate conversion. */
8006 if (inside_int
&& inter_int
&& final_int
8007 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
8008 && inside_unsignedp
&& !inter_unsignedp
)
8009 || final_prec
== inter_prec
))
8010 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8012 /* Two conversions in a row are not needed unless:
8013 - some conversion is floating-point (overstrict for now), or
8014 - some conversion is a vector (overstrict for now), or
8015 - the intermediate type is narrower than both initial and
8017 - the intermediate type and innermost type differ in signedness,
8018 and the outermost type is wider than the intermediate, or
8019 - the initial type is a pointer type and the precisions of the
8020 intermediate and final types differ, or
8021 - the final type is a pointer type and the precisions of the
8022 initial and intermediate types differ. */
8023 if (! inside_float
&& ! inter_float
&& ! final_float
8024 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8025 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8026 && ! (inside_int
&& inter_int
8027 && inter_unsignedp
!= inside_unsignedp
8028 && inter_prec
< final_prec
)
8029 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8030 == (final_unsignedp
&& final_prec
> inter_prec
))
8031 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8032 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8033 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8034 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8035 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8038 /* Handle (T *)&A.B.C for A being of type T and B and C
8039 living at offset zero. This occurs frequently in
8040 C++ upcasting and then accessing the base. */
8041 if (TREE_CODE (op0
) == ADDR_EXPR
8042 && POINTER_TYPE_P (type
)
8043 && handled_component_p (TREE_OPERAND (op0
, 0)))
8045 HOST_WIDE_INT bitsize
, bitpos
;
8047 enum machine_mode mode
;
8048 int unsignedp
, volatilep
;
8049 tree base
= TREE_OPERAND (op0
, 0);
8050 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8051 &mode
, &unsignedp
, &volatilep
, false);
8052 /* If the reference was to a (constant) zero offset, we can use
8053 the address of the base if it has the same base type
8054 as the result type and the pointer type is unqualified. */
8055 if (! offset
&& bitpos
== 0
8056 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8057 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8058 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8059 return fold_convert_loc (loc
, type
,
8060 build_fold_addr_expr_loc (loc
, base
));
8063 if (TREE_CODE (op0
) == MODIFY_EXPR
8064 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8065 /* Detect assigning a bitfield. */
8066 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8068 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8070 /* Don't leave an assignment inside a conversion
8071 unless assigning a bitfield. */
8072 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8073 /* First do the assignment, then return converted constant. */
8074 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8075 TREE_NO_WARNING (tem
) = 1;
8076 TREE_USED (tem
) = 1;
8080 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8081 constants (if x has signed type, the sign bit cannot be set
8082 in c). This folds extension into the BIT_AND_EXPR.
8083 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8084 very likely don't have maximal range for their precision and this
8085 transformation effectively doesn't preserve non-maximal ranges. */
8086 if (TREE_CODE (type
) == INTEGER_TYPE
8087 && TREE_CODE (op0
) == BIT_AND_EXPR
8088 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8090 tree and_expr
= op0
;
8091 tree and0
= TREE_OPERAND (and_expr
, 0);
8092 tree and1
= TREE_OPERAND (and_expr
, 1);
8095 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8096 || (TYPE_PRECISION (type
)
8097 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8099 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8100 <= HOST_BITS_PER_WIDE_INT
8101 && tree_fits_uhwi_p (and1
))
8103 unsigned HOST_WIDE_INT cst
;
8105 cst
= tree_to_uhwi (and1
);
8106 cst
&= HOST_WIDE_INT_M1U
8107 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8108 change
= (cst
== 0);
8109 #ifdef LOAD_EXTEND_OP
8111 && !flag_syntax_only
8112 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8115 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8116 and0
= fold_convert_loc (loc
, uns
, and0
);
8117 and1
= fold_convert_loc (loc
, uns
, and1
);
8123 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8124 0, TREE_OVERFLOW (and1
));
8125 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8126 fold_convert_loc (loc
, type
, and0
), tem
);
8130 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8131 when one of the new casts will fold away. Conservatively we assume
8132 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8133 if (POINTER_TYPE_P (type
)
8134 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8135 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8136 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8137 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8138 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8140 tree arg00
= TREE_OPERAND (arg0
, 0);
8141 tree arg01
= TREE_OPERAND (arg0
, 1);
8143 return fold_build_pointer_plus_loc
8144 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8147 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8148 of the same precision, and X is an integer type not narrower than
8149 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8150 if (INTEGRAL_TYPE_P (type
)
8151 && TREE_CODE (op0
) == BIT_NOT_EXPR
8152 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8153 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8154 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8156 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8157 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8158 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8159 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8160 fold_convert_loc (loc
, type
, tem
));
8163 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8164 type of X and Y (integer types only). */
8165 if (INTEGRAL_TYPE_P (type
)
8166 && TREE_CODE (op0
) == MULT_EXPR
8167 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8168 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8170 /* Be careful not to introduce new overflows. */
8172 if (TYPE_OVERFLOW_WRAPS (type
))
8175 mult_type
= unsigned_type_for (type
);
8177 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8179 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8180 fold_convert_loc (loc
, mult_type
,
8181 TREE_OPERAND (op0
, 0)),
8182 fold_convert_loc (loc
, mult_type
,
8183 TREE_OPERAND (op0
, 1)));
8184 return fold_convert_loc (loc
, type
, tem
);
8188 tem
= fold_convert_const (code
, type
, op0
);
8189 return tem
? tem
: NULL_TREE
;
8191 case ADDR_SPACE_CONVERT_EXPR
:
8192 if (integer_zerop (arg0
))
8193 return fold_convert_const (code
, type
, arg0
);
8196 case FIXED_CONVERT_EXPR
:
8197 tem
= fold_convert_const (code
, type
, arg0
);
8198 return tem
? tem
: NULL_TREE
;
8200 case VIEW_CONVERT_EXPR
:
8201 if (TREE_TYPE (op0
) == type
)
8203 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8204 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8205 type
, TREE_OPERAND (op0
, 0));
8206 if (TREE_CODE (op0
) == MEM_REF
)
8207 return fold_build2_loc (loc
, MEM_REF
, type
,
8208 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8210 /* For integral conversions with the same precision or pointer
8211 conversions use a NOP_EXPR instead. */
8212 if ((INTEGRAL_TYPE_P (type
)
8213 || POINTER_TYPE_P (type
))
8214 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8215 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8216 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8217 return fold_convert_loc (loc
, type
, op0
);
8219 /* Strip inner integral conversions that do not change the precision. */
8220 if (CONVERT_EXPR_P (op0
)
8221 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8222 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8223 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8224 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8225 && (TYPE_PRECISION (TREE_TYPE (op0
))
8226 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8227 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8228 type
, TREE_OPERAND (op0
, 0));
8230 return fold_view_convert_expr (type
, op0
);
8233 tem
= fold_negate_expr (loc
, arg0
);
8235 return fold_convert_loc (loc
, type
, tem
);
8239 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8240 return fold_abs_const (arg0
, type
);
8241 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8242 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8243 /* Convert fabs((double)float) into (double)fabsf(float). */
8244 else if (TREE_CODE (arg0
) == NOP_EXPR
8245 && TREE_CODE (type
) == REAL_TYPE
)
8247 tree targ0
= strip_float_extensions (arg0
);
8249 return fold_convert_loc (loc
, type
,
8250 fold_build1_loc (loc
, ABS_EXPR
,
8254 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8255 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8257 else if (tree_expr_nonnegative_p (arg0
))
8260 /* Strip sign ops from argument. */
8261 if (TREE_CODE (type
) == REAL_TYPE
)
8263 tem
= fold_strip_sign_ops (arg0
);
8265 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8266 fold_convert_loc (loc
, type
, tem
));
8271 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8272 return fold_convert_loc (loc
, type
, arg0
);
8273 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8275 tree itype
= TREE_TYPE (type
);
8276 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8277 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8278 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8279 negate_expr (ipart
));
8281 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8283 tree itype
= TREE_TYPE (type
);
8284 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8285 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8286 return build_complex (type
, rpart
, negate_expr (ipart
));
8288 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8289 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8293 if (TREE_CODE (arg0
) == INTEGER_CST
)
8294 return fold_not_const (arg0
, type
);
8295 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8296 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8297 /* Convert ~ (-A) to A - 1. */
8298 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8299 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8300 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8301 build_int_cst (type
, 1));
8302 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8303 else if (INTEGRAL_TYPE_P (type
)
8304 && ((TREE_CODE (arg0
) == MINUS_EXPR
8305 && integer_onep (TREE_OPERAND (arg0
, 1)))
8306 || (TREE_CODE (arg0
) == PLUS_EXPR
8307 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8308 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8309 fold_convert_loc (loc
, type
,
8310 TREE_OPERAND (arg0
, 0)));
8311 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8312 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8313 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8314 fold_convert_loc (loc
, type
,
8315 TREE_OPERAND (arg0
, 0)))))
8316 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8317 fold_convert_loc (loc
, type
,
8318 TREE_OPERAND (arg0
, 1)));
8319 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8320 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8321 fold_convert_loc (loc
, type
,
8322 TREE_OPERAND (arg0
, 1)))))
8323 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8324 fold_convert_loc (loc
, type
,
8325 TREE_OPERAND (arg0
, 0)), tem
);
8326 /* Perform BIT_NOT_EXPR on each element individually. */
8327 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8331 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8333 elements
= XALLOCAVEC (tree
, count
);
8334 for (i
= 0; i
< count
; i
++)
8336 elem
= VECTOR_CST_ELT (arg0
, i
);
8337 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8338 if (elem
== NULL_TREE
)
8343 return build_vector (type
, elements
);
8345 else if (COMPARISON_CLASS_P (arg0
)
8346 && (VECTOR_TYPE_P (type
)
8347 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8349 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8350 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8351 HONOR_NANS (TYPE_MODE (op_type
)));
8352 if (subcode
!= ERROR_MARK
)
8353 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8354 TREE_OPERAND (arg0
, 1));
8360 case TRUTH_NOT_EXPR
:
8361 /* Note that the operand of this must be an int
8362 and its values must be 0 or 1.
8363 ("true" is a fixed value perhaps depending on the language,
8364 but we don't handle values other than 1 correctly yet.) */
8365 tem
= fold_truth_not_expr (loc
, arg0
);
8368 return fold_convert_loc (loc
, type
, tem
);
8371 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8372 return fold_convert_loc (loc
, type
, arg0
);
8373 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8374 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8375 TREE_OPERAND (arg0
, 1));
8376 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8377 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8378 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8380 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8381 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8382 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8383 TREE_OPERAND (arg0
, 0)),
8384 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8385 TREE_OPERAND (arg0
, 1)));
8386 return fold_convert_loc (loc
, type
, tem
);
8388 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8390 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8391 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8392 TREE_OPERAND (arg0
, 0));
8393 return fold_convert_loc (loc
, type
, tem
);
8395 if (TREE_CODE (arg0
) == CALL_EXPR
)
8397 tree fn
= get_callee_fndecl (arg0
);
8398 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8399 switch (DECL_FUNCTION_CODE (fn
))
8401 CASE_FLT_FN (BUILT_IN_CEXPI
):
8402 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8404 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8414 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8415 return build_zero_cst (type
);
8416 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8417 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8418 TREE_OPERAND (arg0
, 0));
8419 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8420 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8421 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8423 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8424 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8425 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8426 TREE_OPERAND (arg0
, 0)),
8427 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8428 TREE_OPERAND (arg0
, 1)));
8429 return fold_convert_loc (loc
, type
, tem
);
8431 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8433 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8434 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8435 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8437 if (TREE_CODE (arg0
) == CALL_EXPR
)
8439 tree fn
= get_callee_fndecl (arg0
);
8440 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8441 switch (DECL_FUNCTION_CODE (fn
))
8443 CASE_FLT_FN (BUILT_IN_CEXPI
):
8444 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8446 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8456 /* Fold *&X to X if X is an lvalue. */
8457 if (TREE_CODE (op0
) == ADDR_EXPR
)
8459 tree op00
= TREE_OPERAND (op0
, 0);
8460 if ((TREE_CODE (op00
) == VAR_DECL
8461 || TREE_CODE (op00
) == PARM_DECL
8462 || TREE_CODE (op00
) == RESULT_DECL
)
8463 && !TREE_READONLY (op00
))
8468 case VEC_UNPACK_LO_EXPR
:
8469 case VEC_UNPACK_HI_EXPR
:
8470 case VEC_UNPACK_FLOAT_LO_EXPR
:
8471 case VEC_UNPACK_FLOAT_HI_EXPR
:
8473 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8475 enum tree_code subcode
;
8477 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8478 if (TREE_CODE (arg0
) != VECTOR_CST
)
8481 elts
= XALLOCAVEC (tree
, nelts
* 2);
8482 if (!vec_cst_ctor_to_array (arg0
, elts
))
8485 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8486 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8489 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8492 subcode
= FLOAT_EXPR
;
8494 for (i
= 0; i
< nelts
; i
++)
8496 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8497 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8501 return build_vector (type
, elts
);
8504 case REDUC_MIN_EXPR
:
8505 case REDUC_MAX_EXPR
:
8506 case REDUC_PLUS_EXPR
:
8508 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8510 enum tree_code subcode
;
8512 if (TREE_CODE (op0
) != VECTOR_CST
)
8515 elts
= XALLOCAVEC (tree
, nelts
);
8516 if (!vec_cst_ctor_to_array (op0
, elts
))
8521 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8522 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8523 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8524 default: gcc_unreachable ();
8527 for (i
= 1; i
< nelts
; i
++)
8529 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8530 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8532 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8535 return build_vector (type
, elts
);
8540 } /* switch (code) */
8544 /* If the operation was a conversion do _not_ mark a resulting constant
8545 with TREE_OVERFLOW if the original constant was not. These conversions
8546 have implementation defined behavior and retaining the TREE_OVERFLOW
8547 flag here would confuse later passes such as VRP. */
8549 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8550 tree type
, tree op0
)
8552 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8554 && TREE_CODE (res
) == INTEGER_CST
8555 && TREE_CODE (op0
) == INTEGER_CST
8556 && CONVERT_EXPR_CODE_P (code
))
8557 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8562 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8563 operands OP0 and OP1. LOC is the location of the resulting expression.
8564 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8565 Return the folded expression if folding is successful. Otherwise,
8566 return NULL_TREE. */
8568 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8569 tree arg0
, tree arg1
, tree op0
, tree op1
)
8573 /* We only do these simplifications if we are optimizing. */
8577 /* Check for things like (A || B) && (A || C). We can convert this
8578 to A || (B && C). Note that either operator can be any of the four
8579 truth and/or operations and the transformation will still be
8580 valid. Also note that we only care about order for the
8581 ANDIF and ORIF operators. If B contains side effects, this
8582 might change the truth-value of A. */
8583 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8584 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8585 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8586 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8587 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8588 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8590 tree a00
= TREE_OPERAND (arg0
, 0);
8591 tree a01
= TREE_OPERAND (arg0
, 1);
8592 tree a10
= TREE_OPERAND (arg1
, 0);
8593 tree a11
= TREE_OPERAND (arg1
, 1);
8594 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8595 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8596 && (code
== TRUTH_AND_EXPR
8597 || code
== TRUTH_OR_EXPR
));
8599 if (operand_equal_p (a00
, a10
, 0))
8600 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8601 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8602 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8603 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8604 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8605 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8606 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8607 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8609 /* This case if tricky because we must either have commutative
8610 operators or else A10 must not have side-effects. */
8612 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8613 && operand_equal_p (a01
, a11
, 0))
8614 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8615 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8619 /* See if we can build a range comparison. */
8620 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8623 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8624 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8626 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8628 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8631 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8632 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8634 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8636 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8639 /* Check for the possibility of merging component references. If our
8640 lhs is another similar operation, try to merge its rhs with our
8641 rhs. Then try to merge our lhs and rhs. */
8642 if (TREE_CODE (arg0
) == code
8643 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8644 TREE_OPERAND (arg0
, 1), arg1
)))
8645 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8647 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8650 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8651 && (code
== TRUTH_AND_EXPR
8652 || code
== TRUTH_ANDIF_EXPR
8653 || code
== TRUTH_OR_EXPR
8654 || code
== TRUTH_ORIF_EXPR
))
8656 enum tree_code ncode
, icode
;
8658 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8659 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8660 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8662 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8663 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8664 We don't want to pack more than two leafs to a non-IF AND/OR
8666 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8667 equal to IF-CODE, then we don't want to add right-hand operand.
8668 If the inner right-hand side of left-hand operand has
8669 side-effects, or isn't simple, then we can't add to it,
8670 as otherwise we might destroy if-sequence. */
8671 if (TREE_CODE (arg0
) == icode
8672 && simple_operand_p_2 (arg1
)
8673 /* Needed for sequence points to handle trappings, and
8675 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8677 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8679 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8682 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8683 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8684 else if (TREE_CODE (arg1
) == icode
8685 && simple_operand_p_2 (arg0
)
8686 /* Needed for sequence points to handle trappings, and
8688 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8690 tem
= fold_build2_loc (loc
, ncode
, type
,
8691 arg0
, TREE_OPERAND (arg1
, 0));
8692 return fold_build2_loc (loc
, icode
, type
, tem
,
8693 TREE_OPERAND (arg1
, 1));
8695 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8697 For sequence point consistancy, we need to check for trapping,
8698 and side-effects. */
8699 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8700 && simple_operand_p_2 (arg1
))
8701 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8707 /* Fold a binary expression of code CODE and type TYPE with operands
8708 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8709 Return the folded expression if folding is successful. Otherwise,
8710 return NULL_TREE. */
8713 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8715 enum tree_code compl_code
;
8717 if (code
== MIN_EXPR
)
8718 compl_code
= MAX_EXPR
;
8719 else if (code
== MAX_EXPR
)
8720 compl_code
= MIN_EXPR
;
8724 /* MIN (MAX (a, b), b) == b. */
8725 if (TREE_CODE (op0
) == compl_code
8726 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8727 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8729 /* MIN (MAX (b, a), b) == b. */
8730 if (TREE_CODE (op0
) == compl_code
8731 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8732 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8733 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8735 /* MIN (a, MAX (a, b)) == a. */
8736 if (TREE_CODE (op1
) == compl_code
8737 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8738 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8739 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8741 /* MIN (a, MAX (b, a)) == a. */
8742 if (TREE_CODE (op1
) == compl_code
8743 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8744 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8745 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8750 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8751 by changing CODE to reduce the magnitude of constants involved in
8752 ARG0 of the comparison.
8753 Returns a canonicalized comparison tree if a simplification was
8754 possible, otherwise returns NULL_TREE.
8755 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8756 valid if signed overflow is undefined. */
8759 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8760 tree arg0
, tree arg1
,
8761 bool *strict_overflow_p
)
8763 enum tree_code code0
= TREE_CODE (arg0
);
8764 tree t
, cst0
= NULL_TREE
;
8768 /* Match A +- CST code arg1 and CST code arg1. We can change the
8769 first form only if overflow is undefined. */
8770 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8771 /* In principle pointers also have undefined overflow behavior,
8772 but that causes problems elsewhere. */
8773 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8774 && (code0
== MINUS_EXPR
8775 || code0
== PLUS_EXPR
)
8776 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8777 || code0
== INTEGER_CST
))
8780 /* Identify the constant in arg0 and its sign. */
8781 if (code0
== INTEGER_CST
)
8784 cst0
= TREE_OPERAND (arg0
, 1);
8785 sgn0
= tree_int_cst_sgn (cst0
);
8787 /* Overflowed constants and zero will cause problems. */
8788 if (integer_zerop (cst0
)
8789 || TREE_OVERFLOW (cst0
))
8792 /* See if we can reduce the magnitude of the constant in
8793 arg0 by changing the comparison code. */
8794 if (code0
== INTEGER_CST
)
8796 /* CST <= arg1 -> CST-1 < arg1. */
8797 if (code
== LE_EXPR
&& sgn0
== 1)
8799 /* -CST < arg1 -> -CST-1 <= arg1. */
8800 else if (code
== LT_EXPR
&& sgn0
== -1)
8802 /* CST > arg1 -> CST-1 >= arg1. */
8803 else if (code
== GT_EXPR
&& sgn0
== 1)
8805 /* -CST >= arg1 -> -CST-1 > arg1. */
8806 else if (code
== GE_EXPR
&& sgn0
== -1)
8810 /* arg1 code' CST' might be more canonical. */
8815 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8817 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8819 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8820 else if (code
== GT_EXPR
8821 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8823 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8824 else if (code
== LE_EXPR
8825 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8827 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8828 else if (code
== GE_EXPR
8829 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8833 *strict_overflow_p
= true;
8836 /* Now build the constant reduced in magnitude. But not if that
8837 would produce one outside of its types range. */
8838 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8840 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8841 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8843 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8844 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8845 /* We cannot swap the comparison here as that would cause us to
8846 endlessly recurse. */
8849 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8850 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8851 if (code0
!= INTEGER_CST
)
8852 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8853 t
= fold_convert (TREE_TYPE (arg1
), t
);
8855 /* If swapping might yield to a more canonical form, do so. */
8857 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8859 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8862 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8863 overflow further. Try to decrease the magnitude of constants involved
8864 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8865 and put sole constants at the second argument position.
8866 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8869 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8870 tree arg0
, tree arg1
)
8873 bool strict_overflow_p
;
8874 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8875 "when reducing constant in comparison");
8877 /* Try canonicalization by simplifying arg0. */
8878 strict_overflow_p
= false;
8879 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8880 &strict_overflow_p
);
8883 if (strict_overflow_p
)
8884 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8888 /* Try canonicalization by simplifying arg1 using the swapped
8890 code
= swap_tree_comparison (code
);
8891 strict_overflow_p
= false;
8892 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8893 &strict_overflow_p
);
8894 if (t
&& strict_overflow_p
)
8895 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8899 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8900 space. This is used to avoid issuing overflow warnings for
8901 expressions like &p->x which can not wrap. */
8904 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8906 double_int di_offset
, total
;
8908 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8914 if (offset
== NULL_TREE
)
8915 di_offset
= double_int_zero
;
8916 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8919 di_offset
= TREE_INT_CST (offset
);
8922 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8923 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8927 if (total
.high
!= 0)
8930 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8934 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8936 if (TREE_CODE (base
) == ADDR_EXPR
)
8938 HOST_WIDE_INT base_size
;
8940 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8941 if (base_size
> 0 && size
< base_size
)
8945 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8948 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8949 kind INTEGER_CST. This makes sure to properly sign-extend the
8952 static HOST_WIDE_INT
8953 size_low_cst (const_tree t
)
8955 double_int d
= tree_to_double_int (t
);
8956 return d
.sext (TYPE_PRECISION (TREE_TYPE (t
))).low
;
8959 /* Subroutine of fold_binary. This routine performs all of the
8960 transformations that are common to the equality/inequality
8961 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8962 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8963 fold_binary should call fold_binary. Fold a comparison with
8964 tree code CODE and type TYPE with operands OP0 and OP1. Return
8965 the folded comparison or NULL_TREE. */
8968 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8971 tree arg0
, arg1
, tem
;
8976 STRIP_SIGN_NOPS (arg0
);
8977 STRIP_SIGN_NOPS (arg1
);
8979 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8980 if (tem
!= NULL_TREE
)
8983 /* If one arg is a real or integer constant, put it last. */
8984 if (tree_swap_operands_p (arg0
, arg1
, true))
8985 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8987 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8988 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8989 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8990 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8992 && (TREE_CODE (arg1
) == INTEGER_CST
8993 && !TREE_OVERFLOW (arg1
)))
8995 tree const1
= TREE_OPERAND (arg0
, 1);
8997 tree variable
= TREE_OPERAND (arg0
, 0);
9000 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9002 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9003 TREE_TYPE (arg1
), const2
, const1
);
9005 /* If the constant operation overflowed this can be
9006 simplified as a comparison against INT_MAX/INT_MIN. */
9007 if (TREE_CODE (lhs
) == INTEGER_CST
9008 && TREE_OVERFLOW (lhs
))
9010 int const1_sgn
= tree_int_cst_sgn (const1
);
9011 enum tree_code code2
= code
;
9013 /* Get the sign of the constant on the lhs if the
9014 operation were VARIABLE + CONST1. */
9015 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9016 const1_sgn
= -const1_sgn
;
9018 /* The sign of the constant determines if we overflowed
9019 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9020 Canonicalize to the INT_MIN overflow by swapping the comparison
9022 if (const1_sgn
== -1)
9023 code2
= swap_tree_comparison (code
);
9025 /* We now can look at the canonicalized case
9026 VARIABLE + 1 CODE2 INT_MIN
9027 and decide on the result. */
9028 if (code2
== LT_EXPR
9030 || code2
== EQ_EXPR
)
9031 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9032 else if (code2
== NE_EXPR
9034 || code2
== GT_EXPR
)
9035 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9038 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9039 && (TREE_CODE (lhs
) != INTEGER_CST
9040 || !TREE_OVERFLOW (lhs
)))
9042 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9043 fold_overflow_warning ("assuming signed overflow does not occur "
9044 "when changing X +- C1 cmp C2 to "
9046 WARN_STRICT_OVERFLOW_COMPARISON
);
9047 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9051 /* For comparisons of pointers we can decompose it to a compile time
9052 comparison of the base objects and the offsets into the object.
9053 This requires at least one operand being an ADDR_EXPR or a
9054 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9055 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9056 && (TREE_CODE (arg0
) == ADDR_EXPR
9057 || TREE_CODE (arg1
) == ADDR_EXPR
9058 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9059 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9061 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9062 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9063 enum machine_mode mode
;
9064 int volatilep
, unsignedp
;
9065 bool indirect_base0
= false, indirect_base1
= false;
9067 /* Get base and offset for the access. Strip ADDR_EXPR for
9068 get_inner_reference, but put it back by stripping INDIRECT_REF
9069 off the base object if possible. indirect_baseN will be true
9070 if baseN is not an address but refers to the object itself. */
9072 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9074 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9075 &bitsize
, &bitpos0
, &offset0
, &mode
,
9076 &unsignedp
, &volatilep
, false);
9077 if (TREE_CODE (base0
) == INDIRECT_REF
)
9078 base0
= TREE_OPERAND (base0
, 0);
9080 indirect_base0
= true;
9082 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9084 base0
= TREE_OPERAND (arg0
, 0);
9085 STRIP_SIGN_NOPS (base0
);
9086 if (TREE_CODE (base0
) == ADDR_EXPR
)
9088 base0
= TREE_OPERAND (base0
, 0);
9089 indirect_base0
= true;
9091 offset0
= TREE_OPERAND (arg0
, 1);
9092 if (tree_fits_shwi_p (offset0
))
9094 HOST_WIDE_INT off
= size_low_cst (offset0
);
9095 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9097 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9099 bitpos0
= off
* BITS_PER_UNIT
;
9100 offset0
= NULL_TREE
;
9106 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9108 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9109 &bitsize
, &bitpos1
, &offset1
, &mode
,
9110 &unsignedp
, &volatilep
, false);
9111 if (TREE_CODE (base1
) == INDIRECT_REF
)
9112 base1
= TREE_OPERAND (base1
, 0);
9114 indirect_base1
= true;
9116 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9118 base1
= TREE_OPERAND (arg1
, 0);
9119 STRIP_SIGN_NOPS (base1
);
9120 if (TREE_CODE (base1
) == ADDR_EXPR
)
9122 base1
= TREE_OPERAND (base1
, 0);
9123 indirect_base1
= true;
9125 offset1
= TREE_OPERAND (arg1
, 1);
9126 if (tree_fits_shwi_p (offset1
))
9128 HOST_WIDE_INT off
= size_low_cst (offset1
);
9129 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9131 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9133 bitpos1
= off
* BITS_PER_UNIT
;
9134 offset1
= NULL_TREE
;
9139 /* A local variable can never be pointed to by
9140 the default SSA name of an incoming parameter. */
9141 if ((TREE_CODE (arg0
) == ADDR_EXPR
9143 && TREE_CODE (base0
) == VAR_DECL
9144 && auto_var_in_fn_p (base0
, current_function_decl
)
9146 && TREE_CODE (base1
) == SSA_NAME
9147 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9148 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9149 || (TREE_CODE (arg1
) == ADDR_EXPR
9151 && TREE_CODE (base1
) == VAR_DECL
9152 && auto_var_in_fn_p (base1
, current_function_decl
)
9154 && TREE_CODE (base0
) == SSA_NAME
9155 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9156 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9158 if (code
== NE_EXPR
)
9159 return constant_boolean_node (1, type
);
9160 else if (code
== EQ_EXPR
)
9161 return constant_boolean_node (0, type
);
9163 /* If we have equivalent bases we might be able to simplify. */
9164 else if (indirect_base0
== indirect_base1
9165 && operand_equal_p (base0
, base1
, 0))
9167 /* We can fold this expression to a constant if the non-constant
9168 offset parts are equal. */
9169 if ((offset0
== offset1
9170 || (offset0
&& offset1
9171 && operand_equal_p (offset0
, offset1
, 0)))
9174 || (indirect_base0
&& DECL_P (base0
))
9175 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9180 && bitpos0
!= bitpos1
9181 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9182 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9183 fold_overflow_warning (("assuming pointer wraparound does not "
9184 "occur when comparing P +- C1 with "
9186 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9191 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9193 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9195 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9197 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9199 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9201 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9205 /* We can simplify the comparison to a comparison of the variable
9206 offset parts if the constant offset parts are equal.
9207 Be careful to use signed sizetype here because otherwise we
9208 mess with array offsets in the wrong way. This is possible
9209 because pointer arithmetic is restricted to retain within an
9210 object and overflow on pointer differences is undefined as of
9211 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9212 else if (bitpos0
== bitpos1
9213 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9214 || (indirect_base0
&& DECL_P (base0
))
9215 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9217 /* By converting to signed sizetype we cover middle-end pointer
9218 arithmetic which operates on unsigned pointer types of size
9219 type size and ARRAY_REF offsets which are properly sign or
9220 zero extended from their type in case it is narrower than
9222 if (offset0
== NULL_TREE
)
9223 offset0
= build_int_cst (ssizetype
, 0);
9225 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9226 if (offset1
== NULL_TREE
)
9227 offset1
= build_int_cst (ssizetype
, 0);
9229 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9233 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9234 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9235 fold_overflow_warning (("assuming pointer wraparound does not "
9236 "occur when comparing P +- C1 with "
9238 WARN_STRICT_OVERFLOW_COMPARISON
);
9240 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9243 /* For non-equal bases we can simplify if they are addresses
9244 of local binding decls or constants. */
9245 else if (indirect_base0
&& indirect_base1
9246 /* We know that !operand_equal_p (base0, base1, 0)
9247 because the if condition was false. But make
9248 sure two decls are not the same. */
9250 && TREE_CODE (arg0
) == ADDR_EXPR
9251 && TREE_CODE (arg1
) == ADDR_EXPR
9252 && (((TREE_CODE (base0
) == VAR_DECL
9253 || TREE_CODE (base0
) == PARM_DECL
)
9254 && (targetm
.binds_local_p (base0
)
9255 || CONSTANT_CLASS_P (base1
)))
9256 || CONSTANT_CLASS_P (base0
))
9257 && (((TREE_CODE (base1
) == VAR_DECL
9258 || TREE_CODE (base1
) == PARM_DECL
)
9259 && (targetm
.binds_local_p (base1
)
9260 || CONSTANT_CLASS_P (base0
)))
9261 || CONSTANT_CLASS_P (base1
)))
9263 if (code
== EQ_EXPR
)
9264 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9266 else if (code
== NE_EXPR
)
9267 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9270 /* For equal offsets we can simplify to a comparison of the
9272 else if (bitpos0
== bitpos1
9274 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9276 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9277 && ((offset0
== offset1
)
9278 || (offset0
&& offset1
9279 && operand_equal_p (offset0
, offset1
, 0))))
9282 base0
= build_fold_addr_expr_loc (loc
, base0
);
9284 base1
= build_fold_addr_expr_loc (loc
, base1
);
9285 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9289 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9290 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9291 the resulting offset is smaller in absolute value than the
9293 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9294 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9295 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9296 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9297 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9298 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9299 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9301 tree const1
= TREE_OPERAND (arg0
, 1);
9302 tree const2
= TREE_OPERAND (arg1
, 1);
9303 tree variable1
= TREE_OPERAND (arg0
, 0);
9304 tree variable2
= TREE_OPERAND (arg1
, 0);
9306 const char * const warnmsg
= G_("assuming signed overflow does not "
9307 "occur when combining constants around "
9310 /* Put the constant on the side where it doesn't overflow and is
9311 of lower absolute value than before. */
9312 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9313 ? MINUS_EXPR
: PLUS_EXPR
,
9315 if (!TREE_OVERFLOW (cst
)
9316 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9318 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9319 return fold_build2_loc (loc
, code
, type
,
9321 fold_build2_loc (loc
,
9322 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9326 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9327 ? MINUS_EXPR
: PLUS_EXPR
,
9329 if (!TREE_OVERFLOW (cst
)
9330 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9332 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9333 return fold_build2_loc (loc
, code
, type
,
9334 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9340 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9341 signed arithmetic case. That form is created by the compiler
9342 often enough for folding it to be of value. One example is in
9343 computing loop trip counts after Operator Strength Reduction. */
9344 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9345 && TREE_CODE (arg0
) == MULT_EXPR
9346 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9347 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9348 && integer_zerop (arg1
))
9350 tree const1
= TREE_OPERAND (arg0
, 1);
9351 tree const2
= arg1
; /* zero */
9352 tree variable1
= TREE_OPERAND (arg0
, 0);
9353 enum tree_code cmp_code
= code
;
9355 /* Handle unfolded multiplication by zero. */
9356 if (integer_zerop (const1
))
9357 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9359 fold_overflow_warning (("assuming signed overflow does not occur when "
9360 "eliminating multiplication in comparison "
9362 WARN_STRICT_OVERFLOW_COMPARISON
);
9364 /* If const1 is negative we swap the sense of the comparison. */
9365 if (tree_int_cst_sgn (const1
) < 0)
9366 cmp_code
= swap_tree_comparison (cmp_code
);
9368 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9371 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9375 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9377 tree targ0
= strip_float_extensions (arg0
);
9378 tree targ1
= strip_float_extensions (arg1
);
9379 tree newtype
= TREE_TYPE (targ0
);
9381 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9382 newtype
= TREE_TYPE (targ1
);
9384 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9385 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9386 return fold_build2_loc (loc
, code
, type
,
9387 fold_convert_loc (loc
, newtype
, targ0
),
9388 fold_convert_loc (loc
, newtype
, targ1
));
9390 /* (-a) CMP (-b) -> b CMP a */
9391 if (TREE_CODE (arg0
) == NEGATE_EXPR
9392 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9393 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9394 TREE_OPERAND (arg0
, 0));
9396 if (TREE_CODE (arg1
) == REAL_CST
)
9398 REAL_VALUE_TYPE cst
;
9399 cst
= TREE_REAL_CST (arg1
);
9401 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9402 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9403 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9404 TREE_OPERAND (arg0
, 0),
9405 build_real (TREE_TYPE (arg1
),
9406 real_value_negate (&cst
)));
9408 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9409 /* a CMP (-0) -> a CMP 0 */
9410 if (REAL_VALUE_MINUS_ZERO (cst
))
9411 return fold_build2_loc (loc
, code
, type
, arg0
,
9412 build_real (TREE_TYPE (arg1
), dconst0
));
9414 /* x != NaN is always true, other ops are always false. */
9415 if (REAL_VALUE_ISNAN (cst
)
9416 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9418 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9419 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9422 /* Fold comparisons against infinity. */
9423 if (REAL_VALUE_ISINF (cst
)
9424 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9426 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9427 if (tem
!= NULL_TREE
)
9432 /* If this is a comparison of a real constant with a PLUS_EXPR
9433 or a MINUS_EXPR of a real constant, we can convert it into a
9434 comparison with a revised real constant as long as no overflow
9435 occurs when unsafe_math_optimizations are enabled. */
9436 if (flag_unsafe_math_optimizations
9437 && TREE_CODE (arg1
) == REAL_CST
9438 && (TREE_CODE (arg0
) == PLUS_EXPR
9439 || TREE_CODE (arg0
) == MINUS_EXPR
)
9440 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9441 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9442 ? MINUS_EXPR
: PLUS_EXPR
,
9443 arg1
, TREE_OPERAND (arg0
, 1)))
9444 && !TREE_OVERFLOW (tem
))
9445 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9447 /* Likewise, we can simplify a comparison of a real constant with
9448 a MINUS_EXPR whose first operand is also a real constant, i.e.
9449 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9450 floating-point types only if -fassociative-math is set. */
9451 if (flag_associative_math
9452 && TREE_CODE (arg1
) == REAL_CST
9453 && TREE_CODE (arg0
) == MINUS_EXPR
9454 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9455 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9457 && !TREE_OVERFLOW (tem
))
9458 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9459 TREE_OPERAND (arg0
, 1), tem
);
9461 /* Fold comparisons against built-in math functions. */
9462 if (TREE_CODE (arg1
) == REAL_CST
9463 && flag_unsafe_math_optimizations
9464 && ! flag_errno_math
)
9466 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9468 if (fcode
!= END_BUILTINS
)
9470 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9471 if (tem
!= NULL_TREE
)
9477 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9478 && CONVERT_EXPR_P (arg0
))
9480 /* If we are widening one operand of an integer comparison,
9481 see if the other operand is similarly being widened. Perhaps we
9482 can do the comparison in the narrower type. */
9483 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9487 /* Or if we are changing signedness. */
9488 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9493 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9494 constant, we can simplify it. */
9495 if (TREE_CODE (arg1
) == INTEGER_CST
9496 && (TREE_CODE (arg0
) == MIN_EXPR
9497 || TREE_CODE (arg0
) == MAX_EXPR
)
9498 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9500 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9505 /* Simplify comparison of something with itself. (For IEEE
9506 floating-point, we can only do some of these simplifications.) */
9507 if (operand_equal_p (arg0
, arg1
, 0))
9512 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9513 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9514 return constant_boolean_node (1, type
);
9519 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9520 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9521 return constant_boolean_node (1, type
);
9522 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9525 /* For NE, we can only do this simplification if integer
9526 or we don't honor IEEE floating point NaNs. */
9527 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9528 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9530 /* ... fall through ... */
9533 return constant_boolean_node (0, type
);
9539 /* If we are comparing an expression that just has comparisons
9540 of two integer values, arithmetic expressions of those comparisons,
9541 and constants, we can simplify it. There are only three cases
9542 to check: the two values can either be equal, the first can be
9543 greater, or the second can be greater. Fold the expression for
9544 those three values. Since each value must be 0 or 1, we have
9545 eight possibilities, each of which corresponds to the constant 0
9546 or 1 or one of the six possible comparisons.
9548 This handles common cases like (a > b) == 0 but also handles
9549 expressions like ((x > y) - (y > x)) > 0, which supposedly
9550 occur in macroized code. */
9552 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9554 tree cval1
= 0, cval2
= 0;
9557 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9558 /* Don't handle degenerate cases here; they should already
9559 have been handled anyway. */
9560 && cval1
!= 0 && cval2
!= 0
9561 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9562 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9563 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9564 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9565 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9566 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9567 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9569 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9570 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9572 /* We can't just pass T to eval_subst in case cval1 or cval2
9573 was the same as ARG1. */
9576 = fold_build2_loc (loc
, code
, type
,
9577 eval_subst (loc
, arg0
, cval1
, maxval
,
9581 = fold_build2_loc (loc
, code
, type
,
9582 eval_subst (loc
, arg0
, cval1
, maxval
,
9586 = fold_build2_loc (loc
, code
, type
,
9587 eval_subst (loc
, arg0
, cval1
, minval
,
9591 /* All three of these results should be 0 or 1. Confirm they are.
9592 Then use those values to select the proper code to use. */
9594 if (TREE_CODE (high_result
) == INTEGER_CST
9595 && TREE_CODE (equal_result
) == INTEGER_CST
9596 && TREE_CODE (low_result
) == INTEGER_CST
)
9598 /* Make a 3-bit mask with the high-order bit being the
9599 value for `>', the next for '=', and the low for '<'. */
9600 switch ((integer_onep (high_result
) * 4)
9601 + (integer_onep (equal_result
) * 2)
9602 + integer_onep (low_result
))
9606 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9627 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9632 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9633 SET_EXPR_LOCATION (tem
, loc
);
9636 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9641 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9642 into a single range test. */
9643 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9644 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9645 && TREE_CODE (arg1
) == INTEGER_CST
9646 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9647 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9648 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9649 && !TREE_OVERFLOW (arg1
))
9651 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9652 if (tem
!= NULL_TREE
)
9656 /* Fold ~X op ~Y as Y op X. */
9657 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9658 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9660 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9661 return fold_build2_loc (loc
, code
, type
,
9662 fold_convert_loc (loc
, cmp_type
,
9663 TREE_OPERAND (arg1
, 0)),
9664 TREE_OPERAND (arg0
, 0));
9667 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9668 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9669 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9671 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9672 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9673 TREE_OPERAND (arg0
, 0),
9674 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9675 fold_convert_loc (loc
, cmp_type
, arg1
)));
9682 /* Subroutine of fold_binary. Optimize complex multiplications of the
9683 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9684 argument EXPR represents the expression "z" of type TYPE. */
9687 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9689 tree itype
= TREE_TYPE (type
);
9690 tree rpart
, ipart
, tem
;
9692 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9694 rpart
= TREE_OPERAND (expr
, 0);
9695 ipart
= TREE_OPERAND (expr
, 1);
9697 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9699 rpart
= TREE_REALPART (expr
);
9700 ipart
= TREE_IMAGPART (expr
);
9704 expr
= save_expr (expr
);
9705 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9706 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9709 rpart
= save_expr (rpart
);
9710 ipart
= save_expr (ipart
);
9711 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9712 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9713 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9714 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9715 build_zero_cst (itype
));
9719 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9720 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9721 guarantees that P and N have the same least significant log2(M) bits.
9722 N is not otherwise constrained. In particular, N is not normalized to
9723 0 <= N < M as is common. In general, the precise value of P is unknown.
9724 M is chosen as large as possible such that constant N can be determined.
9726 Returns M and sets *RESIDUE to N.
9728 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9729 account. This is not always possible due to PR 35705.
9732 static unsigned HOST_WIDE_INT
9733 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9734 bool allow_func_align
)
9736 enum tree_code code
;
9740 code
= TREE_CODE (expr
);
9741 if (code
== ADDR_EXPR
)
9743 unsigned int bitalign
;
9744 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9745 *residue
/= BITS_PER_UNIT
;
9746 return bitalign
/ BITS_PER_UNIT
;
9748 else if (code
== POINTER_PLUS_EXPR
)
9751 unsigned HOST_WIDE_INT modulus
;
9752 enum tree_code inner_code
;
9754 op0
= TREE_OPERAND (expr
, 0);
9756 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9759 op1
= TREE_OPERAND (expr
, 1);
9761 inner_code
= TREE_CODE (op1
);
9762 if (inner_code
== INTEGER_CST
)
9764 *residue
+= TREE_INT_CST_LOW (op1
);
9767 else if (inner_code
== MULT_EXPR
)
9769 op1
= TREE_OPERAND (op1
, 1);
9770 if (TREE_CODE (op1
) == INTEGER_CST
)
9772 unsigned HOST_WIDE_INT align
;
9774 /* Compute the greatest power-of-2 divisor of op1. */
9775 align
= TREE_INT_CST_LOW (op1
);
9778 /* If align is non-zero and less than *modulus, replace
9779 *modulus with align., If align is 0, then either op1 is 0
9780 or the greatest power-of-2 divisor of op1 doesn't fit in an
9781 unsigned HOST_WIDE_INT. In either case, no additional
9782 constraint is imposed. */
9784 modulus
= MIN (modulus
, align
);
9791 /* If we get here, we were unable to determine anything useful about the
9796 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9797 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9800 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9802 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9804 if (TREE_CODE (arg
) == VECTOR_CST
)
9806 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9807 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9809 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9811 constructor_elt
*elt
;
9813 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9814 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9817 elts
[i
] = elt
->value
;
9821 for (; i
< nelts
; i
++)
9823 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9827 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9828 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9829 NULL_TREE otherwise. */
9832 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9834 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9836 bool need_ctor
= false;
9838 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9839 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9840 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9841 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9844 elts
= XALLOCAVEC (tree
, nelts
* 3);
9845 if (!vec_cst_ctor_to_array (arg0
, elts
)
9846 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9849 for (i
= 0; i
< nelts
; i
++)
9851 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9853 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9858 vec
<constructor_elt
, va_gc
> *v
;
9859 vec_alloc (v
, nelts
);
9860 for (i
= 0; i
< nelts
; i
++)
9861 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9862 return build_constructor (type
, v
);
9865 return build_vector (type
, &elts
[2 * nelts
]);
9868 /* Try to fold a pointer difference of type TYPE two address expressions of
9869 array references AREF0 and AREF1 using location LOC. Return a
9870 simplified expression for the difference or NULL_TREE. */
9873 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9874 tree aref0
, tree aref1
)
9876 tree base0
= TREE_OPERAND (aref0
, 0);
9877 tree base1
= TREE_OPERAND (aref1
, 0);
9878 tree base_offset
= build_int_cst (type
, 0);
9880 /* If the bases are array references as well, recurse. If the bases
9881 are pointer indirections compute the difference of the pointers.
9882 If the bases are equal, we are set. */
9883 if ((TREE_CODE (base0
) == ARRAY_REF
9884 && TREE_CODE (base1
) == ARRAY_REF
9886 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9887 || (INDIRECT_REF_P (base0
)
9888 && INDIRECT_REF_P (base1
)
9889 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9890 TREE_OPERAND (base0
, 0),
9891 TREE_OPERAND (base1
, 0))))
9892 || operand_equal_p (base0
, base1
, 0))
9894 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9895 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9896 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9897 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9898 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9900 fold_build2_loc (loc
, MULT_EXPR
, type
,
9906 /* If the real or vector real constant CST of type TYPE has an exact
9907 inverse, return it, else return NULL. */
9910 exact_inverse (tree type
, tree cst
)
9913 tree unit_type
, *elts
;
9914 enum machine_mode mode
;
9915 unsigned vec_nelts
, i
;
9917 switch (TREE_CODE (cst
))
9920 r
= TREE_REAL_CST (cst
);
9922 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9923 return build_real (type
, r
);
9928 vec_nelts
= VECTOR_CST_NELTS (cst
);
9929 elts
= XALLOCAVEC (tree
, vec_nelts
);
9930 unit_type
= TREE_TYPE (type
);
9931 mode
= TYPE_MODE (unit_type
);
9933 for (i
= 0; i
< vec_nelts
; i
++)
9935 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9936 if (!exact_real_inverse (mode
, &r
))
9938 elts
[i
] = build_real (unit_type
, r
);
9941 return build_vector (type
, elts
);
9948 /* Mask out the tz least significant bits of X of type TYPE where
9949 tz is the number of trailing zeroes in Y. */
9951 mask_with_tz (tree type
, double_int x
, double_int y
)
9953 int tz
= y
.trailing_zeros ();
9959 mask
= ~double_int::mask (tz
);
9960 mask
= mask
.ext (TYPE_PRECISION (type
), TYPE_UNSIGNED (type
));
9966 /* Return true when T is an address and is known to be nonzero.
9967 For floating point we further ensure that T is not denormal.
9968 Similar logic is present in nonzero_address in rtlanal.h.
9970 If the return value is based on the assumption that signed overflow
9971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9972 change *STRICT_OVERFLOW_P. */
9975 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9977 tree type
= TREE_TYPE (t
);
9978 enum tree_code code
;
9980 /* Doing something useful for floating point would need more work. */
9981 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9984 code
= TREE_CODE (t
);
9985 switch (TREE_CODE_CLASS (code
))
9988 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9991 case tcc_comparison
:
9992 return tree_binary_nonzero_warnv_p (code
, type
,
9993 TREE_OPERAND (t
, 0),
9994 TREE_OPERAND (t
, 1),
9997 case tcc_declaration
:
9999 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10007 case TRUTH_NOT_EXPR
:
10008 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10009 strict_overflow_p
);
10011 case TRUTH_AND_EXPR
:
10012 case TRUTH_OR_EXPR
:
10013 case TRUTH_XOR_EXPR
:
10014 return tree_binary_nonzero_warnv_p (code
, type
,
10015 TREE_OPERAND (t
, 0),
10016 TREE_OPERAND (t
, 1),
10017 strict_overflow_p
);
10024 case WITH_SIZE_EXPR
:
10026 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10028 case COMPOUND_EXPR
:
10031 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10032 strict_overflow_p
);
10035 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10036 strict_overflow_p
);
10040 tree fndecl
= get_callee_fndecl (t
);
10041 if (!fndecl
) return false;
10042 if (flag_delete_null_pointer_checks
&& !flag_check_new
10043 && DECL_IS_OPERATOR_NEW (fndecl
)
10044 && !TREE_NOTHROW (fndecl
))
10046 if (flag_delete_null_pointer_checks
10047 && lookup_attribute ("returns_nonnull",
10048 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10050 return alloca_call_p (t
);
10059 /* Return true when T is an address and is known to be nonzero.
10060 Handle warnings about undefined signed overflow. */
10063 tree_expr_nonzero_p (tree t
)
10065 bool ret
, strict_overflow_p
;
10067 strict_overflow_p
= false;
10068 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10069 if (strict_overflow_p
)
10070 fold_overflow_warning (("assuming signed overflow does not occur when "
10071 "determining that expression is always "
10073 WARN_STRICT_OVERFLOW_MISC
);
10077 /* Fold a binary expression of code CODE and type TYPE with operands
10078 OP0 and OP1. LOC is the location of the resulting expression.
10079 Return the folded expression if folding is successful. Otherwise,
10080 return NULL_TREE. */
10083 fold_binary_loc (location_t loc
,
10084 enum tree_code code
, tree type
, tree op0
, tree op1
)
10086 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10087 tree arg0
, arg1
, tem
;
10088 tree t1
= NULL_TREE
;
10089 bool strict_overflow_p
;
10092 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10093 && TREE_CODE_LENGTH (code
) == 2
10094 && op0
!= NULL_TREE
10095 && op1
!= NULL_TREE
);
10100 /* Strip any conversions that don't change the mode. This is
10101 safe for every expression, except for a comparison expression
10102 because its signedness is derived from its operands. So, in
10103 the latter case, only strip conversions that don't change the
10104 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10107 Note that this is done as an internal manipulation within the
10108 constant folder, in order to find the simplest representation
10109 of the arguments so that their form can be studied. In any
10110 cases, the appropriate type conversions should be put back in
10111 the tree that will get out of the constant folder. */
10113 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10115 STRIP_SIGN_NOPS (arg0
);
10116 STRIP_SIGN_NOPS (arg1
);
10124 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10125 constant but we can't do arithmetic on them. */
10126 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10127 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10128 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10129 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10130 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10131 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10132 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10134 if (kind
== tcc_binary
)
10136 /* Make sure type and arg0 have the same saturating flag. */
10137 gcc_assert (TYPE_SATURATING (type
)
10138 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10139 tem
= const_binop (code
, arg0
, arg1
);
10141 else if (kind
== tcc_comparison
)
10142 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10146 if (tem
!= NULL_TREE
)
10148 if (TREE_TYPE (tem
) != type
)
10149 tem
= fold_convert_loc (loc
, type
, tem
);
10154 /* If this is a commutative operation, and ARG0 is a constant, move it
10155 to ARG1 to reduce the number of tests below. */
10156 if (commutative_tree_code (code
)
10157 && tree_swap_operands_p (arg0
, arg1
, true))
10158 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10160 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10162 First check for cases where an arithmetic operation is applied to a
10163 compound, conditional, or comparison operation. Push the arithmetic
10164 operation inside the compound or conditional to see if any folding
10165 can then be done. Convert comparison to conditional for this purpose.
10166 The also optimizes non-constant cases that used to be done in
10169 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10170 one of the operands is a comparison and the other is a comparison, a
10171 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10172 code below would make the expression more complex. Change it to a
10173 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10174 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10176 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10177 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10178 && TREE_CODE (type
) != VECTOR_TYPE
10179 && ((truth_value_p (TREE_CODE (arg0
))
10180 && (truth_value_p (TREE_CODE (arg1
))
10181 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10182 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10183 || (truth_value_p (TREE_CODE (arg1
))
10184 && (truth_value_p (TREE_CODE (arg0
))
10185 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10186 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10188 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10189 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10192 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10193 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10195 if (code
== EQ_EXPR
)
10196 tem
= invert_truthvalue_loc (loc
, tem
);
10198 return fold_convert_loc (loc
, type
, tem
);
10201 if (TREE_CODE_CLASS (code
) == tcc_binary
10202 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10204 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10206 tem
= fold_build2_loc (loc
, code
, type
,
10207 fold_convert_loc (loc
, TREE_TYPE (op0
),
10208 TREE_OPERAND (arg0
, 1)), op1
);
10209 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10212 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10213 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10215 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10216 fold_convert_loc (loc
, TREE_TYPE (op1
),
10217 TREE_OPERAND (arg1
, 1)));
10218 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10222 if (TREE_CODE (arg0
) == COND_EXPR
10223 || TREE_CODE (arg0
) == VEC_COND_EXPR
10224 || COMPARISON_CLASS_P (arg0
))
10226 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10228 /*cond_first_p=*/1);
10229 if (tem
!= NULL_TREE
)
10233 if (TREE_CODE (arg1
) == COND_EXPR
10234 || TREE_CODE (arg1
) == VEC_COND_EXPR
10235 || COMPARISON_CLASS_P (arg1
))
10237 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10239 /*cond_first_p=*/0);
10240 if (tem
!= NULL_TREE
)
10248 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10249 if (TREE_CODE (arg0
) == ADDR_EXPR
10250 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10252 tree iref
= TREE_OPERAND (arg0
, 0);
10253 return fold_build2 (MEM_REF
, type
,
10254 TREE_OPERAND (iref
, 0),
10255 int_const_binop (PLUS_EXPR
, arg1
,
10256 TREE_OPERAND (iref
, 1)));
10259 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10260 if (TREE_CODE (arg0
) == ADDR_EXPR
10261 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10264 HOST_WIDE_INT coffset
;
10265 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10269 return fold_build2 (MEM_REF
, type
,
10270 build_fold_addr_expr (base
),
10271 int_const_binop (PLUS_EXPR
, arg1
,
10272 size_int (coffset
)));
10277 case POINTER_PLUS_EXPR
:
10278 /* 0 +p index -> (type)index */
10279 if (integer_zerop (arg0
))
10280 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10282 /* PTR +p 0 -> PTR */
10283 if (integer_zerop (arg1
))
10284 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10286 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10287 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10288 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10289 return fold_convert_loc (loc
, type
,
10290 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10291 fold_convert_loc (loc
, sizetype
,
10293 fold_convert_loc (loc
, sizetype
,
10296 /* (PTR +p B) +p A -> PTR +p (B + A) */
10297 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10300 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10301 tree arg00
= TREE_OPERAND (arg0
, 0);
10302 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10303 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10304 return fold_convert_loc (loc
, type
,
10305 fold_build_pointer_plus_loc (loc
,
10309 /* PTR_CST +p CST -> CST1 */
10310 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10311 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10312 fold_convert_loc (loc
, type
, arg1
));
10314 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10315 of the array. Loop optimizer sometimes produce this type of
10317 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10319 tem
= try_move_mult_to_index (loc
, arg0
,
10320 fold_convert_loc (loc
,
10323 return fold_convert_loc (loc
, type
, tem
);
10329 /* A + (-B) -> A - B */
10330 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10331 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10332 fold_convert_loc (loc
, type
, arg0
),
10333 fold_convert_loc (loc
, type
,
10334 TREE_OPERAND (arg1
, 0)));
10335 /* (-A) + B -> B - A */
10336 if (TREE_CODE (arg0
) == NEGATE_EXPR
10337 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10338 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10339 fold_convert_loc (loc
, type
, arg1
),
10340 fold_convert_loc (loc
, type
,
10341 TREE_OPERAND (arg0
, 0)));
10343 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10345 /* Convert ~A + 1 to -A. */
10346 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10347 && integer_onep (arg1
))
10348 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10349 fold_convert_loc (loc
, type
,
10350 TREE_OPERAND (arg0
, 0)));
10352 /* ~X + X is -1. */
10353 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10354 && !TYPE_OVERFLOW_TRAPS (type
))
10356 tree tem
= TREE_OPERAND (arg0
, 0);
10359 if (operand_equal_p (tem
, arg1
, 0))
10361 t1
= build_all_ones_cst (type
);
10362 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10366 /* X + ~X is -1. */
10367 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10368 && !TYPE_OVERFLOW_TRAPS (type
))
10370 tree tem
= TREE_OPERAND (arg1
, 0);
10373 if (operand_equal_p (arg0
, tem
, 0))
10375 t1
= build_all_ones_cst (type
);
10376 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10380 /* X + (X / CST) * -CST is X % CST. */
10381 if (TREE_CODE (arg1
) == MULT_EXPR
10382 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10383 && operand_equal_p (arg0
,
10384 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10386 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10387 tree cst1
= TREE_OPERAND (arg1
, 1);
10388 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10390 if (sum
&& integer_zerop (sum
))
10391 return fold_convert_loc (loc
, type
,
10392 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10393 TREE_TYPE (arg0
), arg0
,
10398 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10399 one. Make sure the type is not saturating and has the signedness of
10400 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10401 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10402 if ((TREE_CODE (arg0
) == MULT_EXPR
10403 || TREE_CODE (arg1
) == MULT_EXPR
)
10404 && !TYPE_SATURATING (type
)
10405 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10406 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10407 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10409 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10414 if (! FLOAT_TYPE_P (type
))
10416 if (integer_zerop (arg1
))
10417 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10419 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10420 with a constant, and the two constants have no bits in common,
10421 we should treat this as a BIT_IOR_EXPR since this may produce more
10422 simplifications. */
10423 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10424 && TREE_CODE (arg1
) == BIT_AND_EXPR
10425 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10426 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10427 && integer_zerop (const_binop (BIT_AND_EXPR
,
10428 TREE_OPERAND (arg0
, 1),
10429 TREE_OPERAND (arg1
, 1))))
10431 code
= BIT_IOR_EXPR
;
10435 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10436 (plus (plus (mult) (mult)) (foo)) so that we can
10437 take advantage of the factoring cases below. */
10438 if (TYPE_OVERFLOW_WRAPS (type
)
10439 && (((TREE_CODE (arg0
) == PLUS_EXPR
10440 || TREE_CODE (arg0
) == MINUS_EXPR
)
10441 && TREE_CODE (arg1
) == MULT_EXPR
)
10442 || ((TREE_CODE (arg1
) == PLUS_EXPR
10443 || TREE_CODE (arg1
) == MINUS_EXPR
)
10444 && TREE_CODE (arg0
) == MULT_EXPR
)))
10446 tree parg0
, parg1
, parg
, marg
;
10447 enum tree_code pcode
;
10449 if (TREE_CODE (arg1
) == MULT_EXPR
)
10450 parg
= arg0
, marg
= arg1
;
10452 parg
= arg1
, marg
= arg0
;
10453 pcode
= TREE_CODE (parg
);
10454 parg0
= TREE_OPERAND (parg
, 0);
10455 parg1
= TREE_OPERAND (parg
, 1);
10456 STRIP_NOPS (parg0
);
10457 STRIP_NOPS (parg1
);
10459 if (TREE_CODE (parg0
) == MULT_EXPR
10460 && TREE_CODE (parg1
) != MULT_EXPR
)
10461 return fold_build2_loc (loc
, pcode
, type
,
10462 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10463 fold_convert_loc (loc
, type
,
10465 fold_convert_loc (loc
, type
,
10467 fold_convert_loc (loc
, type
, parg1
));
10468 if (TREE_CODE (parg0
) != MULT_EXPR
10469 && TREE_CODE (parg1
) == MULT_EXPR
)
10471 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10472 fold_convert_loc (loc
, type
, parg0
),
10473 fold_build2_loc (loc
, pcode
, type
,
10474 fold_convert_loc (loc
, type
, marg
),
10475 fold_convert_loc (loc
, type
,
10481 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10482 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10483 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10485 /* Likewise if the operands are reversed. */
10486 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10487 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10489 /* Convert X + -C into X - C. */
10490 if (TREE_CODE (arg1
) == REAL_CST
10491 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10493 tem
= fold_negate_const (arg1
, type
);
10494 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10495 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10496 fold_convert_loc (loc
, type
, arg0
),
10497 fold_convert_loc (loc
, type
, tem
));
10500 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10501 to __complex__ ( x, y ). This is not the same for SNaNs or
10502 if signed zeros are involved. */
10503 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10504 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10505 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10507 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10508 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10509 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10510 bool arg0rz
= false, arg0iz
= false;
10511 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10512 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10514 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10515 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10516 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10518 tree rp
= arg1r
? arg1r
10519 : build1 (REALPART_EXPR
, rtype
, arg1
);
10520 tree ip
= arg0i
? arg0i
10521 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10522 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10524 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10526 tree rp
= arg0r
? arg0r
10527 : build1 (REALPART_EXPR
, rtype
, arg0
);
10528 tree ip
= arg1i
? arg1i
10529 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10530 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10535 if (flag_unsafe_math_optimizations
10536 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10537 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10538 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10541 /* Convert x+x into x*2.0. */
10542 if (operand_equal_p (arg0
, arg1
, 0)
10543 && SCALAR_FLOAT_TYPE_P (type
))
10544 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10545 build_real (type
, dconst2
));
10547 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10548 We associate floats only if the user has specified
10549 -fassociative-math. */
10550 if (flag_associative_math
10551 && TREE_CODE (arg1
) == PLUS_EXPR
10552 && TREE_CODE (arg0
) != MULT_EXPR
)
10554 tree tree10
= TREE_OPERAND (arg1
, 0);
10555 tree tree11
= TREE_OPERAND (arg1
, 1);
10556 if (TREE_CODE (tree11
) == MULT_EXPR
10557 && TREE_CODE (tree10
) == MULT_EXPR
)
10560 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10561 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10564 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10565 We associate floats only if the user has specified
10566 -fassociative-math. */
10567 if (flag_associative_math
10568 && TREE_CODE (arg0
) == PLUS_EXPR
10569 && TREE_CODE (arg1
) != MULT_EXPR
)
10571 tree tree00
= TREE_OPERAND (arg0
, 0);
10572 tree tree01
= TREE_OPERAND (arg0
, 1);
10573 if (TREE_CODE (tree01
) == MULT_EXPR
10574 && TREE_CODE (tree00
) == MULT_EXPR
)
10577 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10578 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10584 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10585 is a rotate of A by C1 bits. */
10586 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10587 is a rotate of A by B bits. */
10589 enum tree_code code0
, code1
;
10591 code0
= TREE_CODE (arg0
);
10592 code1
= TREE_CODE (arg1
);
10593 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10594 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10595 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10596 TREE_OPERAND (arg1
, 0), 0)
10597 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10598 TYPE_UNSIGNED (rtype
))
10599 /* Only create rotates in complete modes. Other cases are not
10600 expanded properly. */
10601 && (element_precision (rtype
)
10602 == element_precision (TYPE_MODE (rtype
))))
10604 tree tree01
, tree11
;
10605 enum tree_code code01
, code11
;
10607 tree01
= TREE_OPERAND (arg0
, 1);
10608 tree11
= TREE_OPERAND (arg1
, 1);
10609 STRIP_NOPS (tree01
);
10610 STRIP_NOPS (tree11
);
10611 code01
= TREE_CODE (tree01
);
10612 code11
= TREE_CODE (tree11
);
10613 if (code01
== INTEGER_CST
10614 && code11
== INTEGER_CST
10615 && TREE_INT_CST_HIGH (tree01
) == 0
10616 && TREE_INT_CST_HIGH (tree11
) == 0
10617 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10618 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10620 tem
= build2_loc (loc
, LROTATE_EXPR
,
10621 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10622 TREE_OPERAND (arg0
, 0),
10623 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10624 return fold_convert_loc (loc
, type
, tem
);
10626 else if (code11
== MINUS_EXPR
)
10628 tree tree110
, tree111
;
10629 tree110
= TREE_OPERAND (tree11
, 0);
10630 tree111
= TREE_OPERAND (tree11
, 1);
10631 STRIP_NOPS (tree110
);
10632 STRIP_NOPS (tree111
);
10633 if (TREE_CODE (tree110
) == INTEGER_CST
10634 && 0 == compare_tree_int (tree110
,
10636 (TREE_TYPE (TREE_OPERAND
10638 && operand_equal_p (tree01
, tree111
, 0))
10640 fold_convert_loc (loc
, type
,
10641 build2 ((code0
== LSHIFT_EXPR
10644 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10645 TREE_OPERAND (arg0
, 0), tree01
));
10647 else if (code01
== MINUS_EXPR
)
10649 tree tree010
, tree011
;
10650 tree010
= TREE_OPERAND (tree01
, 0);
10651 tree011
= TREE_OPERAND (tree01
, 1);
10652 STRIP_NOPS (tree010
);
10653 STRIP_NOPS (tree011
);
10654 if (TREE_CODE (tree010
) == INTEGER_CST
10655 && 0 == compare_tree_int (tree010
,
10657 (TREE_TYPE (TREE_OPERAND
10659 && operand_equal_p (tree11
, tree011
, 0))
10660 return fold_convert_loc
10662 build2 ((code0
!= LSHIFT_EXPR
10665 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10666 TREE_OPERAND (arg0
, 0), tree11
));
10672 /* In most languages, can't associate operations on floats through
10673 parentheses. Rather than remember where the parentheses were, we
10674 don't associate floats at all, unless the user has specified
10675 -fassociative-math.
10676 And, we need to make sure type is not saturating. */
10678 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10679 && !TYPE_SATURATING (type
))
10681 tree var0
, con0
, lit0
, minus_lit0
;
10682 tree var1
, con1
, lit1
, minus_lit1
;
10686 /* Split both trees into variables, constants, and literals. Then
10687 associate each group together, the constants with literals,
10688 then the result with variables. This increases the chances of
10689 literals being recombined later and of generating relocatable
10690 expressions for the sum of a constant and literal. */
10691 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10692 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10693 code
== MINUS_EXPR
);
10695 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10696 if (code
== MINUS_EXPR
)
10699 /* With undefined overflow prefer doing association in a type
10700 which wraps on overflow, if that is one of the operand types. */
10701 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10702 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10704 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10705 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10706 atype
= TREE_TYPE (arg0
);
10707 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10708 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10709 atype
= TREE_TYPE (arg1
);
10710 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10713 /* With undefined overflow we can only associate constants with one
10714 variable, and constants whose association doesn't overflow. */
10715 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10716 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10723 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10724 tmp0
= TREE_OPERAND (tmp0
, 0);
10725 if (CONVERT_EXPR_P (tmp0
)
10726 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10727 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10728 <= TYPE_PRECISION (atype
)))
10729 tmp0
= TREE_OPERAND (tmp0
, 0);
10730 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10731 tmp1
= TREE_OPERAND (tmp1
, 0);
10732 if (CONVERT_EXPR_P (tmp1
)
10733 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10734 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10735 <= TYPE_PRECISION (atype
)))
10736 tmp1
= TREE_OPERAND (tmp1
, 0);
10737 /* The only case we can still associate with two variables
10738 is if they are the same, modulo negation and bit-pattern
10739 preserving conversions. */
10740 if (!operand_equal_p (tmp0
, tmp1
, 0))
10745 /* Only do something if we found more than two objects. Otherwise,
10746 nothing has changed and we risk infinite recursion. */
10748 && (2 < ((var0
!= 0) + (var1
!= 0)
10749 + (con0
!= 0) + (con1
!= 0)
10750 + (lit0
!= 0) + (lit1
!= 0)
10751 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10753 bool any_overflows
= false;
10754 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10755 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10756 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10757 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10758 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10759 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10760 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10761 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10764 /* Preserve the MINUS_EXPR if the negative part of the literal is
10765 greater than the positive part. Otherwise, the multiplicative
10766 folding code (i.e extract_muldiv) may be fooled in case
10767 unsigned constants are subtracted, like in the following
10768 example: ((X*2 + 4) - 8U)/2. */
10769 if (minus_lit0
&& lit0
)
10771 if (TREE_CODE (lit0
) == INTEGER_CST
10772 && TREE_CODE (minus_lit0
) == INTEGER_CST
10773 && tree_int_cst_lt (lit0
, minus_lit0
))
10775 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10776 MINUS_EXPR
, atype
);
10781 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10782 MINUS_EXPR
, atype
);
10787 /* Don't introduce overflows through reassociation. */
10789 && ((lit0
&& TREE_OVERFLOW (lit0
))
10790 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10797 fold_convert_loc (loc
, type
,
10798 associate_trees (loc
, var0
, minus_lit0
,
10799 MINUS_EXPR
, atype
));
10802 con0
= associate_trees (loc
, con0
, minus_lit0
,
10803 MINUS_EXPR
, atype
);
10805 fold_convert_loc (loc
, type
,
10806 associate_trees (loc
, var0
, con0
,
10807 PLUS_EXPR
, atype
));
10811 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10813 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10821 /* Pointer simplifications for subtraction, simple reassociations. */
10822 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10824 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10825 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10826 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10828 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10829 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10830 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10831 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10832 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10833 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10835 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10838 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10839 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10841 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10842 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10843 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10844 fold_convert_loc (loc
, type
, arg1
));
10846 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10849 /* A - (-B) -> A + B */
10850 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10851 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10852 fold_convert_loc (loc
, type
,
10853 TREE_OPERAND (arg1
, 0)));
10854 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10855 if (TREE_CODE (arg0
) == NEGATE_EXPR
10856 && negate_expr_p (arg1
)
10857 && reorder_operands_p (arg0
, arg1
))
10858 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10859 fold_convert_loc (loc
, type
,
10860 negate_expr (arg1
)),
10861 fold_convert_loc (loc
, type
,
10862 TREE_OPERAND (arg0
, 0)));
10863 /* Convert -A - 1 to ~A. */
10864 if (TREE_CODE (type
) != COMPLEX_TYPE
10865 && TREE_CODE (arg0
) == NEGATE_EXPR
10866 && integer_onep (arg1
)
10867 && !TYPE_OVERFLOW_TRAPS (type
))
10868 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10869 fold_convert_loc (loc
, type
,
10870 TREE_OPERAND (arg0
, 0)));
10872 /* Convert -1 - A to ~A. */
10873 if (TREE_CODE (type
) != COMPLEX_TYPE
10874 && integer_all_onesp (arg0
))
10875 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10878 /* X - (X / Y) * Y is X % Y. */
10879 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10880 && TREE_CODE (arg1
) == MULT_EXPR
10881 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10882 && operand_equal_p (arg0
,
10883 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10884 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10885 TREE_OPERAND (arg1
, 1), 0))
10887 fold_convert_loc (loc
, type
,
10888 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10889 arg0
, TREE_OPERAND (arg1
, 1)));
10891 if (! FLOAT_TYPE_P (type
))
10893 if (integer_zerop (arg0
))
10894 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10895 if (integer_zerop (arg1
))
10896 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10898 /* Fold A - (A & B) into ~B & A. */
10899 if (!TREE_SIDE_EFFECTS (arg0
)
10900 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10902 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10904 tree arg10
= fold_convert_loc (loc
, type
,
10905 TREE_OPERAND (arg1
, 0));
10906 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10907 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10909 fold_convert_loc (loc
, type
, arg0
));
10911 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10913 tree arg11
= fold_convert_loc (loc
,
10914 type
, TREE_OPERAND (arg1
, 1));
10915 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10916 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10918 fold_convert_loc (loc
, type
, arg0
));
10922 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10923 any power of 2 minus 1. */
10924 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10925 && TREE_CODE (arg1
) == BIT_AND_EXPR
10926 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10927 TREE_OPERAND (arg1
, 0), 0))
10929 tree mask0
= TREE_OPERAND (arg0
, 1);
10930 tree mask1
= TREE_OPERAND (arg1
, 1);
10931 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10933 if (operand_equal_p (tem
, mask1
, 0))
10935 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10936 TREE_OPERAND (arg0
, 0), mask1
);
10937 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10942 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10943 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10944 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10946 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10947 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10948 (-ARG1 + ARG0) reduces to -ARG1. */
10949 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10950 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10952 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10953 __complex__ ( x, -y ). This is not the same for SNaNs or if
10954 signed zeros are involved. */
10955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10956 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10957 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10959 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10960 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10961 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10962 bool arg0rz
= false, arg0iz
= false;
10963 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10964 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10966 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10967 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10968 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10970 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10972 : build1 (REALPART_EXPR
, rtype
, arg1
));
10973 tree ip
= arg0i
? arg0i
10974 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10975 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10977 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10979 tree rp
= arg0r
? arg0r
10980 : build1 (REALPART_EXPR
, rtype
, arg0
);
10981 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10983 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10984 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10989 /* Fold &x - &x. This can happen from &x.foo - &x.
10990 This is unsafe for certain floats even in non-IEEE formats.
10991 In IEEE, it is unsafe because it does wrong for NaNs.
10992 Also note that operand_equal_p is always false if an operand
10995 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10996 && operand_equal_p (arg0
, arg1
, 0))
10997 return build_zero_cst (type
);
10999 /* A - B -> A + (-B) if B is easily negatable. */
11000 if (negate_expr_p (arg1
)
11001 && ((FLOAT_TYPE_P (type
)
11002 /* Avoid this transformation if B is a positive REAL_CST. */
11003 && (TREE_CODE (arg1
) != REAL_CST
11004 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
11005 || INTEGRAL_TYPE_P (type
)))
11006 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11007 fold_convert_loc (loc
, type
, arg0
),
11008 fold_convert_loc (loc
, type
,
11009 negate_expr (arg1
)));
11011 /* Try folding difference of addresses. */
11013 HOST_WIDE_INT diff
;
11015 if ((TREE_CODE (arg0
) == ADDR_EXPR
11016 || TREE_CODE (arg1
) == ADDR_EXPR
)
11017 && ptr_difference_const (arg0
, arg1
, &diff
))
11018 return build_int_cst_type (type
, diff
);
11021 /* Fold &a[i] - &a[j] to i-j. */
11022 if (TREE_CODE (arg0
) == ADDR_EXPR
11023 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11024 && TREE_CODE (arg1
) == ADDR_EXPR
11025 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11027 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11028 TREE_OPERAND (arg0
, 0),
11029 TREE_OPERAND (arg1
, 0));
11034 if (FLOAT_TYPE_P (type
)
11035 && flag_unsafe_math_optimizations
11036 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11037 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11038 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11041 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11042 one. Make sure the type is not saturating and has the signedness of
11043 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11044 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11045 if ((TREE_CODE (arg0
) == MULT_EXPR
11046 || TREE_CODE (arg1
) == MULT_EXPR
)
11047 && !TYPE_SATURATING (type
)
11048 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11049 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11050 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11052 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11060 /* (-A) * (-B) -> A * B */
11061 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11062 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11063 fold_convert_loc (loc
, type
,
11064 TREE_OPERAND (arg0
, 0)),
11065 fold_convert_loc (loc
, type
,
11066 negate_expr (arg1
)));
11067 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11068 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11069 fold_convert_loc (loc
, type
,
11070 negate_expr (arg0
)),
11071 fold_convert_loc (loc
, type
,
11072 TREE_OPERAND (arg1
, 0)));
11074 if (! FLOAT_TYPE_P (type
))
11076 if (integer_zerop (arg1
))
11077 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11078 if (integer_onep (arg1
))
11079 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11080 /* Transform x * -1 into -x. Make sure to do the negation
11081 on the original operand with conversions not stripped
11082 because we can only strip non-sign-changing conversions. */
11083 if (integer_minus_onep (arg1
))
11084 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11085 /* Transform x * -C into -x * C if x is easily negatable. */
11086 if (TREE_CODE (arg1
) == INTEGER_CST
11087 && tree_int_cst_sgn (arg1
) == -1
11088 && negate_expr_p (arg0
)
11089 && (tem
= negate_expr (arg1
)) != arg1
11090 && !TREE_OVERFLOW (tem
))
11091 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11092 fold_convert_loc (loc
, type
,
11093 negate_expr (arg0
)),
11096 /* (a * (1 << b)) is (a << b) */
11097 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11098 && integer_onep (TREE_OPERAND (arg1
, 0)))
11099 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11100 TREE_OPERAND (arg1
, 1));
11101 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11102 && integer_onep (TREE_OPERAND (arg0
, 0)))
11103 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11104 TREE_OPERAND (arg0
, 1));
11106 /* (A + A) * C -> A * 2 * C */
11107 if (TREE_CODE (arg0
) == PLUS_EXPR
11108 && TREE_CODE (arg1
) == INTEGER_CST
11109 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11110 TREE_OPERAND (arg0
, 1), 0))
11111 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11112 omit_one_operand_loc (loc
, type
,
11113 TREE_OPERAND (arg0
, 0),
11114 TREE_OPERAND (arg0
, 1)),
11115 fold_build2_loc (loc
, MULT_EXPR
, type
,
11116 build_int_cst (type
, 2) , arg1
));
11118 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11119 sign-changing only. */
11120 if (TREE_CODE (arg1
) == INTEGER_CST
11121 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11122 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11123 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11125 strict_overflow_p
= false;
11126 if (TREE_CODE (arg1
) == INTEGER_CST
11127 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11128 &strict_overflow_p
)))
11130 if (strict_overflow_p
)
11131 fold_overflow_warning (("assuming signed overflow does not "
11132 "occur when simplifying "
11134 WARN_STRICT_OVERFLOW_MISC
);
11135 return fold_convert_loc (loc
, type
, tem
);
11138 /* Optimize z * conj(z) for integer complex numbers. */
11139 if (TREE_CODE (arg0
) == CONJ_EXPR
11140 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11141 return fold_mult_zconjz (loc
, type
, arg1
);
11142 if (TREE_CODE (arg1
) == CONJ_EXPR
11143 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11144 return fold_mult_zconjz (loc
, type
, arg0
);
11148 /* Maybe fold x * 0 to 0. The expressions aren't the same
11149 when x is NaN, since x * 0 is also NaN. Nor are they the
11150 same in modes with signed zeros, since multiplying a
11151 negative value by 0 gives -0, not +0. */
11152 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11154 && real_zerop (arg1
))
11155 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11156 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11157 Likewise for complex arithmetic with signed zeros. */
11158 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11159 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11160 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11161 && real_onep (arg1
))
11162 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11164 /* Transform x * -1.0 into -x. */
11165 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11166 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11167 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11168 && real_minus_onep (arg1
))
11169 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11171 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11172 the result for floating point types due to rounding so it is applied
11173 only if -fassociative-math was specify. */
11174 if (flag_associative_math
11175 && TREE_CODE (arg0
) == RDIV_EXPR
11176 && TREE_CODE (arg1
) == REAL_CST
11177 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11179 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11182 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11183 TREE_OPERAND (arg0
, 1));
11186 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11187 if (operand_equal_p (arg0
, arg1
, 0))
11189 tree tem
= fold_strip_sign_ops (arg0
);
11190 if (tem
!= NULL_TREE
)
11192 tem
= fold_convert_loc (loc
, type
, tem
);
11193 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11197 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11198 This is not the same for NaNs or if signed zeros are
11200 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11201 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11202 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11203 && TREE_CODE (arg1
) == COMPLEX_CST
11204 && real_zerop (TREE_REALPART (arg1
)))
11206 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11207 if (real_onep (TREE_IMAGPART (arg1
)))
11209 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11210 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11212 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11213 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11215 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11216 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11217 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11221 /* Optimize z * conj(z) for floating point complex numbers.
11222 Guarded by flag_unsafe_math_optimizations as non-finite
11223 imaginary components don't produce scalar results. */
11224 if (flag_unsafe_math_optimizations
11225 && TREE_CODE (arg0
) == CONJ_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11227 return fold_mult_zconjz (loc
, type
, arg1
);
11228 if (flag_unsafe_math_optimizations
11229 && TREE_CODE (arg1
) == CONJ_EXPR
11230 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11231 return fold_mult_zconjz (loc
, type
, arg0
);
11233 if (flag_unsafe_math_optimizations
)
11235 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11236 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11238 /* Optimizations of root(...)*root(...). */
11239 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11242 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11243 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11245 /* Optimize sqrt(x)*sqrt(x) as x. */
11246 if (BUILTIN_SQRT_P (fcode0
)
11247 && operand_equal_p (arg00
, arg10
, 0)
11248 && ! HONOR_SNANS (TYPE_MODE (type
)))
11251 /* Optimize root(x)*root(y) as root(x*y). */
11252 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11253 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11254 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11257 /* Optimize expN(x)*expN(y) as expN(x+y). */
11258 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11260 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11261 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11262 CALL_EXPR_ARG (arg0
, 0),
11263 CALL_EXPR_ARG (arg1
, 0));
11264 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11267 /* Optimizations of pow(...)*pow(...). */
11268 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11269 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11270 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11272 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11273 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11274 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11275 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11277 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11278 if (operand_equal_p (arg01
, arg11
, 0))
11280 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11281 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11283 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11286 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11287 if (operand_equal_p (arg00
, arg10
, 0))
11289 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11290 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11292 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11296 /* Optimize tan(x)*cos(x) as sin(x). */
11297 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11298 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11299 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11300 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11301 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11302 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11303 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11304 CALL_EXPR_ARG (arg1
, 0), 0))
11306 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11308 if (sinfn
!= NULL_TREE
)
11309 return build_call_expr_loc (loc
, sinfn
, 1,
11310 CALL_EXPR_ARG (arg0
, 0));
11313 /* Optimize x*pow(x,c) as pow(x,c+1). */
11314 if (fcode1
== BUILT_IN_POW
11315 || fcode1
== BUILT_IN_POWF
11316 || fcode1
== BUILT_IN_POWL
)
11318 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11319 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11320 if (TREE_CODE (arg11
) == REAL_CST
11321 && !TREE_OVERFLOW (arg11
)
11322 && operand_equal_p (arg0
, arg10
, 0))
11324 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11328 c
= TREE_REAL_CST (arg11
);
11329 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11330 arg
= build_real (type
, c
);
11331 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11335 /* Optimize pow(x,c)*x as pow(x,c+1). */
11336 if (fcode0
== BUILT_IN_POW
11337 || fcode0
== BUILT_IN_POWF
11338 || fcode0
== BUILT_IN_POWL
)
11340 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11341 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11342 if (TREE_CODE (arg01
) == REAL_CST
11343 && !TREE_OVERFLOW (arg01
)
11344 && operand_equal_p (arg1
, arg00
, 0))
11346 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11350 c
= TREE_REAL_CST (arg01
);
11351 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11352 arg
= build_real (type
, c
);
11353 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11357 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11358 if (!in_gimple_form
11360 && operand_equal_p (arg0
, arg1
, 0))
11362 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11366 tree arg
= build_real (type
, dconst2
);
11367 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11376 if (integer_all_onesp (arg1
))
11377 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11378 if (integer_zerop (arg1
))
11379 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11380 if (operand_equal_p (arg0
, arg1
, 0))
11381 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11383 /* ~X | X is -1. */
11384 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11387 t1
= build_zero_cst (type
);
11388 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11389 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11392 /* X | ~X is -1. */
11393 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11394 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11396 t1
= build_zero_cst (type
);
11397 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11398 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11401 /* Canonicalize (X & C1) | C2. */
11402 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11403 && TREE_CODE (arg1
) == INTEGER_CST
11404 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11406 double_int c1
, c2
, c3
, msk
;
11407 int width
= TYPE_PRECISION (type
), w
;
11408 bool try_simplify
= true;
11410 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11411 c2
= tree_to_double_int (arg1
);
11413 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11414 if ((c1
& c2
) == c1
)
11415 return omit_one_operand_loc (loc
, type
, arg1
,
11416 TREE_OPERAND (arg0
, 0));
11418 msk
= double_int::mask (width
);
11420 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11421 if (msk
.and_not (c1
| c2
).is_zero ())
11422 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11423 TREE_OPERAND (arg0
, 0), arg1
);
11425 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11426 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11427 mode which allows further optimizations. */
11430 c3
= c1
.and_not (c2
);
11431 for (w
= BITS_PER_UNIT
;
11432 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11435 unsigned HOST_WIDE_INT mask
11436 = HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- w
);
11437 if (((c1
.low
| c2
.low
) & mask
) == mask
11438 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11440 c3
= double_int::from_uhwi (mask
);
11445 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11446 with that optimization from the BIT_AND_EXPR optimizations.
11447 This could end up in an infinite recursion. */
11448 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == MULT_EXPR
11449 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11452 tree t
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11453 double_int masked
= mask_with_tz (type
, c3
, tree_to_double_int (t
));
11455 try_simplify
= (masked
!= c1
);
11458 if (try_simplify
&& c3
!= c1
)
11459 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11460 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11461 TREE_OPERAND (arg0
, 0),
11462 double_int_to_tree (type
,
11467 /* (X & Y) | Y is (X, Y). */
11468 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11469 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11470 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11471 /* (X & Y) | X is (Y, X). */
11472 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11474 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11475 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11476 /* X | (X & Y) is (Y, X). */
11477 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11478 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11479 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11480 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11481 /* X | (Y & X) is (Y, X). */
11482 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11483 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11484 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11485 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11487 /* (X & ~Y) | (~X & Y) is X ^ Y */
11488 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11489 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11491 tree a0
, a1
, l0
, l1
, n0
, n1
;
11493 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11494 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11496 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11497 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11499 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11500 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11502 if ((operand_equal_p (n0
, a0
, 0)
11503 && operand_equal_p (n1
, a1
, 0))
11504 || (operand_equal_p (n0
, a1
, 0)
11505 && operand_equal_p (n1
, a0
, 0)))
11506 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11509 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11510 if (t1
!= NULL_TREE
)
11513 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11515 This results in more efficient code for machines without a NAND
11516 instruction. Combine will canonicalize to the first form
11517 which will allow use of NAND instructions provided by the
11518 backend if they exist. */
11519 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11520 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11523 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11524 build2 (BIT_AND_EXPR
, type
,
11525 fold_convert_loc (loc
, type
,
11526 TREE_OPERAND (arg0
, 0)),
11527 fold_convert_loc (loc
, type
,
11528 TREE_OPERAND (arg1
, 0))));
11531 /* See if this can be simplified into a rotate first. If that
11532 is unsuccessful continue in the association code. */
11536 if (integer_zerop (arg1
))
11537 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11538 if (integer_all_onesp (arg1
))
11539 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11540 if (operand_equal_p (arg0
, arg1
, 0))
11541 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11543 /* ~X ^ X is -1. */
11544 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11547 t1
= build_zero_cst (type
);
11548 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11549 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11552 /* X ^ ~X is -1. */
11553 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11554 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11556 t1
= build_zero_cst (type
);
11557 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11558 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11561 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11562 with a constant, and the two constants have no bits in common,
11563 we should treat this as a BIT_IOR_EXPR since this may produce more
11564 simplifications. */
11565 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11566 && TREE_CODE (arg1
) == BIT_AND_EXPR
11567 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11568 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11569 && integer_zerop (const_binop (BIT_AND_EXPR
,
11570 TREE_OPERAND (arg0
, 1),
11571 TREE_OPERAND (arg1
, 1))))
11573 code
= BIT_IOR_EXPR
;
11577 /* (X | Y) ^ X -> Y & ~ X*/
11578 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11579 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11581 tree t2
= TREE_OPERAND (arg0
, 1);
11582 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11584 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11585 fold_convert_loc (loc
, type
, t2
),
11586 fold_convert_loc (loc
, type
, t1
));
11590 /* (Y | X) ^ X -> Y & ~ X*/
11591 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11592 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11594 tree t2
= TREE_OPERAND (arg0
, 0);
11595 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11597 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11598 fold_convert_loc (loc
, type
, t2
),
11599 fold_convert_loc (loc
, type
, t1
));
11603 /* X ^ (X | Y) -> Y & ~ X*/
11604 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11605 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11607 tree t2
= TREE_OPERAND (arg1
, 1);
11608 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11610 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11611 fold_convert_loc (loc
, type
, t2
),
11612 fold_convert_loc (loc
, type
, t1
));
11616 /* X ^ (Y | X) -> Y & ~ X*/
11617 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11618 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11620 tree t2
= TREE_OPERAND (arg1
, 0);
11621 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11623 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11624 fold_convert_loc (loc
, type
, t2
),
11625 fold_convert_loc (loc
, type
, t1
));
11629 /* Convert ~X ^ ~Y to X ^ Y. */
11630 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11631 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11632 return fold_build2_loc (loc
, code
, type
,
11633 fold_convert_loc (loc
, type
,
11634 TREE_OPERAND (arg0
, 0)),
11635 fold_convert_loc (loc
, type
,
11636 TREE_OPERAND (arg1
, 0)));
11638 /* Convert ~X ^ C to X ^ ~C. */
11639 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11640 && TREE_CODE (arg1
) == INTEGER_CST
)
11641 return fold_build2_loc (loc
, code
, type
,
11642 fold_convert_loc (loc
, type
,
11643 TREE_OPERAND (arg0
, 0)),
11644 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11646 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11647 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11648 && integer_onep (TREE_OPERAND (arg0
, 1))
11649 && integer_onep (arg1
))
11650 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11651 build_zero_cst (TREE_TYPE (arg0
)));
11653 /* Fold (X & Y) ^ Y as ~X & Y. */
11654 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11655 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11657 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11658 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11659 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11660 fold_convert_loc (loc
, type
, arg1
));
11662 /* Fold (X & Y) ^ X as ~Y & X. */
11663 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11664 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11665 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11667 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11668 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11669 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11670 fold_convert_loc (loc
, type
, arg1
));
11672 /* Fold X ^ (X & Y) as X & ~Y. */
11673 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11674 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11676 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11677 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11678 fold_convert_loc (loc
, type
, arg0
),
11679 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11681 /* Fold X ^ (Y & X) as ~Y & X. */
11682 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11683 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11684 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11686 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11687 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11688 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11689 fold_convert_loc (loc
, type
, arg0
));
11692 /* See if this can be simplified into a rotate first. If that
11693 is unsuccessful continue in the association code. */
11697 if (integer_all_onesp (arg1
))
11698 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11699 if (integer_zerop (arg1
))
11700 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11701 if (operand_equal_p (arg0
, arg1
, 0))
11702 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11704 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11705 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11706 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11707 || (TREE_CODE (arg0
) == EQ_EXPR
11708 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11709 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11710 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11712 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11713 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11714 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11715 || (TREE_CODE (arg1
) == EQ_EXPR
11716 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11717 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11718 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11720 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11721 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11722 && TREE_CODE (arg1
) == INTEGER_CST
11723 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11725 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11726 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11727 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11728 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11729 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11731 fold_convert_loc (loc
, type
,
11732 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11733 type
, tmp2
, tmp3
));
11736 /* (X | Y) & Y is (X, Y). */
11737 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11738 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11739 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11740 /* (X | Y) & X is (Y, X). */
11741 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11742 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11743 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11744 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11745 /* X & (X | Y) is (Y, X). */
11746 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11747 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11748 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11749 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11750 /* X & (Y | X) is (Y, X). */
11751 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11752 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11753 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11754 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11756 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11757 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11758 && integer_onep (TREE_OPERAND (arg0
, 1))
11759 && integer_onep (arg1
))
11762 tem
= TREE_OPERAND (arg0
, 0);
11763 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11764 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11766 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11767 build_zero_cst (TREE_TYPE (tem
)));
11769 /* Fold ~X & 1 as (X & 1) == 0. */
11770 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11771 && integer_onep (arg1
))
11774 tem
= TREE_OPERAND (arg0
, 0);
11775 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11776 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11778 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11779 build_zero_cst (TREE_TYPE (tem
)));
11781 /* Fold !X & 1 as X == 0. */
11782 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11783 && integer_onep (arg1
))
11785 tem
= TREE_OPERAND (arg0
, 0);
11786 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11787 build_zero_cst (TREE_TYPE (tem
)));
11790 /* Fold (X ^ Y) & Y as ~X & Y. */
11791 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11792 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11794 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11795 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11796 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11797 fold_convert_loc (loc
, type
, arg1
));
11799 /* Fold (X ^ Y) & X as ~Y & X. */
11800 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11801 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11802 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11804 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11805 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11806 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11807 fold_convert_loc (loc
, type
, arg1
));
11809 /* Fold X & (X ^ Y) as X & ~Y. */
11810 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11811 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11813 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11814 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11815 fold_convert_loc (loc
, type
, arg0
),
11816 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11818 /* Fold X & (Y ^ X) as ~Y & X. */
11819 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11820 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11821 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11823 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11824 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11825 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11826 fold_convert_loc (loc
, type
, arg0
));
11829 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11830 multiple of 1 << CST. */
11831 if (TREE_CODE (arg1
) == INTEGER_CST
)
11833 double_int cst1
= tree_to_double_int (arg1
);
11834 double_int ncst1
= (-cst1
).ext (TYPE_PRECISION (TREE_TYPE (arg1
)),
11835 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11836 if ((cst1
& ncst1
) == ncst1
11837 && multiple_of_p (type
, arg0
,
11838 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11839 return fold_convert_loc (loc
, type
, arg0
);
11842 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11844 if (TREE_CODE (arg1
) == INTEGER_CST
11845 && TREE_CODE (arg0
) == MULT_EXPR
11846 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11849 = mask_with_tz (type
, tree_to_double_int (arg1
),
11850 tree_to_double_int (TREE_OPERAND (arg0
, 1)));
11852 if (masked
.is_zero ())
11853 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11855 else if (masked
!= tree_to_double_int (arg1
))
11856 return fold_build2_loc (loc
, code
, type
, op0
,
11857 double_int_to_tree (type
, masked
));
11860 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11861 ((A & N) + B) & M -> (A + B) & M
11862 Similarly if (N & M) == 0,
11863 ((A | N) + B) & M -> (A + B) & M
11864 and for - instead of + (or unary - instead of +)
11865 and/or ^ instead of |.
11866 If B is constant and (B & M) == 0, fold into A & M. */
11867 if (tree_fits_uhwi_p (arg1
))
11869 unsigned HOST_WIDE_INT cst1
= tree_to_uhwi (arg1
);
11870 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11871 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11872 && (TREE_CODE (arg0
) == PLUS_EXPR
11873 || TREE_CODE (arg0
) == MINUS_EXPR
11874 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11875 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11876 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11880 unsigned HOST_WIDE_INT cst0
;
11882 /* Now we know that arg0 is (C + D) or (C - D) or
11883 -C and arg1 (M) is == (1LL << cst) - 1.
11884 Store C into PMOP[0] and D into PMOP[1]. */
11885 pmop
[0] = TREE_OPERAND (arg0
, 0);
11887 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11889 pmop
[1] = TREE_OPERAND (arg0
, 1);
11893 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11894 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11898 for (; which
>= 0; which
--)
11899 switch (TREE_CODE (pmop
[which
]))
11904 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11907 /* tree_to_[su]hwi not used, because we don't care about
11909 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11911 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11916 else if (cst0
!= 0)
11918 /* If C or D is of the form (A & N) where
11919 (N & M) == M, or of the form (A | N) or
11920 (A ^ N) where (N & M) == 0, replace it with A. */
11921 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11924 /* If C or D is a N where (N & M) == 0, it can be
11925 omitted (assumed 0). */
11926 if ((TREE_CODE (arg0
) == PLUS_EXPR
11927 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11928 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11929 pmop
[which
] = NULL
;
11935 /* Only build anything new if we optimized one or both arguments
11937 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11938 || (TREE_CODE (arg0
) != NEGATE_EXPR
11939 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11941 tree utype
= TREE_TYPE (arg0
);
11942 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11944 /* Perform the operations in a type that has defined
11945 overflow behavior. */
11946 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11947 if (pmop
[0] != NULL
)
11948 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11949 if (pmop
[1] != NULL
)
11950 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11953 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11954 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11955 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11957 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11958 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11960 else if (pmop
[0] != NULL
)
11962 else if (pmop
[1] != NULL
)
11965 return build_int_cst (type
, 0);
11967 else if (pmop
[0] == NULL
)
11968 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11970 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11972 /* TEM is now the new binary +, - or unary - replacement. */
11973 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11974 fold_convert_loc (loc
, utype
, arg1
));
11975 return fold_convert_loc (loc
, type
, tem
);
11980 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11981 if (t1
!= NULL_TREE
)
11983 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11984 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11985 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11987 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11989 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11990 && (~TREE_INT_CST_LOW (arg1
)
11991 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11993 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11996 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11998 This results in more efficient code for machines without a NOR
11999 instruction. Combine will canonicalize to the first form
12000 which will allow use of NOR instructions provided by the
12001 backend if they exist. */
12002 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
12003 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
12005 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
12006 build2 (BIT_IOR_EXPR
, type
,
12007 fold_convert_loc (loc
, type
,
12008 TREE_OPERAND (arg0
, 0)),
12009 fold_convert_loc (loc
, type
,
12010 TREE_OPERAND (arg1
, 0))));
12013 /* If arg0 is derived from the address of an object or function, we may
12014 be able to fold this expression using the object or function's
12016 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
12018 unsigned HOST_WIDE_INT modulus
, residue
;
12019 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
12021 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
12022 integer_onep (arg1
));
12024 /* This works because modulus is a power of 2. If this weren't the
12025 case, we'd have to replace it by its greatest power-of-2
12026 divisor: modulus & -modulus. */
12028 return build_int_cst (type
, residue
& low
);
12031 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12032 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12033 if the new mask might be further optimized. */
12034 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
12035 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
12036 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12037 && TREE_CODE (arg1
) == INTEGER_CST
12038 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12039 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
12040 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12041 < TYPE_PRECISION (TREE_TYPE (arg0
))))
12043 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12044 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
12045 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12046 tree shift_type
= TREE_TYPE (arg0
);
12048 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12049 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12050 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12051 && TYPE_PRECISION (TREE_TYPE (arg0
))
12052 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
12054 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12055 tree arg00
= TREE_OPERAND (arg0
, 0);
12056 /* See if more bits can be proven as zero because of
12058 if (TREE_CODE (arg00
) == NOP_EXPR
12059 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12061 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12062 if (TYPE_PRECISION (inner_type
)
12063 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
12064 && TYPE_PRECISION (inner_type
) < prec
)
12066 prec
= TYPE_PRECISION (inner_type
);
12067 /* See if we can shorten the right shift. */
12069 shift_type
= inner_type
;
12072 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12073 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12074 zerobits
<<= prec
- shiftc
;
12075 /* For arithmetic shift if sign bit could be set, zerobits
12076 can contain actually sign bits, so no transformation is
12077 possible, unless MASK masks them all away. In that
12078 case the shift needs to be converted into logical shift. */
12079 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12080 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12082 if ((mask
& zerobits
) == 0)
12083 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12089 /* ((X << 16) & 0xff00) is (X, 0). */
12090 if ((mask
& zerobits
) == mask
)
12091 return omit_one_operand_loc (loc
, type
,
12092 build_int_cst (type
, 0), arg0
);
12094 newmask
= mask
| zerobits
;
12095 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12097 /* Only do the transformation if NEWMASK is some integer
12099 for (prec
= BITS_PER_UNIT
;
12100 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12101 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12103 if (prec
< HOST_BITS_PER_WIDE_INT
12104 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12108 if (shift_type
!= TREE_TYPE (arg0
))
12110 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12111 fold_convert_loc (loc
, shift_type
,
12112 TREE_OPERAND (arg0
, 0)),
12113 TREE_OPERAND (arg0
, 1));
12114 tem
= fold_convert_loc (loc
, type
, tem
);
12118 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12119 if (!tree_int_cst_equal (newmaskt
, arg1
))
12120 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12128 /* Don't touch a floating-point divide by zero unless the mode
12129 of the constant can represent infinity. */
12130 if (TREE_CODE (arg1
) == REAL_CST
12131 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12132 && real_zerop (arg1
))
12135 /* Optimize A / A to 1.0 if we don't care about
12136 NaNs or Infinities. Skip the transformation
12137 for non-real operands. */
12138 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12139 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12140 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12141 && operand_equal_p (arg0
, arg1
, 0))
12143 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12145 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12148 /* The complex version of the above A / A optimization. */
12149 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12150 && operand_equal_p (arg0
, arg1
, 0))
12152 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12153 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12154 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12156 tree r
= build_real (elem_type
, dconst1
);
12157 /* omit_two_operands will call fold_convert for us. */
12158 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12162 /* (-A) / (-B) -> A / B */
12163 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12164 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12165 TREE_OPERAND (arg0
, 0),
12166 negate_expr (arg1
));
12167 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12168 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12169 negate_expr (arg0
),
12170 TREE_OPERAND (arg1
, 0));
12172 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12173 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12174 && real_onep (arg1
))
12175 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12177 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12178 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12179 && real_minus_onep (arg1
))
12180 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12181 negate_expr (arg0
)));
12183 /* If ARG1 is a constant, we can convert this to a multiply by the
12184 reciprocal. This does not have the same rounding properties,
12185 so only do this if -freciprocal-math. We can actually
12186 always safely do it if ARG1 is a power of two, but it's hard to
12187 tell if it is or not in a portable manner. */
12189 && (TREE_CODE (arg1
) == REAL_CST
12190 || (TREE_CODE (arg1
) == COMPLEX_CST
12191 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12192 || (TREE_CODE (arg1
) == VECTOR_CST
12193 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12195 if (flag_reciprocal_math
12196 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12197 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12198 /* Find the reciprocal if optimizing and the result is exact.
12199 TODO: Complex reciprocal not implemented. */
12200 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12202 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12205 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12208 /* Convert A/B/C to A/(B*C). */
12209 if (flag_reciprocal_math
12210 && TREE_CODE (arg0
) == RDIV_EXPR
)
12211 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12212 fold_build2_loc (loc
, MULT_EXPR
, type
,
12213 TREE_OPERAND (arg0
, 1), arg1
));
12215 /* Convert A/(B/C) to (A/B)*C. */
12216 if (flag_reciprocal_math
12217 && TREE_CODE (arg1
) == RDIV_EXPR
)
12218 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12219 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12220 TREE_OPERAND (arg1
, 0)),
12221 TREE_OPERAND (arg1
, 1));
12223 /* Convert C1/(X*C2) into (C1/C2)/X. */
12224 if (flag_reciprocal_math
12225 && TREE_CODE (arg1
) == MULT_EXPR
12226 && TREE_CODE (arg0
) == REAL_CST
12227 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12229 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12230 TREE_OPERAND (arg1
, 1));
12232 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12233 TREE_OPERAND (arg1
, 0));
12236 if (flag_unsafe_math_optimizations
)
12238 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12239 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12241 /* Optimize sin(x)/cos(x) as tan(x). */
12242 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12243 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12244 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12245 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12246 CALL_EXPR_ARG (arg1
, 0), 0))
12248 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12250 if (tanfn
!= NULL_TREE
)
12251 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12254 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12255 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12256 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12257 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12258 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12259 CALL_EXPR_ARG (arg1
, 0), 0))
12261 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12263 if (tanfn
!= NULL_TREE
)
12265 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12266 CALL_EXPR_ARG (arg0
, 0));
12267 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12268 build_real (type
, dconst1
), tmp
);
12272 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12273 NaNs or Infinities. */
12274 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12275 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12276 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12278 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12279 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12281 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12282 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12283 && operand_equal_p (arg00
, arg01
, 0))
12285 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12287 if (cosfn
!= NULL_TREE
)
12288 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12292 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12293 NaNs or Infinities. */
12294 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12295 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12296 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12298 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12299 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12301 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12302 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12303 && operand_equal_p (arg00
, arg01
, 0))
12305 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12307 if (cosfn
!= NULL_TREE
)
12309 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12310 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12311 build_real (type
, dconst1
),
12317 /* Optimize pow(x,c)/x as pow(x,c-1). */
12318 if (fcode0
== BUILT_IN_POW
12319 || fcode0
== BUILT_IN_POWF
12320 || fcode0
== BUILT_IN_POWL
)
12322 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12323 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12324 if (TREE_CODE (arg01
) == REAL_CST
12325 && !TREE_OVERFLOW (arg01
)
12326 && operand_equal_p (arg1
, arg00
, 0))
12328 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12332 c
= TREE_REAL_CST (arg01
);
12333 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12334 arg
= build_real (type
, c
);
12335 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12339 /* Optimize a/root(b/c) into a*root(c/b). */
12340 if (BUILTIN_ROOT_P (fcode1
))
12342 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12344 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12346 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12347 tree b
= TREE_OPERAND (rootarg
, 0);
12348 tree c
= TREE_OPERAND (rootarg
, 1);
12350 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12352 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12353 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12357 /* Optimize x/expN(y) into x*expN(-y). */
12358 if (BUILTIN_EXPONENT_P (fcode1
))
12360 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12361 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12362 arg1
= build_call_expr_loc (loc
,
12364 fold_convert_loc (loc
, type
, arg
));
12365 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12368 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12369 if (fcode1
== BUILT_IN_POW
12370 || fcode1
== BUILT_IN_POWF
12371 || fcode1
== BUILT_IN_POWL
)
12373 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12374 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12375 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12376 tree neg11
= fold_convert_loc (loc
, type
,
12377 negate_expr (arg11
));
12378 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12379 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12384 case TRUNC_DIV_EXPR
:
12385 /* Optimize (X & (-A)) / A where A is a power of 2,
12387 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12388 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12389 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12391 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12392 arg1
, TREE_OPERAND (arg0
, 1));
12393 if (sum
&& integer_zerop (sum
)) {
12394 unsigned long pow2
;
12396 if (TREE_INT_CST_LOW (arg1
))
12397 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12399 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12400 + HOST_BITS_PER_WIDE_INT
;
12402 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12403 TREE_OPERAND (arg0
, 0),
12404 build_int_cst (integer_type_node
, pow2
));
12410 case FLOOR_DIV_EXPR
:
12411 /* Simplify A / (B << N) where A and B are positive and B is
12412 a power of 2, to A >> (N + log2(B)). */
12413 strict_overflow_p
= false;
12414 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12415 && (TYPE_UNSIGNED (type
)
12416 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12418 tree sval
= TREE_OPERAND (arg1
, 0);
12419 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12421 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12422 unsigned long pow2
;
12424 if (TREE_INT_CST_LOW (sval
))
12425 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12427 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12428 + HOST_BITS_PER_WIDE_INT
;
12430 if (strict_overflow_p
)
12431 fold_overflow_warning (("assuming signed overflow does not "
12432 "occur when simplifying A / (B << N)"),
12433 WARN_STRICT_OVERFLOW_MISC
);
12435 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12437 build_int_cst (TREE_TYPE (sh_cnt
),
12439 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12440 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12444 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12445 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12446 if (INTEGRAL_TYPE_P (type
)
12447 && TYPE_UNSIGNED (type
)
12448 && code
== FLOOR_DIV_EXPR
)
12449 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12453 case ROUND_DIV_EXPR
:
12454 case CEIL_DIV_EXPR
:
12455 case EXACT_DIV_EXPR
:
12456 if (integer_onep (arg1
))
12457 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12458 if (integer_zerop (arg1
))
12460 /* X / -1 is -X. */
12461 if (!TYPE_UNSIGNED (type
)
12462 && TREE_CODE (arg1
) == INTEGER_CST
12463 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12464 && TREE_INT_CST_HIGH (arg1
) == -1)
12465 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12467 /* Convert -A / -B to A / B when the type is signed and overflow is
12469 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12470 && TREE_CODE (arg0
) == NEGATE_EXPR
12471 && negate_expr_p (arg1
))
12473 if (INTEGRAL_TYPE_P (type
))
12474 fold_overflow_warning (("assuming signed overflow does not occur "
12475 "when distributing negation across "
12477 WARN_STRICT_OVERFLOW_MISC
);
12478 return fold_build2_loc (loc
, code
, type
,
12479 fold_convert_loc (loc
, type
,
12480 TREE_OPERAND (arg0
, 0)),
12481 fold_convert_loc (loc
, type
,
12482 negate_expr (arg1
)));
12484 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12485 && TREE_CODE (arg1
) == NEGATE_EXPR
12486 && negate_expr_p (arg0
))
12488 if (INTEGRAL_TYPE_P (type
))
12489 fold_overflow_warning (("assuming signed overflow does not occur "
12490 "when distributing negation across "
12492 WARN_STRICT_OVERFLOW_MISC
);
12493 return fold_build2_loc (loc
, code
, type
,
12494 fold_convert_loc (loc
, type
,
12495 negate_expr (arg0
)),
12496 fold_convert_loc (loc
, type
,
12497 TREE_OPERAND (arg1
, 0)));
12500 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12501 operation, EXACT_DIV_EXPR.
12503 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12504 At one time others generated faster code, it's not clear if they do
12505 after the last round to changes to the DIV code in expmed.c. */
12506 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12507 && multiple_of_p (type
, arg0
, arg1
))
12508 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12510 strict_overflow_p
= false;
12511 if (TREE_CODE (arg1
) == INTEGER_CST
12512 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12513 &strict_overflow_p
)))
12515 if (strict_overflow_p
)
12516 fold_overflow_warning (("assuming signed overflow does not occur "
12517 "when simplifying division"),
12518 WARN_STRICT_OVERFLOW_MISC
);
12519 return fold_convert_loc (loc
, type
, tem
);
12524 case CEIL_MOD_EXPR
:
12525 case FLOOR_MOD_EXPR
:
12526 case ROUND_MOD_EXPR
:
12527 case TRUNC_MOD_EXPR
:
12528 /* X % 1 is always zero, but be sure to preserve any side
12530 if (integer_onep (arg1
))
12531 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12533 /* X % 0, return X % 0 unchanged so that we can get the
12534 proper warnings and errors. */
12535 if (integer_zerop (arg1
))
12538 /* 0 % X is always zero, but be sure to preserve any side
12539 effects in X. Place this after checking for X == 0. */
12540 if (integer_zerop (arg0
))
12541 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12543 /* X % -1 is zero. */
12544 if (!TYPE_UNSIGNED (type
)
12545 && TREE_CODE (arg1
) == INTEGER_CST
12546 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12547 && TREE_INT_CST_HIGH (arg1
) == -1)
12548 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12550 /* X % -C is the same as X % C. */
12551 if (code
== TRUNC_MOD_EXPR
12552 && !TYPE_UNSIGNED (type
)
12553 && TREE_CODE (arg1
) == INTEGER_CST
12554 && !TREE_OVERFLOW (arg1
)
12555 && TREE_INT_CST_HIGH (arg1
) < 0
12556 && !TYPE_OVERFLOW_TRAPS (type
)
12557 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12558 && !sign_bit_p (arg1
, arg1
))
12559 return fold_build2_loc (loc
, code
, type
,
12560 fold_convert_loc (loc
, type
, arg0
),
12561 fold_convert_loc (loc
, type
,
12562 negate_expr (arg1
)));
12564 /* X % -Y is the same as X % Y. */
12565 if (code
== TRUNC_MOD_EXPR
12566 && !TYPE_UNSIGNED (type
)
12567 && TREE_CODE (arg1
) == NEGATE_EXPR
12568 && !TYPE_OVERFLOW_TRAPS (type
))
12569 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12570 fold_convert_loc (loc
, type
,
12571 TREE_OPERAND (arg1
, 0)));
12573 strict_overflow_p
= false;
12574 if (TREE_CODE (arg1
) == INTEGER_CST
12575 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12576 &strict_overflow_p
)))
12578 if (strict_overflow_p
)
12579 fold_overflow_warning (("assuming signed overflow does not occur "
12580 "when simplifying modulus"),
12581 WARN_STRICT_OVERFLOW_MISC
);
12582 return fold_convert_loc (loc
, type
, tem
);
12585 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12586 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12587 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12588 && (TYPE_UNSIGNED (type
)
12589 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12592 /* Also optimize A % (C << N) where C is a power of 2,
12593 to A & ((C << N) - 1). */
12594 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12595 c
= TREE_OPERAND (arg1
, 0);
12597 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12600 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12601 build_int_cst (TREE_TYPE (arg1
), 1));
12602 if (strict_overflow_p
)
12603 fold_overflow_warning (("assuming signed overflow does not "
12604 "occur when simplifying "
12605 "X % (power of two)"),
12606 WARN_STRICT_OVERFLOW_MISC
);
12607 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12608 fold_convert_loc (loc
, type
, arg0
),
12609 fold_convert_loc (loc
, type
, mask
));
12617 if (integer_all_onesp (arg0
))
12618 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12622 /* Optimize -1 >> x for arithmetic right shifts. */
12623 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12624 && tree_expr_nonnegative_p (arg1
))
12625 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12626 /* ... fall through ... */
12630 if (integer_zerop (arg1
))
12631 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12632 if (integer_zerop (arg0
))
12633 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12635 /* Prefer vector1 << scalar to vector1 << vector2
12636 if vector2 is uniform. */
12637 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12638 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12639 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12641 /* Since negative shift count is not well-defined,
12642 don't try to compute it in the compiler. */
12643 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12646 prec
= element_precision (type
);
12648 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12649 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12650 && TREE_INT_CST_LOW (arg1
) < prec
12651 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12652 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12654 unsigned int low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12655 + TREE_INT_CST_LOW (arg1
));
12657 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12658 being well defined. */
12661 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12663 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12664 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12665 TREE_OPERAND (arg0
, 0));
12670 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12671 build_int_cst (TREE_TYPE (arg1
), low
));
12674 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12675 into x & ((unsigned)-1 >> c) for unsigned types. */
12676 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12677 || (TYPE_UNSIGNED (type
)
12678 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12679 && tree_fits_shwi_p (arg1
)
12680 && TREE_INT_CST_LOW (arg1
) < prec
12681 && tree_fits_shwi_p (TREE_OPERAND (arg0
, 1))
12682 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12684 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12685 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12691 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12693 lshift
= build_minus_one_cst (type
);
12694 lshift
= const_binop (code
, lshift
, arg1
);
12696 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12700 /* Rewrite an LROTATE_EXPR by a constant into an
12701 RROTATE_EXPR by a new constant. */
12702 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12704 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12705 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12706 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12709 /* If we have a rotate of a bit operation with the rotate count and
12710 the second operand of the bit operation both constant,
12711 permute the two operations. */
12712 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12713 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12714 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12715 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12716 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12717 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12718 fold_build2_loc (loc
, code
, type
,
12719 TREE_OPERAND (arg0
, 0), arg1
),
12720 fold_build2_loc (loc
, code
, type
,
12721 TREE_OPERAND (arg0
, 1), arg1
));
12723 /* Two consecutive rotates adding up to the precision of the
12724 type can be ignored. */
12725 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12726 && TREE_CODE (arg0
) == RROTATE_EXPR
12727 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12728 && TREE_INT_CST_HIGH (arg1
) == 0
12729 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12730 && ((TREE_INT_CST_LOW (arg1
)
12731 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12733 return TREE_OPERAND (arg0
, 0);
12735 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12736 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12737 if the latter can be further optimized. */
12738 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12739 && TREE_CODE (arg0
) == BIT_AND_EXPR
12740 && TREE_CODE (arg1
) == INTEGER_CST
12741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12743 tree mask
= fold_build2_loc (loc
, code
, type
,
12744 fold_convert_loc (loc
, type
,
12745 TREE_OPERAND (arg0
, 1)),
12747 tree shift
= fold_build2_loc (loc
, code
, type
,
12748 fold_convert_loc (loc
, type
,
12749 TREE_OPERAND (arg0
, 0)),
12751 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12759 if (operand_equal_p (arg0
, arg1
, 0))
12760 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12761 if (INTEGRAL_TYPE_P (type
)
12762 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12763 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12764 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12770 if (operand_equal_p (arg0
, arg1
, 0))
12771 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12772 if (INTEGRAL_TYPE_P (type
)
12773 && TYPE_MAX_VALUE (type
)
12774 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12775 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12776 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12781 case TRUTH_ANDIF_EXPR
:
12782 /* Note that the operands of this must be ints
12783 and their values must be 0 or 1.
12784 ("true" is a fixed value perhaps depending on the language.) */
12785 /* If first arg is constant zero, return it. */
12786 if (integer_zerop (arg0
))
12787 return fold_convert_loc (loc
, type
, arg0
);
12788 case TRUTH_AND_EXPR
:
12789 /* If either arg is constant true, drop it. */
12790 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12791 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12792 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12793 /* Preserve sequence points. */
12794 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12795 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12796 /* If second arg is constant zero, result is zero, but first arg
12797 must be evaluated. */
12798 if (integer_zerop (arg1
))
12799 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12800 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12801 case will be handled here. */
12802 if (integer_zerop (arg0
))
12803 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12805 /* !X && X is always false. */
12806 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12807 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12808 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12809 /* X && !X is always false. */
12810 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12811 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12812 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12814 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12815 means A >= Y && A != MAX, but in this case we know that
12818 if (!TREE_SIDE_EFFECTS (arg0
)
12819 && !TREE_SIDE_EFFECTS (arg1
))
12821 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12822 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12823 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12825 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12826 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12827 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12830 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12836 case TRUTH_ORIF_EXPR
:
12837 /* Note that the operands of this must be ints
12838 and their values must be 0 or true.
12839 ("true" is a fixed value perhaps depending on the language.) */
12840 /* If first arg is constant true, return it. */
12841 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12842 return fold_convert_loc (loc
, type
, arg0
);
12843 case TRUTH_OR_EXPR
:
12844 /* If either arg is constant zero, drop it. */
12845 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12846 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12847 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12848 /* Preserve sequence points. */
12849 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12850 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12851 /* If second arg is constant true, result is true, but we must
12852 evaluate first arg. */
12853 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12854 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12855 /* Likewise for first arg, but note this only occurs here for
12857 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12858 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12860 /* !X || X is always true. */
12861 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12862 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12863 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12864 /* X || !X is always true. */
12865 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12866 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12867 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12869 /* (X && !Y) || (!X && Y) is X ^ Y */
12870 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12871 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12873 tree a0
, a1
, l0
, l1
, n0
, n1
;
12875 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12876 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12878 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12879 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12881 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12882 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12884 if ((operand_equal_p (n0
, a0
, 0)
12885 && operand_equal_p (n1
, a1
, 0))
12886 || (operand_equal_p (n0
, a1
, 0)
12887 && operand_equal_p (n1
, a0
, 0)))
12888 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12891 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12897 case TRUTH_XOR_EXPR
:
12898 /* If the second arg is constant zero, drop it. */
12899 if (integer_zerop (arg1
))
12900 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12901 /* If the second arg is constant true, this is a logical inversion. */
12902 if (integer_onep (arg1
))
12904 tem
= invert_truthvalue_loc (loc
, arg0
);
12905 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12907 /* Identical arguments cancel to zero. */
12908 if (operand_equal_p (arg0
, arg1
, 0))
12909 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12911 /* !X ^ X is always true. */
12912 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12913 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12914 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12916 /* X ^ !X is always true. */
12917 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12918 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12919 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12928 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12929 if (tem
!= NULL_TREE
)
12932 /* bool_var != 0 becomes bool_var. */
12933 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12934 && code
== NE_EXPR
)
12935 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12937 /* bool_var == 1 becomes bool_var. */
12938 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12939 && code
== EQ_EXPR
)
12940 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12942 /* bool_var != 1 becomes !bool_var. */
12943 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12944 && code
== NE_EXPR
)
12945 return fold_convert_loc (loc
, type
,
12946 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12947 TREE_TYPE (arg0
), arg0
));
12949 /* bool_var == 0 becomes !bool_var. */
12950 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12951 && code
== EQ_EXPR
)
12952 return fold_convert_loc (loc
, type
,
12953 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12954 TREE_TYPE (arg0
), arg0
));
12956 /* !exp != 0 becomes !exp */
12957 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12958 && code
== NE_EXPR
)
12959 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12961 /* If this is an equality comparison of the address of two non-weak,
12962 unaliased symbols neither of which are extern (since we do not
12963 have access to attributes for externs), then we know the result. */
12964 if (TREE_CODE (arg0
) == ADDR_EXPR
12965 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12966 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12967 && ! lookup_attribute ("alias",
12968 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12969 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12970 && TREE_CODE (arg1
) == ADDR_EXPR
12971 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12972 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12973 && ! lookup_attribute ("alias",
12974 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12975 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12977 /* We know that we're looking at the address of two
12978 non-weak, unaliased, static _DECL nodes.
12980 It is both wasteful and incorrect to call operand_equal_p
12981 to compare the two ADDR_EXPR nodes. It is wasteful in that
12982 all we need to do is test pointer equality for the arguments
12983 to the two ADDR_EXPR nodes. It is incorrect to use
12984 operand_equal_p as that function is NOT equivalent to a
12985 C equality test. It can in fact return false for two
12986 objects which would test as equal using the C equality
12988 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12989 return constant_boolean_node (equal
12990 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12994 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12995 a MINUS_EXPR of a constant, we can convert it into a comparison with
12996 a revised constant as long as no overflow occurs. */
12997 if (TREE_CODE (arg1
) == INTEGER_CST
12998 && (TREE_CODE (arg0
) == PLUS_EXPR
12999 || TREE_CODE (arg0
) == MINUS_EXPR
)
13000 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13001 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
13002 ? MINUS_EXPR
: PLUS_EXPR
,
13003 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13005 TREE_OPERAND (arg0
, 1)))
13006 && !TREE_OVERFLOW (tem
))
13007 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13009 /* Similarly for a NEGATE_EXPR. */
13010 if (TREE_CODE (arg0
) == NEGATE_EXPR
13011 && TREE_CODE (arg1
) == INTEGER_CST
13012 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
13014 && TREE_CODE (tem
) == INTEGER_CST
13015 && !TREE_OVERFLOW (tem
))
13016 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13018 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13019 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13020 && TREE_CODE (arg1
) == INTEGER_CST
13021 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13022 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13023 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
13024 fold_convert_loc (loc
,
13027 TREE_OPERAND (arg0
, 1)));
13029 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13030 if ((TREE_CODE (arg0
) == PLUS_EXPR
13031 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
13032 || TREE_CODE (arg0
) == MINUS_EXPR
)
13033 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13036 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13037 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
13039 tree val
= TREE_OPERAND (arg0
, 1);
13040 return omit_two_operands_loc (loc
, type
,
13041 fold_build2_loc (loc
, code
, type
,
13043 build_int_cst (TREE_TYPE (val
),
13045 TREE_OPERAND (arg0
, 0), arg1
);
13048 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13049 if (TREE_CODE (arg0
) == MINUS_EXPR
13050 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
13051 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13054 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
13056 return omit_two_operands_loc (loc
, type
,
13058 ? boolean_true_node
: boolean_false_node
,
13059 TREE_OPERAND (arg0
, 1), arg1
);
13062 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13063 for !=. Don't do this for ordered comparisons due to overflow. */
13064 if (TREE_CODE (arg0
) == MINUS_EXPR
13065 && integer_zerop (arg1
))
13066 return fold_build2_loc (loc
, code
, type
,
13067 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
13069 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13070 if (TREE_CODE (arg0
) == ABS_EXPR
13071 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13072 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13074 /* If this is an EQ or NE comparison with zero and ARG0 is
13075 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13076 two operations, but the latter can be done in one less insn
13077 on machines that have only two-operand insns or on which a
13078 constant cannot be the first operand. */
13079 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13080 && integer_zerop (arg1
))
13082 tree arg00
= TREE_OPERAND (arg0
, 0);
13083 tree arg01
= TREE_OPERAND (arg0
, 1);
13084 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13085 && integer_onep (TREE_OPERAND (arg00
, 0)))
13087 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13088 arg01
, TREE_OPERAND (arg00
, 1));
13089 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13090 build_int_cst (TREE_TYPE (arg0
), 1));
13091 return fold_build2_loc (loc
, code
, type
,
13092 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13095 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg01
, 0)))
13098 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13099 arg00
, TREE_OPERAND (arg01
, 1));
13100 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13101 build_int_cst (TREE_TYPE (arg0
), 1));
13102 return fold_build2_loc (loc
, code
, type
,
13103 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13108 /* If this is an NE or EQ comparison of zero against the result of a
13109 signed MOD operation whose second operand is a power of 2, make
13110 the MOD operation unsigned since it is simpler and equivalent. */
13111 if (integer_zerop (arg1
)
13112 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13113 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13114 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13115 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13116 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13117 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13119 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13120 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13121 fold_convert_loc (loc
, newtype
,
13122 TREE_OPERAND (arg0
, 0)),
13123 fold_convert_loc (loc
, newtype
,
13124 TREE_OPERAND (arg0
, 1)));
13126 return fold_build2_loc (loc
, code
, type
, newmod
,
13127 fold_convert_loc (loc
, newtype
, arg1
));
13130 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13131 C1 is a valid shift constant, and C2 is a power of two, i.e.
13133 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13134 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13137 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13138 && integer_zerop (arg1
))
13140 tree itype
= TREE_TYPE (arg0
);
13141 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13142 prec
= TYPE_PRECISION (itype
);
13144 /* Check for a valid shift count. */
13145 if (TREE_INT_CST_HIGH (arg001
) == 0
13146 && TREE_INT_CST_LOW (arg001
) < prec
)
13148 tree arg01
= TREE_OPERAND (arg0
, 1);
13149 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13150 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13151 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13152 can be rewritten as (X & (C2 << C1)) != 0. */
13153 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13155 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13156 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13157 return fold_build2_loc (loc
, code
, type
, tem
,
13158 fold_convert_loc (loc
, itype
, arg1
));
13160 /* Otherwise, for signed (arithmetic) shifts,
13161 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13162 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13163 else if (!TYPE_UNSIGNED (itype
))
13164 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13165 arg000
, build_int_cst (itype
, 0));
13166 /* Otherwise, of unsigned (logical) shifts,
13167 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13168 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13170 return omit_one_operand_loc (loc
, type
,
13171 code
== EQ_EXPR
? integer_one_node
13172 : integer_zero_node
,
13177 /* If we have (A & C) == C where C is a power of 2, convert this into
13178 (A & C) != 0. Similarly for NE_EXPR. */
13179 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13180 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13181 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13182 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13183 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13184 integer_zero_node
));
13186 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13187 bit, then fold the expression into A < 0 or A >= 0. */
13188 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13192 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13193 Similarly for NE_EXPR. */
13194 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13195 && TREE_CODE (arg1
) == INTEGER_CST
13196 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13198 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13199 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13200 TREE_OPERAND (arg0
, 1));
13202 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13203 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13205 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13206 if (integer_nonzerop (dandnotc
))
13207 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13210 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13211 Similarly for NE_EXPR. */
13212 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13213 && TREE_CODE (arg1
) == INTEGER_CST
13214 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13216 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13218 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13219 TREE_OPERAND (arg0
, 1),
13220 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13221 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13222 if (integer_nonzerop (candnotd
))
13223 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13226 /* If this is a comparison of a field, we may be able to simplify it. */
13227 if ((TREE_CODE (arg0
) == COMPONENT_REF
13228 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13229 /* Handle the constant case even without -O
13230 to make sure the warnings are given. */
13231 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13233 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13238 /* Optimize comparisons of strlen vs zero to a compare of the
13239 first character of the string vs zero. To wit,
13240 strlen(ptr) == 0 => *ptr == 0
13241 strlen(ptr) != 0 => *ptr != 0
13242 Other cases should reduce to one of these two (or a constant)
13243 due to the return value of strlen being unsigned. */
13244 if (TREE_CODE (arg0
) == CALL_EXPR
13245 && integer_zerop (arg1
))
13247 tree fndecl
= get_callee_fndecl (arg0
);
13250 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13251 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13252 && call_expr_nargs (arg0
) == 1
13253 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13255 tree iref
= build_fold_indirect_ref_loc (loc
,
13256 CALL_EXPR_ARG (arg0
, 0));
13257 return fold_build2_loc (loc
, code
, type
, iref
,
13258 build_int_cst (TREE_TYPE (iref
), 0));
13262 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13263 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13264 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13265 && integer_zerop (arg1
)
13266 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13268 tree arg00
= TREE_OPERAND (arg0
, 0);
13269 tree arg01
= TREE_OPERAND (arg0
, 1);
13270 tree itype
= TREE_TYPE (arg00
);
13271 if (TREE_INT_CST_HIGH (arg01
) == 0
13272 && TREE_INT_CST_LOW (arg01
)
13273 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13275 if (TYPE_UNSIGNED (itype
))
13277 itype
= signed_type_for (itype
);
13278 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13280 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13281 type
, arg00
, build_zero_cst (itype
));
13285 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13286 if (integer_zerop (arg1
)
13287 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13288 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13289 TREE_OPERAND (arg0
, 1));
13291 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13292 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13293 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13294 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13295 build_zero_cst (TREE_TYPE (arg0
)));
13296 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13297 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13298 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13299 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13300 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13301 build_zero_cst (TREE_TYPE (arg0
)));
13303 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13304 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13305 && TREE_CODE (arg1
) == INTEGER_CST
13306 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13307 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13308 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13309 TREE_OPERAND (arg0
, 1), arg1
));
13311 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13312 (X & C) == 0 when C is a single bit. */
13313 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13314 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13315 && integer_zerop (arg1
)
13316 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13318 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13319 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13320 TREE_OPERAND (arg0
, 1));
13321 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13323 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13327 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13328 constant C is a power of two, i.e. a single bit. */
13329 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13330 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13331 && integer_zerop (arg1
)
13332 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13333 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13334 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13336 tree arg00
= TREE_OPERAND (arg0
, 0);
13337 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13338 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13341 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13342 when is C is a power of two, i.e. a single bit. */
13343 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13344 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13345 && integer_zerop (arg1
)
13346 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13347 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13348 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13350 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13351 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13352 arg000
, TREE_OPERAND (arg0
, 1));
13353 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13354 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13357 if (integer_zerop (arg1
)
13358 && tree_expr_nonzero_p (arg0
))
13360 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13361 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13364 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13365 if (TREE_CODE (arg0
) == NEGATE_EXPR
13366 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13367 return fold_build2_loc (loc
, code
, type
,
13368 TREE_OPERAND (arg0
, 0),
13369 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13370 TREE_OPERAND (arg1
, 0)));
13372 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13373 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13374 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13376 tree arg00
= TREE_OPERAND (arg0
, 0);
13377 tree arg01
= TREE_OPERAND (arg0
, 1);
13378 tree arg10
= TREE_OPERAND (arg1
, 0);
13379 tree arg11
= TREE_OPERAND (arg1
, 1);
13380 tree itype
= TREE_TYPE (arg0
);
13382 if (operand_equal_p (arg01
, arg11
, 0))
13383 return fold_build2_loc (loc
, code
, type
,
13384 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13385 fold_build2_loc (loc
,
13386 BIT_XOR_EXPR
, itype
,
13389 build_zero_cst (itype
));
13391 if (operand_equal_p (arg01
, arg10
, 0))
13392 return fold_build2_loc (loc
, code
, type
,
13393 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13394 fold_build2_loc (loc
,
13395 BIT_XOR_EXPR
, itype
,
13398 build_zero_cst (itype
));
13400 if (operand_equal_p (arg00
, arg11
, 0))
13401 return fold_build2_loc (loc
, code
, type
,
13402 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13403 fold_build2_loc (loc
,
13404 BIT_XOR_EXPR
, itype
,
13407 build_zero_cst (itype
));
13409 if (operand_equal_p (arg00
, arg10
, 0))
13410 return fold_build2_loc (loc
, code
, type
,
13411 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13412 fold_build2_loc (loc
,
13413 BIT_XOR_EXPR
, itype
,
13416 build_zero_cst (itype
));
13419 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13420 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13422 tree arg00
= TREE_OPERAND (arg0
, 0);
13423 tree arg01
= TREE_OPERAND (arg0
, 1);
13424 tree arg10
= TREE_OPERAND (arg1
, 0);
13425 tree arg11
= TREE_OPERAND (arg1
, 1);
13426 tree itype
= TREE_TYPE (arg0
);
13428 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13429 operand_equal_p guarantees no side-effects so we don't need
13430 to use omit_one_operand on Z. */
13431 if (operand_equal_p (arg01
, arg11
, 0))
13432 return fold_build2_loc (loc
, code
, type
, arg00
,
13433 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13435 if (operand_equal_p (arg01
, arg10
, 0))
13436 return fold_build2_loc (loc
, code
, type
, arg00
,
13437 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13439 if (operand_equal_p (arg00
, arg11
, 0))
13440 return fold_build2_loc (loc
, code
, type
, arg01
,
13441 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13443 if (operand_equal_p (arg00
, arg10
, 0))
13444 return fold_build2_loc (loc
, code
, type
, arg01
,
13445 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13448 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13449 if (TREE_CODE (arg01
) == INTEGER_CST
13450 && TREE_CODE (arg11
) == INTEGER_CST
)
13452 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13453 fold_convert_loc (loc
, itype
, arg11
));
13454 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13455 return fold_build2_loc (loc
, code
, type
, tem
,
13456 fold_convert_loc (loc
, itype
, arg10
));
13460 /* Attempt to simplify equality/inequality comparisons of complex
13461 values. Only lower the comparison if the result is known or
13462 can be simplified to a single scalar comparison. */
13463 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13464 || TREE_CODE (arg0
) == COMPLEX_CST
)
13465 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13466 || TREE_CODE (arg1
) == COMPLEX_CST
))
13468 tree real0
, imag0
, real1
, imag1
;
13471 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13473 real0
= TREE_OPERAND (arg0
, 0);
13474 imag0
= TREE_OPERAND (arg0
, 1);
13478 real0
= TREE_REALPART (arg0
);
13479 imag0
= TREE_IMAGPART (arg0
);
13482 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13484 real1
= TREE_OPERAND (arg1
, 0);
13485 imag1
= TREE_OPERAND (arg1
, 1);
13489 real1
= TREE_REALPART (arg1
);
13490 imag1
= TREE_IMAGPART (arg1
);
13493 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13494 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13496 if (integer_zerop (rcond
))
13498 if (code
== EQ_EXPR
)
13499 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13501 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13505 if (code
== NE_EXPR
)
13506 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13508 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13512 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13513 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13515 if (integer_zerop (icond
))
13517 if (code
== EQ_EXPR
)
13518 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13520 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13524 if (code
== NE_EXPR
)
13525 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13527 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13538 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13539 if (tem
!= NULL_TREE
)
13542 /* Transform comparisons of the form X +- C CMP X. */
13543 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13544 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13545 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13546 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13547 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13548 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13550 tree arg01
= TREE_OPERAND (arg0
, 1);
13551 enum tree_code code0
= TREE_CODE (arg0
);
13554 if (TREE_CODE (arg01
) == REAL_CST
)
13555 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13557 is_positive
= tree_int_cst_sgn (arg01
);
13559 /* (X - c) > X becomes false. */
13560 if (code
== GT_EXPR
13561 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13562 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13564 if (TREE_CODE (arg01
) == INTEGER_CST
13565 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13566 fold_overflow_warning (("assuming signed overflow does not "
13567 "occur when assuming that (X - c) > X "
13568 "is always false"),
13569 WARN_STRICT_OVERFLOW_ALL
);
13570 return constant_boolean_node (0, type
);
13573 /* Likewise (X + c) < X becomes false. */
13574 if (code
== LT_EXPR
13575 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13576 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13578 if (TREE_CODE (arg01
) == INTEGER_CST
13579 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13580 fold_overflow_warning (("assuming signed overflow does not "
13581 "occur when assuming that "
13582 "(X + c) < X is always false"),
13583 WARN_STRICT_OVERFLOW_ALL
);
13584 return constant_boolean_node (0, type
);
13587 /* Convert (X - c) <= X to true. */
13588 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13590 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13591 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13593 if (TREE_CODE (arg01
) == INTEGER_CST
13594 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13595 fold_overflow_warning (("assuming signed overflow does not "
13596 "occur when assuming that "
13597 "(X - c) <= X is always true"),
13598 WARN_STRICT_OVERFLOW_ALL
);
13599 return constant_boolean_node (1, type
);
13602 /* Convert (X + c) >= X to true. */
13603 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13605 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13606 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13608 if (TREE_CODE (arg01
) == INTEGER_CST
13609 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13610 fold_overflow_warning (("assuming signed overflow does not "
13611 "occur when assuming that "
13612 "(X + c) >= X is always true"),
13613 WARN_STRICT_OVERFLOW_ALL
);
13614 return constant_boolean_node (1, type
);
13617 if (TREE_CODE (arg01
) == INTEGER_CST
)
13619 /* Convert X + c > X and X - c < X to true for integers. */
13620 if (code
== GT_EXPR
13621 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13622 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13624 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13625 fold_overflow_warning (("assuming signed overflow does "
13626 "not occur when assuming that "
13627 "(X + c) > X is always true"),
13628 WARN_STRICT_OVERFLOW_ALL
);
13629 return constant_boolean_node (1, type
);
13632 if (code
== LT_EXPR
13633 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13634 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13636 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13637 fold_overflow_warning (("assuming signed overflow does "
13638 "not occur when assuming that "
13639 "(X - c) < X is always true"),
13640 WARN_STRICT_OVERFLOW_ALL
);
13641 return constant_boolean_node (1, type
);
13644 /* Convert X + c <= X and X - c >= X to false for integers. */
13645 if (code
== LE_EXPR
13646 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13647 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13649 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13650 fold_overflow_warning (("assuming signed overflow does "
13651 "not occur when assuming that "
13652 "(X + c) <= X is always false"),
13653 WARN_STRICT_OVERFLOW_ALL
);
13654 return constant_boolean_node (0, type
);
13657 if (code
== GE_EXPR
13658 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13659 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13661 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13662 fold_overflow_warning (("assuming signed overflow does "
13663 "not occur when assuming that "
13664 "(X - c) >= X is always false"),
13665 WARN_STRICT_OVERFLOW_ALL
);
13666 return constant_boolean_node (0, type
);
13671 /* Comparisons with the highest or lowest possible integer of
13672 the specified precision will have known values. */
13674 tree arg1_type
= TREE_TYPE (arg1
);
13675 unsigned int width
= TYPE_PRECISION (arg1_type
);
13677 if (TREE_CODE (arg1
) == INTEGER_CST
13678 && width
<= HOST_BITS_PER_DOUBLE_INT
13679 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13681 HOST_WIDE_INT signed_max_hi
;
13682 unsigned HOST_WIDE_INT signed_max_lo
;
13683 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13685 if (width
<= HOST_BITS_PER_WIDE_INT
)
13687 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13692 if (TYPE_UNSIGNED (arg1_type
))
13694 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13700 max_lo
= signed_max_lo
;
13701 min_lo
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13707 width
-= HOST_BITS_PER_WIDE_INT
;
13708 signed_max_lo
= -1;
13709 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13714 if (TYPE_UNSIGNED (arg1_type
))
13716 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13721 max_hi
= signed_max_hi
;
13722 min_hi
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13726 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13727 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13731 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13734 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13737 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13740 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13742 /* The GE_EXPR and LT_EXPR cases above are not normally
13743 reached because of previous transformations. */
13748 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13750 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13754 arg1
= const_binop (PLUS_EXPR
, arg1
,
13755 build_int_cst (TREE_TYPE (arg1
), 1));
13756 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13757 fold_convert_loc (loc
,
13758 TREE_TYPE (arg1
), arg0
),
13761 arg1
= const_binop (PLUS_EXPR
, arg1
,
13762 build_int_cst (TREE_TYPE (arg1
), 1));
13763 return fold_build2_loc (loc
, NE_EXPR
, type
,
13764 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13770 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13772 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13776 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13779 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13782 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13785 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13790 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13792 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13796 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13797 return fold_build2_loc (loc
, NE_EXPR
, type
,
13798 fold_convert_loc (loc
,
13799 TREE_TYPE (arg1
), arg0
),
13802 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13803 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13804 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13811 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13812 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13813 && TYPE_UNSIGNED (arg1_type
)
13814 /* We will flip the signedness of the comparison operator
13815 associated with the mode of arg1, so the sign bit is
13816 specified by this mode. Check that arg1 is the signed
13817 max associated with this sign bit. */
13818 && width
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13819 /* signed_type does not work on pointer types. */
13820 && INTEGRAL_TYPE_P (arg1_type
))
13822 /* The following case also applies to X < signed_max+1
13823 and X >= signed_max+1 because previous transformations. */
13824 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13826 tree st
= signed_type_for (arg1_type
);
13827 return fold_build2_loc (loc
,
13828 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13829 type
, fold_convert_loc (loc
, st
, arg0
),
13830 build_int_cst (st
, 0));
13836 /* If we are comparing an ABS_EXPR with a constant, we can
13837 convert all the cases into explicit comparisons, but they may
13838 well not be faster than doing the ABS and one comparison.
13839 But ABS (X) <= C is a range comparison, which becomes a subtraction
13840 and a comparison, and is probably faster. */
13841 if (code
== LE_EXPR
13842 && TREE_CODE (arg1
) == INTEGER_CST
13843 && TREE_CODE (arg0
) == ABS_EXPR
13844 && ! TREE_SIDE_EFFECTS (arg0
)
13845 && (0 != (tem
= negate_expr (arg1
)))
13846 && TREE_CODE (tem
) == INTEGER_CST
13847 && !TREE_OVERFLOW (tem
))
13848 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13849 build2 (GE_EXPR
, type
,
13850 TREE_OPERAND (arg0
, 0), tem
),
13851 build2 (LE_EXPR
, type
,
13852 TREE_OPERAND (arg0
, 0), arg1
));
13854 /* Convert ABS_EXPR<x> >= 0 to true. */
13855 strict_overflow_p
= false;
13856 if (code
== GE_EXPR
13857 && (integer_zerop (arg1
)
13858 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13859 && real_zerop (arg1
)))
13860 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13862 if (strict_overflow_p
)
13863 fold_overflow_warning (("assuming signed overflow does not occur "
13864 "when simplifying comparison of "
13865 "absolute value and zero"),
13866 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13867 return omit_one_operand_loc (loc
, type
,
13868 constant_boolean_node (true, type
),
13872 /* Convert ABS_EXPR<x> < 0 to false. */
13873 strict_overflow_p
= false;
13874 if (code
== LT_EXPR
13875 && (integer_zerop (arg1
) || real_zerop (arg1
))
13876 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13878 if (strict_overflow_p
)
13879 fold_overflow_warning (("assuming signed overflow does not occur "
13880 "when simplifying comparison of "
13881 "absolute value and zero"),
13882 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13883 return omit_one_operand_loc (loc
, type
,
13884 constant_boolean_node (false, type
),
13888 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13889 and similarly for >= into !=. */
13890 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13891 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13892 && TREE_CODE (arg1
) == LSHIFT_EXPR
13893 && integer_onep (TREE_OPERAND (arg1
, 0)))
13894 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13895 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13896 TREE_OPERAND (arg1
, 1)),
13897 build_zero_cst (TREE_TYPE (arg0
)));
13899 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13900 otherwise Y might be >= # of bits in X's type and thus e.g.
13901 (unsigned char) (1 << Y) for Y 15 might be 0.
13902 If the cast is widening, then 1 << Y should have unsigned type,
13903 otherwise if Y is number of bits in the signed shift type minus 1,
13904 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13905 31 might be 0xffffffff80000000. */
13906 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13907 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13908 && CONVERT_EXPR_P (arg1
)
13909 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13910 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13911 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13912 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13913 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13914 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13915 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13917 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13918 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13919 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13920 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13921 build_zero_cst (TREE_TYPE (arg0
)));
13926 case UNORDERED_EXPR
:
13934 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13936 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13937 if (t1
!= NULL_TREE
)
13941 /* If the first operand is NaN, the result is constant. */
13942 if (TREE_CODE (arg0
) == REAL_CST
13943 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13944 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13946 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13947 ? integer_zero_node
13948 : integer_one_node
;
13949 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13952 /* If the second operand is NaN, the result is constant. */
13953 if (TREE_CODE (arg1
) == REAL_CST
13954 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13955 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13957 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13958 ? integer_zero_node
13959 : integer_one_node
;
13960 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13963 /* Simplify unordered comparison of something with itself. */
13964 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13965 && operand_equal_p (arg0
, arg1
, 0))
13966 return constant_boolean_node (1, type
);
13968 if (code
== LTGT_EXPR
13969 && !flag_trapping_math
13970 && operand_equal_p (arg0
, arg1
, 0))
13971 return constant_boolean_node (0, type
);
13973 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13975 tree targ0
= strip_float_extensions (arg0
);
13976 tree targ1
= strip_float_extensions (arg1
);
13977 tree newtype
= TREE_TYPE (targ0
);
13979 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13980 newtype
= TREE_TYPE (targ1
);
13982 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13983 return fold_build2_loc (loc
, code
, type
,
13984 fold_convert_loc (loc
, newtype
, targ0
),
13985 fold_convert_loc (loc
, newtype
, targ1
));
13990 case COMPOUND_EXPR
:
13991 /* When pedantic, a compound expression can be neither an lvalue
13992 nor an integer constant expression. */
13993 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13995 /* Don't let (0, 0) be null pointer constant. */
13996 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13997 : fold_convert_loc (loc
, type
, arg1
);
13998 return pedantic_non_lvalue_loc (loc
, tem
);
14001 if ((TREE_CODE (arg0
) == REAL_CST
14002 && TREE_CODE (arg1
) == REAL_CST
)
14003 || (TREE_CODE (arg0
) == INTEGER_CST
14004 && TREE_CODE (arg1
) == INTEGER_CST
))
14005 return build_complex (type
, arg0
, arg1
);
14006 if (TREE_CODE (arg0
) == REALPART_EXPR
14007 && TREE_CODE (arg1
) == IMAGPART_EXPR
14008 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
14009 && operand_equal_p (TREE_OPERAND (arg0
, 0),
14010 TREE_OPERAND (arg1
, 0), 0))
14011 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
14012 TREE_OPERAND (arg1
, 0));
14016 /* An ASSERT_EXPR should never be passed to fold_binary. */
14017 gcc_unreachable ();
14019 case VEC_PACK_TRUNC_EXPR
:
14020 case VEC_PACK_FIX_TRUNC_EXPR
:
14022 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14025 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
14026 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
14027 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14030 elts
= XALLOCAVEC (tree
, nelts
);
14031 if (!vec_cst_ctor_to_array (arg0
, elts
)
14032 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
14035 for (i
= 0; i
< nelts
; i
++)
14037 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
14038 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
14039 TREE_TYPE (type
), elts
[i
]);
14040 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
14044 return build_vector (type
, elts
);
14047 case VEC_WIDEN_MULT_LO_EXPR
:
14048 case VEC_WIDEN_MULT_HI_EXPR
:
14049 case VEC_WIDEN_MULT_EVEN_EXPR
:
14050 case VEC_WIDEN_MULT_ODD_EXPR
:
14052 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
14053 unsigned int out
, ofs
, scale
;
14056 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
14057 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
14058 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14061 elts
= XALLOCAVEC (tree
, nelts
* 4);
14062 if (!vec_cst_ctor_to_array (arg0
, elts
)
14063 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
14066 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
14067 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
14068 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
14069 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
14070 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
14071 scale
= 1, ofs
= 0;
14072 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14073 scale
= 1, ofs
= 1;
14075 for (out
= 0; out
< nelts
; out
++)
14077 unsigned int in1
= (out
<< scale
) + ofs
;
14078 unsigned int in2
= in1
+ nelts
* 2;
14081 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
14082 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
14084 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
14086 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
14087 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
14091 return build_vector (type
, elts
);
14096 } /* switch (code) */
14099 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14100 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14104 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
14106 switch (TREE_CODE (*tp
))
14112 *walk_subtrees
= 0;
14114 /* ... fall through ... */
14121 /* Return whether the sub-tree ST contains a label which is accessible from
14122 outside the sub-tree. */
14125 contains_label_p (tree st
)
14128 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14131 /* Fold a ternary expression of code CODE and type TYPE with operands
14132 OP0, OP1, and OP2. Return the folded expression if folding is
14133 successful. Otherwise, return NULL_TREE. */
14136 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14137 tree op0
, tree op1
, tree op2
)
14140 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14141 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14143 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14144 && TREE_CODE_LENGTH (code
) == 3);
14146 /* Strip any conversions that don't change the mode. This is safe
14147 for every expression, except for a comparison expression because
14148 its signedness is derived from its operands. So, in the latter
14149 case, only strip conversions that don't change the signedness.
14151 Note that this is done as an internal manipulation within the
14152 constant folder, in order to find the simplest representation of
14153 the arguments so that their form can be studied. In any cases,
14154 the appropriate type conversions should be put back in the tree
14155 that will get out of the constant folder. */
14176 case COMPONENT_REF
:
14177 if (TREE_CODE (arg0
) == CONSTRUCTOR
14178 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14180 unsigned HOST_WIDE_INT idx
;
14182 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14189 case VEC_COND_EXPR
:
14190 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14191 so all simple results must be passed through pedantic_non_lvalue. */
14192 if (TREE_CODE (arg0
) == INTEGER_CST
)
14194 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14195 tem
= integer_zerop (arg0
) ? op2
: op1
;
14196 /* Only optimize constant conditions when the selected branch
14197 has the same type as the COND_EXPR. This avoids optimizing
14198 away "c ? x : throw", where the throw has a void type.
14199 Avoid throwing away that operand which contains label. */
14200 if ((!TREE_SIDE_EFFECTS (unused_op
)
14201 || !contains_label_p (unused_op
))
14202 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14203 || VOID_TYPE_P (type
)))
14204 return pedantic_non_lvalue_loc (loc
, tem
);
14207 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14209 if (integer_all_onesp (arg0
))
14210 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14211 if (integer_zerop (arg0
))
14212 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14214 if ((TREE_CODE (arg1
) == VECTOR_CST
14215 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14216 && (TREE_CODE (arg2
) == VECTOR_CST
14217 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14219 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14220 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14221 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14222 for (i
= 0; i
< nelts
; i
++)
14224 tree val
= VECTOR_CST_ELT (arg0
, i
);
14225 if (integer_all_onesp (val
))
14227 else if (integer_zerop (val
))
14228 sel
[i
] = nelts
+ i
;
14229 else /* Currently unreachable. */
14232 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14233 if (t
!= NULL_TREE
)
14238 if (operand_equal_p (arg1
, op2
, 0))
14239 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14241 /* If we have A op B ? A : C, we may be able to convert this to a
14242 simpler expression, depending on the operation and the values
14243 of B and C. Signed zeros prevent all of these transformations,
14244 for reasons given above each one.
14246 Also try swapping the arguments and inverting the conditional. */
14247 if (COMPARISON_CLASS_P (arg0
)
14248 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14249 arg1
, TREE_OPERAND (arg0
, 1))
14250 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14252 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14257 if (COMPARISON_CLASS_P (arg0
)
14258 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14260 TREE_OPERAND (arg0
, 1))
14261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14263 location_t loc0
= expr_location_or (arg0
, loc
);
14264 tem
= fold_invert_truthvalue (loc0
, arg0
);
14265 if (tem
&& COMPARISON_CLASS_P (tem
))
14267 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14273 /* If the second operand is simpler than the third, swap them
14274 since that produces better jump optimization results. */
14275 if (truth_value_p (TREE_CODE (arg0
))
14276 && tree_swap_operands_p (op1
, op2
, false))
14278 location_t loc0
= expr_location_or (arg0
, loc
);
14279 /* See if this can be inverted. If it can't, possibly because
14280 it was a floating-point inequality comparison, don't do
14282 tem
= fold_invert_truthvalue (loc0
, arg0
);
14284 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14287 /* Convert A ? 1 : 0 to simply A. */
14288 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14289 : (integer_onep (op1
)
14290 && !VECTOR_TYPE_P (type
)))
14291 && integer_zerop (op2
)
14292 /* If we try to convert OP0 to our type, the
14293 call to fold will try to move the conversion inside
14294 a COND, which will recurse. In that case, the COND_EXPR
14295 is probably the best choice, so leave it alone. */
14296 && type
== TREE_TYPE (arg0
))
14297 return pedantic_non_lvalue_loc (loc
, arg0
);
14299 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14300 over COND_EXPR in cases such as floating point comparisons. */
14301 if (integer_zerop (op1
)
14302 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14303 : (integer_onep (op2
)
14304 && !VECTOR_TYPE_P (type
)))
14305 && truth_value_p (TREE_CODE (arg0
)))
14306 return pedantic_non_lvalue_loc (loc
,
14307 fold_convert_loc (loc
, type
,
14308 invert_truthvalue_loc (loc
,
14311 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14312 if (TREE_CODE (arg0
) == LT_EXPR
14313 && integer_zerop (TREE_OPERAND (arg0
, 1))
14314 && integer_zerop (op2
)
14315 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14317 /* sign_bit_p looks through both zero and sign extensions,
14318 but for this optimization only sign extensions are
14320 tree tem2
= TREE_OPERAND (arg0
, 0);
14321 while (tem
!= tem2
)
14323 if (TREE_CODE (tem2
) != NOP_EXPR
14324 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14329 tem2
= TREE_OPERAND (tem2
, 0);
14331 /* sign_bit_p only checks ARG1 bits within A's precision.
14332 If <sign bit of A> has wider type than A, bits outside
14333 of A's precision in <sign bit of A> need to be checked.
14334 If they are all 0, this optimization needs to be done
14335 in unsigned A's type, if they are all 1 in signed A's type,
14336 otherwise this can't be done. */
14338 && TYPE_PRECISION (TREE_TYPE (tem
))
14339 < TYPE_PRECISION (TREE_TYPE (arg1
))
14340 && TYPE_PRECISION (TREE_TYPE (tem
))
14341 < TYPE_PRECISION (type
))
14343 unsigned HOST_WIDE_INT mask_lo
;
14344 HOST_WIDE_INT mask_hi
;
14345 int inner_width
, outer_width
;
14348 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14349 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14350 if (outer_width
> TYPE_PRECISION (type
))
14351 outer_width
= TYPE_PRECISION (type
);
14353 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14355 mask_hi
= (HOST_WIDE_INT_M1U
14356 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14362 mask_lo
= (HOST_WIDE_INT_M1U
14363 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14365 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14367 mask_hi
&= ~(HOST_WIDE_INT_M1U
14368 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14372 mask_lo
&= ~(HOST_WIDE_INT_M1U
14373 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14375 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14376 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14378 tem_type
= signed_type_for (TREE_TYPE (tem
));
14379 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14381 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14382 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14384 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14385 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14393 fold_convert_loc (loc
, type
,
14394 fold_build2_loc (loc
, BIT_AND_EXPR
,
14395 TREE_TYPE (tem
), tem
,
14396 fold_convert_loc (loc
,
14401 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14402 already handled above. */
14403 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14404 && integer_onep (TREE_OPERAND (arg0
, 1))
14405 && integer_zerop (op2
)
14406 && integer_pow2p (arg1
))
14408 tree tem
= TREE_OPERAND (arg0
, 0);
14410 if (TREE_CODE (tem
) == RSHIFT_EXPR
14411 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14412 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14413 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14414 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14415 TREE_OPERAND (tem
, 0), arg1
);
14418 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14419 is probably obsolete because the first operand should be a
14420 truth value (that's why we have the two cases above), but let's
14421 leave it in until we can confirm this for all front-ends. */
14422 if (integer_zerop (op2
)
14423 && TREE_CODE (arg0
) == NE_EXPR
14424 && integer_zerop (TREE_OPERAND (arg0
, 1))
14425 && integer_pow2p (arg1
)
14426 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14428 arg1
, OEP_ONLY_CONST
))
14429 return pedantic_non_lvalue_loc (loc
,
14430 fold_convert_loc (loc
, type
,
14431 TREE_OPERAND (arg0
, 0)));
14433 /* Disable the transformations below for vectors, since
14434 fold_binary_op_with_conditional_arg may undo them immediately,
14435 yielding an infinite loop. */
14436 if (code
== VEC_COND_EXPR
)
14439 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14440 if (integer_zerop (op2
)
14441 && truth_value_p (TREE_CODE (arg0
))
14442 && truth_value_p (TREE_CODE (arg1
))
14443 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14444 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14445 : TRUTH_ANDIF_EXPR
,
14446 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14448 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14449 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14450 && truth_value_p (TREE_CODE (arg0
))
14451 && truth_value_p (TREE_CODE (arg1
))
14452 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14454 location_t loc0
= expr_location_or (arg0
, loc
);
14455 /* Only perform transformation if ARG0 is easily inverted. */
14456 tem
= fold_invert_truthvalue (loc0
, arg0
);
14458 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14461 type
, fold_convert_loc (loc
, type
, tem
),
14465 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14466 if (integer_zerop (arg1
)
14467 && truth_value_p (TREE_CODE (arg0
))
14468 && truth_value_p (TREE_CODE (op2
))
14469 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14471 location_t loc0
= expr_location_or (arg0
, loc
);
14472 /* Only perform transformation if ARG0 is easily inverted. */
14473 tem
= fold_invert_truthvalue (loc0
, arg0
);
14475 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14476 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14477 type
, fold_convert_loc (loc
, type
, tem
),
14481 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14482 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14483 && truth_value_p (TREE_CODE (arg0
))
14484 && truth_value_p (TREE_CODE (op2
))
14485 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14486 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14487 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14488 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14493 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14494 of fold_ternary on them. */
14495 gcc_unreachable ();
14497 case BIT_FIELD_REF
:
14498 if ((TREE_CODE (arg0
) == VECTOR_CST
14499 || (TREE_CODE (arg0
) == CONSTRUCTOR
14500 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14501 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14502 || (TREE_CODE (type
) == VECTOR_TYPE
14503 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14505 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14506 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14507 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14508 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14511 && (idx
% width
) == 0
14512 && (n
% width
) == 0
14513 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14518 if (TREE_CODE (arg0
) == VECTOR_CST
)
14521 return VECTOR_CST_ELT (arg0
, idx
);
14523 tree
*vals
= XALLOCAVEC (tree
, n
);
14524 for (unsigned i
= 0; i
< n
; ++i
)
14525 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14526 return build_vector (type
, vals
);
14529 /* Constructor elements can be subvectors. */
14530 unsigned HOST_WIDE_INT k
= 1;
14531 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14533 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14534 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14535 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14538 /* We keep an exact subset of the constructor elements. */
14539 if ((idx
% k
) == 0 && (n
% k
) == 0)
14541 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14542 return build_constructor (type
, NULL
);
14547 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14548 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14549 return build_zero_cst (type
);
14552 vec
<constructor_elt
, va_gc
> *vals
;
14553 vec_alloc (vals
, n
);
14554 for (unsigned i
= 0;
14555 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14557 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14559 (arg0
, idx
+ i
)->value
);
14560 return build_constructor (type
, vals
);
14562 /* The bitfield references a single constructor element. */
14563 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14565 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14566 return build_zero_cst (type
);
14568 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14570 return fold_build3_loc (loc
, code
, type
,
14571 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14572 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14577 /* A bit-field-ref that referenced the full argument can be stripped. */
14578 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14579 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14580 && integer_zerop (op2
))
14581 return fold_convert_loc (loc
, type
, arg0
);
14583 /* On constants we can use native encode/interpret to constant
14584 fold (nearly) all BIT_FIELD_REFs. */
14585 if (CONSTANT_CLASS_P (arg0
)
14586 && can_native_interpret_type_p (type
)
14587 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14588 /* This limitation should not be necessary, we just need to
14589 round this up to mode size. */
14590 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14591 /* Need bit-shifting of the buffer to relax the following. */
14592 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14594 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14595 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14596 unsigned HOST_WIDE_INT clen
;
14597 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14598 /* ??? We cannot tell native_encode_expr to start at
14599 some random byte only. So limit us to a reasonable amount
14603 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14604 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14606 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14608 tree v
= native_interpret_expr (type
,
14609 b
+ bitpos
/ BITS_PER_UNIT
,
14610 bitsize
/ BITS_PER_UNIT
);
14620 /* For integers we can decompose the FMA if possible. */
14621 if (TREE_CODE (arg0
) == INTEGER_CST
14622 && TREE_CODE (arg1
) == INTEGER_CST
)
14623 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14624 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14625 if (integer_zerop (arg2
))
14626 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14628 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14630 case VEC_PERM_EXPR
:
14631 if (TREE_CODE (arg2
) == VECTOR_CST
)
14633 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14634 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14636 bool need_mask_canon
= false;
14637 bool all_in_vec0
= true;
14638 bool all_in_vec1
= true;
14639 bool maybe_identity
= true;
14640 bool single_arg
= (op0
== op1
);
14641 bool changed
= false;
14643 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14644 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14645 for (i
= 0; i
< nelts
; i
++)
14647 tree val
= VECTOR_CST_ELT (arg2
, i
);
14648 if (TREE_CODE (val
) != INTEGER_CST
)
14651 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14652 if (TREE_INT_CST_HIGH (val
)
14653 || ((unsigned HOST_WIDE_INT
)
14654 TREE_INT_CST_LOW (val
) != sel
[i
]))
14655 need_mask_canon
= true;
14657 if (sel
[i
] < nelts
)
14658 all_in_vec1
= false;
14660 all_in_vec0
= false;
14662 if ((sel
[i
] & (nelts
-1)) != i
)
14663 maybe_identity
= false;
14666 if (maybe_identity
)
14676 else if (all_in_vec1
)
14679 for (i
= 0; i
< nelts
; i
++)
14681 need_mask_canon
= true;
14684 if ((TREE_CODE (op0
) == VECTOR_CST
14685 || TREE_CODE (op0
) == CONSTRUCTOR
)
14686 && (TREE_CODE (op1
) == VECTOR_CST
14687 || TREE_CODE (op1
) == CONSTRUCTOR
))
14689 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14690 if (t
!= NULL_TREE
)
14694 if (op0
== op1
&& !single_arg
)
14697 if (need_mask_canon
&& arg2
== op2
)
14699 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14700 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14701 for (i
= 0; i
< nelts
; i
++)
14702 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14703 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14708 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14714 } /* switch (code) */
14717 /* Perform constant folding and related simplification of EXPR.
14718 The related simplifications include x*1 => x, x*0 => 0, etc.,
14719 and application of the associative law.
14720 NOP_EXPR conversions may be removed freely (as long as we
14721 are careful not to change the type of the overall expression).
14722 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14723 but we can constant-fold them if they have constant operands. */
14725 #ifdef ENABLE_FOLD_CHECKING
14726 # define fold(x) fold_1 (x)
14727 static tree
fold_1 (tree
);
14733 const tree t
= expr
;
14734 enum tree_code code
= TREE_CODE (t
);
14735 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14737 location_t loc
= EXPR_LOCATION (expr
);
14739 /* Return right away if a constant. */
14740 if (kind
== tcc_constant
)
14743 /* CALL_EXPR-like objects with variable numbers of operands are
14744 treated specially. */
14745 if (kind
== tcc_vl_exp
)
14747 if (code
== CALL_EXPR
)
14749 tem
= fold_call_expr (loc
, expr
, false);
14750 return tem
? tem
: expr
;
14755 if (IS_EXPR_CODE_CLASS (kind
))
14757 tree type
= TREE_TYPE (t
);
14758 tree op0
, op1
, op2
;
14760 switch (TREE_CODE_LENGTH (code
))
14763 op0
= TREE_OPERAND (t
, 0);
14764 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14765 return tem
? tem
: expr
;
14767 op0
= TREE_OPERAND (t
, 0);
14768 op1
= TREE_OPERAND (t
, 1);
14769 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14770 return tem
? tem
: expr
;
14772 op0
= TREE_OPERAND (t
, 0);
14773 op1
= TREE_OPERAND (t
, 1);
14774 op2
= TREE_OPERAND (t
, 2);
14775 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14776 return tem
? tem
: expr
;
14786 tree op0
= TREE_OPERAND (t
, 0);
14787 tree op1
= TREE_OPERAND (t
, 1);
14789 if (TREE_CODE (op1
) == INTEGER_CST
14790 && TREE_CODE (op0
) == CONSTRUCTOR
14791 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14793 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14794 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14795 unsigned HOST_WIDE_INT begin
= 0;
14797 /* Find a matching index by means of a binary search. */
14798 while (begin
!= end
)
14800 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14801 tree index
= (*elts
)[middle
].index
;
14803 if (TREE_CODE (index
) == INTEGER_CST
14804 && tree_int_cst_lt (index
, op1
))
14805 begin
= middle
+ 1;
14806 else if (TREE_CODE (index
) == INTEGER_CST
14807 && tree_int_cst_lt (op1
, index
))
14809 else if (TREE_CODE (index
) == RANGE_EXPR
14810 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14811 begin
= middle
+ 1;
14812 else if (TREE_CODE (index
) == RANGE_EXPR
14813 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14816 return (*elts
)[middle
].value
;
14823 /* Return a VECTOR_CST if possible. */
14826 tree type
= TREE_TYPE (t
);
14827 if (TREE_CODE (type
) != VECTOR_TYPE
)
14830 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14831 unsigned HOST_WIDE_INT idx
, pos
= 0;
14834 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14836 if (!CONSTANT_CLASS_P (value
))
14838 if (TREE_CODE (value
) == VECTOR_CST
)
14840 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14841 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14844 vec
[pos
++] = value
;
14846 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14847 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14849 return build_vector (type
, vec
);
14853 return fold (DECL_INITIAL (t
));
14857 } /* switch (code) */
14860 #ifdef ENABLE_FOLD_CHECKING
14863 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14864 hash_table
<pointer_hash
<tree_node
> >);
14865 static void fold_check_failed (const_tree
, const_tree
);
14866 void print_fold_checksum (const_tree
);
14868 /* When --enable-checking=fold, compute a digest of expr before
14869 and after actual fold call to see if fold did not accidentally
14870 change original expr. */
14876 struct md5_ctx ctx
;
14877 unsigned char checksum_before
[16], checksum_after
[16];
14878 hash_table
<pointer_hash
<tree_node
> > ht
;
14881 md5_init_ctx (&ctx
);
14882 fold_checksum_tree (expr
, &ctx
, ht
);
14883 md5_finish_ctx (&ctx
, checksum_before
);
14886 ret
= fold_1 (expr
);
14888 md5_init_ctx (&ctx
);
14889 fold_checksum_tree (expr
, &ctx
, ht
);
14890 md5_finish_ctx (&ctx
, checksum_after
);
14893 if (memcmp (checksum_before
, checksum_after
, 16))
14894 fold_check_failed (expr
, ret
);
14900 print_fold_checksum (const_tree expr
)
14902 struct md5_ctx ctx
;
14903 unsigned char checksum
[16], cnt
;
14904 hash_table
<pointer_hash
<tree_node
> > ht
;
14907 md5_init_ctx (&ctx
);
14908 fold_checksum_tree (expr
, &ctx
, ht
);
14909 md5_finish_ctx (&ctx
, checksum
);
14911 for (cnt
= 0; cnt
< 16; ++cnt
)
14912 fprintf (stderr
, "%02x", checksum
[cnt
]);
14913 putc ('\n', stderr
);
14917 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14919 internal_error ("fold check: original tree changed by fold");
14923 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14924 hash_table
<pointer_hash
<tree_node
> > ht
)
14927 enum tree_code code
;
14928 union tree_node buf
;
14934 slot
= ht
.find_slot (expr
, INSERT
);
14937 *slot
= CONST_CAST_TREE (expr
);
14938 code
= TREE_CODE (expr
);
14939 if (TREE_CODE_CLASS (code
) == tcc_declaration
14940 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14942 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14943 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14944 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14945 expr
= (tree
) &buf
;
14947 else if (TREE_CODE_CLASS (code
) == tcc_type
14948 && (TYPE_POINTER_TO (expr
)
14949 || TYPE_REFERENCE_TO (expr
)
14950 || TYPE_CACHED_VALUES_P (expr
)
14951 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14952 || TYPE_NEXT_VARIANT (expr
)))
14954 /* Allow these fields to be modified. */
14956 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14957 expr
= tmp
= (tree
) &buf
;
14958 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14959 TYPE_POINTER_TO (tmp
) = NULL
;
14960 TYPE_REFERENCE_TO (tmp
) = NULL
;
14961 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14962 if (TYPE_CACHED_VALUES_P (tmp
))
14964 TYPE_CACHED_VALUES_P (tmp
) = 0;
14965 TYPE_CACHED_VALUES (tmp
) = NULL
;
14968 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14969 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14970 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14971 if (TREE_CODE_CLASS (code
) != tcc_type
14972 && TREE_CODE_CLASS (code
) != tcc_declaration
14973 && code
!= TREE_LIST
14974 && code
!= SSA_NAME
14975 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14976 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14977 switch (TREE_CODE_CLASS (code
))
14983 md5_process_bytes (TREE_STRING_POINTER (expr
),
14984 TREE_STRING_LENGTH (expr
), ctx
);
14987 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14988 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14991 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14992 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14998 case tcc_exceptional
:
15002 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
15003 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
15004 expr
= TREE_CHAIN (expr
);
15005 goto recursive_label
;
15008 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
15009 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
15015 case tcc_expression
:
15016 case tcc_reference
:
15017 case tcc_comparison
:
15020 case tcc_statement
:
15022 len
= TREE_OPERAND_LENGTH (expr
);
15023 for (i
= 0; i
< len
; ++i
)
15024 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
15026 case tcc_declaration
:
15027 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
15028 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
15029 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
15031 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
15032 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
15033 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
15034 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
15035 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
15037 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
15038 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
15040 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
15042 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
15043 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
15044 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
15048 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
15049 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
15050 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
15051 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
15052 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
15053 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
15054 if (INTEGRAL_TYPE_P (expr
)
15055 || SCALAR_FLOAT_TYPE_P (expr
))
15057 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
15058 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
15060 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
15061 if (TREE_CODE (expr
) == RECORD_TYPE
15062 || TREE_CODE (expr
) == UNION_TYPE
15063 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
15064 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
15065 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
15072 /* Helper function for outputting the checksum of a tree T. When
15073 debugging with gdb, you can "define mynext" to be "next" followed
15074 by "call debug_fold_checksum (op0)", then just trace down till the
15077 DEBUG_FUNCTION
void
15078 debug_fold_checksum (const_tree t
)
15081 unsigned char checksum
[16];
15082 struct md5_ctx ctx
;
15083 hash_table
<pointer_hash
<tree_node
> > ht
;
15086 md5_init_ctx (&ctx
);
15087 fold_checksum_tree (t
, &ctx
, ht
);
15088 md5_finish_ctx (&ctx
, checksum
);
15091 for (i
= 0; i
< 16; i
++)
15092 fprintf (stderr
, "%d ", checksum
[i
]);
15094 fprintf (stderr
, "\n");
15099 /* Fold a unary tree expression with code CODE of type TYPE with an
15100 operand OP0. LOC is the location of the resulting expression.
15101 Return a folded expression if successful. Otherwise, return a tree
15102 expression with code CODE of type TYPE with an operand OP0. */
15105 fold_build1_stat_loc (location_t loc
,
15106 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
15109 #ifdef ENABLE_FOLD_CHECKING
15110 unsigned char checksum_before
[16], checksum_after
[16];
15111 struct md5_ctx ctx
;
15112 hash_table
<pointer_hash
<tree_node
> > ht
;
15115 md5_init_ctx (&ctx
);
15116 fold_checksum_tree (op0
, &ctx
, ht
);
15117 md5_finish_ctx (&ctx
, checksum_before
);
15121 tem
= fold_unary_loc (loc
, code
, type
, op0
);
15123 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
15125 #ifdef ENABLE_FOLD_CHECKING
15126 md5_init_ctx (&ctx
);
15127 fold_checksum_tree (op0
, &ctx
, ht
);
15128 md5_finish_ctx (&ctx
, checksum_after
);
15131 if (memcmp (checksum_before
, checksum_after
, 16))
15132 fold_check_failed (op0
, tem
);
15137 /* Fold a binary tree expression with code CODE of type TYPE with
15138 operands OP0 and OP1. LOC is the location of the resulting
15139 expression. Return a folded expression if successful. Otherwise,
15140 return a tree expression with code CODE of type TYPE with operands
15144 fold_build2_stat_loc (location_t loc
,
15145 enum tree_code code
, tree type
, tree op0
, tree op1
15149 #ifdef ENABLE_FOLD_CHECKING
15150 unsigned char checksum_before_op0
[16],
15151 checksum_before_op1
[16],
15152 checksum_after_op0
[16],
15153 checksum_after_op1
[16];
15154 struct md5_ctx ctx
;
15155 hash_table
<pointer_hash
<tree_node
> > ht
;
15158 md5_init_ctx (&ctx
);
15159 fold_checksum_tree (op0
, &ctx
, ht
);
15160 md5_finish_ctx (&ctx
, checksum_before_op0
);
15163 md5_init_ctx (&ctx
);
15164 fold_checksum_tree (op1
, &ctx
, ht
);
15165 md5_finish_ctx (&ctx
, checksum_before_op1
);
15169 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15171 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15173 #ifdef ENABLE_FOLD_CHECKING
15174 md5_init_ctx (&ctx
);
15175 fold_checksum_tree (op0
, &ctx
, ht
);
15176 md5_finish_ctx (&ctx
, checksum_after_op0
);
15179 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15180 fold_check_failed (op0
, tem
);
15182 md5_init_ctx (&ctx
);
15183 fold_checksum_tree (op1
, &ctx
, ht
);
15184 md5_finish_ctx (&ctx
, checksum_after_op1
);
15187 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15188 fold_check_failed (op1
, tem
);
15193 /* Fold a ternary tree expression with code CODE of type TYPE with
15194 operands OP0, OP1, and OP2. Return a folded expression if
15195 successful. Otherwise, return a tree expression with code CODE of
15196 type TYPE with operands OP0, OP1, and OP2. */
15199 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15200 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15203 #ifdef ENABLE_FOLD_CHECKING
15204 unsigned char checksum_before_op0
[16],
15205 checksum_before_op1
[16],
15206 checksum_before_op2
[16],
15207 checksum_after_op0
[16],
15208 checksum_after_op1
[16],
15209 checksum_after_op2
[16];
15210 struct md5_ctx ctx
;
15211 hash_table
<pointer_hash
<tree_node
> > ht
;
15214 md5_init_ctx (&ctx
);
15215 fold_checksum_tree (op0
, &ctx
, ht
);
15216 md5_finish_ctx (&ctx
, checksum_before_op0
);
15219 md5_init_ctx (&ctx
);
15220 fold_checksum_tree (op1
, &ctx
, ht
);
15221 md5_finish_ctx (&ctx
, checksum_before_op1
);
15224 md5_init_ctx (&ctx
);
15225 fold_checksum_tree (op2
, &ctx
, ht
);
15226 md5_finish_ctx (&ctx
, checksum_before_op2
);
15230 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15231 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15233 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15235 #ifdef ENABLE_FOLD_CHECKING
15236 md5_init_ctx (&ctx
);
15237 fold_checksum_tree (op0
, &ctx
, ht
);
15238 md5_finish_ctx (&ctx
, checksum_after_op0
);
15241 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15242 fold_check_failed (op0
, tem
);
15244 md5_init_ctx (&ctx
);
15245 fold_checksum_tree (op1
, &ctx
, ht
);
15246 md5_finish_ctx (&ctx
, checksum_after_op1
);
15249 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15250 fold_check_failed (op1
, tem
);
15252 md5_init_ctx (&ctx
);
15253 fold_checksum_tree (op2
, &ctx
, ht
);
15254 md5_finish_ctx (&ctx
, checksum_after_op2
);
15257 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15258 fold_check_failed (op2
, tem
);
15263 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15264 arguments in ARGARRAY, and a null static chain.
15265 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15266 of type TYPE from the given operands as constructed by build_call_array. */
15269 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15270 int nargs
, tree
*argarray
)
15273 #ifdef ENABLE_FOLD_CHECKING
15274 unsigned char checksum_before_fn
[16],
15275 checksum_before_arglist
[16],
15276 checksum_after_fn
[16],
15277 checksum_after_arglist
[16];
15278 struct md5_ctx ctx
;
15279 hash_table
<pointer_hash
<tree_node
> > ht
;
15283 md5_init_ctx (&ctx
);
15284 fold_checksum_tree (fn
, &ctx
, ht
);
15285 md5_finish_ctx (&ctx
, checksum_before_fn
);
15288 md5_init_ctx (&ctx
);
15289 for (i
= 0; i
< nargs
; i
++)
15290 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15291 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15295 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15297 #ifdef ENABLE_FOLD_CHECKING
15298 md5_init_ctx (&ctx
);
15299 fold_checksum_tree (fn
, &ctx
, ht
);
15300 md5_finish_ctx (&ctx
, checksum_after_fn
);
15303 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15304 fold_check_failed (fn
, tem
);
15306 md5_init_ctx (&ctx
);
15307 for (i
= 0; i
< nargs
; i
++)
15308 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15309 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15312 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15313 fold_check_failed (NULL_TREE
, tem
);
15318 /* Perform constant folding and related simplification of initializer
15319 expression EXPR. These behave identically to "fold_buildN" but ignore
15320 potential run-time traps and exceptions that fold must preserve. */
15322 #define START_FOLD_INIT \
15323 int saved_signaling_nans = flag_signaling_nans;\
15324 int saved_trapping_math = flag_trapping_math;\
15325 int saved_rounding_math = flag_rounding_math;\
15326 int saved_trapv = flag_trapv;\
15327 int saved_folding_initializer = folding_initializer;\
15328 flag_signaling_nans = 0;\
15329 flag_trapping_math = 0;\
15330 flag_rounding_math = 0;\
15332 folding_initializer = 1;
15334 #define END_FOLD_INIT \
15335 flag_signaling_nans = saved_signaling_nans;\
15336 flag_trapping_math = saved_trapping_math;\
15337 flag_rounding_math = saved_rounding_math;\
15338 flag_trapv = saved_trapv;\
15339 folding_initializer = saved_folding_initializer;
15342 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15343 tree type
, tree op
)
15348 result
= fold_build1_loc (loc
, code
, type
, op
);
15355 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15356 tree type
, tree op0
, tree op1
)
15361 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15368 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15369 int nargs
, tree
*argarray
)
15374 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15380 #undef START_FOLD_INIT
15381 #undef END_FOLD_INIT
15383 /* Determine if first argument is a multiple of second argument. Return 0 if
15384 it is not, or we cannot easily determined it to be.
15386 An example of the sort of thing we care about (at this point; this routine
15387 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15388 fold cases do now) is discovering that
15390 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15396 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15398 This code also handles discovering that
15400 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15402 is a multiple of 8 so we don't have to worry about dealing with a
15403 possible remainder.
15405 Note that we *look* inside a SAVE_EXPR only to determine how it was
15406 calculated; it is not safe for fold to do much of anything else with the
15407 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15408 at run time. For example, the latter example above *cannot* be implemented
15409 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15410 evaluation time of the original SAVE_EXPR is not necessarily the same at
15411 the time the new expression is evaluated. The only optimization of this
15412 sort that would be valid is changing
15414 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15418 SAVE_EXPR (I) * SAVE_EXPR (J)
15420 (where the same SAVE_EXPR (J) is used in the original and the
15421 transformed version). */
15424 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15426 if (operand_equal_p (top
, bottom
, 0))
15429 if (TREE_CODE (type
) != INTEGER_TYPE
)
15432 switch (TREE_CODE (top
))
15435 /* Bitwise and provides a power of two multiple. If the mask is
15436 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15437 if (!integer_pow2p (bottom
))
15442 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15443 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15447 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15448 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15451 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15455 op1
= TREE_OPERAND (top
, 1);
15456 /* const_binop may not detect overflow correctly,
15457 so check for it explicitly here. */
15458 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15459 > TREE_INT_CST_LOW (op1
)
15460 && TREE_INT_CST_HIGH (op1
) == 0
15461 && 0 != (t1
= fold_convert (type
,
15462 const_binop (LSHIFT_EXPR
,
15465 && !TREE_OVERFLOW (t1
))
15466 return multiple_of_p (type
, t1
, bottom
);
15471 /* Can't handle conversions from non-integral or wider integral type. */
15472 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15473 || (TYPE_PRECISION (type
)
15474 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15477 /* .. fall through ... */
15480 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15483 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15484 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15487 if (TREE_CODE (bottom
) != INTEGER_CST
15488 || integer_zerop (bottom
)
15489 || (TYPE_UNSIGNED (type
)
15490 && (tree_int_cst_sgn (top
) < 0
15491 || tree_int_cst_sgn (bottom
) < 0)))
15493 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15501 /* Return true if CODE or TYPE is known to be non-negative. */
15504 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15506 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15507 && truth_value_p (code
))
15508 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15509 have a signed:1 type (where the value is -1 and 0). */
15514 /* Return true if (CODE OP0) is known to be non-negative. If the return
15515 value is based on the assumption that signed overflow is undefined,
15516 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15517 *STRICT_OVERFLOW_P. */
15520 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15521 bool *strict_overflow_p
)
15523 if (TYPE_UNSIGNED (type
))
15529 /* We can't return 1 if flag_wrapv is set because
15530 ABS_EXPR<INT_MIN> = INT_MIN. */
15531 if (!INTEGRAL_TYPE_P (type
))
15533 if (TYPE_OVERFLOW_UNDEFINED (type
))
15535 *strict_overflow_p
= true;
15540 case NON_LVALUE_EXPR
:
15542 case FIX_TRUNC_EXPR
:
15543 return tree_expr_nonnegative_warnv_p (op0
,
15544 strict_overflow_p
);
15548 tree inner_type
= TREE_TYPE (op0
);
15549 tree outer_type
= type
;
15551 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15553 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15554 return tree_expr_nonnegative_warnv_p (op0
,
15555 strict_overflow_p
);
15556 if (INTEGRAL_TYPE_P (inner_type
))
15558 if (TYPE_UNSIGNED (inner_type
))
15560 return tree_expr_nonnegative_warnv_p (op0
,
15561 strict_overflow_p
);
15564 else if (INTEGRAL_TYPE_P (outer_type
))
15566 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15567 return tree_expr_nonnegative_warnv_p (op0
,
15568 strict_overflow_p
);
15569 if (INTEGRAL_TYPE_P (inner_type
))
15570 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15571 && TYPE_UNSIGNED (inner_type
);
15577 return tree_simple_nonnegative_warnv_p (code
, type
);
15580 /* We don't know sign of `t', so be conservative and return false. */
15584 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15585 value is based on the assumption that signed overflow is undefined,
15586 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15587 *STRICT_OVERFLOW_P. */
15590 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15591 tree op1
, bool *strict_overflow_p
)
15593 if (TYPE_UNSIGNED (type
))
15598 case POINTER_PLUS_EXPR
:
15600 if (FLOAT_TYPE_P (type
))
15601 return (tree_expr_nonnegative_warnv_p (op0
,
15603 && tree_expr_nonnegative_warnv_p (op1
,
15604 strict_overflow_p
));
15606 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15607 both unsigned and at least 2 bits shorter than the result. */
15608 if (TREE_CODE (type
) == INTEGER_TYPE
15609 && TREE_CODE (op0
) == NOP_EXPR
15610 && TREE_CODE (op1
) == NOP_EXPR
)
15612 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15613 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15614 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15615 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15617 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15618 TYPE_PRECISION (inner2
)) + 1;
15619 return prec
< TYPE_PRECISION (type
);
15625 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15627 /* x * x is always non-negative for floating point x
15628 or without overflow. */
15629 if (operand_equal_p (op0
, op1
, 0)
15630 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15631 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15633 if (TYPE_OVERFLOW_UNDEFINED (type
))
15634 *strict_overflow_p
= true;
15639 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15640 both unsigned and their total bits is shorter than the result. */
15641 if (TREE_CODE (type
) == INTEGER_TYPE
15642 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15643 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15645 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15646 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15648 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15649 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15652 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15653 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15655 if (TREE_CODE (op0
) == INTEGER_CST
)
15656 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15658 if (TREE_CODE (op1
) == INTEGER_CST
)
15659 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15661 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15662 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15664 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15665 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15666 : TYPE_PRECISION (inner0
);
15668 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15669 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15670 : TYPE_PRECISION (inner1
);
15672 return precision0
+ precision1
< TYPE_PRECISION (type
);
15679 return (tree_expr_nonnegative_warnv_p (op0
,
15681 || tree_expr_nonnegative_warnv_p (op1
,
15682 strict_overflow_p
));
15688 case TRUNC_DIV_EXPR
:
15689 case CEIL_DIV_EXPR
:
15690 case FLOOR_DIV_EXPR
:
15691 case ROUND_DIV_EXPR
:
15692 return (tree_expr_nonnegative_warnv_p (op0
,
15694 && tree_expr_nonnegative_warnv_p (op1
,
15695 strict_overflow_p
));
15697 case TRUNC_MOD_EXPR
:
15698 case CEIL_MOD_EXPR
:
15699 case FLOOR_MOD_EXPR
:
15700 case ROUND_MOD_EXPR
:
15701 return tree_expr_nonnegative_warnv_p (op0
,
15702 strict_overflow_p
);
15704 return tree_simple_nonnegative_warnv_p (code
, type
);
15707 /* We don't know sign of `t', so be conservative and return false. */
15711 /* Return true if T is known to be non-negative. If the return
15712 value is based on the assumption that signed overflow is undefined,
15713 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15714 *STRICT_OVERFLOW_P. */
15717 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15719 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15722 switch (TREE_CODE (t
))
15725 return tree_int_cst_sgn (t
) >= 0;
15728 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15731 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15734 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15736 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15737 strict_overflow_p
));
15739 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15742 /* We don't know sign of `t', so be conservative and return false. */
15746 /* Return true if T is known to be non-negative. If the return
15747 value is based on the assumption that signed overflow is undefined,
15748 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15749 *STRICT_OVERFLOW_P. */
15752 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15753 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15755 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15756 switch (DECL_FUNCTION_CODE (fndecl
))
15758 CASE_FLT_FN (BUILT_IN_ACOS
):
15759 CASE_FLT_FN (BUILT_IN_ACOSH
):
15760 CASE_FLT_FN (BUILT_IN_CABS
):
15761 CASE_FLT_FN (BUILT_IN_COSH
):
15762 CASE_FLT_FN (BUILT_IN_ERFC
):
15763 CASE_FLT_FN (BUILT_IN_EXP
):
15764 CASE_FLT_FN (BUILT_IN_EXP10
):
15765 CASE_FLT_FN (BUILT_IN_EXP2
):
15766 CASE_FLT_FN (BUILT_IN_FABS
):
15767 CASE_FLT_FN (BUILT_IN_FDIM
):
15768 CASE_FLT_FN (BUILT_IN_HYPOT
):
15769 CASE_FLT_FN (BUILT_IN_POW10
):
15770 CASE_INT_FN (BUILT_IN_FFS
):
15771 CASE_INT_FN (BUILT_IN_PARITY
):
15772 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15773 CASE_INT_FN (BUILT_IN_CLZ
):
15774 CASE_INT_FN (BUILT_IN_CLRSB
):
15775 case BUILT_IN_BSWAP32
:
15776 case BUILT_IN_BSWAP64
:
15780 CASE_FLT_FN (BUILT_IN_SQRT
):
15781 /* sqrt(-0.0) is -0.0. */
15782 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15784 return tree_expr_nonnegative_warnv_p (arg0
,
15785 strict_overflow_p
);
15787 CASE_FLT_FN (BUILT_IN_ASINH
):
15788 CASE_FLT_FN (BUILT_IN_ATAN
):
15789 CASE_FLT_FN (BUILT_IN_ATANH
):
15790 CASE_FLT_FN (BUILT_IN_CBRT
):
15791 CASE_FLT_FN (BUILT_IN_CEIL
):
15792 CASE_FLT_FN (BUILT_IN_ERF
):
15793 CASE_FLT_FN (BUILT_IN_EXPM1
):
15794 CASE_FLT_FN (BUILT_IN_FLOOR
):
15795 CASE_FLT_FN (BUILT_IN_FMOD
):
15796 CASE_FLT_FN (BUILT_IN_FREXP
):
15797 CASE_FLT_FN (BUILT_IN_ICEIL
):
15798 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15799 CASE_FLT_FN (BUILT_IN_IRINT
):
15800 CASE_FLT_FN (BUILT_IN_IROUND
):
15801 CASE_FLT_FN (BUILT_IN_LCEIL
):
15802 CASE_FLT_FN (BUILT_IN_LDEXP
):
15803 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15804 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15805 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15806 CASE_FLT_FN (BUILT_IN_LLRINT
):
15807 CASE_FLT_FN (BUILT_IN_LLROUND
):
15808 CASE_FLT_FN (BUILT_IN_LRINT
):
15809 CASE_FLT_FN (BUILT_IN_LROUND
):
15810 CASE_FLT_FN (BUILT_IN_MODF
):
15811 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15812 CASE_FLT_FN (BUILT_IN_RINT
):
15813 CASE_FLT_FN (BUILT_IN_ROUND
):
15814 CASE_FLT_FN (BUILT_IN_SCALB
):
15815 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15816 CASE_FLT_FN (BUILT_IN_SCALBN
):
15817 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15818 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15819 CASE_FLT_FN (BUILT_IN_SINH
):
15820 CASE_FLT_FN (BUILT_IN_TANH
):
15821 CASE_FLT_FN (BUILT_IN_TRUNC
):
15822 /* True if the 1st argument is nonnegative. */
15823 return tree_expr_nonnegative_warnv_p (arg0
,
15824 strict_overflow_p
);
15826 CASE_FLT_FN (BUILT_IN_FMAX
):
15827 /* True if the 1st OR 2nd arguments are nonnegative. */
15828 return (tree_expr_nonnegative_warnv_p (arg0
,
15830 || (tree_expr_nonnegative_warnv_p (arg1
,
15831 strict_overflow_p
)));
15833 CASE_FLT_FN (BUILT_IN_FMIN
):
15834 /* True if the 1st AND 2nd arguments are nonnegative. */
15835 return (tree_expr_nonnegative_warnv_p (arg0
,
15837 && (tree_expr_nonnegative_warnv_p (arg1
,
15838 strict_overflow_p
)));
15840 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15841 /* True if the 2nd argument is nonnegative. */
15842 return tree_expr_nonnegative_warnv_p (arg1
,
15843 strict_overflow_p
);
15845 CASE_FLT_FN (BUILT_IN_POWI
):
15846 /* True if the 1st argument is nonnegative or the second
15847 argument is an even integer. */
15848 if (TREE_CODE (arg1
) == INTEGER_CST
15849 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15851 return tree_expr_nonnegative_warnv_p (arg0
,
15852 strict_overflow_p
);
15854 CASE_FLT_FN (BUILT_IN_POW
):
15855 /* True if the 1st argument is nonnegative or the second
15856 argument is an even integer valued real. */
15857 if (TREE_CODE (arg1
) == REAL_CST
)
15862 c
= TREE_REAL_CST (arg1
);
15863 n
= real_to_integer (&c
);
15866 REAL_VALUE_TYPE cint
;
15867 real_from_integer (&cint
, VOIDmode
, n
,
15868 n
< 0 ? -1 : 0, 0);
15869 if (real_identical (&c
, &cint
))
15873 return tree_expr_nonnegative_warnv_p (arg0
,
15874 strict_overflow_p
);
15879 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15883 /* Return true if T is known to be non-negative. If the return
15884 value is based on the assumption that signed overflow is undefined,
15885 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15886 *STRICT_OVERFLOW_P. */
15889 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15891 enum tree_code code
= TREE_CODE (t
);
15892 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15899 tree temp
= TARGET_EXPR_SLOT (t
);
15900 t
= TARGET_EXPR_INITIAL (t
);
15902 /* If the initializer is non-void, then it's a normal expression
15903 that will be assigned to the slot. */
15904 if (!VOID_TYPE_P (t
))
15905 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15907 /* Otherwise, the initializer sets the slot in some way. One common
15908 way is an assignment statement at the end of the initializer. */
15911 if (TREE_CODE (t
) == BIND_EXPR
)
15912 t
= expr_last (BIND_EXPR_BODY (t
));
15913 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15914 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15915 t
= expr_last (TREE_OPERAND (t
, 0));
15916 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15921 if (TREE_CODE (t
) == MODIFY_EXPR
15922 && TREE_OPERAND (t
, 0) == temp
)
15923 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15924 strict_overflow_p
);
15931 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15932 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15934 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15935 get_callee_fndecl (t
),
15938 strict_overflow_p
);
15940 case COMPOUND_EXPR
:
15942 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15943 strict_overflow_p
);
15945 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15946 strict_overflow_p
);
15948 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15949 strict_overflow_p
);
15952 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15956 /* We don't know sign of `t', so be conservative and return false. */
15960 /* Return true if T is known to be non-negative. If the return
15961 value is based on the assumption that signed overflow is undefined,
15962 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15963 *STRICT_OVERFLOW_P. */
15966 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15968 enum tree_code code
;
15969 if (t
== error_mark_node
)
15972 code
= TREE_CODE (t
);
15973 switch (TREE_CODE_CLASS (code
))
15976 case tcc_comparison
:
15977 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15979 TREE_OPERAND (t
, 0),
15980 TREE_OPERAND (t
, 1),
15981 strict_overflow_p
);
15984 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15986 TREE_OPERAND (t
, 0),
15987 strict_overflow_p
);
15990 case tcc_declaration
:
15991 case tcc_reference
:
15992 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16000 case TRUTH_AND_EXPR
:
16001 case TRUTH_OR_EXPR
:
16002 case TRUTH_XOR_EXPR
:
16003 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
16005 TREE_OPERAND (t
, 0),
16006 TREE_OPERAND (t
, 1),
16007 strict_overflow_p
);
16008 case TRUTH_NOT_EXPR
:
16009 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
16011 TREE_OPERAND (t
, 0),
16012 strict_overflow_p
);
16019 case WITH_SIZE_EXPR
:
16021 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16024 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
16028 /* Return true if `t' is known to be non-negative. Handle warnings
16029 about undefined signed overflow. */
16032 tree_expr_nonnegative_p (tree t
)
16034 bool ret
, strict_overflow_p
;
16036 strict_overflow_p
= false;
16037 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
16038 if (strict_overflow_p
)
16039 fold_overflow_warning (("assuming signed overflow does not occur when "
16040 "determining that expression is always "
16042 WARN_STRICT_OVERFLOW_MISC
);
16047 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16048 For floating point we further ensure that T is not denormal.
16049 Similar logic is present in nonzero_address in rtlanal.h.
16051 If the return value is based on the assumption that signed overflow
16052 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16053 change *STRICT_OVERFLOW_P. */
16056 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
16057 bool *strict_overflow_p
)
16062 return tree_expr_nonzero_warnv_p (op0
,
16063 strict_overflow_p
);
16067 tree inner_type
= TREE_TYPE (op0
);
16068 tree outer_type
= type
;
16070 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
16071 && tree_expr_nonzero_warnv_p (op0
,
16072 strict_overflow_p
));
16076 case NON_LVALUE_EXPR
:
16077 return tree_expr_nonzero_warnv_p (op0
,
16078 strict_overflow_p
);
16087 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16088 For floating point we further ensure that T is not denormal.
16089 Similar logic is present in nonzero_address in rtlanal.h.
16091 If the return value is based on the assumption that signed overflow
16092 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16093 change *STRICT_OVERFLOW_P. */
16096 tree_binary_nonzero_warnv_p (enum tree_code code
,
16099 tree op1
, bool *strict_overflow_p
)
16101 bool sub_strict_overflow_p
;
16104 case POINTER_PLUS_EXPR
:
16106 if (TYPE_OVERFLOW_UNDEFINED (type
))
16108 /* With the presence of negative values it is hard
16109 to say something. */
16110 sub_strict_overflow_p
= false;
16111 if (!tree_expr_nonnegative_warnv_p (op0
,
16112 &sub_strict_overflow_p
)
16113 || !tree_expr_nonnegative_warnv_p (op1
,
16114 &sub_strict_overflow_p
))
16116 /* One of operands must be positive and the other non-negative. */
16117 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16118 overflows, on a twos-complement machine the sum of two
16119 nonnegative numbers can never be zero. */
16120 return (tree_expr_nonzero_warnv_p (op0
,
16122 || tree_expr_nonzero_warnv_p (op1
,
16123 strict_overflow_p
));
16128 if (TYPE_OVERFLOW_UNDEFINED (type
))
16130 if (tree_expr_nonzero_warnv_p (op0
,
16132 && tree_expr_nonzero_warnv_p (op1
,
16133 strict_overflow_p
))
16135 *strict_overflow_p
= true;
16142 sub_strict_overflow_p
= false;
16143 if (tree_expr_nonzero_warnv_p (op0
,
16144 &sub_strict_overflow_p
)
16145 && tree_expr_nonzero_warnv_p (op1
,
16146 &sub_strict_overflow_p
))
16148 if (sub_strict_overflow_p
)
16149 *strict_overflow_p
= true;
16154 sub_strict_overflow_p
= false;
16155 if (tree_expr_nonzero_warnv_p (op0
,
16156 &sub_strict_overflow_p
))
16158 if (sub_strict_overflow_p
)
16159 *strict_overflow_p
= true;
16161 /* When both operands are nonzero, then MAX must be too. */
16162 if (tree_expr_nonzero_warnv_p (op1
,
16163 strict_overflow_p
))
16166 /* MAX where operand 0 is positive is positive. */
16167 return tree_expr_nonnegative_warnv_p (op0
,
16168 strict_overflow_p
);
16170 /* MAX where operand 1 is positive is positive. */
16171 else if (tree_expr_nonzero_warnv_p (op1
,
16172 &sub_strict_overflow_p
)
16173 && tree_expr_nonnegative_warnv_p (op1
,
16174 &sub_strict_overflow_p
))
16176 if (sub_strict_overflow_p
)
16177 *strict_overflow_p
= true;
16183 return (tree_expr_nonzero_warnv_p (op1
,
16185 || tree_expr_nonzero_warnv_p (op0
,
16186 strict_overflow_p
));
16195 /* Return true when T is an address and is known to be nonzero.
16196 For floating point we further ensure that T is not denormal.
16197 Similar logic is present in nonzero_address in rtlanal.h.
16199 If the return value is based on the assumption that signed overflow
16200 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16201 change *STRICT_OVERFLOW_P. */
16204 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16206 bool sub_strict_overflow_p
;
16207 switch (TREE_CODE (t
))
16210 return !integer_zerop (t
);
16214 tree base
= TREE_OPERAND (t
, 0);
16215 if (!DECL_P (base
))
16216 base
= get_base_address (base
);
16221 /* Weak declarations may link to NULL. Other things may also be NULL
16222 so protect with -fdelete-null-pointer-checks; but not variables
16223 allocated on the stack. */
16225 && (flag_delete_null_pointer_checks
16226 || (DECL_CONTEXT (base
)
16227 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16228 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
16229 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
16231 /* Constants are never weak. */
16232 if (CONSTANT_CLASS_P (base
))
16239 sub_strict_overflow_p
= false;
16240 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16241 &sub_strict_overflow_p
)
16242 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16243 &sub_strict_overflow_p
))
16245 if (sub_strict_overflow_p
)
16246 *strict_overflow_p
= true;
16257 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16258 attempt to fold the expression to a constant without modifying TYPE,
16261 If the expression could be simplified to a constant, then return
16262 the constant. If the expression would not be simplified to a
16263 constant, then return NULL_TREE. */
16266 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16268 tree tem
= fold_binary (code
, type
, op0
, op1
);
16269 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16272 /* Given the components of a unary expression CODE, TYPE and OP0,
16273 attempt to fold the expression to a constant without modifying
16276 If the expression could be simplified to a constant, then return
16277 the constant. If the expression would not be simplified to a
16278 constant, then return NULL_TREE. */
16281 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16283 tree tem
= fold_unary (code
, type
, op0
);
16284 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16287 /* If EXP represents referencing an element in a constant string
16288 (either via pointer arithmetic or array indexing), return the
16289 tree representing the value accessed, otherwise return NULL. */
16292 fold_read_from_constant_string (tree exp
)
16294 if ((TREE_CODE (exp
) == INDIRECT_REF
16295 || TREE_CODE (exp
) == ARRAY_REF
)
16296 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16298 tree exp1
= TREE_OPERAND (exp
, 0);
16301 location_t loc
= EXPR_LOCATION (exp
);
16303 if (TREE_CODE (exp
) == INDIRECT_REF
)
16304 string
= string_constant (exp1
, &index
);
16307 tree low_bound
= array_ref_low_bound (exp
);
16308 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16310 /* Optimize the special-case of a zero lower bound.
16312 We convert the low_bound to sizetype to avoid some problems
16313 with constant folding. (E.g. suppose the lower bound is 1,
16314 and its mode is QI. Without the conversion,l (ARRAY
16315 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16316 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16317 if (! integer_zerop (low_bound
))
16318 index
= size_diffop_loc (loc
, index
,
16319 fold_convert_loc (loc
, sizetype
, low_bound
));
16325 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16326 && TREE_CODE (string
) == STRING_CST
16327 && TREE_CODE (index
) == INTEGER_CST
16328 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16329 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16331 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16332 return build_int_cst_type (TREE_TYPE (exp
),
16333 (TREE_STRING_POINTER (string
)
16334 [TREE_INT_CST_LOW (index
)]));
16339 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16340 an integer constant, real, or fixed-point constant.
16342 TYPE is the type of the result. */
16345 fold_negate_const (tree arg0
, tree type
)
16347 tree t
= NULL_TREE
;
16349 switch (TREE_CODE (arg0
))
16353 double_int val
= tree_to_double_int (arg0
);
16355 val
= val
.neg_with_overflow (&overflow
);
16356 t
= force_fit_type_double (type
, val
, 1,
16357 (overflow
| TREE_OVERFLOW (arg0
))
16358 && !TYPE_UNSIGNED (type
));
16363 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16368 FIXED_VALUE_TYPE f
;
16369 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16370 &(TREE_FIXED_CST (arg0
)), NULL
,
16371 TYPE_SATURATING (type
));
16372 t
= build_fixed (type
, f
);
16373 /* Propagate overflow flags. */
16374 if (overflow_p
| TREE_OVERFLOW (arg0
))
16375 TREE_OVERFLOW (t
) = 1;
16380 gcc_unreachable ();
16386 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16387 an integer constant or real constant.
16389 TYPE is the type of the result. */
16392 fold_abs_const (tree arg0
, tree type
)
16394 tree t
= NULL_TREE
;
16396 switch (TREE_CODE (arg0
))
16400 double_int val
= tree_to_double_int (arg0
);
16402 /* If the value is unsigned or non-negative, then the absolute value
16403 is the same as the ordinary value. */
16404 if (TYPE_UNSIGNED (type
)
16405 || !val
.is_negative ())
16408 /* If the value is negative, then the absolute value is
16413 val
= val
.neg_with_overflow (&overflow
);
16414 t
= force_fit_type_double (type
, val
, -1,
16415 overflow
| TREE_OVERFLOW (arg0
));
16421 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16422 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16428 gcc_unreachable ();
16434 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16435 constant. TYPE is the type of the result. */
16438 fold_not_const (const_tree arg0
, tree type
)
16442 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16444 val
= ~tree_to_double_int (arg0
);
16445 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16448 /* Given CODE, a relational operator, the target type, TYPE and two
16449 constant operands OP0 and OP1, return the result of the
16450 relational operation. If the result is not a compile time
16451 constant, then return NULL_TREE. */
16454 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16456 int result
, invert
;
16458 /* From here on, the only cases we handle are when the result is
16459 known to be a constant. */
16461 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16463 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16464 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16466 /* Handle the cases where either operand is a NaN. */
16467 if (real_isnan (c0
) || real_isnan (c1
))
16477 case UNORDERED_EXPR
:
16491 if (flag_trapping_math
)
16497 gcc_unreachable ();
16500 return constant_boolean_node (result
, type
);
16503 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16506 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16508 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16509 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16510 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16513 /* Handle equality/inequality of complex constants. */
16514 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16516 tree rcond
= fold_relational_const (code
, type
,
16517 TREE_REALPART (op0
),
16518 TREE_REALPART (op1
));
16519 tree icond
= fold_relational_const (code
, type
,
16520 TREE_IMAGPART (op0
),
16521 TREE_IMAGPART (op1
));
16522 if (code
== EQ_EXPR
)
16523 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16524 else if (code
== NE_EXPR
)
16525 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16530 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16532 unsigned count
= VECTOR_CST_NELTS (op0
);
16533 tree
*elts
= XALLOCAVEC (tree
, count
);
16534 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16535 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16537 for (unsigned i
= 0; i
< count
; i
++)
16539 tree elem_type
= TREE_TYPE (type
);
16540 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16541 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16543 tree tem
= fold_relational_const (code
, elem_type
,
16546 if (tem
== NULL_TREE
)
16549 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16552 return build_vector (type
, elts
);
16555 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16557 To compute GT, swap the arguments and do LT.
16558 To compute GE, do LT and invert the result.
16559 To compute LE, swap the arguments, do LT and invert the result.
16560 To compute NE, do EQ and invert the result.
16562 Therefore, the code below must handle only EQ and LT. */
16564 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16569 code
= swap_tree_comparison (code
);
16572 /* Note that it is safe to invert for real values here because we
16573 have already handled the one case that it matters. */
16576 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16579 code
= invert_tree_comparison (code
, false);
16582 /* Compute a result for LT or EQ if args permit;
16583 Otherwise return T. */
16584 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16586 if (code
== EQ_EXPR
)
16587 result
= tree_int_cst_equal (op0
, op1
);
16588 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16589 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16591 result
= INT_CST_LT (op0
, op1
);
16598 return constant_boolean_node (result
, type
);
16601 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16602 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16606 fold_build_cleanup_point_expr (tree type
, tree expr
)
16608 /* If the expression does not have side effects then we don't have to wrap
16609 it with a cleanup point expression. */
16610 if (!TREE_SIDE_EFFECTS (expr
))
16613 /* If the expression is a return, check to see if the expression inside the
16614 return has no side effects or the right hand side of the modify expression
16615 inside the return. If either don't have side effects set we don't need to
16616 wrap the expression in a cleanup point expression. Note we don't check the
16617 left hand side of the modify because it should always be a return decl. */
16618 if (TREE_CODE (expr
) == RETURN_EXPR
)
16620 tree op
= TREE_OPERAND (expr
, 0);
16621 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16623 op
= TREE_OPERAND (op
, 1);
16624 if (!TREE_SIDE_EFFECTS (op
))
16628 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16631 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16632 of an indirection through OP0, or NULL_TREE if no simplification is
16636 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16642 subtype
= TREE_TYPE (sub
);
16643 if (!POINTER_TYPE_P (subtype
))
16646 if (TREE_CODE (sub
) == ADDR_EXPR
)
16648 tree op
= TREE_OPERAND (sub
, 0);
16649 tree optype
= TREE_TYPE (op
);
16650 /* *&CONST_DECL -> to the value of the const decl. */
16651 if (TREE_CODE (op
) == CONST_DECL
)
16652 return DECL_INITIAL (op
);
16653 /* *&p => p; make sure to handle *&"str"[cst] here. */
16654 if (type
== optype
)
16656 tree fop
= fold_read_from_constant_string (op
);
16662 /* *(foo *)&fooarray => fooarray[0] */
16663 else if (TREE_CODE (optype
) == ARRAY_TYPE
16664 && type
== TREE_TYPE (optype
)
16665 && (!in_gimple_form
16666 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16668 tree type_domain
= TYPE_DOMAIN (optype
);
16669 tree min_val
= size_zero_node
;
16670 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16671 min_val
= TYPE_MIN_VALUE (type_domain
);
16673 && TREE_CODE (min_val
) != INTEGER_CST
)
16675 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16676 NULL_TREE
, NULL_TREE
);
16678 /* *(foo *)&complexfoo => __real__ complexfoo */
16679 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16680 && type
== TREE_TYPE (optype
))
16681 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16682 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16683 else if (TREE_CODE (optype
) == VECTOR_TYPE
16684 && type
== TREE_TYPE (optype
))
16686 tree part_width
= TYPE_SIZE (type
);
16687 tree index
= bitsize_int (0);
16688 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16692 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16693 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16695 tree op00
= TREE_OPERAND (sub
, 0);
16696 tree op01
= TREE_OPERAND (sub
, 1);
16699 if (TREE_CODE (op00
) == ADDR_EXPR
)
16702 op00
= TREE_OPERAND (op00
, 0);
16703 op00type
= TREE_TYPE (op00
);
16705 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16706 if (TREE_CODE (op00type
) == VECTOR_TYPE
16707 && type
== TREE_TYPE (op00type
))
16709 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16710 tree part_width
= TYPE_SIZE (type
);
16711 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16712 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16713 tree index
= bitsize_int (indexi
);
16715 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16716 return fold_build3_loc (loc
,
16717 BIT_FIELD_REF
, type
, op00
,
16718 part_width
, index
);
16721 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16722 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16723 && type
== TREE_TYPE (op00type
))
16725 tree size
= TYPE_SIZE_UNIT (type
);
16726 if (tree_int_cst_equal (size
, op01
))
16727 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16729 /* ((foo *)&fooarray)[1] => fooarray[1] */
16730 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16731 && type
== TREE_TYPE (op00type
))
16733 tree type_domain
= TYPE_DOMAIN (op00type
);
16734 tree min_val
= size_zero_node
;
16735 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16736 min_val
= TYPE_MIN_VALUE (type_domain
);
16737 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16738 TYPE_SIZE_UNIT (type
));
16739 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16740 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16741 NULL_TREE
, NULL_TREE
);
16746 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16747 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16748 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16749 && (!in_gimple_form
16750 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16753 tree min_val
= size_zero_node
;
16754 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16755 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16756 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16757 min_val
= TYPE_MIN_VALUE (type_domain
);
16759 && TREE_CODE (min_val
) != INTEGER_CST
)
16761 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16768 /* Builds an expression for an indirection through T, simplifying some
16772 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16774 tree type
= TREE_TYPE (TREE_TYPE (t
));
16775 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16780 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16783 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16786 fold_indirect_ref_loc (location_t loc
, tree t
)
16788 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16796 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16797 whose result is ignored. The type of the returned tree need not be
16798 the same as the original expression. */
16801 fold_ignored_result (tree t
)
16803 if (!TREE_SIDE_EFFECTS (t
))
16804 return integer_zero_node
;
16807 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16810 t
= TREE_OPERAND (t
, 0);
16814 case tcc_comparison
:
16815 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16816 t
= TREE_OPERAND (t
, 0);
16817 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16818 t
= TREE_OPERAND (t
, 1);
16823 case tcc_expression
:
16824 switch (TREE_CODE (t
))
16826 case COMPOUND_EXPR
:
16827 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16829 t
= TREE_OPERAND (t
, 0);
16833 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16834 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16836 t
= TREE_OPERAND (t
, 0);
16849 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16850 This can only be applied to objects of a sizetype. */
16853 round_up_loc (location_t loc
, tree value
, int divisor
)
16855 tree div
= NULL_TREE
;
16857 gcc_assert (divisor
> 0);
16861 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16862 have to do anything. Only do this when we are not given a const,
16863 because in that case, this check is more expensive than just
16865 if (TREE_CODE (value
) != INTEGER_CST
)
16867 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16869 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16873 /* If divisor is a power of two, simplify this to bit manipulation. */
16874 if (divisor
== (divisor
& -divisor
))
16876 if (TREE_CODE (value
) == INTEGER_CST
)
16878 double_int val
= tree_to_double_int (value
);
16881 if ((val
.low
& (divisor
- 1)) == 0)
16884 overflow_p
= TREE_OVERFLOW (value
);
16885 val
.low
&= ~(divisor
- 1);
16886 val
.low
+= divisor
;
16894 return force_fit_type_double (TREE_TYPE (value
), val
,
16901 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16902 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16903 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16904 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16910 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16911 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16912 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16918 /* Likewise, but round down. */
16921 round_down_loc (location_t loc
, tree value
, int divisor
)
16923 tree div
= NULL_TREE
;
16925 gcc_assert (divisor
> 0);
16929 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16930 have to do anything. Only do this when we are not given a const,
16931 because in that case, this check is more expensive than just
16933 if (TREE_CODE (value
) != INTEGER_CST
)
16935 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16937 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16941 /* If divisor is a power of two, simplify this to bit manipulation. */
16942 if (divisor
== (divisor
& -divisor
))
16946 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16947 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16952 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16953 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16954 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16960 /* Returns the pointer to the base of the object addressed by EXP and
16961 extracts the information about the offset of the access, storing it
16962 to PBITPOS and POFFSET. */
16965 split_address_to_core_and_offset (tree exp
,
16966 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16969 enum machine_mode mode
;
16970 int unsignedp
, volatilep
;
16971 HOST_WIDE_INT bitsize
;
16972 location_t loc
= EXPR_LOCATION (exp
);
16974 if (TREE_CODE (exp
) == ADDR_EXPR
)
16976 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16977 poffset
, &mode
, &unsignedp
, &volatilep
,
16979 core
= build_fold_addr_expr_loc (loc
, core
);
16985 *poffset
= NULL_TREE
;
16991 /* Returns true if addresses of E1 and E2 differ by a constant, false
16992 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16995 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16998 HOST_WIDE_INT bitpos1
, bitpos2
;
16999 tree toffset1
, toffset2
, tdiff
, type
;
17001 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
17002 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
17004 if (bitpos1
% BITS_PER_UNIT
!= 0
17005 || bitpos2
% BITS_PER_UNIT
!= 0
17006 || !operand_equal_p (core1
, core2
, 0))
17009 if (toffset1
&& toffset2
)
17011 type
= TREE_TYPE (toffset1
);
17012 if (type
!= TREE_TYPE (toffset2
))
17013 toffset2
= fold_convert (type
, toffset2
);
17015 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
17016 if (!cst_and_fits_in_hwi (tdiff
))
17019 *diff
= int_cst_value (tdiff
);
17021 else if (toffset1
|| toffset2
)
17023 /* If only one of the offsets is non-constant, the difference cannot
17030 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
17034 /* Simplify the floating point expression EXP when the sign of the
17035 result is not significant. Return NULL_TREE if no simplification
17039 fold_strip_sign_ops (tree exp
)
17042 location_t loc
= EXPR_LOCATION (exp
);
17044 switch (TREE_CODE (exp
))
17048 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17049 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
17053 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
17055 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17056 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17057 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
17058 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
17059 arg0
? arg0
: TREE_OPERAND (exp
, 0),
17060 arg1
? arg1
: TREE_OPERAND (exp
, 1));
17063 case COMPOUND_EXPR
:
17064 arg0
= TREE_OPERAND (exp
, 0);
17065 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17067 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
17071 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17072 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
17074 return fold_build3_loc (loc
,
17075 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
17076 arg0
? arg0
: TREE_OPERAND (exp
, 1),
17077 arg1
? arg1
: TREE_OPERAND (exp
, 2));
17082 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
17085 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
17086 /* Strip copysign function call, return the 1st argument. */
17087 arg0
= CALL_EXPR_ARG (exp
, 0);
17088 arg1
= CALL_EXPR_ARG (exp
, 1);
17089 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
17092 /* Strip sign ops from the argument of "odd" math functions. */
17093 if (negate_mathfn_p (fcode
))
17095 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
17097 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);