1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
65 #include "gimple-expr.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
72 /* Nonzero if we are folding constants inside an initializer; zero
74 int folding_initializer
= 0;
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code
{
98 static bool negate_mathfn_p (enum built_in_function
);
99 static bool negate_expr_p (tree
);
100 static tree
negate_expr (tree
);
101 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
102 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
103 static tree
const_binop (enum tree_code
, tree
, tree
);
104 static enum comparison_code
comparison_to_compcode (enum tree_code
);
105 static enum tree_code
compcode_to_comparison (enum comparison_code
);
106 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
107 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
108 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
109 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
110 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
111 static tree
make_bit_field_ref (location_t
, tree
, tree
,
112 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
113 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
115 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
117 enum machine_mode
*, int *, int *,
119 static int all_ones_mask_p (const_tree
, int);
120 static tree
sign_bit_p (tree
, const_tree
);
121 static int simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
131 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
fold_binary_op_with_conditional_arg (location_t
,
134 enum tree_code
, tree
,
137 static tree
fold_mathfn_compare (location_t
,
138 enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (const_tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
152 expr_location_or (tree t
, location_t loc
)
154 location_t tloc
= EXPR_LOCATION (t
);
155 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
162 protected_set_expr_location_unshare (tree x
, location_t loc
)
164 if (CAN_HAVE_LOCATION_P (x
)
165 && EXPR_LOCATION (x
) != loc
166 && !(TREE_CODE (x
) == SAVE_EXPR
167 || TREE_CODE (x
) == TARGET_EXPR
168 || TREE_CODE (x
) == BIND_EXPR
))
171 SET_EXPR_LOCATION (x
, loc
);
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
181 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
191 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
195 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
209 static int fold_deferring_overflow_warnings
;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning
;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings
;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
242 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
247 gcc_assert (fold_deferring_overflow_warnings
> 0);
248 --fold_deferring_overflow_warnings
;
249 if (fold_deferring_overflow_warnings
> 0)
251 if (fold_deferred_overflow_warning
!= NULL
253 && code
< (int) fold_deferred_overflow_code
)
254 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
258 warnmsg
= fold_deferred_overflow_warning
;
259 fold_deferred_overflow_warning
= NULL
;
261 if (!issue
|| warnmsg
== NULL
)
264 if (gimple_no_warning_p (stmt
))
267 /* Use the smallest code level when deciding to issue the
269 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
270 code
= fold_deferred_overflow_code
;
272 if (!issue_strict_overflow_warning (code
))
276 locus
= input_location
;
278 locus
= gimple_location (stmt
);
279 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
282 /* Stop deferring overflow warnings, ignoring any deferred
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL
, 0);
291 /* Whether we are deferring overflow warnings. */
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings
> 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
303 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
305 if (fold_deferring_overflow_warnings
> 0)
307 if (fold_deferred_overflow_warning
== NULL
308 || wc
< fold_deferred_overflow_code
)
310 fold_deferred_overflow_warning
= gmsgid
;
311 fold_deferred_overflow_code
= wc
;
314 else if (issue_strict_overflow_warning (wc
))
315 warning (OPT_Wstrict_overflow
, gmsgid
);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
322 negate_mathfn_p (enum built_in_function code
)
326 CASE_FLT_FN (BUILT_IN_ASIN
):
327 CASE_FLT_FN (BUILT_IN_ASINH
):
328 CASE_FLT_FN (BUILT_IN_ATAN
):
329 CASE_FLT_FN (BUILT_IN_ATANH
):
330 CASE_FLT_FN (BUILT_IN_CASIN
):
331 CASE_FLT_FN (BUILT_IN_CASINH
):
332 CASE_FLT_FN (BUILT_IN_CATAN
):
333 CASE_FLT_FN (BUILT_IN_CATANH
):
334 CASE_FLT_FN (BUILT_IN_CBRT
):
335 CASE_FLT_FN (BUILT_IN_CPROJ
):
336 CASE_FLT_FN (BUILT_IN_CSIN
):
337 CASE_FLT_FN (BUILT_IN_CSINH
):
338 CASE_FLT_FN (BUILT_IN_CTAN
):
339 CASE_FLT_FN (BUILT_IN_CTANH
):
340 CASE_FLT_FN (BUILT_IN_ERF
):
341 CASE_FLT_FN (BUILT_IN_LLROUND
):
342 CASE_FLT_FN (BUILT_IN_LROUND
):
343 CASE_FLT_FN (BUILT_IN_ROUND
):
344 CASE_FLT_FN (BUILT_IN_SIN
):
345 CASE_FLT_FN (BUILT_IN_SINH
):
346 CASE_FLT_FN (BUILT_IN_TAN
):
347 CASE_FLT_FN (BUILT_IN_TANH
):
348 CASE_FLT_FN (BUILT_IN_TRUNC
):
351 CASE_FLT_FN (BUILT_IN_LLRINT
):
352 CASE_FLT_FN (BUILT_IN_LRINT
):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
354 CASE_FLT_FN (BUILT_IN_RINT
):
355 return !flag_rounding_math
;
363 /* Check whether we may negate an integer constant T without causing
367 may_negate_without_overflow_p (const_tree t
)
369 unsigned HOST_WIDE_INT val
;
373 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
375 type
= TREE_TYPE (t
);
376 if (TYPE_UNSIGNED (type
))
379 prec
= TYPE_PRECISION (type
);
380 if (prec
> HOST_BITS_PER_WIDE_INT
)
382 if (TREE_INT_CST_LOW (t
) != 0)
384 prec
-= HOST_BITS_PER_WIDE_INT
;
385 val
= TREE_INT_CST_HIGH (t
);
388 val
= TREE_INT_CST_LOW (t
);
389 if (prec
< HOST_BITS_PER_WIDE_INT
)
390 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
391 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
398 negate_expr_p (tree t
)
405 type
= TREE_TYPE (t
);
408 switch (TREE_CODE (t
))
411 if (TYPE_OVERFLOW_WRAPS (type
))
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t
);
417 return (INTEGRAL_TYPE_P (type
)
418 && TYPE_OVERFLOW_WRAPS (type
));
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
430 return negate_expr_p (TREE_REALPART (t
))
431 && negate_expr_p (TREE_IMAGPART (t
));
435 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
438 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
440 for (i
= 0; i
< count
; i
++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
448 return negate_expr_p (TREE_OPERAND (t
, 0))
449 && negate_expr_p (TREE_OPERAND (t
, 1));
452 return negate_expr_p (TREE_OPERAND (t
, 0));
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t
, 1))
460 && reorder_operands_p (TREE_OPERAND (t
, 0),
461 TREE_OPERAND (t
, 1)))
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t
, 0));
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
470 && reorder_operands_p (TREE_OPERAND (t
, 0),
471 TREE_OPERAND (t
, 1));
474 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
481 return negate_expr_p (TREE_OPERAND (t
, 1))
482 || negate_expr_p (TREE_OPERAND (t
, 0));
488 /* In general we can't negate A / B, because if A is INT_MIN and
489 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 and actually traps on some architectures. But if overflow is
491 undefined, we can negate, because - (INT_MIN / 1) is an
493 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
495 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
497 /* If overflow is undefined then we have to be careful because
498 we ask whether it's ok to associate the negate with the
499 division which is not ok for example for
500 -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 overflow because of negating INT_MIN. So do not use
502 negate_expr_p here but open-code the two important cases. */
503 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
504 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
505 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
508 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
510 return negate_expr_p (TREE_OPERAND (t
, 1));
513 /* Negate -((double)float) as (double)(-float). */
514 if (TREE_CODE (type
) == REAL_TYPE
)
516 tree tem
= strip_float_extensions (t
);
518 return negate_expr_p (tem
);
523 /* Negate -f(x) as f(-x). */
524 if (negate_mathfn_p (builtin_mathfn_code (t
)))
525 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
529 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
530 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
532 tree op1
= TREE_OPERAND (t
, 1);
533 if (TREE_INT_CST_HIGH (op1
) == 0
534 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
535 == TREE_INT_CST_LOW (op1
))
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
552 fold_negate_expr (location_t loc
, tree t
)
554 tree type
= TREE_TYPE (t
);
557 switch (TREE_CODE (t
))
559 /* Convert - (~A) to A + 1. */
561 if (INTEGRAL_TYPE_P (type
))
562 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
563 build_one_cst (type
));
567 tem
= fold_negate_const (t
, type
);
568 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
569 || !TYPE_OVERFLOW_TRAPS (type
))
574 tem
= fold_negate_const (t
, type
);
575 /* Two's complement FP formats, such as c4x, may overflow. */
576 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
581 tem
= fold_negate_const (t
, type
);
586 tree rpart
= negate_expr (TREE_REALPART (t
));
587 tree ipart
= negate_expr (TREE_IMAGPART (t
));
589 if ((TREE_CODE (rpart
) == REAL_CST
590 && TREE_CODE (ipart
) == REAL_CST
)
591 || (TREE_CODE (rpart
) == INTEGER_CST
592 && TREE_CODE (ipart
) == INTEGER_CST
))
593 return build_complex (type
, rpart
, ipart
);
599 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
600 tree
*elts
= XALLOCAVEC (tree
, count
);
602 for (i
= 0; i
< count
; i
++)
604 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
605 if (elts
[i
] == NULL_TREE
)
609 return build_vector (type
, elts
);
613 if (negate_expr_p (t
))
614 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
615 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
616 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
620 if (negate_expr_p (t
))
621 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
622 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
626 return TREE_OPERAND (t
, 0);
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
632 /* -(A + B) -> (-B) - A. */
633 if (negate_expr_p (TREE_OPERAND (t
, 1))
634 && reorder_operands_p (TREE_OPERAND (t
, 0),
635 TREE_OPERAND (t
, 1)))
637 tem
= negate_expr (TREE_OPERAND (t
, 1));
638 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
639 tem
, TREE_OPERAND (t
, 0));
642 /* -(A + B) -> (-A) - B. */
643 if (negate_expr_p (TREE_OPERAND (t
, 0)))
645 tem
= negate_expr (TREE_OPERAND (t
, 0));
646 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
647 tem
, TREE_OPERAND (t
, 1));
653 /* - (A - B) -> B - A */
654 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
655 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
656 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
657 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
658 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
662 if (TYPE_UNSIGNED (type
))
668 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
670 tem
= TREE_OPERAND (t
, 1);
671 if (negate_expr_p (tem
))
672 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
673 TREE_OPERAND (t
, 0), negate_expr (tem
));
674 tem
= TREE_OPERAND (t
, 0);
675 if (negate_expr_p (tem
))
676 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
677 negate_expr (tem
), TREE_OPERAND (t
, 1));
684 /* In general we can't negate A / B, because if A is INT_MIN and
685 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 and actually traps on some architectures. But if overflow is
687 undefined, we can negate, because - (INT_MIN / 1) is an
689 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
691 const char * const warnmsg
= G_("assuming signed overflow does not "
692 "occur when negating a division");
693 tem
= TREE_OPERAND (t
, 1);
694 if (negate_expr_p (tem
))
696 if (INTEGRAL_TYPE_P (type
)
697 && (TREE_CODE (tem
) != INTEGER_CST
698 || integer_onep (tem
)))
699 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
700 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
701 TREE_OPERAND (t
, 0), negate_expr (tem
));
703 /* If overflow is undefined then we have to be careful because
704 we ask whether it's ok to associate the negate with the
705 division which is not ok for example for
706 -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 overflow because of negating INT_MIN. So do not use
708 negate_expr_p here but open-code the two important cases. */
709 tem
= TREE_OPERAND (t
, 0);
710 if ((INTEGRAL_TYPE_P (type
)
711 && (TREE_CODE (tem
) == NEGATE_EXPR
712 || (TREE_CODE (tem
) == INTEGER_CST
713 && may_negate_without_overflow_p (tem
))))
714 || !INTEGRAL_TYPE_P (type
))
715 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
716 negate_expr (tem
), TREE_OPERAND (t
, 1));
721 /* Convert -((double)float) into (double)(-float). */
722 if (TREE_CODE (type
) == REAL_TYPE
)
724 tem
= strip_float_extensions (t
);
725 if (tem
!= t
&& negate_expr_p (tem
))
726 return fold_convert_loc (loc
, type
, negate_expr (tem
));
731 /* Negate -f(x) as f(-x). */
732 if (negate_mathfn_p (builtin_mathfn_code (t
))
733 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
737 fndecl
= get_callee_fndecl (t
);
738 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
739 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
744 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
745 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
747 tree op1
= TREE_OPERAND (t
, 1);
748 if (TREE_INT_CST_HIGH (op1
) == 0
749 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
750 == TREE_INT_CST_LOW (op1
))
752 tree ntype
= TYPE_UNSIGNED (type
)
753 ? signed_type_for (type
)
754 : unsigned_type_for (type
);
755 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
756 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
757 return fold_convert_loc (loc
, type
, temp
);
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
782 loc
= EXPR_LOCATION (t
);
783 type
= TREE_TYPE (t
);
786 tem
= fold_negate_expr (loc
, t
);
788 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
789 return fold_convert_loc (loc
, type
, tem
);
792 /* Split a tree IN into a constant, literal and variable parts that could be
793 combined with CODE to make IN. "constant" means an expression with
794 TREE_CONSTANT but that isn't an actual constant. CODE must be a
795 commutative arithmetic operation. Store the constant part into *CONP,
796 the literal in *LITP and return the variable part. If a part isn't
797 present, set it to null. If the tree does not decompose in this way,
798 return the entire tree as the variable part and the other parts as null.
800 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
801 case, we negate an operand that was subtracted. Except if it is a
802 literal for which we use *MINUS_LITP instead.
804 If NEGATE_P is true, we are negating all of IN, again except a literal
805 for which we use *MINUS_LITP instead.
807 If IN is itself a literal or constant, return it as appropriate.
809 Note that we do not guarantee that any of the three values will be the
810 same type as IN, but they will have the same signedness and mode. */
813 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
814 tree
*minus_litp
, int negate_p
)
822 /* Strip any conversions that don't change the machine mode or signedness. */
823 STRIP_SIGN_NOPS (in
);
825 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
826 || TREE_CODE (in
) == FIXED_CST
)
828 else if (TREE_CODE (in
) == code
829 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
830 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
831 /* We can associate addition and subtraction together (even
832 though the C standard doesn't say so) for integers because
833 the value is not affected. For reals, the value might be
834 affected, so we can't. */
835 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
836 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
838 tree op0
= TREE_OPERAND (in
, 0);
839 tree op1
= TREE_OPERAND (in
, 1);
840 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
841 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
843 /* First see if either of the operands is a literal, then a constant. */
844 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
845 || TREE_CODE (op0
) == FIXED_CST
)
846 *litp
= op0
, op0
= 0;
847 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
848 || TREE_CODE (op1
) == FIXED_CST
)
849 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
851 if (op0
!= 0 && TREE_CONSTANT (op0
))
852 *conp
= op0
, op0
= 0;
853 else if (op1
!= 0 && TREE_CONSTANT (op1
))
854 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
856 /* If we haven't dealt with either operand, this is not a case we can
857 decompose. Otherwise, VAR is either of the ones remaining, if any. */
858 if (op0
!= 0 && op1
!= 0)
863 var
= op1
, neg_var_p
= neg1_p
;
865 /* Now do any needed negations. */
867 *minus_litp
= *litp
, *litp
= 0;
869 *conp
= negate_expr (*conp
);
871 var
= negate_expr (var
);
873 else if (TREE_CODE (in
) == BIT_NOT_EXPR
874 && code
== PLUS_EXPR
)
876 /* -X - 1 is folded to ~X, undo that here. */
877 *minus_litp
= build_one_cst (TREE_TYPE (in
));
878 var
= negate_expr (TREE_OPERAND (in
, 0));
880 else if (TREE_CONSTANT (in
))
888 *minus_litp
= *litp
, *litp
= 0;
889 else if (*minus_litp
)
890 *litp
= *minus_litp
, *minus_litp
= 0;
891 *conp
= negate_expr (*conp
);
892 var
= negate_expr (var
);
898 /* Re-associate trees split by the above function. T1 and T2 are
899 either expressions to associate or null. Return the new
900 expression, if any. LOC is the location of the new expression. If
901 we build an operation, do it in TYPE and with CODE. */
904 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
911 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912 try to fold this since we will have infinite recursion. But do
913 deal with any NEGATE_EXPRs. */
914 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
915 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
917 if (code
== PLUS_EXPR
)
919 if (TREE_CODE (t1
) == NEGATE_EXPR
)
920 return build2_loc (loc
, MINUS_EXPR
, type
,
921 fold_convert_loc (loc
, type
, t2
),
922 fold_convert_loc (loc
, type
,
923 TREE_OPERAND (t1
, 0)));
924 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
925 return build2_loc (loc
, MINUS_EXPR
, type
,
926 fold_convert_loc (loc
, type
, t1
),
927 fold_convert_loc (loc
, type
,
928 TREE_OPERAND (t2
, 0)));
929 else if (integer_zerop (t2
))
930 return fold_convert_loc (loc
, type
, t1
);
932 else if (code
== MINUS_EXPR
)
934 if (integer_zerop (t2
))
935 return fold_convert_loc (loc
, type
, t1
);
938 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
939 fold_convert_loc (loc
, type
, t2
));
942 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
943 fold_convert_loc (loc
, type
, t2
));
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947 for use in int_const_binop, size_binop and size_diffop. */
950 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
952 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
954 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
969 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
970 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
971 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976 to produce a new constant. Return NULL_TREE if we don't know how
977 to evaluate CODE at compile-time. */
980 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
983 double_int op1
, op2
, res
, tmp
;
985 tree type
= TREE_TYPE (arg1
);
986 bool uns
= TYPE_UNSIGNED (type
);
987 bool overflow
= false;
989 op1
= tree_to_double_int (arg1
);
990 op2
= tree_to_double_int (arg2
);
1007 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1018 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1022 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1026 res
= op1
.add_with_sign (op2
, false, &overflow
);
1030 res
= op1
.sub_with_overflow (op2
, &overflow
);
1034 res
= op1
.mul_with_sign (op2
, false, &overflow
);
1037 case MULT_HIGHPART_EXPR
:
1038 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1040 bool dummy_overflow
;
1041 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
1043 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
1047 bool dummy_overflow
;
1048 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 is performed in twice the precision of arguments. */
1050 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1051 res
= tmp
.rshift (TYPE_PRECISION (type
),
1052 2 * TYPE_PRECISION (type
), !uns
);
1056 case TRUNC_DIV_EXPR
:
1057 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1058 case EXACT_DIV_EXPR
:
1059 /* This is a shortcut for a common special case. */
1060 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1061 && !TREE_OVERFLOW (arg1
)
1062 && !TREE_OVERFLOW (arg2
)
1063 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1065 if (code
== CEIL_DIV_EXPR
)
1066 op1
.low
+= op2
.low
- 1;
1068 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1072 /* ... fall through ... */
1074 case ROUND_DIV_EXPR
:
1082 if (op1
== op2
&& !op1
.is_zero ())
1084 res
= double_int_one
;
1087 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1090 case TRUNC_MOD_EXPR
:
1091 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1092 /* This is a shortcut for a common special case. */
1093 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1094 && !TREE_OVERFLOW (arg1
)
1095 && !TREE_OVERFLOW (arg2
)
1096 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1098 if (code
== CEIL_MOD_EXPR
)
1099 op1
.low
+= op2
.low
- 1;
1100 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1104 /* ... fall through ... */
1106 case ROUND_MOD_EXPR
:
1110 /* Check for the case the case of INT_MIN % -1 and return
1111 overflow and result = 0. The TImode case is handled properly
1113 if (TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
1115 && op2
.is_minus_one ()
1116 && op1
.high
== (HOST_WIDE_INT
) -1
1117 && (HOST_WIDE_INT
) op1
.low
1118 == (((HOST_WIDE_INT
)-1) << (TYPE_PRECISION (type
) - 1)))
1121 res
= double_int_zero
;
1124 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1128 res
= op1
.min (op2
, uns
);
1132 res
= op1
.max (op2
, uns
);
1139 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1141 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1147 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1149 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153 constant. We assume ARG1 and ARG2 have the same data type, or at least
1154 are the same kind of constant and the same machine mode. Return zero if
1155 combining the constants is not allowed in the current operating mode. */
1158 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1160 /* Sanity check for the recursive cases. */
1167 if (TREE_CODE (arg1
) == INTEGER_CST
)
1168 return int_const_binop (code
, arg1
, arg2
);
1170 if (TREE_CODE (arg1
) == REAL_CST
)
1172 enum machine_mode mode
;
1175 REAL_VALUE_TYPE value
;
1176 REAL_VALUE_TYPE result
;
1180 /* The following codes are handled by real_arithmetic. */
1195 d1
= TREE_REAL_CST (arg1
);
1196 d2
= TREE_REAL_CST (arg2
);
1198 type
= TREE_TYPE (arg1
);
1199 mode
= TYPE_MODE (type
);
1201 /* Don't perform operation if we honor signaling NaNs and
1202 either operand is a NaN. */
1203 if (HONOR_SNANS (mode
)
1204 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code
== RDIV_EXPR
1210 && REAL_VALUES_EQUAL (d2
, dconst0
)
1211 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1
))
1218 else if (REAL_VALUE_ISNAN (d2
))
1221 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1222 real_convert (&result
, mode
, &value
);
1224 /* Don't constant fold this floating point operation if
1225 the result has overflowed and flag_trapping_math. */
1226 if (flag_trapping_math
1227 && MODE_HAS_INFINITIES (mode
)
1228 && REAL_VALUE_ISINF (result
)
1229 && !REAL_VALUE_ISINF (d1
)
1230 && !REAL_VALUE_ISINF (d2
))
1233 /* Don't constant fold this floating point operation if the
1234 result may dependent upon the run-time rounding mode and
1235 flag_rounding_math is set, or if GCC's software emulation
1236 is unable to accurately represent the result. */
1237 if ((flag_rounding_math
1238 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1239 && (inexact
|| !real_identical (&result
, &value
)))
1242 t
= build_real (type
, result
);
1244 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1248 if (TREE_CODE (arg1
) == FIXED_CST
)
1250 FIXED_VALUE_TYPE f1
;
1251 FIXED_VALUE_TYPE f2
;
1252 FIXED_VALUE_TYPE result
;
1257 /* The following codes are handled by fixed_arithmetic. */
1263 case TRUNC_DIV_EXPR
:
1264 f2
= TREE_FIXED_CST (arg2
);
1269 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1270 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1278 f1
= TREE_FIXED_CST (arg1
);
1279 type
= TREE_TYPE (arg1
);
1280 sat_p
= TYPE_SATURATING (type
);
1281 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1282 t
= build_fixed (type
, result
);
1283 /* Propagate overflow flags. */
1284 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1285 TREE_OVERFLOW (t
) = 1;
1289 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1291 tree type
= TREE_TYPE (arg1
);
1292 tree r1
= TREE_REALPART (arg1
);
1293 tree i1
= TREE_IMAGPART (arg1
);
1294 tree r2
= TREE_REALPART (arg2
);
1295 tree i2
= TREE_IMAGPART (arg2
);
1302 real
= const_binop (code
, r1
, r2
);
1303 imag
= const_binop (code
, i1
, i2
);
1307 if (COMPLEX_FLOAT_TYPE_P (type
))
1308 return do_mpc_arg2 (arg1
, arg2
, type
,
1309 /* do_nonfinite= */ folding_initializer
,
1312 real
= const_binop (MINUS_EXPR
,
1313 const_binop (MULT_EXPR
, r1
, r2
),
1314 const_binop (MULT_EXPR
, i1
, i2
));
1315 imag
= const_binop (PLUS_EXPR
,
1316 const_binop (MULT_EXPR
, r1
, i2
),
1317 const_binop (MULT_EXPR
, i1
, r2
));
1321 if (COMPLEX_FLOAT_TYPE_P (type
))
1322 return do_mpc_arg2 (arg1
, arg2
, type
,
1323 /* do_nonfinite= */ folding_initializer
,
1326 case TRUNC_DIV_EXPR
:
1328 case FLOOR_DIV_EXPR
:
1329 case ROUND_DIV_EXPR
:
1330 if (flag_complex_method
== 0)
1332 /* Keep this algorithm in sync with
1333 tree-complex.c:expand_complex_div_straight().
1335 Expand complex division to scalars, straightforward algorithm.
1336 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1340 = const_binop (PLUS_EXPR
,
1341 const_binop (MULT_EXPR
, r2
, r2
),
1342 const_binop (MULT_EXPR
, i2
, i2
));
1344 = const_binop (PLUS_EXPR
,
1345 const_binop (MULT_EXPR
, r1
, r2
),
1346 const_binop (MULT_EXPR
, i1
, i2
));
1348 = const_binop (MINUS_EXPR
,
1349 const_binop (MULT_EXPR
, i1
, r2
),
1350 const_binop (MULT_EXPR
, r1
, i2
));
1352 real
= const_binop (code
, t1
, magsquared
);
1353 imag
= const_binop (code
, t2
, magsquared
);
1357 /* Keep this algorithm in sync with
1358 tree-complex.c:expand_complex_div_wide().
1360 Expand complex division to scalars, modified algorithm to minimize
1361 overflow with wide input ranges. */
1362 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1363 fold_abs_const (r2
, TREE_TYPE (type
)),
1364 fold_abs_const (i2
, TREE_TYPE (type
)));
1366 if (integer_nonzerop (compare
))
1368 /* In the TRUE branch, we compute
1370 div = (br * ratio) + bi;
1371 tr = (ar * ratio) + ai;
1372 ti = (ai * ratio) - ar;
1375 tree ratio
= const_binop (code
, r2
, i2
);
1376 tree div
= const_binop (PLUS_EXPR
, i2
,
1377 const_binop (MULT_EXPR
, r2
, ratio
));
1378 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1379 real
= const_binop (PLUS_EXPR
, real
, i1
);
1380 real
= const_binop (code
, real
, div
);
1382 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1383 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1384 imag
= const_binop (code
, imag
, div
);
1388 /* In the FALSE branch, we compute
1390 divisor = (d * ratio) + c;
1391 tr = (b * ratio) + a;
1392 ti = b - (a * ratio);
1395 tree ratio
= const_binop (code
, i2
, r2
);
1396 tree div
= const_binop (PLUS_EXPR
, r2
,
1397 const_binop (MULT_EXPR
, i2
, ratio
));
1399 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1400 real
= const_binop (PLUS_EXPR
, real
, r1
);
1401 real
= const_binop (code
, real
, div
);
1403 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1404 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1405 imag
= const_binop (code
, imag
, div
);
1415 return build_complex (type
, real
, imag
);
1418 if (TREE_CODE (arg1
) == VECTOR_CST
1419 && TREE_CODE (arg2
) == VECTOR_CST
)
1421 tree type
= TREE_TYPE (arg1
);
1422 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1423 tree
*elts
= XALLOCAVEC (tree
, count
);
1425 for (i
= 0; i
< count
; i
++)
1427 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1428 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1430 elts
[i
] = const_binop (code
, elem1
, elem2
);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE */
1434 if (elts
[i
] == NULL_TREE
)
1438 return build_vector (type
, elts
);
1441 /* Shifts allow a scalar offset for a vector. */
1442 if (TREE_CODE (arg1
) == VECTOR_CST
1443 && TREE_CODE (arg2
) == INTEGER_CST
)
1445 tree type
= TREE_TYPE (arg1
);
1446 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1447 tree
*elts
= XALLOCAVEC (tree
, count
);
1449 if (code
== VEC_LSHIFT_EXPR
1450 || code
== VEC_RSHIFT_EXPR
)
1452 if (!tree_fits_uhwi_p (arg2
))
1455 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1456 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1457 unsigned HOST_WIDE_INT innerc
1458 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1459 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1461 int offset
= shiftc
/ innerc
;
1462 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1463 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1464 for !BYTES_BIG_ENDIAN picks first vector element, but
1465 for BYTES_BIG_ENDIAN last element from the vector. */
1466 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1468 tree zero
= build_zero_cst (TREE_TYPE (type
));
1469 for (i
= 0; i
< count
; i
++)
1471 if (i
+ offset
< 0 || i
+ offset
>= count
)
1474 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1478 for (i
= 0; i
< count
; i
++)
1480 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1482 elts
[i
] = const_binop (code
, elem1
, arg2
);
1484 /* It is possible that const_binop cannot handle the given
1485 code and return NULL_TREE */
1486 if (elts
[i
] == NULL_TREE
)
1490 return build_vector (type
, elts
);
1495 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1496 indicates which particular sizetype to create. */
1499 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1501 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1504 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1505 is a tree code. The type of the result is taken from the operands.
1506 Both must be equivalent integer types, ala int_binop_types_match_p.
1507 If the operands are constant, so is the result. */
1510 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1512 tree type
= TREE_TYPE (arg0
);
1514 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1515 return error_mark_node
;
1517 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1520 /* Handle the special case of two integer constants faster. */
1521 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1523 /* And some specific cases even faster than that. */
1524 if (code
== PLUS_EXPR
)
1526 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1528 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1531 else if (code
== MINUS_EXPR
)
1533 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1536 else if (code
== MULT_EXPR
)
1538 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1542 /* Handle general case of two integer constants. For sizetype
1543 constant calculations we always want to know about overflow,
1544 even in the unsigned case. */
1545 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1548 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1551 /* Given two values, either both of sizetype or both of bitsizetype,
1552 compute the difference between the two values. Return the value
1553 in signed type corresponding to the type of the operands. */
1556 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1558 tree type
= TREE_TYPE (arg0
);
1561 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1564 /* If the type is already signed, just do the simple thing. */
1565 if (!TYPE_UNSIGNED (type
))
1566 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1568 if (type
== sizetype
)
1570 else if (type
== bitsizetype
)
1571 ctype
= sbitsizetype
;
1573 ctype
= signed_type_for (type
);
1575 /* If either operand is not a constant, do the conversions to the signed
1576 type and subtract. The hardware will do the right thing with any
1577 overflow in the subtraction. */
1578 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1579 return size_binop_loc (loc
, MINUS_EXPR
,
1580 fold_convert_loc (loc
, ctype
, arg0
),
1581 fold_convert_loc (loc
, ctype
, arg1
));
1583 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1584 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1585 overflow) and negate (which can't either). Special-case a result
1586 of zero while we're here. */
1587 if (tree_int_cst_equal (arg0
, arg1
))
1588 return build_int_cst (ctype
, 0);
1589 else if (tree_int_cst_lt (arg1
, arg0
))
1590 return fold_convert_loc (loc
, ctype
,
1591 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1593 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1594 fold_convert_loc (loc
, ctype
,
1595 size_binop_loc (loc
,
1600 /* A subroutine of fold_convert_const handling conversions of an
1601 INTEGER_CST to another integer type. */
1604 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1608 /* Given an integer constant, make new constant with new type,
1609 appropriately sign-extended or truncated. */
1610 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1611 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1612 (TREE_INT_CST_HIGH (arg1
) < 0
1613 && (TYPE_UNSIGNED (type
)
1614 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1615 | TREE_OVERFLOW (arg1
));
1620 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1621 to an integer type. */
1624 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1629 /* The following code implements the floating point to integer
1630 conversion rules required by the Java Language Specification,
1631 that IEEE NaNs are mapped to zero and values that overflow
1632 the target precision saturate, i.e. values greater than
1633 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1634 are mapped to INT_MIN. These semantics are allowed by the
1635 C and C++ standards that simply state that the behavior of
1636 FP-to-integer conversion is unspecified upon overflow. */
1640 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1644 case FIX_TRUNC_EXPR
:
1645 real_trunc (&r
, VOIDmode
, &x
);
1652 /* If R is NaN, return zero and show we have an overflow. */
1653 if (REAL_VALUE_ISNAN (r
))
1656 val
= double_int_zero
;
1659 /* See if R is less than the lower bound or greater than the
1664 tree lt
= TYPE_MIN_VALUE (type
);
1665 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1666 if (REAL_VALUES_LESS (r
, l
))
1669 val
= tree_to_double_int (lt
);
1675 tree ut
= TYPE_MAX_VALUE (type
);
1678 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1679 if (REAL_VALUES_LESS (u
, r
))
1682 val
= tree_to_double_int (ut
);
1688 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1690 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1694 /* A subroutine of fold_convert_const handling conversions of a
1695 FIXED_CST to an integer type. */
1698 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1701 double_int temp
, temp_trunc
;
1704 /* Right shift FIXED_CST to temp by fbit. */
1705 temp
= TREE_FIXED_CST (arg1
).data
;
1706 mode
= TREE_FIXED_CST (arg1
).mode
;
1707 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1709 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1710 HOST_BITS_PER_DOUBLE_INT
,
1711 SIGNED_FIXED_POINT_MODE_P (mode
));
1713 /* Left shift temp to temp_trunc by fbit. */
1714 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1715 HOST_BITS_PER_DOUBLE_INT
,
1716 SIGNED_FIXED_POINT_MODE_P (mode
));
1720 temp
= double_int_zero
;
1721 temp_trunc
= double_int_zero
;
1724 /* If FIXED_CST is negative, we need to round the value toward 0.
1725 By checking if the fractional bits are not zero to add 1 to temp. */
1726 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1727 && temp_trunc
.is_negative ()
1728 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1729 temp
+= double_int_one
;
1731 /* Given a fixed-point constant, make new constant with new type,
1732 appropriately sign-extended or truncated. */
1733 t
= force_fit_type_double (type
, temp
, -1,
1734 (temp
.is_negative ()
1735 && (TYPE_UNSIGNED (type
)
1736 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1737 | TREE_OVERFLOW (arg1
));
1742 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1743 to another floating point type. */
1746 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1748 REAL_VALUE_TYPE value
;
1751 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1752 t
= build_real (type
, value
);
1754 /* If converting an infinity or NAN to a representation that doesn't
1755 have one, set the overflow bit so that we can produce some kind of
1756 error message at the appropriate point if necessary. It's not the
1757 most user-friendly message, but it's better than nothing. */
1758 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1759 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1760 TREE_OVERFLOW (t
) = 1;
1761 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1762 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1763 TREE_OVERFLOW (t
) = 1;
1764 /* Regular overflow, conversion produced an infinity in a mode that
1765 can't represent them. */
1766 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1767 && REAL_VALUE_ISINF (value
)
1768 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1769 TREE_OVERFLOW (t
) = 1;
1771 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776 to a floating point type. */
1779 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1781 REAL_VALUE_TYPE value
;
1784 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1785 t
= build_real (type
, value
);
1787 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1791 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1792 to another fixed-point type. */
1795 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1797 FIXED_VALUE_TYPE value
;
1801 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1802 TYPE_SATURATING (type
));
1803 t
= build_fixed (type
, value
);
1805 /* Propagate overflow flags. */
1806 if (overflow_p
| TREE_OVERFLOW (arg1
))
1807 TREE_OVERFLOW (t
) = 1;
1811 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1812 to a fixed-point type. */
1815 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1817 FIXED_VALUE_TYPE value
;
1821 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1822 TREE_INT_CST (arg1
),
1823 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1824 TYPE_SATURATING (type
));
1825 t
= build_fixed (type
, value
);
1827 /* Propagate overflow flags. */
1828 if (overflow_p
| TREE_OVERFLOW (arg1
))
1829 TREE_OVERFLOW (t
) = 1;
1833 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1834 to a fixed-point type. */
1837 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1839 FIXED_VALUE_TYPE value
;
1843 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1844 &TREE_REAL_CST (arg1
),
1845 TYPE_SATURATING (type
));
1846 t
= build_fixed (type
, value
);
1848 /* Propagate overflow flags. */
1849 if (overflow_p
| TREE_OVERFLOW (arg1
))
1850 TREE_OVERFLOW (t
) = 1;
1854 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1855 type TYPE. If no simplification can be done return NULL_TREE. */
1858 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1860 if (TREE_TYPE (arg1
) == type
)
1863 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1864 || TREE_CODE (type
) == OFFSET_TYPE
)
1866 if (TREE_CODE (arg1
) == INTEGER_CST
)
1867 return fold_convert_const_int_from_int (type
, arg1
);
1868 else if (TREE_CODE (arg1
) == REAL_CST
)
1869 return fold_convert_const_int_from_real (code
, type
, arg1
);
1870 else if (TREE_CODE (arg1
) == FIXED_CST
)
1871 return fold_convert_const_int_from_fixed (type
, arg1
);
1873 else if (TREE_CODE (type
) == REAL_TYPE
)
1875 if (TREE_CODE (arg1
) == INTEGER_CST
)
1876 return build_real_from_int_cst (type
, arg1
);
1877 else if (TREE_CODE (arg1
) == REAL_CST
)
1878 return fold_convert_const_real_from_real (type
, arg1
);
1879 else if (TREE_CODE (arg1
) == FIXED_CST
)
1880 return fold_convert_const_real_from_fixed (type
, arg1
);
1882 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1884 if (TREE_CODE (arg1
) == FIXED_CST
)
1885 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1886 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1887 return fold_convert_const_fixed_from_int (type
, arg1
);
1888 else if (TREE_CODE (arg1
) == REAL_CST
)
1889 return fold_convert_const_fixed_from_real (type
, arg1
);
1894 /* Construct a vector of zero elements of vector type TYPE. */
1897 build_zero_vector (tree type
)
1901 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1902 return build_vector_from_val (type
, t
);
1905 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1908 fold_convertible_p (const_tree type
, const_tree arg
)
1910 tree orig
= TREE_TYPE (arg
);
1915 if (TREE_CODE (arg
) == ERROR_MARK
1916 || TREE_CODE (type
) == ERROR_MARK
1917 || TREE_CODE (orig
) == ERROR_MARK
)
1920 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1923 switch (TREE_CODE (type
))
1925 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1926 case POINTER_TYPE
: case REFERENCE_TYPE
:
1928 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1929 || TREE_CODE (orig
) == OFFSET_TYPE
)
1931 return (TREE_CODE (orig
) == VECTOR_TYPE
1932 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1935 case FIXED_POINT_TYPE
:
1939 return TREE_CODE (type
) == TREE_CODE (orig
);
1946 /* Convert expression ARG to type TYPE. Used by the middle-end for
1947 simple conversions in preference to calling the front-end's convert. */
1950 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1952 tree orig
= TREE_TYPE (arg
);
1958 if (TREE_CODE (arg
) == ERROR_MARK
1959 || TREE_CODE (type
) == ERROR_MARK
1960 || TREE_CODE (orig
) == ERROR_MARK
)
1961 return error_mark_node
;
1963 switch (TREE_CODE (type
))
1966 case REFERENCE_TYPE
:
1967 /* Handle conversions between pointers to different address spaces. */
1968 if (POINTER_TYPE_P (orig
)
1969 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1970 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1971 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1974 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1976 if (TREE_CODE (arg
) == INTEGER_CST
)
1978 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1979 if (tem
!= NULL_TREE
)
1982 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1983 || TREE_CODE (orig
) == OFFSET_TYPE
)
1984 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1985 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1986 return fold_convert_loc (loc
, type
,
1987 fold_build1_loc (loc
, REALPART_EXPR
,
1988 TREE_TYPE (orig
), arg
));
1989 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1990 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1991 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1994 if (TREE_CODE (arg
) == INTEGER_CST
)
1996 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1997 if (tem
!= NULL_TREE
)
2000 else if (TREE_CODE (arg
) == REAL_CST
)
2002 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2003 if (tem
!= NULL_TREE
)
2006 else if (TREE_CODE (arg
) == FIXED_CST
)
2008 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2009 if (tem
!= NULL_TREE
)
2013 switch (TREE_CODE (orig
))
2016 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2017 case POINTER_TYPE
: case REFERENCE_TYPE
:
2018 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2021 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2023 case FIXED_POINT_TYPE
:
2024 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2027 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2028 return fold_convert_loc (loc
, type
, tem
);
2034 case FIXED_POINT_TYPE
:
2035 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2036 || TREE_CODE (arg
) == REAL_CST
)
2038 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2039 if (tem
!= NULL_TREE
)
2040 goto fold_convert_exit
;
2043 switch (TREE_CODE (orig
))
2045 case FIXED_POINT_TYPE
:
2050 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2053 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2054 return fold_convert_loc (loc
, type
, tem
);
2061 switch (TREE_CODE (orig
))
2064 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2065 case POINTER_TYPE
: case REFERENCE_TYPE
:
2067 case FIXED_POINT_TYPE
:
2068 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2069 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2070 fold_convert_loc (loc
, TREE_TYPE (type
),
2071 integer_zero_node
));
2076 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2078 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2079 TREE_OPERAND (arg
, 0));
2080 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2081 TREE_OPERAND (arg
, 1));
2082 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2085 arg
= save_expr (arg
);
2086 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2087 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2088 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2089 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2090 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2098 if (integer_zerop (arg
))
2099 return build_zero_vector (type
);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2102 || TREE_CODE (orig
) == VECTOR_TYPE
);
2103 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2106 tem
= fold_ignored_result (arg
);
2107 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2110 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2111 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2115 protected_set_expr_location_unshare (tem
, loc
);
2119 /* Return false if expr can be assumed not to be an lvalue, true
2123 maybe_lvalue_p (const_tree x
)
2125 /* We only need to wrap lvalue tree codes. */
2126 switch (TREE_CODE (x
))
2139 case ARRAY_RANGE_REF
:
2145 case PREINCREMENT_EXPR
:
2146 case PREDECREMENT_EXPR
:
2148 case TRY_CATCH_EXPR
:
2149 case WITH_CLEANUP_EXPR
:
2158 /* Assume the worst for front-end tree codes. */
2159 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2167 /* Return an expr equal to X but certainly not valid as an lvalue. */
2170 non_lvalue_loc (location_t loc
, tree x
)
2172 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2177 if (! maybe_lvalue_p (x
))
2179 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183 Zero means allow extended lvalues. */
2185 int pedantic_lvalues
;
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188 pedantic lvalue. Otherwise, return X. */
2191 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2193 if (pedantic_lvalues
)
2194 return non_lvalue_loc (loc
, x
);
2196 return protected_set_expr_location_unshare (x
, loc
);
2199 /* Given a tree comparison code, return the code that is the logical inverse.
2200 It is generally not safe to do this for floating-point comparisons, except
2201 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2202 ERROR_MARK in this case. */
2205 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2207 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2208 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2218 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2220 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2222 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2224 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2238 return UNORDERED_EXPR
;
2239 case UNORDERED_EXPR
:
2240 return ORDERED_EXPR
;
2246 /* Similar, but return the comparison that results if the operands are
2247 swapped. This is safe for floating-point. */
2250 swap_tree_comparison (enum tree_code code
)
2257 case UNORDERED_EXPR
:
2283 /* Convert a comparison tree code from an enum tree_code representation
2284 into a compcode bit-based encoding. This function is the inverse of
2285 compcode_to_comparison. */
2287 static enum comparison_code
2288 comparison_to_compcode (enum tree_code code
)
2305 return COMPCODE_ORD
;
2306 case UNORDERED_EXPR
:
2307 return COMPCODE_UNORD
;
2309 return COMPCODE_UNLT
;
2311 return COMPCODE_UNEQ
;
2313 return COMPCODE_UNLE
;
2315 return COMPCODE_UNGT
;
2317 return COMPCODE_LTGT
;
2319 return COMPCODE_UNGE
;
2325 /* Convert a compcode bit-based encoding of a comparison operator back
2326 to GCC's enum tree_code representation. This function is the
2327 inverse of comparison_to_compcode. */
2329 static enum tree_code
2330 compcode_to_comparison (enum comparison_code code
)
2347 return ORDERED_EXPR
;
2348 case COMPCODE_UNORD
:
2349 return UNORDERED_EXPR
;
2367 /* Return a tree for the comparison which is the combination of
2368 doing the AND or OR (depending on CODE) of the two operations LCODE
2369 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2370 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2371 if this makes the transformation invalid. */
2374 combine_comparisons (location_t loc
,
2375 enum tree_code code
, enum tree_code lcode
,
2376 enum tree_code rcode
, tree truth_type
,
2377 tree ll_arg
, tree lr_arg
)
2379 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2380 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2381 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2386 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2387 compcode
= lcompcode
& rcompcode
;
2390 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2391 compcode
= lcompcode
| rcompcode
;
2400 /* Eliminate unordered comparisons, as well as LTGT and ORD
2401 which are not used unless the mode has NaNs. */
2402 compcode
&= ~COMPCODE_UNORD
;
2403 if (compcode
== COMPCODE_LTGT
)
2404 compcode
= COMPCODE_NE
;
2405 else if (compcode
== COMPCODE_ORD
)
2406 compcode
= COMPCODE_TRUE
;
2408 else if (flag_trapping_math
)
2410 /* Check that the original operation and the optimized ones will trap
2411 under the same condition. */
2412 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2413 && (lcompcode
!= COMPCODE_EQ
)
2414 && (lcompcode
!= COMPCODE_ORD
);
2415 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2416 && (rcompcode
!= COMPCODE_EQ
)
2417 && (rcompcode
!= COMPCODE_ORD
);
2418 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2419 && (compcode
!= COMPCODE_EQ
)
2420 && (compcode
!= COMPCODE_ORD
);
2422 /* In a short-circuited boolean expression the LHS might be
2423 such that the RHS, if evaluated, will never trap. For
2424 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2425 if neither x nor y is NaN. (This is a mixed blessing: for
2426 example, the expression above will never trap, hence
2427 optimizing it to x < y would be invalid). */
2428 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2429 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2432 /* If the comparison was short-circuited, and only the RHS
2433 trapped, we may now generate a spurious trap. */
2435 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2438 /* If we changed the conditions that cause a trap, we lose. */
2439 if ((ltrap
|| rtrap
) != trap
)
2443 if (compcode
== COMPCODE_TRUE
)
2444 return constant_boolean_node (true, truth_type
);
2445 else if (compcode
== COMPCODE_FALSE
)
2446 return constant_boolean_node (false, truth_type
);
2449 enum tree_code tcode
;
2451 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2452 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2456 /* Return nonzero if two operands (typically of the same tree node)
2457 are necessarily equal. If either argument has side-effects this
2458 function returns zero. FLAGS modifies behavior as follows:
2460 If OEP_ONLY_CONST is set, only return nonzero for constants.
2461 This function tests whether the operands are indistinguishable;
2462 it does not test whether they are equal using C's == operation.
2463 The distinction is important for IEEE floating point, because
2464 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2465 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2467 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2468 even though it may hold multiple values during a function.
2469 This is because a GCC tree node guarantees that nothing else is
2470 executed between the evaluation of its "operands" (which may often
2471 be evaluated in arbitrary order). Hence if the operands themselves
2472 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2473 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2474 unset means assuming isochronic (or instantaneous) tree equivalence.
2475 Unless comparing arbitrary expression trees, such as from different
2476 statements, this flag can usually be left unset.
2478 If OEP_PURE_SAME is set, then pure functions with identical arguments
2479 are considered the same. It is used when the caller has other ways
2480 to ensure that global memory is unchanged in between. */
2483 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2485 /* If either is ERROR_MARK, they aren't equal. */
2486 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2487 || TREE_TYPE (arg0
) == error_mark_node
2488 || TREE_TYPE (arg1
) == error_mark_node
)
2491 /* Similar, if either does not have a type (like a released SSA name),
2492 they aren't equal. */
2493 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2496 /* Check equality of integer constants before bailing out due to
2497 precision differences. */
2498 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2499 return tree_int_cst_equal (arg0
, arg1
);
2501 /* If both types don't have the same signedness, then we can't consider
2502 them equal. We must check this before the STRIP_NOPS calls
2503 because they may change the signedness of the arguments. As pointers
2504 strictly don't have a signedness, require either two pointers or
2505 two non-pointers as well. */
2506 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2507 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2510 /* We cannot consider pointers to different address space equal. */
2511 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2512 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2513 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2516 /* If both types don't have the same precision, then it is not safe
2518 if (element_precision (TREE_TYPE (arg0
))
2519 != element_precision (TREE_TYPE (arg1
)))
2525 /* In case both args are comparisons but with different comparison
2526 code, try to swap the comparison operands of one arg to produce
2527 a match and compare that variant. */
2528 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2529 && COMPARISON_CLASS_P (arg0
)
2530 && COMPARISON_CLASS_P (arg1
))
2532 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2534 if (TREE_CODE (arg0
) == swap_code
)
2535 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2536 TREE_OPERAND (arg1
, 1), flags
)
2537 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2538 TREE_OPERAND (arg1
, 0), flags
);
2541 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2542 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2543 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2546 /* This is needed for conversions and for COMPONENT_REF.
2547 Might as well play it safe and always test this. */
2548 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2549 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2550 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2553 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2554 We don't care about side effects in that case because the SAVE_EXPR
2555 takes care of that for us. In all other cases, two expressions are
2556 equal if they have no side effects. If we have two identical
2557 expressions with side effects that should be treated the same due
2558 to the only side effects being identical SAVE_EXPR's, that will
2559 be detected in the recursive calls below.
2560 If we are taking an invariant address of two identical objects
2561 they are necessarily equal as well. */
2562 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2563 && (TREE_CODE (arg0
) == SAVE_EXPR
2564 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2565 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2568 /* Next handle constant cases, those for which we can return 1 even
2569 if ONLY_CONST is set. */
2570 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2571 switch (TREE_CODE (arg0
))
2574 return tree_int_cst_equal (arg0
, arg1
);
2577 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2578 TREE_FIXED_CST (arg1
));
2581 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2582 TREE_REAL_CST (arg1
)))
2586 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2588 /* If we do not distinguish between signed and unsigned zero,
2589 consider them equal. */
2590 if (real_zerop (arg0
) && real_zerop (arg1
))
2599 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2602 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2604 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2605 VECTOR_CST_ELT (arg1
, i
), flags
))
2612 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2614 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2618 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2619 && ! memcmp (TREE_STRING_POINTER (arg0
),
2620 TREE_STRING_POINTER (arg1
),
2621 TREE_STRING_LENGTH (arg0
)));
2624 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2625 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2626 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2631 if (flags
& OEP_ONLY_CONST
)
2634 /* Define macros to test an operand from arg0 and arg1 for equality and a
2635 variant that allows null and views null as being different from any
2636 non-null value. In the latter case, if either is null, the both
2637 must be; otherwise, do the normal comparison. */
2638 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2639 TREE_OPERAND (arg1, N), flags)
2641 #define OP_SAME_WITH_NULL(N) \
2642 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2643 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2645 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2648 /* Two conversions are equal only if signedness and modes match. */
2649 switch (TREE_CODE (arg0
))
2652 case FIX_TRUNC_EXPR
:
2653 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2654 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2664 case tcc_comparison
:
2666 if (OP_SAME (0) && OP_SAME (1))
2669 /* For commutative ops, allow the other order. */
2670 return (commutative_tree_code (TREE_CODE (arg0
))
2671 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2672 TREE_OPERAND (arg1
, 1), flags
)
2673 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2674 TREE_OPERAND (arg1
, 0), flags
));
2677 /* If either of the pointer (or reference) expressions we are
2678 dereferencing contain a side effect, these cannot be equal,
2679 but their addresses can be. */
2680 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2681 && (TREE_SIDE_EFFECTS (arg0
)
2682 || TREE_SIDE_EFFECTS (arg1
)))
2685 switch (TREE_CODE (arg0
))
2688 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2695 case TARGET_MEM_REF
:
2696 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2697 /* Require equal extra operands and then fall through to MEM_REF
2698 handling of the two common operands. */
2699 if (!OP_SAME_WITH_NULL (2)
2700 || !OP_SAME_WITH_NULL (3)
2701 || !OP_SAME_WITH_NULL (4))
2705 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2706 /* Require equal access sizes, and similar pointer types.
2707 We can have incomplete types for array references of
2708 variable-sized arrays from the Fortran frontend
2709 though. Also verify the types are compatible. */
2710 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2711 || (TYPE_SIZE (TREE_TYPE (arg0
))
2712 && TYPE_SIZE (TREE_TYPE (arg1
))
2713 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2714 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2715 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2716 && alias_ptr_types_compatible_p
2717 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2718 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2719 && OP_SAME (0) && OP_SAME (1));
2722 case ARRAY_RANGE_REF
:
2723 /* Operands 2 and 3 may be null.
2724 Compare the array index by value if it is constant first as we
2725 may have different types but same value here. */
2728 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2729 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2730 TREE_OPERAND (arg1
, 1))
2732 && OP_SAME_WITH_NULL (2)
2733 && OP_SAME_WITH_NULL (3));
2736 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2737 may be NULL when we're called to compare MEM_EXPRs. */
2738 if (!OP_SAME_WITH_NULL (0)
2741 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2742 return OP_SAME_WITH_NULL (2);
2747 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2748 return OP_SAME (1) && OP_SAME (2);
2754 case tcc_expression
:
2755 switch (TREE_CODE (arg0
))
2758 case TRUTH_NOT_EXPR
:
2761 case TRUTH_ANDIF_EXPR
:
2762 case TRUTH_ORIF_EXPR
:
2763 return OP_SAME (0) && OP_SAME (1);
2766 case WIDEN_MULT_PLUS_EXPR
:
2767 case WIDEN_MULT_MINUS_EXPR
:
2770 /* The multiplcation operands are commutative. */
2773 case TRUTH_AND_EXPR
:
2775 case TRUTH_XOR_EXPR
:
2776 if (OP_SAME (0) && OP_SAME (1))
2779 /* Otherwise take into account this is a commutative operation. */
2780 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2781 TREE_OPERAND (arg1
, 1), flags
)
2782 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2783 TREE_OPERAND (arg1
, 0), flags
));
2788 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2795 switch (TREE_CODE (arg0
))
2798 /* If the CALL_EXPRs call different functions, then they
2799 clearly can not be equal. */
2800 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2805 unsigned int cef
= call_expr_flags (arg0
);
2806 if (flags
& OEP_PURE_SAME
)
2807 cef
&= ECF_CONST
| ECF_PURE
;
2814 /* Now see if all the arguments are the same. */
2816 const_call_expr_arg_iterator iter0
, iter1
;
2818 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2819 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2821 a0
= next_const_call_expr_arg (&iter0
),
2822 a1
= next_const_call_expr_arg (&iter1
))
2823 if (! operand_equal_p (a0
, a1
, flags
))
2826 /* If we get here and both argument lists are exhausted
2827 then the CALL_EXPRs are equal. */
2828 return ! (a0
|| a1
);
2834 case tcc_declaration
:
2835 /* Consider __builtin_sqrt equal to sqrt. */
2836 return (TREE_CODE (arg0
) == FUNCTION_DECL
2837 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2838 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2839 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2846 #undef OP_SAME_WITH_NULL
2849 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2850 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2852 When in doubt, return 0. */
2855 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2857 int unsignedp1
, unsignedpo
;
2858 tree primarg0
, primarg1
, primother
;
2859 unsigned int correct_width
;
2861 if (operand_equal_p (arg0
, arg1
, 0))
2864 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2865 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2868 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2869 and see if the inner values are the same. This removes any
2870 signedness comparison, which doesn't matter here. */
2871 primarg0
= arg0
, primarg1
= arg1
;
2872 STRIP_NOPS (primarg0
);
2873 STRIP_NOPS (primarg1
);
2874 if (operand_equal_p (primarg0
, primarg1
, 0))
2877 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2878 actual comparison operand, ARG0.
2880 First throw away any conversions to wider types
2881 already present in the operands. */
2883 primarg1
= get_narrower (arg1
, &unsignedp1
);
2884 primother
= get_narrower (other
, &unsignedpo
);
2886 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2887 if (unsignedp1
== unsignedpo
2888 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2889 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2891 tree type
= TREE_TYPE (arg0
);
2893 /* Make sure shorter operand is extended the right way
2894 to match the longer operand. */
2895 primarg1
= fold_convert (signed_or_unsigned_type_for
2896 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2898 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2905 /* See if ARG is an expression that is either a comparison or is performing
2906 arithmetic on comparisons. The comparisons must only be comparing
2907 two different values, which will be stored in *CVAL1 and *CVAL2; if
2908 they are nonzero it means that some operands have already been found.
2909 No variables may be used anywhere else in the expression except in the
2910 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2911 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2913 If this is true, return 1. Otherwise, return zero. */
2916 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2918 enum tree_code code
= TREE_CODE (arg
);
2919 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2921 /* We can handle some of the tcc_expression cases here. */
2922 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2924 else if (tclass
== tcc_expression
2925 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2926 || code
== COMPOUND_EXPR
))
2927 tclass
= tcc_binary
;
2929 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2930 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2932 /* If we've already found a CVAL1 or CVAL2, this expression is
2933 two complex to handle. */
2934 if (*cval1
|| *cval2
)
2944 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2947 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2948 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2949 cval1
, cval2
, save_p
));
2954 case tcc_expression
:
2955 if (code
== COND_EXPR
)
2956 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2957 cval1
, cval2
, save_p
)
2958 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2959 cval1
, cval2
, save_p
)
2960 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2961 cval1
, cval2
, save_p
));
2964 case tcc_comparison
:
2965 /* First see if we can handle the first operand, then the second. For
2966 the second operand, we know *CVAL1 can't be zero. It must be that
2967 one side of the comparison is each of the values; test for the
2968 case where this isn't true by failing if the two operands
2971 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2972 TREE_OPERAND (arg
, 1), 0))
2976 *cval1
= TREE_OPERAND (arg
, 0);
2977 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2979 else if (*cval2
== 0)
2980 *cval2
= TREE_OPERAND (arg
, 0);
2981 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2986 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2988 else if (*cval2
== 0)
2989 *cval2
= TREE_OPERAND (arg
, 1);
2990 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3002 /* ARG is a tree that is known to contain just arithmetic operations and
3003 comparisons. Evaluate the operations in the tree substituting NEW0 for
3004 any occurrence of OLD0 as an operand of a comparison and likewise for
3008 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3009 tree old1
, tree new1
)
3011 tree type
= TREE_TYPE (arg
);
3012 enum tree_code code
= TREE_CODE (arg
);
3013 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3015 /* We can handle some of the tcc_expression cases here. */
3016 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3018 else if (tclass
== tcc_expression
3019 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3020 tclass
= tcc_binary
;
3025 return fold_build1_loc (loc
, code
, type
,
3026 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3027 old0
, new0
, old1
, new1
));
3030 return fold_build2_loc (loc
, code
, type
,
3031 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3032 old0
, new0
, old1
, new1
),
3033 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3034 old0
, new0
, old1
, new1
));
3036 case tcc_expression
:
3040 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3044 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3048 return fold_build3_loc (loc
, code
, type
,
3049 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3050 old0
, new0
, old1
, new1
),
3051 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3052 old0
, new0
, old1
, new1
),
3053 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3054 old0
, new0
, old1
, new1
));
3058 /* Fall through - ??? */
3060 case tcc_comparison
:
3062 tree arg0
= TREE_OPERAND (arg
, 0);
3063 tree arg1
= TREE_OPERAND (arg
, 1);
3065 /* We need to check both for exact equality and tree equality. The
3066 former will be true if the operand has a side-effect. In that
3067 case, we know the operand occurred exactly once. */
3069 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3071 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3074 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3076 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3079 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3087 /* Return a tree for the case when the result of an expression is RESULT
3088 converted to TYPE and OMITTED was previously an operand of the expression
3089 but is now not needed (e.g., we folded OMITTED * 0).
3091 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3092 the conversion of RESULT to TYPE. */
3095 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3097 tree t
= fold_convert_loc (loc
, type
, result
);
3099 /* If the resulting operand is an empty statement, just return the omitted
3100 statement casted to void. */
3101 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3102 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3103 fold_ignored_result (omitted
));
3105 if (TREE_SIDE_EFFECTS (omitted
))
3106 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3107 fold_ignored_result (omitted
), t
);
3109 return non_lvalue_loc (loc
, t
);
3112 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3115 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3118 tree t
= fold_convert_loc (loc
, type
, result
);
3120 /* If the resulting operand is an empty statement, just return the omitted
3121 statement casted to void. */
3122 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3123 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3124 fold_ignored_result (omitted
));
3126 if (TREE_SIDE_EFFECTS (omitted
))
3127 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3128 fold_ignored_result (omitted
), t
);
3130 return pedantic_non_lvalue_loc (loc
, t
);
3133 /* Return a tree for the case when the result of an expression is RESULT
3134 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3135 of the expression but are now not needed.
3137 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3138 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3139 evaluated before OMITTED2. Otherwise, if neither has side effects,
3140 just do the conversion of RESULT to TYPE. */
3143 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3144 tree omitted1
, tree omitted2
)
3146 tree t
= fold_convert_loc (loc
, type
, result
);
3148 if (TREE_SIDE_EFFECTS (omitted2
))
3149 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3150 if (TREE_SIDE_EFFECTS (omitted1
))
3151 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3153 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3157 /* Return a simplified tree node for the truth-negation of ARG. This
3158 never alters ARG itself. We assume that ARG is an operation that
3159 returns a truth value (0 or 1).
3161 FIXME: one would think we would fold the result, but it causes
3162 problems with the dominator optimizer. */
3165 fold_truth_not_expr (location_t loc
, tree arg
)
3167 tree type
= TREE_TYPE (arg
);
3168 enum tree_code code
= TREE_CODE (arg
);
3169 location_t loc1
, loc2
;
3171 /* If this is a comparison, we can simply invert it, except for
3172 floating-point non-equality comparisons, in which case we just
3173 enclose a TRUTH_NOT_EXPR around what we have. */
3175 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3177 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3178 if (FLOAT_TYPE_P (op_type
)
3179 && flag_trapping_math
3180 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3181 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3184 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3185 if (code
== ERROR_MARK
)
3188 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3189 TREE_OPERAND (arg
, 1));
3195 return constant_boolean_node (integer_zerop (arg
), type
);
3197 case TRUTH_AND_EXPR
:
3198 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3199 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3200 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3201 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3202 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3205 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3206 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3207 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3208 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3209 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3211 case TRUTH_XOR_EXPR
:
3212 /* Here we can invert either operand. We invert the first operand
3213 unless the second operand is a TRUTH_NOT_EXPR in which case our
3214 result is the XOR of the first operand with the inside of the
3215 negation of the second operand. */
3217 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3218 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3219 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3221 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3222 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3223 TREE_OPERAND (arg
, 1));
3225 case TRUTH_ANDIF_EXPR
:
3226 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3227 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3228 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3229 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3230 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3232 case TRUTH_ORIF_EXPR
:
3233 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3234 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3235 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3236 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3237 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3239 case TRUTH_NOT_EXPR
:
3240 return TREE_OPERAND (arg
, 0);
3244 tree arg1
= TREE_OPERAND (arg
, 1);
3245 tree arg2
= TREE_OPERAND (arg
, 2);
3247 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3248 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3250 /* A COND_EXPR may have a throw as one operand, which
3251 then has void type. Just leave void operands
3253 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3254 VOID_TYPE_P (TREE_TYPE (arg1
))
3255 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3256 VOID_TYPE_P (TREE_TYPE (arg2
))
3257 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3261 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3262 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3263 TREE_OPERAND (arg
, 0),
3264 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3266 case NON_LVALUE_EXPR
:
3267 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3268 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3271 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3272 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3274 /* ... fall through ... */
3277 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3278 return build1_loc (loc
, TREE_CODE (arg
), type
,
3279 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3282 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3284 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3287 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3289 case CLEANUP_POINT_EXPR
:
3290 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3291 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3292 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3299 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3300 assume that ARG is an operation that returns a truth value (0 or 1
3301 for scalars, 0 or -1 for vectors). Return the folded expression if
3302 folding is successful. Otherwise, return NULL_TREE. */
3305 fold_invert_truthvalue (location_t loc
, tree arg
)
3307 tree type
= TREE_TYPE (arg
);
3308 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3314 /* Return a simplified tree node for the truth-negation of ARG. This
3315 never alters ARG itself. We assume that ARG is an operation that
3316 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3319 invert_truthvalue_loc (location_t loc
, tree arg
)
3321 if (TREE_CODE (arg
) == ERROR_MARK
)
3324 tree type
= TREE_TYPE (arg
);
3325 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3331 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3332 operands are another bit-wise operation with a common input. If so,
3333 distribute the bit operations to save an operation and possibly two if
3334 constants are involved. For example, convert
3335 (A | B) & (A | C) into A | (B & C)
3336 Further simplification will occur if B and C are constants.
3338 If this optimization cannot be done, 0 will be returned. */
3341 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3342 tree arg0
, tree arg1
)
3347 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3348 || TREE_CODE (arg0
) == code
3349 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3350 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3353 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3355 common
= TREE_OPERAND (arg0
, 0);
3356 left
= TREE_OPERAND (arg0
, 1);
3357 right
= TREE_OPERAND (arg1
, 1);
3359 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3361 common
= TREE_OPERAND (arg0
, 0);
3362 left
= TREE_OPERAND (arg0
, 1);
3363 right
= TREE_OPERAND (arg1
, 0);
3365 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3367 common
= TREE_OPERAND (arg0
, 1);
3368 left
= TREE_OPERAND (arg0
, 0);
3369 right
= TREE_OPERAND (arg1
, 1);
3371 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3373 common
= TREE_OPERAND (arg0
, 1);
3374 left
= TREE_OPERAND (arg0
, 0);
3375 right
= TREE_OPERAND (arg1
, 0);
3380 common
= fold_convert_loc (loc
, type
, common
);
3381 left
= fold_convert_loc (loc
, type
, left
);
3382 right
= fold_convert_loc (loc
, type
, right
);
3383 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3384 fold_build2_loc (loc
, code
, type
, left
, right
));
3387 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3388 with code CODE. This optimization is unsafe. */
3390 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3391 tree arg0
, tree arg1
)
3393 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3394 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3396 /* (A / C) +- (B / C) -> (A +- B) / C. */
3398 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3399 TREE_OPERAND (arg1
, 1), 0))
3400 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3401 fold_build2_loc (loc
, code
, type
,
3402 TREE_OPERAND (arg0
, 0),
3403 TREE_OPERAND (arg1
, 0)),
3404 TREE_OPERAND (arg0
, 1));
3406 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3407 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3408 TREE_OPERAND (arg1
, 0), 0)
3409 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3410 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3412 REAL_VALUE_TYPE r0
, r1
;
3413 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3414 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3416 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3418 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3419 real_arithmetic (&r0
, code
, &r0
, &r1
);
3420 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3421 TREE_OPERAND (arg0
, 0),
3422 build_real (type
, r0
));
3428 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3429 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3432 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3433 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3435 tree result
, bftype
;
3439 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3440 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3441 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3442 && tree_fits_shwi_p (size
)
3443 && tree_to_shwi (size
) == bitsize
)
3444 return fold_convert_loc (loc
, type
, inner
);
3448 if (TYPE_PRECISION (bftype
) != bitsize
3449 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3450 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3452 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3453 size_int (bitsize
), bitsize_int (bitpos
));
3456 result
= fold_convert_loc (loc
, type
, result
);
3461 /* Optimize a bit-field compare.
3463 There are two cases: First is a compare against a constant and the
3464 second is a comparison of two items where the fields are at the same
3465 bit position relative to the start of a chunk (byte, halfword, word)
3466 large enough to contain it. In these cases we can avoid the shift
3467 implicit in bitfield extractions.
3469 For constants, we emit a compare of the shifted constant with the
3470 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471 compared. For two fields at the same position, we do the ANDs with the
3472 similar mask and compare the result of the ANDs.
3474 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476 are the left and right operands of the comparison, respectively.
3478 If the optimization described above can be done, we return the resulting
3479 tree. Otherwise we return zero. */
3482 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3483 tree compare_type
, tree lhs
, tree rhs
)
3485 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3486 tree type
= TREE_TYPE (lhs
);
3487 tree signed_type
, unsigned_type
;
3488 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3489 enum machine_mode lmode
, rmode
, nmode
;
3490 int lunsignedp
, runsignedp
;
3491 int lvolatilep
= 0, rvolatilep
= 0;
3492 tree linner
, rinner
= NULL_TREE
;
3496 /* Get all the information about the extractions being done. If the bit size
3497 if the same as the size of the underlying object, we aren't doing an
3498 extraction at all and so can do nothing. We also don't want to
3499 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500 then will no longer be able to replace it. */
3501 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3502 &lunsignedp
, &lvolatilep
, false);
3503 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3504 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3509 /* If this is not a constant, we can only do something if bit positions,
3510 sizes, and signedness are the same. */
3511 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3512 &runsignedp
, &rvolatilep
, false);
3514 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3515 || lunsignedp
!= runsignedp
|| offset
!= 0
3516 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3520 /* See if we can find a mode to refer to this field. We should be able to,
3521 but fail if we can't. */
3522 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3523 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3524 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3525 TYPE_ALIGN (TREE_TYPE (rinner
))),
3527 if (nmode
== VOIDmode
)
3530 /* Set signed and unsigned types of the precision of this mode for the
3532 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3533 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3535 /* Compute the bit position and size for the new reference and our offset
3536 within it. If the new reference is the same size as the original, we
3537 won't optimize anything, so return zero. */
3538 nbitsize
= GET_MODE_BITSIZE (nmode
);
3539 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3541 if (nbitsize
== lbitsize
)
3544 if (BYTES_BIG_ENDIAN
)
3545 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3547 /* Make the mask to be used against the extracted field. */
3548 mask
= build_int_cst_type (unsigned_type
, -1);
3549 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3550 mask
= const_binop (RSHIFT_EXPR
, mask
,
3551 size_int (nbitsize
- lbitsize
- lbitpos
));
3554 /* If not comparing with constant, just rework the comparison
3556 return fold_build2_loc (loc
, code
, compare_type
,
3557 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3558 make_bit_field_ref (loc
, linner
,
3563 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3564 make_bit_field_ref (loc
, rinner
,
3570 /* Otherwise, we are handling the constant case. See if the constant is too
3571 big for the field. Warn and return a tree of for 0 (false) if so. We do
3572 this not only for its own sake, but to avoid having to test for this
3573 error case below. If we didn't, we might generate wrong code.
3575 For unsigned fields, the constant shifted right by the field length should
3576 be all zero. For signed fields, the high-order bits should agree with
3581 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3582 fold_convert_loc (loc
,
3583 unsigned_type
, rhs
),
3584 size_int (lbitsize
))))
3586 warning (0, "comparison is always %d due to width of bit-field",
3588 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3593 tree tem
= const_binop (RSHIFT_EXPR
,
3594 fold_convert_loc (loc
, signed_type
, rhs
),
3595 size_int (lbitsize
- 1));
3596 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3598 warning (0, "comparison is always %d due to width of bit-field",
3600 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3604 /* Single-bit compares should always be against zero. */
3605 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3607 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3608 rhs
= build_int_cst (type
, 0);
3611 /* Make a new bitfield reference, shift the constant over the
3612 appropriate number of bits and mask it with the computed mask
3613 (in case this was a signed field). If we changed it, make a new one. */
3614 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3616 rhs
= const_binop (BIT_AND_EXPR
,
3617 const_binop (LSHIFT_EXPR
,
3618 fold_convert_loc (loc
, unsigned_type
, rhs
),
3619 size_int (lbitpos
)),
3622 lhs
= build2_loc (loc
, code
, compare_type
,
3623 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3627 /* Subroutine for fold_truth_andor_1: decode a field reference.
3629 If EXP is a comparison reference, we return the innermost reference.
3631 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3632 set to the starting bit number.
3634 If the innermost field can be completely contained in a mode-sized
3635 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3637 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3638 otherwise it is not changed.
3640 *PUNSIGNEDP is set to the signedness of the field.
3642 *PMASK is set to the mask used. This is either contained in a
3643 BIT_AND_EXPR or derived from the width of the field.
3645 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647 Return 0 if this is not a component reference or is one that we can't
3648 do anything with. */
3651 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3652 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3653 int *punsignedp
, int *pvolatilep
,
3654 tree
*pmask
, tree
*pand_mask
)
3656 tree outer_type
= 0;
3658 tree mask
, inner
, offset
;
3660 unsigned int precision
;
3662 /* All the optimizations using this function assume integer fields.
3663 There are problems with FP fields since the type_for_size call
3664 below can fail for, e.g., XFmode. */
3665 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3668 /* We are interested in the bare arrangement of bits, so strip everything
3669 that doesn't affect the machine mode. However, record the type of the
3670 outermost expression if it may matter below. */
3671 if (CONVERT_EXPR_P (exp
)
3672 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3673 outer_type
= TREE_TYPE (exp
);
3676 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3678 and_mask
= TREE_OPERAND (exp
, 1);
3679 exp
= TREE_OPERAND (exp
, 0);
3680 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3681 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3685 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3686 punsignedp
, pvolatilep
, false);
3687 if ((inner
== exp
&& and_mask
== 0)
3688 || *pbitsize
< 0 || offset
!= 0
3689 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3692 /* If the number of bits in the reference is the same as the bitsize of
3693 the outer type, then the outer type gives the signedness. Otherwise
3694 (in case of a small bitfield) the signedness is unchanged. */
3695 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3696 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3698 /* Compute the mask to access the bitfield. */
3699 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3700 precision
= TYPE_PRECISION (unsigned_type
);
3702 mask
= build_int_cst_type (unsigned_type
, -1);
3704 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3705 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3707 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3709 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3710 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3713 *pand_mask
= and_mask
;
3717 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721 all_ones_mask_p (const_tree mask
, int size
)
3723 tree type
= TREE_TYPE (mask
);
3724 unsigned int precision
= TYPE_PRECISION (type
);
3727 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3730 tree_int_cst_equal (mask
,
3731 const_binop (RSHIFT_EXPR
,
3732 const_binop (LSHIFT_EXPR
, tmask
,
3733 size_int (precision
- size
)),
3734 size_int (precision
- size
)));
3737 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3738 represents the sign bit of EXP's type. If EXP represents a sign
3739 or zero extension, also test VAL against the unextended type.
3740 The return value is the (sub)expression whose sign bit is VAL,
3741 or NULL_TREE otherwise. */
3744 sign_bit_p (tree exp
, const_tree val
)
3746 unsigned HOST_WIDE_INT mask_lo
, lo
;
3747 HOST_WIDE_INT mask_hi
, hi
;
3751 /* Tree EXP must have an integral type. */
3752 t
= TREE_TYPE (exp
);
3753 if (! INTEGRAL_TYPE_P (t
))
3756 /* Tree VAL must be an integer constant. */
3757 if (TREE_CODE (val
) != INTEGER_CST
3758 || TREE_OVERFLOW (val
))
3761 width
= TYPE_PRECISION (t
);
3762 if (width
> HOST_BITS_PER_WIDE_INT
)
3764 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3767 mask_hi
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_DOUBLE_INT
- width
));
3773 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3776 mask_lo
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- width
));
3779 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3780 treat VAL as if it were unsigned. */
3781 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3782 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3785 /* Handle extension from a narrower type. */
3786 if (TREE_CODE (exp
) == NOP_EXPR
3787 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3788 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3793 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3794 to be evaluated unconditionally. */
3797 simple_operand_p (const_tree exp
)
3799 /* Strip any conversions that don't change the machine mode. */
3802 return (CONSTANT_CLASS_P (exp
)
3803 || TREE_CODE (exp
) == SSA_NAME
3805 && ! TREE_ADDRESSABLE (exp
)
3806 && ! TREE_THIS_VOLATILE (exp
)
3807 && ! DECL_NONLOCAL (exp
)
3808 /* Don't regard global variables as simple. They may be
3809 allocated in ways unknown to the compiler (shared memory,
3810 #pragma weak, etc). */
3811 && ! TREE_PUBLIC (exp
)
3812 && ! DECL_EXTERNAL (exp
)
3813 /* Weakrefs are not safe to be read, since they can be NULL.
3814 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3815 have DECL_WEAK flag set. */
3816 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3817 /* Loading a static variable is unduly expensive, but global
3818 registers aren't expensive. */
3819 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3822 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3823 to be evaluated unconditionally.
3824 I addition to simple_operand_p, we assume that comparisons, conversions,
3825 and logic-not operations are simple, if their operands are simple, too. */
3828 simple_operand_p_2 (tree exp
)
3830 enum tree_code code
;
3832 if (TREE_SIDE_EFFECTS (exp
)
3833 || tree_could_trap_p (exp
))
3836 while (CONVERT_EXPR_P (exp
))
3837 exp
= TREE_OPERAND (exp
, 0);
3839 code
= TREE_CODE (exp
);
3841 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3842 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3843 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3845 if (code
== TRUTH_NOT_EXPR
)
3846 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3848 return simple_operand_p (exp
);
3852 /* The following functions are subroutines to fold_range_test and allow it to
3853 try to change a logical combination of comparisons into a range test.
3856 X == 2 || X == 3 || X == 4 || X == 5
3860 (unsigned) (X - 2) <= 3
3862 We describe each set of comparisons as being either inside or outside
3863 a range, using a variable named like IN_P, and then describe the
3864 range with a lower and upper bound. If one of the bounds is omitted,
3865 it represents either the highest or lowest value of the type.
3867 In the comments below, we represent a range by two numbers in brackets
3868 preceded by a "+" to designate being inside that range, or a "-" to
3869 designate being outside that range, so the condition can be inverted by
3870 flipping the prefix. An omitted bound is represented by a "-". For
3871 example, "- [-, 10]" means being outside the range starting at the lowest
3872 possible value and ending at 10, in other words, being greater than 10.
3873 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3876 We set up things so that the missing bounds are handled in a consistent
3877 manner so neither a missing bound nor "true" and "false" need to be
3878 handled using a special case. */
3880 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3881 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3882 and UPPER1_P are nonzero if the respective argument is an upper bound
3883 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3884 must be specified for a comparison. ARG1 will be converted to ARG0's
3885 type if both are specified. */
3888 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3889 tree arg1
, int upper1_p
)
3895 /* If neither arg represents infinity, do the normal operation.
3896 Else, if not a comparison, return infinity. Else handle the special
3897 comparison rules. Note that most of the cases below won't occur, but
3898 are handled for consistency. */
3900 if (arg0
!= 0 && arg1
!= 0)
3902 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3903 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3905 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3908 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3911 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3912 for neither. In real maths, we cannot assume open ended ranges are
3913 the same. But, this is computer arithmetic, where numbers are finite.
3914 We can therefore make the transformation of any unbounded range with
3915 the value Z, Z being greater than any representable number. This permits
3916 us to treat unbounded ranges as equal. */
3917 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3918 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3922 result
= sgn0
== sgn1
;
3925 result
= sgn0
!= sgn1
;
3928 result
= sgn0
< sgn1
;
3931 result
= sgn0
<= sgn1
;
3934 result
= sgn0
> sgn1
;
3937 result
= sgn0
>= sgn1
;
3943 return constant_boolean_node (result
, type
);
3946 /* Helper routine for make_range. Perform one step for it, return
3947 new expression if the loop should continue or NULL_TREE if it should
3951 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3952 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3953 bool *strict_overflow_p
)
3955 tree arg0_type
= TREE_TYPE (arg0
);
3956 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3957 int in_p
= *p_in_p
, n_in_p
;
3961 case TRUTH_NOT_EXPR
:
3962 /* We can only do something if the range is testing for zero. */
3963 if (low
== NULL_TREE
|| high
== NULL_TREE
3964 || ! integer_zerop (low
) || ! integer_zerop (high
))
3969 case EQ_EXPR
: case NE_EXPR
:
3970 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3971 /* We can only do something if the range is testing for zero
3972 and if the second operand is an integer constant. Note that
3973 saying something is "in" the range we make is done by
3974 complementing IN_P since it will set in the initial case of
3975 being not equal to zero; "out" is leaving it alone. */
3976 if (low
== NULL_TREE
|| high
== NULL_TREE
3977 || ! integer_zerop (low
) || ! integer_zerop (high
)
3978 || TREE_CODE (arg1
) != INTEGER_CST
)
3983 case NE_EXPR
: /* - [c, c] */
3986 case EQ_EXPR
: /* + [c, c] */
3987 in_p
= ! in_p
, low
= high
= arg1
;
3989 case GT_EXPR
: /* - [-, c] */
3990 low
= 0, high
= arg1
;
3992 case GE_EXPR
: /* + [c, -] */
3993 in_p
= ! in_p
, low
= arg1
, high
= 0;
3995 case LT_EXPR
: /* - [c, -] */
3996 low
= arg1
, high
= 0;
3998 case LE_EXPR
: /* + [-, c] */
3999 in_p
= ! in_p
, low
= 0, high
= arg1
;
4005 /* If this is an unsigned comparison, we also know that EXP is
4006 greater than or equal to zero. We base the range tests we make
4007 on that fact, so we record it here so we can parse existing
4008 range tests. We test arg0_type since often the return type
4009 of, e.g. EQ_EXPR, is boolean. */
4010 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4012 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4014 build_int_cst (arg0_type
, 0),
4018 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4020 /* If the high bound is missing, but we have a nonzero low
4021 bound, reverse the range so it goes from zero to the low bound
4023 if (high
== 0 && low
&& ! integer_zerop (low
))
4026 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4027 integer_one_node
, 0);
4028 low
= build_int_cst (arg0_type
, 0);
4038 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4039 low and high are non-NULL, then normalize will DTRT. */
4040 if (!TYPE_UNSIGNED (arg0_type
)
4041 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4043 if (low
== NULL_TREE
)
4044 low
= TYPE_MIN_VALUE (arg0_type
);
4045 if (high
== NULL_TREE
)
4046 high
= TYPE_MAX_VALUE (arg0_type
);
4049 /* (-x) IN [a,b] -> x in [-b, -a] */
4050 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4051 build_int_cst (exp_type
, 0),
4053 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4054 build_int_cst (exp_type
, 0),
4056 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4062 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4063 build_int_cst (exp_type
, 1));
4067 if (TREE_CODE (arg1
) != INTEGER_CST
)
4070 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4071 move a constant to the other side. */
4072 if (!TYPE_UNSIGNED (arg0_type
)
4073 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4076 /* If EXP is signed, any overflow in the computation is undefined,
4077 so we don't worry about it so long as our computations on
4078 the bounds don't overflow. For unsigned, overflow is defined
4079 and this is exactly the right thing. */
4080 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4081 arg0_type
, low
, 0, arg1
, 0);
4082 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4083 arg0_type
, high
, 1, arg1
, 0);
4084 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4085 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4088 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4089 *strict_overflow_p
= true;
4092 /* Check for an unsigned range which has wrapped around the maximum
4093 value thus making n_high < n_low, and normalize it. */
4094 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4096 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4097 integer_one_node
, 0);
4098 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4099 integer_one_node
, 0);
4101 /* If the range is of the form +/- [ x+1, x ], we won't
4102 be able to normalize it. But then, it represents the
4103 whole range or the empty set, so make it
4105 if (tree_int_cst_equal (n_low
, low
)
4106 && tree_int_cst_equal (n_high
, high
))
4112 low
= n_low
, high
= n_high
;
4120 case NON_LVALUE_EXPR
:
4121 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4124 if (! INTEGRAL_TYPE_P (arg0_type
)
4125 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4126 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4129 n_low
= low
, n_high
= high
;
4132 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4135 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4137 /* If we're converting arg0 from an unsigned type, to exp,
4138 a signed type, we will be doing the comparison as unsigned.
4139 The tests above have already verified that LOW and HIGH
4142 So we have to ensure that we will handle large unsigned
4143 values the same way that the current signed bounds treat
4146 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4150 /* For fixed-point modes, we need to pass the saturating flag
4151 as the 2nd parameter. */
4152 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4154 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4155 TYPE_SATURATING (arg0_type
));
4158 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4160 /* A range without an upper bound is, naturally, unbounded.
4161 Since convert would have cropped a very large value, use
4162 the max value for the destination type. */
4164 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4165 : TYPE_MAX_VALUE (arg0_type
);
4167 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4168 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4169 fold_convert_loc (loc
, arg0_type
,
4171 build_int_cst (arg0_type
, 1));
4173 /* If the low bound is specified, "and" the range with the
4174 range for which the original unsigned value will be
4178 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4179 1, fold_convert_loc (loc
, arg0_type
,
4184 in_p
= (n_in_p
== in_p
);
4188 /* Otherwise, "or" the range with the range of the input
4189 that will be interpreted as negative. */
4190 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4191 1, fold_convert_loc (loc
, arg0_type
,
4196 in_p
= (in_p
!= n_in_p
);
4210 /* Given EXP, a logical expression, set the range it is testing into
4211 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4212 actually being tested. *PLOW and *PHIGH will be made of the same
4213 type as the returned expression. If EXP is not a comparison, we
4214 will most likely not be returning a useful value and range. Set
4215 *STRICT_OVERFLOW_P to true if the return value is only valid
4216 because signed overflow is undefined; otherwise, do not change
4217 *STRICT_OVERFLOW_P. */
4220 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4221 bool *strict_overflow_p
)
4223 enum tree_code code
;
4224 tree arg0
, arg1
= NULL_TREE
;
4225 tree exp_type
, nexp
;
4228 location_t loc
= EXPR_LOCATION (exp
);
4230 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4231 and see if we can refine the range. Some of the cases below may not
4232 happen, but it doesn't seem worth worrying about this. We "continue"
4233 the outer loop when we've changed something; otherwise we "break"
4234 the switch, which will "break" the while. */
4237 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4241 code
= TREE_CODE (exp
);
4242 exp_type
= TREE_TYPE (exp
);
4245 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4247 if (TREE_OPERAND_LENGTH (exp
) > 0)
4248 arg0
= TREE_OPERAND (exp
, 0);
4249 if (TREE_CODE_CLASS (code
) == tcc_binary
4250 || TREE_CODE_CLASS (code
) == tcc_comparison
4251 || (TREE_CODE_CLASS (code
) == tcc_expression
4252 && TREE_OPERAND_LENGTH (exp
) > 1))
4253 arg1
= TREE_OPERAND (exp
, 1);
4255 if (arg0
== NULL_TREE
)
4258 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4259 &high
, &in_p
, strict_overflow_p
);
4260 if (nexp
== NULL_TREE
)
4265 /* If EXP is a constant, we can evaluate whether this is true or false. */
4266 if (TREE_CODE (exp
) == INTEGER_CST
)
4268 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4270 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4276 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4280 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4281 type, TYPE, return an expression to test if EXP is in (or out of, depending
4282 on IN_P) the range. Return 0 if the test couldn't be created. */
4285 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4286 tree low
, tree high
)
4288 tree etype
= TREE_TYPE (exp
), value
;
4290 #ifdef HAVE_canonicalize_funcptr_for_compare
4291 /* Disable this optimization for function pointer expressions
4292 on targets that require function pointer canonicalization. */
4293 if (HAVE_canonicalize_funcptr_for_compare
4294 && TREE_CODE (etype
) == POINTER_TYPE
4295 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4301 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4303 return invert_truthvalue_loc (loc
, value
);
4308 if (low
== 0 && high
== 0)
4309 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4312 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4313 fold_convert_loc (loc
, etype
, high
));
4316 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4317 fold_convert_loc (loc
, etype
, low
));
4319 if (operand_equal_p (low
, high
, 0))
4320 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4321 fold_convert_loc (loc
, etype
, low
));
4323 if (integer_zerop (low
))
4325 if (! TYPE_UNSIGNED (etype
))
4327 etype
= unsigned_type_for (etype
);
4328 high
= fold_convert_loc (loc
, etype
, high
);
4329 exp
= fold_convert_loc (loc
, etype
, exp
);
4331 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4334 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4335 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4337 unsigned HOST_WIDE_INT lo
;
4341 prec
= TYPE_PRECISION (etype
);
4342 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4345 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4349 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4350 lo
= HOST_WIDE_INT_M1U
;
4353 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4355 if (TYPE_UNSIGNED (etype
))
4357 tree signed_etype
= signed_type_for (etype
);
4358 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4360 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4362 etype
= signed_etype
;
4363 exp
= fold_convert_loc (loc
, etype
, exp
);
4365 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4366 build_int_cst (etype
, 0));
4370 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4371 This requires wrap-around arithmetics for the type of the expression.
4372 First make sure that arithmetics in this type is valid, then make sure
4373 that it wraps around. */
4374 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4375 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4376 TYPE_UNSIGNED (etype
));
4378 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4380 tree utype
, minv
, maxv
;
4382 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4383 for the type in question, as we rely on this here. */
4384 utype
= unsigned_type_for (etype
);
4385 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4386 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4387 integer_one_node
, 1);
4388 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4390 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4397 high
= fold_convert_loc (loc
, etype
, high
);
4398 low
= fold_convert_loc (loc
, etype
, low
);
4399 exp
= fold_convert_loc (loc
, etype
, exp
);
4401 value
= const_binop (MINUS_EXPR
, high
, low
);
4404 if (POINTER_TYPE_P (etype
))
4406 if (value
!= 0 && !TREE_OVERFLOW (value
))
4408 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4409 return build_range_check (loc
, type
,
4410 fold_build_pointer_plus_loc (loc
, exp
, low
),
4411 1, build_int_cst (etype
, 0), value
);
4416 if (value
!= 0 && !TREE_OVERFLOW (value
))
4417 return build_range_check (loc
, type
,
4418 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4419 1, build_int_cst (etype
, 0), value
);
4424 /* Return the predecessor of VAL in its type, handling the infinite case. */
4427 range_predecessor (tree val
)
4429 tree type
= TREE_TYPE (val
);
4431 if (INTEGRAL_TYPE_P (type
)
4432 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4435 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4438 /* Return the successor of VAL in its type, handling the infinite case. */
4441 range_successor (tree val
)
4443 tree type
= TREE_TYPE (val
);
4445 if (INTEGRAL_TYPE_P (type
)
4446 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4449 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4452 /* Given two ranges, see if we can merge them into one. Return 1 if we
4453 can, 0 if we can't. Set the output range into the specified parameters. */
4456 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4457 tree high0
, int in1_p
, tree low1
, tree high1
)
4465 int lowequal
= ((low0
== 0 && low1
== 0)
4466 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4467 low0
, 0, low1
, 0)));
4468 int highequal
= ((high0
== 0 && high1
== 0)
4469 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4470 high0
, 1, high1
, 1)));
4472 /* Make range 0 be the range that starts first, or ends last if they
4473 start at the same value. Swap them if it isn't. */
4474 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4477 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4478 high1
, 1, high0
, 1))))
4480 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4481 tem
= low0
, low0
= low1
, low1
= tem
;
4482 tem
= high0
, high0
= high1
, high1
= tem
;
4485 /* Now flag two cases, whether the ranges are disjoint or whether the
4486 second range is totally subsumed in the first. Note that the tests
4487 below are simplified by the ones above. */
4488 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4489 high0
, 1, low1
, 0));
4490 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4491 high1
, 1, high0
, 1));
4493 /* We now have four cases, depending on whether we are including or
4494 excluding the two ranges. */
4497 /* If they don't overlap, the result is false. If the second range
4498 is a subset it is the result. Otherwise, the range is from the start
4499 of the second to the end of the first. */
4501 in_p
= 0, low
= high
= 0;
4503 in_p
= 1, low
= low1
, high
= high1
;
4505 in_p
= 1, low
= low1
, high
= high0
;
4508 else if (in0_p
&& ! in1_p
)
4510 /* If they don't overlap, the result is the first range. If they are
4511 equal, the result is false. If the second range is a subset of the
4512 first, and the ranges begin at the same place, we go from just after
4513 the end of the second range to the end of the first. If the second
4514 range is not a subset of the first, or if it is a subset and both
4515 ranges end at the same place, the range starts at the start of the
4516 first range and ends just before the second range.
4517 Otherwise, we can't describe this as a single range. */
4519 in_p
= 1, low
= low0
, high
= high0
;
4520 else if (lowequal
&& highequal
)
4521 in_p
= 0, low
= high
= 0;
4522 else if (subset
&& lowequal
)
4524 low
= range_successor (high1
);
4529 /* We are in the weird situation where high0 > high1 but
4530 high1 has no successor. Punt. */
4534 else if (! subset
|| highequal
)
4537 high
= range_predecessor (low1
);
4541 /* low0 < low1 but low1 has no predecessor. Punt. */
4549 else if (! in0_p
&& in1_p
)
4551 /* If they don't overlap, the result is the second range. If the second
4552 is a subset of the first, the result is false. Otherwise,
4553 the range starts just after the first range and ends at the
4554 end of the second. */
4556 in_p
= 1, low
= low1
, high
= high1
;
4557 else if (subset
|| highequal
)
4558 in_p
= 0, low
= high
= 0;
4561 low
= range_successor (high0
);
4566 /* high1 > high0 but high0 has no successor. Punt. */
4574 /* The case where we are excluding both ranges. Here the complex case
4575 is if they don't overlap. In that case, the only time we have a
4576 range is if they are adjacent. If the second is a subset of the
4577 first, the result is the first. Otherwise, the range to exclude
4578 starts at the beginning of the first range and ends at the end of the
4582 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4583 range_successor (high0
),
4585 in_p
= 0, low
= low0
, high
= high1
;
4588 /* Canonicalize - [min, x] into - [-, x]. */
4589 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4590 switch (TREE_CODE (TREE_TYPE (low0
)))
4593 if (TYPE_PRECISION (TREE_TYPE (low0
))
4594 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4598 if (tree_int_cst_equal (low0
,
4599 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4603 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4604 && integer_zerop (low0
))
4611 /* Canonicalize - [x, max] into - [x, -]. */
4612 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4613 switch (TREE_CODE (TREE_TYPE (high1
)))
4616 if (TYPE_PRECISION (TREE_TYPE (high1
))
4617 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4621 if (tree_int_cst_equal (high1
,
4622 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4626 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4627 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4629 integer_one_node
, 1)))
4636 /* The ranges might be also adjacent between the maximum and
4637 minimum values of the given type. For
4638 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4639 return + [x + 1, y - 1]. */
4640 if (low0
== 0 && high1
== 0)
4642 low
= range_successor (high0
);
4643 high
= range_predecessor (low1
);
4644 if (low
== 0 || high
== 0)
4654 in_p
= 0, low
= low0
, high
= high0
;
4656 in_p
= 0, low
= low0
, high
= high1
;
4659 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4664 /* Subroutine of fold, looking inside expressions of the form
4665 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4666 of the COND_EXPR. This function is being used also to optimize
4667 A op B ? C : A, by reversing the comparison first.
4669 Return a folded expression whose code is not a COND_EXPR
4670 anymore, or NULL_TREE if no folding opportunity is found. */
4673 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4674 tree arg0
, tree arg1
, tree arg2
)
4676 enum tree_code comp_code
= TREE_CODE (arg0
);
4677 tree arg00
= TREE_OPERAND (arg0
, 0);
4678 tree arg01
= TREE_OPERAND (arg0
, 1);
4679 tree arg1_type
= TREE_TYPE (arg1
);
4685 /* If we have A op 0 ? A : -A, consider applying the following
4688 A == 0? A : -A same as -A
4689 A != 0? A : -A same as A
4690 A >= 0? A : -A same as abs (A)
4691 A > 0? A : -A same as abs (A)
4692 A <= 0? A : -A same as -abs (A)
4693 A < 0? A : -A same as -abs (A)
4695 None of these transformations work for modes with signed
4696 zeros. If A is +/-0, the first two transformations will
4697 change the sign of the result (from +0 to -0, or vice
4698 versa). The last four will fix the sign of the result,
4699 even though the original expressions could be positive or
4700 negative, depending on the sign of A.
4702 Note that all these transformations are correct if A is
4703 NaN, since the two alternatives (A and -A) are also NaNs. */
4704 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4705 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4706 ? real_zerop (arg01
)
4707 : integer_zerop (arg01
))
4708 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4710 /* In the case that A is of the form X-Y, '-A' (arg2) may
4711 have already been folded to Y-X, check for that. */
4712 || (TREE_CODE (arg1
) == MINUS_EXPR
4713 && TREE_CODE (arg2
) == MINUS_EXPR
4714 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4715 TREE_OPERAND (arg2
, 1), 0)
4716 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4717 TREE_OPERAND (arg2
, 0), 0))))
4722 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4723 return pedantic_non_lvalue_loc (loc
,
4724 fold_convert_loc (loc
, type
,
4725 negate_expr (tem
)));
4728 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4731 if (flag_trapping_math
)
4736 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4737 arg1
= fold_convert_loc (loc
, signed_type_for
4738 (TREE_TYPE (arg1
)), arg1
);
4739 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4740 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4743 if (flag_trapping_math
)
4747 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4748 arg1
= fold_convert_loc (loc
, signed_type_for
4749 (TREE_TYPE (arg1
)), arg1
);
4750 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4751 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4753 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4757 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4758 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4759 both transformations are correct when A is NaN: A != 0
4760 is then true, and A == 0 is false. */
4762 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4763 && integer_zerop (arg01
) && integer_zerop (arg2
))
4765 if (comp_code
== NE_EXPR
)
4766 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4767 else if (comp_code
== EQ_EXPR
)
4768 return build_zero_cst (type
);
4771 /* Try some transformations of A op B ? A : B.
4773 A == B? A : B same as B
4774 A != B? A : B same as A
4775 A >= B? A : B same as max (A, B)
4776 A > B? A : B same as max (B, A)
4777 A <= B? A : B same as min (A, B)
4778 A < B? A : B same as min (B, A)
4780 As above, these transformations don't work in the presence
4781 of signed zeros. For example, if A and B are zeros of
4782 opposite sign, the first two transformations will change
4783 the sign of the result. In the last four, the original
4784 expressions give different results for (A=+0, B=-0) and
4785 (A=-0, B=+0), but the transformed expressions do not.
4787 The first two transformations are correct if either A or B
4788 is a NaN. In the first transformation, the condition will
4789 be false, and B will indeed be chosen. In the case of the
4790 second transformation, the condition A != B will be true,
4791 and A will be chosen.
4793 The conversions to max() and min() are not correct if B is
4794 a number and A is not. The conditions in the original
4795 expressions will be false, so all four give B. The min()
4796 and max() versions would give a NaN instead. */
4797 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4798 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4799 /* Avoid these transformations if the COND_EXPR may be used
4800 as an lvalue in the C++ front-end. PR c++/19199. */
4802 || VECTOR_TYPE_P (type
)
4803 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4804 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4805 || ! maybe_lvalue_p (arg1
)
4806 || ! maybe_lvalue_p (arg2
)))
4808 tree comp_op0
= arg00
;
4809 tree comp_op1
= arg01
;
4810 tree comp_type
= TREE_TYPE (comp_op0
);
4812 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4813 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4823 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4825 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4830 /* In C++ a ?: expression can be an lvalue, so put the
4831 operand which will be used if they are equal first
4832 so that we can convert this back to the
4833 corresponding COND_EXPR. */
4834 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4836 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4837 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4838 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4839 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4840 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4841 comp_op1
, comp_op0
);
4842 return pedantic_non_lvalue_loc (loc
,
4843 fold_convert_loc (loc
, type
, tem
));
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4852 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4853 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4854 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4855 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4856 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4857 comp_op1
, comp_op0
);
4858 return pedantic_non_lvalue_loc (loc
,
4859 fold_convert_loc (loc
, type
, tem
));
4863 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4864 return pedantic_non_lvalue_loc (loc
,
4865 fold_convert_loc (loc
, type
, arg2
));
4868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4869 return pedantic_non_lvalue_loc (loc
,
4870 fold_convert_loc (loc
, type
, arg1
));
4873 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4878 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4879 we might still be able to simplify this. For example,
4880 if C1 is one less or one more than C2, this might have started
4881 out as a MIN or MAX and been transformed by this function.
4882 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4884 if (INTEGRAL_TYPE_P (type
)
4885 && TREE_CODE (arg01
) == INTEGER_CST
4886 && TREE_CODE (arg2
) == INTEGER_CST
)
4890 if (TREE_CODE (arg1
) == INTEGER_CST
)
4892 /* We can replace A with C1 in this case. */
4893 arg1
= fold_convert_loc (loc
, type
, arg01
);
4894 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4897 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4898 MIN_EXPR, to preserve the signedness of the comparison. */
4899 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4901 && operand_equal_p (arg01
,
4902 const_binop (PLUS_EXPR
, arg2
,
4903 build_int_cst (type
, 1)),
4906 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4907 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4909 return pedantic_non_lvalue_loc (loc
,
4910 fold_convert_loc (loc
, type
, tem
));
4915 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4917 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4919 && operand_equal_p (arg01
,
4920 const_binop (MINUS_EXPR
, arg2
,
4921 build_int_cst (type
, 1)),
4924 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4925 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4927 return pedantic_non_lvalue_loc (loc
,
4928 fold_convert_loc (loc
, type
, tem
));
4933 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4934 MAX_EXPR, to preserve the signedness of the comparison. */
4935 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4937 && operand_equal_p (arg01
,
4938 const_binop (MINUS_EXPR
, arg2
,
4939 build_int_cst (type
, 1)),
4942 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4943 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4945 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4950 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4951 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4953 && operand_equal_p (arg01
,
4954 const_binop (PLUS_EXPR
, arg2
,
4955 build_int_cst (type
, 1)),
4958 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4959 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4961 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4975 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4976 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4977 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4981 /* EXP is some logical combination of boolean tests. See if we can
4982 merge it into some range test. Return the new tree if so. */
4985 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4988 int or_op
= (code
== TRUTH_ORIF_EXPR
4989 || code
== TRUTH_OR_EXPR
);
4990 int in0_p
, in1_p
, in_p
;
4991 tree low0
, low1
, low
, high0
, high1
, high
;
4992 bool strict_overflow_p
= false;
4994 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4995 "when simplifying range test");
4997 if (!INTEGRAL_TYPE_P (type
))
5000 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5001 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5003 /* If this is an OR operation, invert both sides; we will invert
5004 again at the end. */
5006 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5008 /* If both expressions are the same, if we can merge the ranges, and we
5009 can build the range test, return it or it inverted. If one of the
5010 ranges is always true or always false, consider it to be the same
5011 expression as the other. */
5012 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5013 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5015 && 0 != (tem
= (build_range_check (loc
, type
,
5017 : rhs
!= 0 ? rhs
: integer_zero_node
,
5020 if (strict_overflow_p
)
5021 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5022 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5025 /* On machines where the branch cost is expensive, if this is a
5026 short-circuited branch and the underlying object on both sides
5027 is the same, make a non-short-circuit operation. */
5028 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5029 && lhs
!= 0 && rhs
!= 0
5030 && (code
== TRUTH_ANDIF_EXPR
5031 || code
== TRUTH_ORIF_EXPR
)
5032 && operand_equal_p (lhs
, rhs
, 0))
5034 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5035 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5036 which cases we can't do this. */
5037 if (simple_operand_p (lhs
))
5038 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5039 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5042 else if (!lang_hooks
.decls
.global_bindings_p ()
5043 && !CONTAINS_PLACEHOLDER_P (lhs
))
5045 tree common
= save_expr (lhs
);
5047 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5048 or_op
? ! in0_p
: in0_p
,
5050 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5051 or_op
? ! in1_p
: in1_p
,
5054 if (strict_overflow_p
)
5055 fold_overflow_warning (warnmsg
,
5056 WARN_STRICT_OVERFLOW_COMPARISON
);
5057 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5058 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5067 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5068 bit value. Arrange things so the extra bits will be set to zero if and
5069 only if C is signed-extended to its full width. If MASK is nonzero,
5070 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5073 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5075 tree type
= TREE_TYPE (c
);
5076 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5079 if (p
== modesize
|| unsignedp
)
5082 /* We work by getting just the sign bit into the low-order bit, then
5083 into the high-order bit, then sign-extend. We then XOR that value
5085 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5086 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5088 /* We must use a signed type in order to get an arithmetic right shift.
5089 However, we must also avoid introducing accidental overflows, so that
5090 a subsequent call to integer_zerop will work. Hence we must
5091 do the type conversion here. At this point, the constant is either
5092 zero or one, and the conversion to a signed type can never overflow.
5093 We could get an overflow if this conversion is done anywhere else. */
5094 if (TYPE_UNSIGNED (type
))
5095 temp
= fold_convert (signed_type_for (type
), temp
);
5097 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5098 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5100 temp
= const_binop (BIT_AND_EXPR
, temp
,
5101 fold_convert (TREE_TYPE (c
), mask
));
5102 /* If necessary, convert the type back to match the type of C. */
5103 if (TYPE_UNSIGNED (type
))
5104 temp
= fold_convert (type
, temp
);
5106 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5109 /* For an expression that has the form
5113 we can drop one of the inner expressions and simplify to
5117 LOC is the location of the resulting expression. OP is the inner
5118 logical operation; the left-hand side in the examples above, while CMPOP
5119 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5120 removing a condition that guards another, as in
5121 (A != NULL && A->...) || A == NULL
5122 which we must not transform. If RHS_ONLY is true, only eliminate the
5123 right-most operand of the inner logical operation. */
5126 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5129 tree type
= TREE_TYPE (cmpop
);
5130 enum tree_code code
= TREE_CODE (cmpop
);
5131 enum tree_code truthop_code
= TREE_CODE (op
);
5132 tree lhs
= TREE_OPERAND (op
, 0);
5133 tree rhs
= TREE_OPERAND (op
, 1);
5134 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5135 enum tree_code rhs_code
= TREE_CODE (rhs
);
5136 enum tree_code lhs_code
= TREE_CODE (lhs
);
5137 enum tree_code inv_code
;
5139 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5142 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5145 if (rhs_code
== truthop_code
)
5147 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5148 if (newrhs
!= NULL_TREE
)
5151 rhs_code
= TREE_CODE (rhs
);
5154 if (lhs_code
== truthop_code
&& !rhs_only
)
5156 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5157 if (newlhs
!= NULL_TREE
)
5160 lhs_code
= TREE_CODE (lhs
);
5164 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5165 if (inv_code
== rhs_code
5166 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5167 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5169 if (!rhs_only
&& inv_code
== lhs_code
5170 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5171 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5173 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5174 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5179 /* Find ways of folding logical expressions of LHS and RHS:
5180 Try to merge two comparisons to the same innermost item.
5181 Look for range tests like "ch >= '0' && ch <= '9'".
5182 Look for combinations of simple terms on machines with expensive branches
5183 and evaluate the RHS unconditionally.
5185 For example, if we have p->a == 2 && p->b == 4 and we can make an
5186 object large enough to span both A and B, we can do this with a comparison
5187 against the object ANDed with the a mask.
5189 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5190 operations to do this with one comparison.
5192 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5193 function and the one above.
5195 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5196 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5198 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5201 We return the simplified tree or 0 if no optimization is possible. */
5204 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5207 /* If this is the "or" of two comparisons, we can do something if
5208 the comparisons are NE_EXPR. If this is the "and", we can do something
5209 if the comparisons are EQ_EXPR. I.e.,
5210 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5212 WANTED_CODE is this operation code. For single bit fields, we can
5213 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5214 comparison for one-bit fields. */
5216 enum tree_code wanted_code
;
5217 enum tree_code lcode
, rcode
;
5218 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5219 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5220 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5221 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5222 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5223 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5224 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5225 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5226 enum machine_mode lnmode
, rnmode
;
5227 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5228 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5229 tree l_const
, r_const
;
5230 tree lntype
, rntype
, result
;
5231 HOST_WIDE_INT first_bit
, end_bit
;
5234 /* Start by getting the comparison codes. Fail if anything is volatile.
5235 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5236 it were surrounded with a NE_EXPR. */
5238 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5241 lcode
= TREE_CODE (lhs
);
5242 rcode
= TREE_CODE (rhs
);
5244 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5246 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5247 build_int_cst (TREE_TYPE (lhs
), 0));
5251 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5253 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5254 build_int_cst (TREE_TYPE (rhs
), 0));
5258 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5259 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5262 ll_arg
= TREE_OPERAND (lhs
, 0);
5263 lr_arg
= TREE_OPERAND (lhs
, 1);
5264 rl_arg
= TREE_OPERAND (rhs
, 0);
5265 rr_arg
= TREE_OPERAND (rhs
, 1);
5267 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5268 if (simple_operand_p (ll_arg
)
5269 && simple_operand_p (lr_arg
))
5271 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5272 && operand_equal_p (lr_arg
, rr_arg
, 0))
5274 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5275 truth_type
, ll_arg
, lr_arg
);
5279 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5280 && operand_equal_p (lr_arg
, rl_arg
, 0))
5282 result
= combine_comparisons (loc
, code
, lcode
,
5283 swap_tree_comparison (rcode
),
5284 truth_type
, ll_arg
, lr_arg
);
5290 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5291 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5293 /* If the RHS can be evaluated unconditionally and its operands are
5294 simple, it wins to evaluate the RHS unconditionally on machines
5295 with expensive branches. In this case, this isn't a comparison
5296 that can be merged. */
5298 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5300 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5301 && simple_operand_p (rl_arg
)
5302 && simple_operand_p (rr_arg
))
5304 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5305 if (code
== TRUTH_OR_EXPR
5306 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5307 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5308 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5309 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5310 return build2_loc (loc
, NE_EXPR
, truth_type
,
5311 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5313 build_int_cst (TREE_TYPE (ll_arg
), 0));
5315 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5316 if (code
== TRUTH_AND_EXPR
5317 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5318 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5319 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5320 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5321 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5322 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5324 build_int_cst (TREE_TYPE (ll_arg
), 0));
5327 /* See if the comparisons can be merged. Then get all the parameters for
5330 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5331 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5335 ll_inner
= decode_field_reference (loc
, ll_arg
,
5336 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5337 &ll_unsignedp
, &volatilep
, &ll_mask
,
5339 lr_inner
= decode_field_reference (loc
, lr_arg
,
5340 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5341 &lr_unsignedp
, &volatilep
, &lr_mask
,
5343 rl_inner
= decode_field_reference (loc
, rl_arg
,
5344 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5345 &rl_unsignedp
, &volatilep
, &rl_mask
,
5347 rr_inner
= decode_field_reference (loc
, rr_arg
,
5348 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5349 &rr_unsignedp
, &volatilep
, &rr_mask
,
5352 /* It must be true that the inner operation on the lhs of each
5353 comparison must be the same if we are to be able to do anything.
5354 Then see if we have constants. If not, the same must be true for
5356 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5357 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5360 if (TREE_CODE (lr_arg
) == INTEGER_CST
5361 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5362 l_const
= lr_arg
, r_const
= rr_arg
;
5363 else if (lr_inner
== 0 || rr_inner
== 0
5364 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5367 l_const
= r_const
= 0;
5369 /* If either comparison code is not correct for our logical operation,
5370 fail. However, we can convert a one-bit comparison against zero into
5371 the opposite comparison against that bit being set in the field. */
5373 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5374 if (lcode
!= wanted_code
)
5376 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5378 /* Make the left operand unsigned, since we are only interested
5379 in the value of one bit. Otherwise we are doing the wrong
5388 /* This is analogous to the code for l_const above. */
5389 if (rcode
!= wanted_code
)
5391 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5400 /* See if we can find a mode that contains both fields being compared on
5401 the left. If we can't, fail. Otherwise, update all constants and masks
5402 to be relative to a field of that size. */
5403 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5404 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5405 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5406 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5408 if (lnmode
== VOIDmode
)
5411 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5412 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5413 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5414 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5416 if (BYTES_BIG_ENDIAN
)
5418 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5419 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5422 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5423 size_int (xll_bitpos
));
5424 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5425 size_int (xrl_bitpos
));
5429 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5430 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5431 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5432 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5433 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5436 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5438 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5443 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5444 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5445 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5446 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5447 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5450 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5452 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5456 /* If the right sides are not constant, do the same for it. Also,
5457 disallow this optimization if a size or signedness mismatch occurs
5458 between the left and right sides. */
5461 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5462 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5463 /* Make sure the two fields on the right
5464 correspond to the left without being swapped. */
5465 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5468 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5469 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5470 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5471 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5473 if (rnmode
== VOIDmode
)
5476 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5477 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5478 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5479 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5481 if (BYTES_BIG_ENDIAN
)
5483 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5484 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5487 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5489 size_int (xlr_bitpos
));
5490 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5492 size_int (xrr_bitpos
));
5494 /* Make a mask that corresponds to both fields being compared.
5495 Do this for both items being compared. If the operands are the
5496 same size and the bits being compared are in the same position
5497 then we can do this by masking both and comparing the masked
5499 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5500 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5501 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5503 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5504 ll_unsignedp
|| rl_unsignedp
);
5505 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5506 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5508 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5509 lr_unsignedp
|| rr_unsignedp
);
5510 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5511 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5513 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5516 /* There is still another way we can do something: If both pairs of
5517 fields being compared are adjacent, we may be able to make a wider
5518 field containing them both.
5520 Note that we still must mask the lhs/rhs expressions. Furthermore,
5521 the mask must be shifted to account for the shift done by
5522 make_bit_field_ref. */
5523 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5524 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5525 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5526 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5530 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5531 ll_bitsize
+ rl_bitsize
,
5532 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5533 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5534 lr_bitsize
+ rr_bitsize
,
5535 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5537 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5538 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5539 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5540 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5542 /* Convert to the smaller type before masking out unwanted bits. */
5544 if (lntype
!= rntype
)
5546 if (lnbitsize
> rnbitsize
)
5548 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5549 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5552 else if (lnbitsize
< rnbitsize
)
5554 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5555 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5560 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5561 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5563 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5564 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5566 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5572 /* Handle the case of comparisons with constants. If there is something in
5573 common between the masks, those bits of the constants must be the same.
5574 If not, the condition is always false. Test for this to avoid generating
5575 incorrect code below. */
5576 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5577 if (! integer_zerop (result
)
5578 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5579 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5581 if (wanted_code
== NE_EXPR
)
5583 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5584 return constant_boolean_node (true, truth_type
);
5588 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5589 return constant_boolean_node (false, truth_type
);
5593 /* Construct the expression we will return. First get the component
5594 reference we will make. Unless the mask is all ones the width of
5595 that field, perform the mask operation. Then compare with the
5597 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5598 ll_unsignedp
|| rl_unsignedp
);
5600 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5601 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5602 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5604 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5605 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5608 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5612 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5616 enum tree_code op_code
;
5619 int consts_equal
, consts_lt
;
5622 STRIP_SIGN_NOPS (arg0
);
5624 op_code
= TREE_CODE (arg0
);
5625 minmax_const
= TREE_OPERAND (arg0
, 1);
5626 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5627 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5628 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5629 inner
= TREE_OPERAND (arg0
, 0);
5631 /* If something does not permit us to optimize, return the original tree. */
5632 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5633 || TREE_CODE (comp_const
) != INTEGER_CST
5634 || TREE_OVERFLOW (comp_const
)
5635 || TREE_CODE (minmax_const
) != INTEGER_CST
5636 || TREE_OVERFLOW (minmax_const
))
5639 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5640 and GT_EXPR, doing the rest with recursive calls using logical
5644 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5647 = optimize_minmax_comparison (loc
,
5648 invert_tree_comparison (code
, false),
5651 return invert_truthvalue_loc (loc
, tem
);
5657 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5658 optimize_minmax_comparison
5659 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5660 optimize_minmax_comparison
5661 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5664 if (op_code
== MAX_EXPR
&& consts_equal
)
5665 /* MAX (X, 0) == 0 -> X <= 0 */
5666 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5668 else if (op_code
== MAX_EXPR
&& consts_lt
)
5669 /* MAX (X, 0) == 5 -> X == 5 */
5670 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5672 else if (op_code
== MAX_EXPR
)
5673 /* MAX (X, 0) == -1 -> false */
5674 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5676 else if (consts_equal
)
5677 /* MIN (X, 0) == 0 -> X >= 0 */
5678 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5681 /* MIN (X, 0) == 5 -> false */
5682 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5685 /* MIN (X, 0) == -1 -> X == -1 */
5686 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5689 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5690 /* MAX (X, 0) > 0 -> X > 0
5691 MAX (X, 0) > 5 -> X > 5 */
5692 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5694 else if (op_code
== MAX_EXPR
)
5695 /* MAX (X, 0) > -1 -> true */
5696 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5698 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5699 /* MIN (X, 0) > 0 -> false
5700 MIN (X, 0) > 5 -> false */
5701 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5704 /* MIN (X, 0) > -1 -> X > -1 */
5705 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5712 /* T is an integer expression that is being multiplied, divided, or taken a
5713 modulus (CODE says which and what kind of divide or modulus) by a
5714 constant C. See if we can eliminate that operation by folding it with
5715 other operations already in T. WIDE_TYPE, if non-null, is a type that
5716 should be used for the computation if wider than our type.
5718 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5719 (X * 2) + (Y * 4). We must, however, be assured that either the original
5720 expression would not overflow or that overflow is undefined for the type
5721 in the language in question.
5723 If we return a non-null expression, it is an equivalent form of the
5724 original computation, but need not be in the original type.
5726 We set *STRICT_OVERFLOW_P to true if the return values depends on
5727 signed overflow being undefined. Otherwise we do not change
5728 *STRICT_OVERFLOW_P. */
5731 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5732 bool *strict_overflow_p
)
5734 /* To avoid exponential search depth, refuse to allow recursion past
5735 three levels. Beyond that (1) it's highly unlikely that we'll find
5736 something interesting and (2) we've probably processed it before
5737 when we built the inner expression. */
5746 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5753 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5754 bool *strict_overflow_p
)
5756 tree type
= TREE_TYPE (t
);
5757 enum tree_code tcode
= TREE_CODE (t
);
5758 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5759 > GET_MODE_SIZE (TYPE_MODE (type
)))
5760 ? wide_type
: type
);
5762 int same_p
= tcode
== code
;
5763 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5764 bool sub_strict_overflow_p
;
5766 /* Don't deal with constants of zero here; they confuse the code below. */
5767 if (integer_zerop (c
))
5770 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5771 op0
= TREE_OPERAND (t
, 0);
5773 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5774 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5776 /* Note that we need not handle conditional operations here since fold
5777 already handles those cases. So just do arithmetic here. */
5781 /* For a constant, we can always simplify if we are a multiply
5782 or (for divide and modulus) if it is a multiple of our constant. */
5783 if (code
== MULT_EXPR
5784 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5785 return const_binop (code
, fold_convert (ctype
, t
),
5786 fold_convert (ctype
, c
));
5789 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5790 /* If op0 is an expression ... */
5791 if ((COMPARISON_CLASS_P (op0
)
5792 || UNARY_CLASS_P (op0
)
5793 || BINARY_CLASS_P (op0
)
5794 || VL_EXP_CLASS_P (op0
)
5795 || EXPRESSION_CLASS_P (op0
))
5796 /* ... and has wrapping overflow, and its type is smaller
5797 than ctype, then we cannot pass through as widening. */
5798 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5799 && (TYPE_PRECISION (ctype
)
5800 > TYPE_PRECISION (TREE_TYPE (op0
))))
5801 /* ... or this is a truncation (t is narrower than op0),
5802 then we cannot pass through this narrowing. */
5803 || (TYPE_PRECISION (type
)
5804 < TYPE_PRECISION (TREE_TYPE (op0
)))
5805 /* ... or signedness changes for division or modulus,
5806 then we cannot pass through this conversion. */
5807 || (code
!= MULT_EXPR
5808 && (TYPE_UNSIGNED (ctype
)
5809 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5810 /* ... or has undefined overflow while the converted to
5811 type has not, we cannot do the operation in the inner type
5812 as that would introduce undefined overflow. */
5813 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5814 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5817 /* Pass the constant down and see if we can make a simplification. If
5818 we can, replace this expression with the inner simplification for
5819 possible later conversion to our or some other type. */
5820 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5821 && TREE_CODE (t2
) == INTEGER_CST
5822 && !TREE_OVERFLOW (t2
)
5823 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5825 ? ctype
: NULL_TREE
,
5826 strict_overflow_p
))))
5831 /* If widening the type changes it from signed to unsigned, then we
5832 must avoid building ABS_EXPR itself as unsigned. */
5833 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5835 tree cstype
= (*signed_type_for
) (ctype
);
5836 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5839 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5840 return fold_convert (ctype
, t1
);
5844 /* If the constant is negative, we cannot simplify this. */
5845 if (tree_int_cst_sgn (c
) == -1)
5849 /* For division and modulus, type can't be unsigned, as e.g.
5850 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5851 For signed types, even with wrapping overflow, this is fine. */
5852 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5854 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5856 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5859 case MIN_EXPR
: case MAX_EXPR
:
5860 /* If widening the type changes the signedness, then we can't perform
5861 this optimization as that changes the result. */
5862 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5865 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5866 sub_strict_overflow_p
= false;
5867 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5868 &sub_strict_overflow_p
)) != 0
5869 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5870 &sub_strict_overflow_p
)) != 0)
5872 if (tree_int_cst_sgn (c
) < 0)
5873 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5874 if (sub_strict_overflow_p
)
5875 *strict_overflow_p
= true;
5876 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5877 fold_convert (ctype
, t2
));
5881 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5882 /* If the second operand is constant, this is a multiplication
5883 or floor division, by a power of two, so we can treat it that
5884 way unless the multiplier or divisor overflows. Signed
5885 left-shift overflow is implementation-defined rather than
5886 undefined in C90, so do not convert signed left shift into
5888 if (TREE_CODE (op1
) == INTEGER_CST
5889 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5890 /* const_binop may not detect overflow correctly,
5891 so check for it explicitly here. */
5892 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5893 && TREE_INT_CST_HIGH (op1
) == 0
5894 && 0 != (t1
= fold_convert (ctype
,
5895 const_binop (LSHIFT_EXPR
,
5898 && !TREE_OVERFLOW (t1
))
5899 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5900 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5902 fold_convert (ctype
, op0
),
5904 c
, code
, wide_type
, strict_overflow_p
);
5907 case PLUS_EXPR
: case MINUS_EXPR
:
5908 /* See if we can eliminate the operation on both sides. If we can, we
5909 can return a new PLUS or MINUS. If we can't, the only remaining
5910 cases where we can do anything are if the second operand is a
5912 sub_strict_overflow_p
= false;
5913 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5914 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5915 if (t1
!= 0 && t2
!= 0
5916 && (code
== MULT_EXPR
5917 /* If not multiplication, we can only do this if both operands
5918 are divisible by c. */
5919 || (multiple_of_p (ctype
, op0
, c
)
5920 && multiple_of_p (ctype
, op1
, c
))))
5922 if (sub_strict_overflow_p
)
5923 *strict_overflow_p
= true;
5924 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5925 fold_convert (ctype
, t2
));
5928 /* If this was a subtraction, negate OP1 and set it to be an addition.
5929 This simplifies the logic below. */
5930 if (tcode
== MINUS_EXPR
)
5932 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5933 /* If OP1 was not easily negatable, the constant may be OP0. */
5934 if (TREE_CODE (op0
) == INTEGER_CST
)
5945 if (TREE_CODE (op1
) != INTEGER_CST
)
5948 /* If either OP1 or C are negative, this optimization is not safe for
5949 some of the division and remainder types while for others we need
5950 to change the code. */
5951 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5953 if (code
== CEIL_DIV_EXPR
)
5954 code
= FLOOR_DIV_EXPR
;
5955 else if (code
== FLOOR_DIV_EXPR
)
5956 code
= CEIL_DIV_EXPR
;
5957 else if (code
!= MULT_EXPR
5958 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5962 /* If it's a multiply or a division/modulus operation of a multiple
5963 of our constant, do the operation and verify it doesn't overflow. */
5964 if (code
== MULT_EXPR
5965 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5967 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5968 fold_convert (ctype
, c
));
5969 /* We allow the constant to overflow with wrapping semantics. */
5971 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5977 /* If we have an unsigned type, we cannot widen the operation since it
5978 will change the result if the original computation overflowed. */
5979 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5982 /* If we were able to eliminate our operation from the first side,
5983 apply our operation to the second side and reform the PLUS. */
5984 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5985 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5987 /* The last case is if we are a multiply. In that case, we can
5988 apply the distributive law to commute the multiply and addition
5989 if the multiplication of the constants doesn't overflow
5990 and overflow is defined. With undefined overflow
5991 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5992 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5993 return fold_build2 (tcode
, ctype
,
5994 fold_build2 (code
, ctype
,
5995 fold_convert (ctype
, op0
),
5996 fold_convert (ctype
, c
)),
6002 /* We have a special case here if we are doing something like
6003 (C * 8) % 4 since we know that's zero. */
6004 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6005 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6006 /* If the multiplication can overflow we cannot optimize this. */
6007 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6008 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6009 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
6011 *strict_overflow_p
= true;
6012 return omit_one_operand (type
, integer_zero_node
, op0
);
6015 /* ... fall through ... */
6017 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6018 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6019 /* If we can extract our operation from the LHS, do so and return a
6020 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6021 do something only if the second operand is a constant. */
6023 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6024 strict_overflow_p
)) != 0)
6025 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6026 fold_convert (ctype
, op1
));
6027 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6028 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6029 strict_overflow_p
)) != 0)
6030 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6031 fold_convert (ctype
, t1
));
6032 else if (TREE_CODE (op1
) != INTEGER_CST
)
6035 /* If these are the same operation types, we can associate them
6036 assuming no overflow. */
6041 unsigned prec
= TYPE_PRECISION (ctype
);
6042 bool uns
= TYPE_UNSIGNED (ctype
);
6043 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
6044 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
6045 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
6046 overflow_p
= ((!uns
&& overflow_p
)
6047 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
6048 if (!double_int_fits_to_tree_p (ctype
, mul
)
6049 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
6052 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6053 double_int_to_tree (ctype
, mul
));
6056 /* If these operations "cancel" each other, we have the main
6057 optimizations of this pass, which occur when either constant is a
6058 multiple of the other, in which case we replace this with either an
6059 operation or CODE or TCODE.
6061 If we have an unsigned type, we cannot do this since it will change
6062 the result if the original computation overflowed. */
6063 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6064 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6065 || (tcode
== MULT_EXPR
6066 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6067 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6068 && code
!= MULT_EXPR
)))
6070 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
6072 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6073 *strict_overflow_p
= true;
6074 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6075 fold_convert (ctype
,
6076 const_binop (TRUNC_DIV_EXPR
,
6079 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6081 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6082 *strict_overflow_p
= true;
6083 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6084 fold_convert (ctype
,
6085 const_binop (TRUNC_DIV_EXPR
,
6098 /* Return a node which has the indicated constant VALUE (either 0 or
6099 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6100 and is of the indicated TYPE. */
6103 constant_boolean_node (bool value
, tree type
)
6105 if (type
== integer_type_node
)
6106 return value
? integer_one_node
: integer_zero_node
;
6107 else if (type
== boolean_type_node
)
6108 return value
? boolean_true_node
: boolean_false_node
;
6109 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6110 return build_vector_from_val (type
,
6111 build_int_cst (TREE_TYPE (type
),
6114 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6118 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6119 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6120 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6121 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6122 COND is the first argument to CODE; otherwise (as in the example
6123 given here), it is the second argument. TYPE is the type of the
6124 original expression. Return NULL_TREE if no simplification is
6128 fold_binary_op_with_conditional_arg (location_t loc
,
6129 enum tree_code code
,
6130 tree type
, tree op0
, tree op1
,
6131 tree cond
, tree arg
, int cond_first_p
)
6133 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6134 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6135 tree test
, true_value
, false_value
;
6136 tree lhs
= NULL_TREE
;
6137 tree rhs
= NULL_TREE
;
6138 enum tree_code cond_code
= COND_EXPR
;
6140 if (TREE_CODE (cond
) == COND_EXPR
6141 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6143 test
= TREE_OPERAND (cond
, 0);
6144 true_value
= TREE_OPERAND (cond
, 1);
6145 false_value
= TREE_OPERAND (cond
, 2);
6146 /* If this operand throws an expression, then it does not make
6147 sense to try to perform a logical or arithmetic operation
6149 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6151 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6156 tree testtype
= TREE_TYPE (cond
);
6158 true_value
= constant_boolean_node (true, testtype
);
6159 false_value
= constant_boolean_node (false, testtype
);
6162 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6163 cond_code
= VEC_COND_EXPR
;
6165 /* This transformation is only worthwhile if we don't have to wrap ARG
6166 in a SAVE_EXPR and the operation can be simplified without recursing
6167 on at least one of the branches once its pushed inside the COND_EXPR. */
6168 if (!TREE_CONSTANT (arg
)
6169 && (TREE_SIDE_EFFECTS (arg
)
6170 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6171 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6174 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6177 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6179 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6181 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6185 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6187 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6189 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6192 /* Check that we have simplified at least one of the branches. */
6193 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6196 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6200 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6202 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6203 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6204 ADDEND is the same as X.
6206 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6207 and finite. The problematic cases are when X is zero, and its mode
6208 has signed zeros. In the case of rounding towards -infinity,
6209 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6210 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6213 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6215 if (!real_zerop (addend
))
6218 /* Don't allow the fold with -fsignaling-nans. */
6219 if (HONOR_SNANS (TYPE_MODE (type
)))
6222 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6223 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6226 /* In a vector or complex, we would need to check the sign of all zeros. */
6227 if (TREE_CODE (addend
) != REAL_CST
)
6230 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6231 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6234 /* The mode has signed zeros, and we have to honor their sign.
6235 In this situation, there is only one case we can return true for.
6236 X - 0 is the same as X unless rounding towards -infinity is
6238 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6241 /* Subroutine of fold() that checks comparisons of built-in math
6242 functions against real constants.
6244 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6245 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6246 is the type of the result and ARG0 and ARG1 are the operands of the
6247 comparison. ARG1 must be a TREE_REAL_CST.
6249 The function returns the constant folded tree if a simplification
6250 can be made, and NULL_TREE otherwise. */
6253 fold_mathfn_compare (location_t loc
,
6254 enum built_in_function fcode
, enum tree_code code
,
6255 tree type
, tree arg0
, tree arg1
)
6259 if (BUILTIN_SQRT_P (fcode
))
6261 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6262 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6264 c
= TREE_REAL_CST (arg1
);
6265 if (REAL_VALUE_NEGATIVE (c
))
6267 /* sqrt(x) < y is always false, if y is negative. */
6268 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6269 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6271 /* sqrt(x) > y is always true, if y is negative and we
6272 don't care about NaNs, i.e. negative values of x. */
6273 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6274 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6276 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6277 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6278 build_real (TREE_TYPE (arg
), dconst0
));
6280 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6284 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6285 real_convert (&c2
, mode
, &c2
);
6287 if (REAL_VALUE_ISINF (c2
))
6289 /* sqrt(x) > y is x == +Inf, when y is very large. */
6290 if (HONOR_INFINITIES (mode
))
6291 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6292 build_real (TREE_TYPE (arg
), c2
));
6294 /* sqrt(x) > y is always false, when y is very large
6295 and we don't care about infinities. */
6296 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6299 /* sqrt(x) > c is the same as x > c*c. */
6300 return fold_build2_loc (loc
, code
, type
, arg
,
6301 build_real (TREE_TYPE (arg
), c2
));
6303 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6307 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6308 real_convert (&c2
, mode
, &c2
);
6310 if (REAL_VALUE_ISINF (c2
))
6312 /* sqrt(x) < y is always true, when y is a very large
6313 value and we don't care about NaNs or Infinities. */
6314 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6315 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6317 /* sqrt(x) < y is x != +Inf when y is very large and we
6318 don't care about NaNs. */
6319 if (! HONOR_NANS (mode
))
6320 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6321 build_real (TREE_TYPE (arg
), c2
));
6323 /* sqrt(x) < y is x >= 0 when y is very large and we
6324 don't care about Infinities. */
6325 if (! HONOR_INFINITIES (mode
))
6326 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6327 build_real (TREE_TYPE (arg
), dconst0
));
6329 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6330 arg
= save_expr (arg
);
6331 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6332 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6333 build_real (TREE_TYPE (arg
),
6335 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6336 build_real (TREE_TYPE (arg
),
6340 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6341 if (! HONOR_NANS (mode
))
6342 return fold_build2_loc (loc
, code
, type
, arg
,
6343 build_real (TREE_TYPE (arg
), c2
));
6345 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6346 arg
= save_expr (arg
);
6347 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6348 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6349 build_real (TREE_TYPE (arg
),
6351 fold_build2_loc (loc
, code
, type
, arg
,
6352 build_real (TREE_TYPE (arg
),
6360 /* Subroutine of fold() that optimizes comparisons against Infinities,
6361 either +Inf or -Inf.
6363 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6364 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6365 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6367 The function returns the constant folded tree if a simplification
6368 can be made, and NULL_TREE otherwise. */
6371 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6372 tree arg0
, tree arg1
)
6374 enum machine_mode mode
;
6375 REAL_VALUE_TYPE max
;
6379 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6381 /* For negative infinity swap the sense of the comparison. */
6382 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6384 code
= swap_tree_comparison (code
);
6389 /* x > +Inf is always false, if with ignore sNANs. */
6390 if (HONOR_SNANS (mode
))
6392 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6395 /* x <= +Inf is always true, if we don't case about NaNs. */
6396 if (! HONOR_NANS (mode
))
6397 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6399 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6400 arg0
= save_expr (arg0
);
6401 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6405 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6406 real_maxval (&max
, neg
, mode
);
6407 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6408 arg0
, build_real (TREE_TYPE (arg0
), max
));
6411 /* x < +Inf is always equal to x <= DBL_MAX. */
6412 real_maxval (&max
, neg
, mode
);
6413 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6414 arg0
, build_real (TREE_TYPE (arg0
), max
));
6417 /* x != +Inf is always equal to !(x > DBL_MAX). */
6418 real_maxval (&max
, neg
, mode
);
6419 if (! HONOR_NANS (mode
))
6420 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6421 arg0
, build_real (TREE_TYPE (arg0
), max
));
6423 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6424 arg0
, build_real (TREE_TYPE (arg0
), max
));
6425 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6434 /* Subroutine of fold() that optimizes comparisons of a division by
6435 a nonzero integer constant against an integer constant, i.e.
6438 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6439 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6440 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6442 The function returns the constant folded tree if a simplification
6443 can be made, and NULL_TREE otherwise. */
6446 fold_div_compare (location_t loc
,
6447 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6449 tree prod
, tmp
, hi
, lo
;
6450 tree arg00
= TREE_OPERAND (arg0
, 0);
6451 tree arg01
= TREE_OPERAND (arg0
, 1);
6453 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6457 /* We have to do this the hard way to detect unsigned overflow.
6458 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6459 val
= TREE_INT_CST (arg01
)
6460 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6461 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6462 neg_overflow
= false;
6466 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6467 build_int_cst (TREE_TYPE (arg01
), 1));
6470 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6471 val
= TREE_INT_CST (prod
)
6472 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6473 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6474 -1, overflow
| TREE_OVERFLOW (prod
));
6476 else if (tree_int_cst_sgn (arg01
) >= 0)
6478 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6479 build_int_cst (TREE_TYPE (arg01
), 1));
6480 switch (tree_int_cst_sgn (arg1
))
6483 neg_overflow
= true;
6484 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6489 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6494 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6504 /* A negative divisor reverses the relational operators. */
6505 code
= swap_tree_comparison (code
);
6507 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6508 build_int_cst (TREE_TYPE (arg01
), 1));
6509 switch (tree_int_cst_sgn (arg1
))
6512 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6517 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6522 neg_overflow
= true;
6523 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6535 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6536 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6537 if (TREE_OVERFLOW (hi
))
6538 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6539 if (TREE_OVERFLOW (lo
))
6540 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6541 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6544 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6545 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6546 if (TREE_OVERFLOW (hi
))
6547 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6548 if (TREE_OVERFLOW (lo
))
6549 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6550 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6553 if (TREE_OVERFLOW (lo
))
6555 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6556 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6558 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6561 if (TREE_OVERFLOW (hi
))
6563 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6564 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6566 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6569 if (TREE_OVERFLOW (hi
))
6571 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6572 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6574 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6577 if (TREE_OVERFLOW (lo
))
6579 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6580 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6582 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6592 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6593 equality/inequality test, then return a simplified form of the test
6594 using a sign testing. Otherwise return NULL. TYPE is the desired
6598 fold_single_bit_test_into_sign_test (location_t loc
,
6599 enum tree_code code
, tree arg0
, tree arg1
,
6602 /* If this is testing a single bit, we can optimize the test. */
6603 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6604 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6605 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6607 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6608 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6609 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6611 if (arg00
!= NULL_TREE
6612 /* This is only a win if casting to a signed type is cheap,
6613 i.e. when arg00's type is not a partial mode. */
6614 && TYPE_PRECISION (TREE_TYPE (arg00
))
6615 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6617 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6618 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6620 fold_convert_loc (loc
, stype
, arg00
),
6621 build_int_cst (stype
, 0));
6628 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6629 equality/inequality test, then return a simplified form of
6630 the test using shifts and logical operations. Otherwise return
6631 NULL. TYPE is the desired result type. */
6634 fold_single_bit_test (location_t loc
, enum tree_code code
,
6635 tree arg0
, tree arg1
, tree result_type
)
6637 /* If this is testing a single bit, we can optimize the test. */
6638 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6639 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6640 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6642 tree inner
= TREE_OPERAND (arg0
, 0);
6643 tree type
= TREE_TYPE (arg0
);
6644 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6645 enum machine_mode operand_mode
= TYPE_MODE (type
);
6647 tree signed_type
, unsigned_type
, intermediate_type
;
6650 /* First, see if we can fold the single bit test into a sign-bit
6652 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6657 /* Otherwise we have (A & C) != 0 where C is a single bit,
6658 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6659 Similarly for (A & C) == 0. */
6661 /* If INNER is a right shift of a constant and it plus BITNUM does
6662 not overflow, adjust BITNUM and INNER. */
6663 if (TREE_CODE (inner
) == RSHIFT_EXPR
6664 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6665 && tree_fits_uhwi_p (TREE_OPERAND (inner
, 1))
6666 && bitnum
< TYPE_PRECISION (type
)
6667 && (tree_to_uhwi (TREE_OPERAND (inner
, 1))
6668 < (unsigned) (TYPE_PRECISION (type
) - bitnum
)))
6670 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6671 inner
= TREE_OPERAND (inner
, 0);
6674 /* If we are going to be able to omit the AND below, we must do our
6675 operations as unsigned. If we must use the AND, we have a choice.
6676 Normally unsigned is faster, but for some machines signed is. */
6677 #ifdef LOAD_EXTEND_OP
6678 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6679 && !flag_syntax_only
) ? 0 : 1;
6684 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6685 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6686 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6687 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6690 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6691 inner
, size_int (bitnum
));
6693 one
= build_int_cst (intermediate_type
, 1);
6695 if (code
== EQ_EXPR
)
6696 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6698 /* Put the AND last so it can combine with more things. */
6699 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6701 /* Make sure to return the proper type. */
6702 inner
= fold_convert_loc (loc
, result_type
, inner
);
6709 /* Check whether we are allowed to reorder operands arg0 and arg1,
6710 such that the evaluation of arg1 occurs before arg0. */
6713 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6715 if (! flag_evaluation_order
)
6717 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6719 return ! TREE_SIDE_EFFECTS (arg0
)
6720 && ! TREE_SIDE_EFFECTS (arg1
);
6723 /* Test whether it is preferable two swap two operands, ARG0 and
6724 ARG1, for example because ARG0 is an integer constant and ARG1
6725 isn't. If REORDER is true, only recommend swapping if we can
6726 evaluate the operands in reverse order. */
6729 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6731 STRIP_SIGN_NOPS (arg0
);
6732 STRIP_SIGN_NOPS (arg1
);
6734 if (TREE_CODE (arg1
) == INTEGER_CST
)
6736 if (TREE_CODE (arg0
) == INTEGER_CST
)
6739 if (TREE_CODE (arg1
) == REAL_CST
)
6741 if (TREE_CODE (arg0
) == REAL_CST
)
6744 if (TREE_CODE (arg1
) == FIXED_CST
)
6746 if (TREE_CODE (arg0
) == FIXED_CST
)
6749 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6751 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6754 if (TREE_CONSTANT (arg1
))
6756 if (TREE_CONSTANT (arg0
))
6759 if (optimize_function_for_size_p (cfun
))
6762 if (reorder
&& flag_evaluation_order
6763 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6766 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6767 for commutative and comparison operators. Ensuring a canonical
6768 form allows the optimizers to find additional redundancies without
6769 having to explicitly check for both orderings. */
6770 if (TREE_CODE (arg0
) == SSA_NAME
6771 && TREE_CODE (arg1
) == SSA_NAME
6772 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6775 /* Put SSA_NAMEs last. */
6776 if (TREE_CODE (arg1
) == SSA_NAME
)
6778 if (TREE_CODE (arg0
) == SSA_NAME
)
6781 /* Put variables last. */
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6791 ARG0 is extended to a wider type. */
6794 fold_widened_comparison (location_t loc
, enum tree_code code
,
6795 tree type
, tree arg0
, tree arg1
)
6797 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6799 tree shorter_type
, outer_type
;
6803 if (arg0_unw
== arg0
)
6805 shorter_type
= TREE_TYPE (arg0_unw
);
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (shorter_type
) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6816 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6819 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6821 /* If possible, express the comparison in the shorter mode. */
6822 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6823 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6824 && (TREE_TYPE (arg1_unw
) == shorter_type
6825 || ((TYPE_PRECISION (shorter_type
)
6826 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6827 && (TYPE_UNSIGNED (shorter_type
)
6828 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6829 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6830 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6831 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6832 && int_fits_type_p (arg1_unw
, shorter_type
))))
6833 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6834 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6836 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6837 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6838 || !int_fits_type_p (arg1_unw
, shorter_type
))
6841 /* If we are comparing with the integer that does not fit into the range
6842 of the shorter type, the result is known. */
6843 outer_type
= TREE_TYPE (arg1_unw
);
6844 min
= lower_bound_in_type (outer_type
, shorter_type
);
6845 max
= upper_bound_in_type (outer_type
, shorter_type
);
6847 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6849 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6856 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6861 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6867 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6869 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6874 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6876 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6885 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6886 ARG0 just the signedness is changed. */
6889 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6890 tree arg0
, tree arg1
)
6893 tree inner_type
, outer_type
;
6895 if (!CONVERT_EXPR_P (arg0
))
6898 outer_type
= TREE_TYPE (arg0
);
6899 arg0_inner
= TREE_OPERAND (arg0
, 0);
6900 inner_type
= TREE_TYPE (arg0_inner
);
6902 #ifdef HAVE_canonicalize_funcptr_for_compare
6903 /* Disable this optimization if we're casting a function pointer
6904 type on targets that require function pointer canonicalization. */
6905 if (HAVE_canonicalize_funcptr_for_compare
6906 && TREE_CODE (inner_type
) == POINTER_TYPE
6907 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6911 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6914 if (TREE_CODE (arg1
) != INTEGER_CST
6915 && !(CONVERT_EXPR_P (arg1
)
6916 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6919 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6924 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6927 if (TREE_CODE (arg1
) == INTEGER_CST
)
6928 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6929 0, TREE_OVERFLOW (arg1
));
6931 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6933 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6936 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6937 step of the array. Reconstructs s and delta in the case of s *
6938 delta being an integer constant (and thus already folded). ADDR is
6939 the address. MULT is the multiplicative expression. If the
6940 function succeeds, the new address expression is returned.
6941 Otherwise NULL_TREE is returned. LOC is the location of the
6942 resulting expression. */
6945 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6947 tree s
, delta
, step
;
6948 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6953 /* Strip the nops that might be added when converting op1 to sizetype. */
6956 /* Canonicalize op1 into a possibly non-constant delta
6957 and an INTEGER_CST s. */
6958 if (TREE_CODE (op1
) == MULT_EXPR
)
6960 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6965 if (TREE_CODE (arg0
) == INTEGER_CST
)
6970 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6978 else if (TREE_CODE (op1
) == INTEGER_CST
)
6985 /* Simulate we are delta * 1. */
6987 s
= integer_one_node
;
6990 /* Handle &x.array the same as we would handle &x.array[0]. */
6991 if (TREE_CODE (ref
) == COMPONENT_REF
6992 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6996 /* Remember if this was a multi-dimensional array. */
6997 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7000 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
7003 itype
= TREE_TYPE (domain
);
7005 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
7006 if (TREE_CODE (step
) != INTEGER_CST
)
7011 if (! tree_int_cst_equal (step
, s
))
7016 /* Try if delta is a multiple of step. */
7017 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7023 /* Only fold here if we can verify we do not overflow one
7024 dimension of a multi-dimensional array. */
7029 if (!TYPE_MIN_VALUE (domain
)
7030 || !TYPE_MAX_VALUE (domain
)
7031 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7034 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7035 fold_convert_loc (loc
, itype
,
7036 TYPE_MIN_VALUE (domain
)),
7037 fold_convert_loc (loc
, itype
, delta
));
7038 if (TREE_CODE (tmp
) != INTEGER_CST
7039 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7043 /* We found a suitable component reference. */
7045 pref
= TREE_OPERAND (addr
, 0);
7046 ret
= copy_node (pref
);
7047 SET_EXPR_LOCATION (ret
, loc
);
7049 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
7051 (loc
, PLUS_EXPR
, itype
,
7052 fold_convert_loc (loc
, itype
,
7054 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7055 fold_convert_loc (loc
, itype
, delta
)),
7056 NULL_TREE
, NULL_TREE
);
7057 return build_fold_addr_expr_loc (loc
, ret
);
7062 for (;; ref
= TREE_OPERAND (ref
, 0))
7064 if (TREE_CODE (ref
) == ARRAY_REF
)
7068 /* Remember if this was a multi-dimensional array. */
7069 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7072 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7075 itype
= TREE_TYPE (domain
);
7077 step
= array_ref_element_size (ref
);
7078 if (TREE_CODE (step
) != INTEGER_CST
)
7083 if (! tree_int_cst_equal (step
, s
))
7088 /* Try if delta is a multiple of step. */
7089 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7095 /* Only fold here if we can verify we do not overflow one
7096 dimension of a multi-dimensional array. */
7101 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7102 || !TYPE_MAX_VALUE (domain
)
7103 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7106 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7107 fold_convert_loc (loc
, itype
,
7108 TREE_OPERAND (ref
, 1)),
7109 fold_convert_loc (loc
, itype
, delta
));
7111 || TREE_CODE (tmp
) != INTEGER_CST
7112 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7121 if (!handled_component_p (ref
))
7125 /* We found the suitable array reference. So copy everything up to it,
7126 and replace the index. */
7128 pref
= TREE_OPERAND (addr
, 0);
7129 ret
= copy_node (pref
);
7130 SET_EXPR_LOCATION (ret
, loc
);
7135 pref
= TREE_OPERAND (pref
, 0);
7136 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7137 pos
= TREE_OPERAND (pos
, 0);
7140 TREE_OPERAND (pos
, 1)
7141 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7142 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7143 fold_convert_loc (loc
, itype
, delta
));
7144 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7148 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7149 means A >= Y && A != MAX, but in this case we know that
7150 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7153 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7155 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7157 if (TREE_CODE (bound
) == LT_EXPR
)
7158 a
= TREE_OPERAND (bound
, 0);
7159 else if (TREE_CODE (bound
) == GT_EXPR
)
7160 a
= TREE_OPERAND (bound
, 1);
7164 typea
= TREE_TYPE (a
);
7165 if (!INTEGRAL_TYPE_P (typea
)
7166 && !POINTER_TYPE_P (typea
))
7169 if (TREE_CODE (ineq
) == LT_EXPR
)
7171 a1
= TREE_OPERAND (ineq
, 1);
7172 y
= TREE_OPERAND (ineq
, 0);
7174 else if (TREE_CODE (ineq
) == GT_EXPR
)
7176 a1
= TREE_OPERAND (ineq
, 0);
7177 y
= TREE_OPERAND (ineq
, 1);
7182 if (TREE_TYPE (a1
) != typea
)
7185 if (POINTER_TYPE_P (typea
))
7187 /* Convert the pointer types into integer before taking the difference. */
7188 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7189 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7190 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7193 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7195 if (!diff
|| !integer_onep (diff
))
7198 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7201 /* Fold a sum or difference of at least one multiplication.
7202 Returns the folded tree or NULL if no simplification could be made. */
7205 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7206 tree arg0
, tree arg1
)
7208 tree arg00
, arg01
, arg10
, arg11
;
7209 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7211 /* (A * C) +- (B * C) -> (A+-B) * C.
7212 (A * C) +- A -> A * (C+-1).
7213 We are most concerned about the case where C is a constant,
7214 but other combinations show up during loop reduction. Since
7215 it is not difficult, try all four possibilities. */
7217 if (TREE_CODE (arg0
) == MULT_EXPR
)
7219 arg00
= TREE_OPERAND (arg0
, 0);
7220 arg01
= TREE_OPERAND (arg0
, 1);
7222 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7224 arg00
= build_one_cst (type
);
7229 /* We cannot generate constant 1 for fract. */
7230 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7233 arg01
= build_one_cst (type
);
7235 if (TREE_CODE (arg1
) == MULT_EXPR
)
7237 arg10
= TREE_OPERAND (arg1
, 0);
7238 arg11
= TREE_OPERAND (arg1
, 1);
7240 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7242 arg10
= build_one_cst (type
);
7243 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7244 the purpose of this canonicalization. */
7245 if (TREE_INT_CST_HIGH (arg1
) == -1
7246 && negate_expr_p (arg1
)
7247 && code
== PLUS_EXPR
)
7249 arg11
= negate_expr (arg1
);
7257 /* We cannot generate constant 1 for fract. */
7258 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7261 arg11
= build_one_cst (type
);
7265 if (operand_equal_p (arg01
, arg11
, 0))
7266 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7267 else if (operand_equal_p (arg00
, arg10
, 0))
7268 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7269 else if (operand_equal_p (arg00
, arg11
, 0))
7270 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7271 else if (operand_equal_p (arg01
, arg10
, 0))
7272 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7274 /* No identical multiplicands; see if we can find a common
7275 power-of-two factor in non-power-of-two multiplies. This
7276 can help in multi-dimensional array access. */
7277 else if (tree_fits_shwi_p (arg01
)
7278 && tree_fits_shwi_p (arg11
))
7280 HOST_WIDE_INT int01
, int11
, tmp
;
7283 int01
= tree_to_shwi (arg01
);
7284 int11
= tree_to_shwi (arg11
);
7286 /* Move min of absolute values to int11. */
7287 if (absu_hwi (int01
) < absu_hwi (int11
))
7289 tmp
= int01
, int01
= int11
, int11
= tmp
;
7290 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7297 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7298 /* The remainder should not be a constant, otherwise we
7299 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7300 increased the number of multiplications necessary. */
7301 && TREE_CODE (arg10
) != INTEGER_CST
)
7303 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7304 build_int_cst (TREE_TYPE (arg00
),
7309 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7314 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7315 fold_build2_loc (loc
, code
, type
,
7316 fold_convert_loc (loc
, type
, alt0
),
7317 fold_convert_loc (loc
, type
, alt1
)),
7318 fold_convert_loc (loc
, type
, same
));
7323 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7324 specified by EXPR into the buffer PTR of length LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero
7329 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7331 tree type
= TREE_TYPE (expr
);
7332 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7333 int byte
, offset
, word
, words
;
7334 unsigned char value
;
7336 if (total_bytes
> len
)
7338 words
= total_bytes
/ UNITS_PER_WORD
;
7340 for (byte
= 0; byte
< total_bytes
; byte
++)
7342 int bitpos
= byte
* BITS_PER_UNIT
;
7343 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7344 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7346 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7347 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7349 if (total_bytes
> UNITS_PER_WORD
)
7351 word
= byte
/ UNITS_PER_WORD
;
7352 if (WORDS_BIG_ENDIAN
)
7353 word
= (words
- 1) - word
;
7354 offset
= word
* UNITS_PER_WORD
;
7355 if (BYTES_BIG_ENDIAN
)
7356 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7358 offset
+= byte
% UNITS_PER_WORD
;
7361 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7362 ptr
[offset
] = value
;
7368 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7369 specified by EXPR into the buffer PTR of length LEN bytes.
7370 Return the number of bytes placed in the buffer, or zero
7374 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7376 tree type
= TREE_TYPE (expr
);
7377 enum machine_mode mode
= TYPE_MODE (type
);
7378 int total_bytes
= GET_MODE_SIZE (mode
);
7379 FIXED_VALUE_TYPE value
;
7380 tree i_value
, i_type
;
7382 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7385 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7387 if (NULL_TREE
== i_type
7388 || TYPE_PRECISION (i_type
) != total_bytes
)
7391 value
= TREE_FIXED_CST (expr
);
7392 i_value
= double_int_to_tree (i_type
, value
.data
);
7394 return native_encode_int (i_value
, ptr
, len
);
7398 /* Subroutine of native_encode_expr. Encode the REAL_CST
7399 specified by EXPR into the buffer PTR of length LEN bytes.
7400 Return the number of bytes placed in the buffer, or zero
7404 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7406 tree type
= TREE_TYPE (expr
);
7407 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7408 int byte
, offset
, word
, words
, bitpos
;
7409 unsigned char value
;
7411 /* There are always 32 bits in each long, no matter the size of
7412 the hosts long. We handle floating point representations with
7416 if (total_bytes
> len
)
7418 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7420 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7422 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7423 bitpos
+= BITS_PER_UNIT
)
7425 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7426 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7428 if (UNITS_PER_WORD
< 4)
7430 word
= byte
/ UNITS_PER_WORD
;
7431 if (WORDS_BIG_ENDIAN
)
7432 word
= (words
- 1) - word
;
7433 offset
= word
* UNITS_PER_WORD
;
7434 if (BYTES_BIG_ENDIAN
)
7435 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7437 offset
+= byte
% UNITS_PER_WORD
;
7440 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7441 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7446 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7447 specified by EXPR into the buffer PTR of length LEN bytes.
7448 Return the number of bytes placed in the buffer, or zero
7452 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7457 part
= TREE_REALPART (expr
);
7458 rsize
= native_encode_expr (part
, ptr
, len
);
7461 part
= TREE_IMAGPART (expr
);
7462 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7465 return rsize
+ isize
;
7469 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7470 specified by EXPR into the buffer PTR of length LEN bytes.
7471 Return the number of bytes placed in the buffer, or zero
7475 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7482 count
= VECTOR_CST_NELTS (expr
);
7483 itype
= TREE_TYPE (TREE_TYPE (expr
));
7484 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7485 for (i
= 0; i
< count
; i
++)
7487 elem
= VECTOR_CST_ELT (expr
, i
);
7488 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7496 /* Subroutine of native_encode_expr. Encode the STRING_CST
7497 specified by EXPR into the buffer PTR of length LEN bytes.
7498 Return the number of bytes placed in the buffer, or zero
7502 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7504 tree type
= TREE_TYPE (expr
);
7505 HOST_WIDE_INT total_bytes
;
7507 if (TREE_CODE (type
) != ARRAY_TYPE
7508 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7509 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7510 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7512 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7513 if (total_bytes
> len
)
7515 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7517 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7518 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7519 total_bytes
- TREE_STRING_LENGTH (expr
));
7522 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7527 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7528 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7529 buffer PTR of length LEN bytes. Return the number of bytes
7530 placed in the buffer, or zero upon failure. */
7533 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7535 switch (TREE_CODE (expr
))
7538 return native_encode_int (expr
, ptr
, len
);
7541 return native_encode_real (expr
, ptr
, len
);
7544 return native_encode_fixed (expr
, ptr
, len
);
7547 return native_encode_complex (expr
, ptr
, len
);
7550 return native_encode_vector (expr
, ptr
, len
);
7553 return native_encode_string (expr
, ptr
, len
);
7561 /* Subroutine of native_interpret_expr. Interpret the contents of
7562 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7563 If the buffer cannot be interpreted, return NULL_TREE. */
7566 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7568 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7571 if (total_bytes
> len
7572 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7575 result
= double_int::from_buffer (ptr
, total_bytes
);
7577 return double_int_to_tree (type
, result
);
7581 /* Subroutine of native_interpret_expr. Interpret the contents of
7582 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7583 If the buffer cannot be interpreted, return NULL_TREE. */
7586 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7588 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7590 FIXED_VALUE_TYPE fixed_value
;
7592 if (total_bytes
> len
7593 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7596 result
= double_int::from_buffer (ptr
, total_bytes
);
7597 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7599 return build_fixed (type
, fixed_value
);
7603 /* Subroutine of native_interpret_expr. Interpret the contents of
7604 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7605 If the buffer cannot be interpreted, return NULL_TREE. */
7608 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7610 enum machine_mode mode
= TYPE_MODE (type
);
7611 int total_bytes
= GET_MODE_SIZE (mode
);
7612 int byte
, offset
, word
, words
, bitpos
;
7613 unsigned char value
;
7614 /* There are always 32 bits in each long, no matter the size of
7615 the hosts long. We handle floating point representations with
7620 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7621 if (total_bytes
> len
|| total_bytes
> 24)
7623 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7625 memset (tmp
, 0, sizeof (tmp
));
7626 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7627 bitpos
+= BITS_PER_UNIT
)
7629 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7630 if (UNITS_PER_WORD
< 4)
7632 word
= byte
/ UNITS_PER_WORD
;
7633 if (WORDS_BIG_ENDIAN
)
7634 word
= (words
- 1) - word
;
7635 offset
= word
* UNITS_PER_WORD
;
7636 if (BYTES_BIG_ENDIAN
)
7637 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7639 offset
+= byte
% UNITS_PER_WORD
;
7642 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7643 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7645 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7648 real_from_target (&r
, tmp
, mode
);
7649 return build_real (type
, r
);
7653 /* Subroutine of native_interpret_expr. Interpret the contents of
7654 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7655 If the buffer cannot be interpreted, return NULL_TREE. */
7658 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7660 tree etype
, rpart
, ipart
;
7663 etype
= TREE_TYPE (type
);
7664 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7667 rpart
= native_interpret_expr (etype
, ptr
, size
);
7670 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7673 return build_complex (type
, rpart
, ipart
);
7677 /* Subroutine of native_interpret_expr. Interpret the contents of
7678 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7679 If the buffer cannot be interpreted, return NULL_TREE. */
7682 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7688 etype
= TREE_TYPE (type
);
7689 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7690 count
= TYPE_VECTOR_SUBPARTS (type
);
7691 if (size
* count
> len
)
7694 elements
= XALLOCAVEC (tree
, count
);
7695 for (i
= count
- 1; i
>= 0; i
--)
7697 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7702 return build_vector (type
, elements
);
7706 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7707 the buffer PTR of length LEN as a constant of type TYPE. For
7708 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7709 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7710 return NULL_TREE. */
7713 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7715 switch (TREE_CODE (type
))
7721 case REFERENCE_TYPE
:
7722 return native_interpret_int (type
, ptr
, len
);
7725 return native_interpret_real (type
, ptr
, len
);
7727 case FIXED_POINT_TYPE
:
7728 return native_interpret_fixed (type
, ptr
, len
);
7731 return native_interpret_complex (type
, ptr
, len
);
7734 return native_interpret_vector (type
, ptr
, len
);
7741 /* Returns true if we can interpret the contents of a native encoding
7745 can_native_interpret_type_p (tree type
)
7747 switch (TREE_CODE (type
))
7753 case REFERENCE_TYPE
:
7754 case FIXED_POINT_TYPE
:
7764 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7765 TYPE at compile-time. If we're unable to perform the conversion
7766 return NULL_TREE. */
7769 fold_view_convert_expr (tree type
, tree expr
)
7771 /* We support up to 512-bit values (for V8DFmode). */
7772 unsigned char buffer
[64];
7775 /* Check that the host and target are sane. */
7776 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7779 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7783 return native_interpret_expr (type
, buffer
, len
);
7786 /* Build an expression for the address of T. Folds away INDIRECT_REF
7787 to avoid confusing the gimplify process. */
7790 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7792 /* The size of the object is not relevant when talking about its address. */
7793 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7794 t
= TREE_OPERAND (t
, 0);
7796 if (TREE_CODE (t
) == INDIRECT_REF
)
7798 t
= TREE_OPERAND (t
, 0);
7800 if (TREE_TYPE (t
) != ptrtype
)
7801 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7803 else if (TREE_CODE (t
) == MEM_REF
7804 && integer_zerop (TREE_OPERAND (t
, 1)))
7805 return TREE_OPERAND (t
, 0);
7806 else if (TREE_CODE (t
) == MEM_REF
7807 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7808 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7809 TREE_OPERAND (t
, 0),
7810 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7811 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7813 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7815 if (TREE_TYPE (t
) != ptrtype
)
7816 t
= fold_convert_loc (loc
, ptrtype
, t
);
7819 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7824 /* Build an expression for the address of T. */
7827 build_fold_addr_expr_loc (location_t loc
, tree t
)
7829 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7831 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7834 static bool vec_cst_ctor_to_array (tree
, tree
*);
7836 /* Fold a unary expression of code CODE and type TYPE with operand
7837 OP0. Return the folded expression if folding is successful.
7838 Otherwise, return NULL_TREE. */
7841 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7845 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7847 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7848 && TREE_CODE_LENGTH (code
) == 1);
7853 if (CONVERT_EXPR_CODE_P (code
)
7854 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7856 /* Don't use STRIP_NOPS, because signedness of argument type
7858 STRIP_SIGN_NOPS (arg0
);
7862 /* Strip any conversions that don't change the mode. This
7863 is safe for every expression, except for a comparison
7864 expression because its signedness is derived from its
7867 Note that this is done as an internal manipulation within
7868 the constant folder, in order to find the simplest
7869 representation of the arguments so that their form can be
7870 studied. In any cases, the appropriate type conversions
7871 should be put back in the tree that will get out of the
7877 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7879 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7880 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7881 fold_build1_loc (loc
, code
, type
,
7882 fold_convert_loc (loc
, TREE_TYPE (op0
),
7883 TREE_OPERAND (arg0
, 1))));
7884 else if (TREE_CODE (arg0
) == COND_EXPR
)
7886 tree arg01
= TREE_OPERAND (arg0
, 1);
7887 tree arg02
= TREE_OPERAND (arg0
, 2);
7888 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7889 arg01
= fold_build1_loc (loc
, code
, type
,
7890 fold_convert_loc (loc
,
7891 TREE_TYPE (op0
), arg01
));
7892 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7893 arg02
= fold_build1_loc (loc
, code
, type
,
7894 fold_convert_loc (loc
,
7895 TREE_TYPE (op0
), arg02
));
7896 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7899 /* If this was a conversion, and all we did was to move into
7900 inside the COND_EXPR, bring it back out. But leave it if
7901 it is a conversion from integer to integer and the
7902 result precision is no wider than a word since such a
7903 conversion is cheap and may be optimized away by combine,
7904 while it couldn't if it were outside the COND_EXPR. Then return
7905 so we don't get into an infinite recursion loop taking the
7906 conversion out and then back in. */
7908 if ((CONVERT_EXPR_CODE_P (code
)
7909 || code
== NON_LVALUE_EXPR
)
7910 && TREE_CODE (tem
) == COND_EXPR
7911 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7912 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7913 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7914 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7915 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7916 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7917 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7919 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7920 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7921 || flag_syntax_only
))
7922 tem
= build1_loc (loc
, code
, type
,
7924 TREE_TYPE (TREE_OPERAND
7925 (TREE_OPERAND (tem
, 1), 0)),
7926 TREE_OPERAND (tem
, 0),
7927 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7928 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7937 /* Re-association barriers around constants and other re-association
7938 barriers can be removed. */
7939 if (CONSTANT_CLASS_P (op0
)
7940 || TREE_CODE (op0
) == PAREN_EXPR
)
7941 return fold_convert_loc (loc
, type
, op0
);
7946 case FIX_TRUNC_EXPR
:
7947 if (TREE_TYPE (op0
) == type
)
7950 if (COMPARISON_CLASS_P (op0
))
7952 /* If we have (type) (a CMP b) and type is an integral type, return
7953 new expression involving the new type. Canonicalize
7954 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7956 Do not fold the result as that would not simplify further, also
7957 folding again results in recursions. */
7958 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7959 return build2_loc (loc
, TREE_CODE (op0
), type
,
7960 TREE_OPERAND (op0
, 0),
7961 TREE_OPERAND (op0
, 1));
7962 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7963 && TREE_CODE (type
) != VECTOR_TYPE
)
7964 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7965 constant_boolean_node (true, type
),
7966 constant_boolean_node (false, type
));
7969 /* Handle cases of two conversions in a row. */
7970 if (CONVERT_EXPR_P (op0
))
7972 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7973 tree inter_type
= TREE_TYPE (op0
);
7974 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7975 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7976 int inside_float
= FLOAT_TYPE_P (inside_type
);
7977 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7978 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7979 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7980 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7981 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7982 int inter_float
= FLOAT_TYPE_P (inter_type
);
7983 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7984 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7985 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7986 int final_int
= INTEGRAL_TYPE_P (type
);
7987 int final_ptr
= POINTER_TYPE_P (type
);
7988 int final_float
= FLOAT_TYPE_P (type
);
7989 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7990 unsigned int final_prec
= TYPE_PRECISION (type
);
7991 int final_unsignedp
= TYPE_UNSIGNED (type
);
7993 /* check for cases specific to UPC, involving pointer types */
7994 if (final_ptr
|| inter_ptr
|| inside_ptr
)
7996 int final_pts
= final_ptr
7997 && upc_shared_type_p (TREE_TYPE (type
));
7998 int inter_pts
= inter_ptr
7999 && upc_shared_type_p (TREE_TYPE (inter_type
));
8000 int inside_pts
= inside_ptr
8001 && upc_shared_type_p (TREE_TYPE (inside_type
));
8002 if (final_pts
|| inter_pts
|| inside_pts
)
8004 if (!((final_pts
&& inter_pts
)
8005 && TREE_TYPE (type
) == TREE_TYPE (inter_type
))
8006 || ((inter_pts
&& inside_pts
)
8007 && (TREE_TYPE (inter_type
)
8008 == TREE_TYPE (inside_type
))))
8013 /* In addition to the cases of two conversions in a row
8014 handled below, if we are converting something to its own
8015 type via an object of identical or wider precision, neither
8016 conversion is needed. */
8017 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8018 && (((inter_int
|| inter_ptr
) && final_int
)
8019 || (inter_float
&& final_float
))
8020 && inter_prec
>= final_prec
)
8021 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8023 /* Likewise, if the intermediate and initial types are either both
8024 float or both integer, we don't need the middle conversion if the
8025 former is wider than the latter and doesn't change the signedness
8026 (for integers). Avoid this if the final type is a pointer since
8027 then we sometimes need the middle conversion. Likewise if the
8028 final type has a precision not equal to the size of its mode. */
8029 if (((inter_int
&& inside_int
)
8030 || (inter_float
&& inside_float
)
8031 || (inter_vec
&& inside_vec
))
8032 && inter_prec
>= inside_prec
8033 && (inter_float
|| inter_vec
8034 || inter_unsignedp
== inside_unsignedp
)
8035 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8036 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8038 && (! final_vec
|| inter_prec
== inside_prec
))
8039 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8041 /* If we have a sign-extension of a zero-extended value, we can
8042 replace that by a single zero-extension. Likewise if the
8043 final conversion does not change precision we can drop the
8044 intermediate conversion. */
8045 if (inside_int
&& inter_int
&& final_int
8046 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
8047 && inside_unsignedp
&& !inter_unsignedp
)
8048 || final_prec
== inter_prec
))
8049 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8051 /* Two conversions in a row are not needed unless:
8052 - some conversion is floating-point (overstrict for now), or
8053 - some conversion is a vector (overstrict for now), or
8054 - the intermediate type is narrower than both initial and
8056 - the intermediate type and innermost type differ in signedness,
8057 and the outermost type is wider than the intermediate, or
8058 - the initial type is a pointer type and the precisions of the
8059 intermediate and final types differ, or
8060 - the final type is a pointer type and the precisions of the
8061 initial and intermediate types differ. */
8062 if (! inside_float
&& ! inter_float
&& ! final_float
8063 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8064 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8065 && ! (inside_int
&& inter_int
8066 && inter_unsignedp
!= inside_unsignedp
8067 && inter_prec
< final_prec
)
8068 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8069 == (final_unsignedp
&& final_prec
> inter_prec
))
8070 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8071 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8072 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8073 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8074 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8077 /* Handle (T *)&A.B.C for A being of type T and B and C
8078 living at offset zero. This occurs frequently in
8079 C++ upcasting and then accessing the base. */
8080 if (TREE_CODE (op0
) == ADDR_EXPR
8081 && POINTER_TYPE_P (type
)
8082 && handled_component_p (TREE_OPERAND (op0
, 0)))
8084 HOST_WIDE_INT bitsize
, bitpos
;
8086 enum machine_mode mode
;
8087 int unsignedp
, volatilep
;
8088 tree base
= TREE_OPERAND (op0
, 0);
8089 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8090 &mode
, &unsignedp
, &volatilep
, false);
8091 /* If the reference was to a (constant) zero offset, we can use
8092 the address of the base if it has the same base type
8093 as the result type and the pointer type is unqualified. */
8094 if (! offset
&& bitpos
== 0
8095 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8096 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8097 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8098 return fold_convert_loc (loc
, type
,
8099 build_fold_addr_expr_loc (loc
, base
));
8102 if (TREE_CODE (op0
) == MODIFY_EXPR
8103 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8104 /* Detect assigning a bitfield. */
8105 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8107 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8109 /* Don't leave an assignment inside a conversion
8110 unless assigning a bitfield. */
8111 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8112 /* First do the assignment, then return converted constant. */
8113 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8114 TREE_NO_WARNING (tem
) = 1;
8115 TREE_USED (tem
) = 1;
8119 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8120 constants (if x has signed type, the sign bit cannot be set
8121 in c). This folds extension into the BIT_AND_EXPR.
8122 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8123 very likely don't have maximal range for their precision and this
8124 transformation effectively doesn't preserve non-maximal ranges. */
8125 if (TREE_CODE (type
) == INTEGER_TYPE
8126 && TREE_CODE (op0
) == BIT_AND_EXPR
8127 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8129 tree and_expr
= op0
;
8130 tree and0
= TREE_OPERAND (and_expr
, 0);
8131 tree and1
= TREE_OPERAND (and_expr
, 1);
8134 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8135 || (TYPE_PRECISION (type
)
8136 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8138 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8139 <= HOST_BITS_PER_WIDE_INT
8140 && tree_fits_uhwi_p (and1
))
8142 unsigned HOST_WIDE_INT cst
;
8144 cst
= tree_to_uhwi (and1
);
8145 cst
&= HOST_WIDE_INT_M1U
8146 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8147 change
= (cst
== 0);
8148 #ifdef LOAD_EXTEND_OP
8150 && !flag_syntax_only
8151 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8154 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8155 and0
= fold_convert_loc (loc
, uns
, and0
);
8156 and1
= fold_convert_loc (loc
, uns
, and1
);
8162 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8163 0, TREE_OVERFLOW (and1
));
8164 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8165 fold_convert_loc (loc
, type
, and0
), tem
);
8169 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8170 when one of the new casts will fold away. Conservatively we assume
8171 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8172 if (POINTER_TYPE_P (type
)
8173 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8174 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8175 && !upc_shared_type_p (TREE_TYPE (type
))
8176 && !upc_shared_type_p (TREE_TYPE (
8177 TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8178 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8179 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8180 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8182 tree arg00
= TREE_OPERAND (arg0
, 0);
8183 tree arg01
= TREE_OPERAND (arg0
, 1);
8185 return fold_build_pointer_plus_loc
8186 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8189 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8190 of the same precision, and X is an integer type not narrower than
8191 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8192 if (INTEGRAL_TYPE_P (type
)
8193 && TREE_CODE (op0
) == BIT_NOT_EXPR
8194 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8195 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8196 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8198 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8199 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8200 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8201 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8202 fold_convert_loc (loc
, type
, tem
));
8205 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8206 type of X and Y (integer types only). */
8207 if (INTEGRAL_TYPE_P (type
)
8208 && TREE_CODE (op0
) == MULT_EXPR
8209 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8210 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8212 /* Be careful not to introduce new overflows. */
8214 if (TYPE_OVERFLOW_WRAPS (type
))
8217 mult_type
= unsigned_type_for (type
);
8219 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8221 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8222 fold_convert_loc (loc
, mult_type
,
8223 TREE_OPERAND (op0
, 0)),
8224 fold_convert_loc (loc
, mult_type
,
8225 TREE_OPERAND (op0
, 1)));
8226 return fold_convert_loc (loc
, type
, tem
);
8230 tem
= fold_convert_const (code
, type
, op0
);
8231 return tem
? tem
: NULL_TREE
;
8233 case ADDR_SPACE_CONVERT_EXPR
:
8234 if (integer_zerop (arg0
))
8235 return fold_convert_const (code
, type
, arg0
);
8238 case FIXED_CONVERT_EXPR
:
8239 tem
= fold_convert_const (code
, type
, arg0
);
8240 return tem
? tem
: NULL_TREE
;
8242 case VIEW_CONVERT_EXPR
:
8243 if (TREE_TYPE (op0
) == type
)
8245 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8246 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8247 type
, TREE_OPERAND (op0
, 0));
8248 if (TREE_CODE (op0
) == MEM_REF
)
8249 return fold_build2_loc (loc
, MEM_REF
, type
,
8250 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8252 /* For integral conversions with the same precision or pointer
8253 conversions use a NOP_EXPR instead. */
8254 if ((INTEGRAL_TYPE_P (type
)
8255 || (POINTER_TYPE_P (type
)
8256 && !upc_shared_type_p (TREE_TYPE (type
))))
8257 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8258 || (POINTER_TYPE_P (TREE_TYPE (op0
))
8259 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
8260 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8261 return fold_convert_loc (loc
, type
, op0
);
8263 /* Strip inner integral conversions that do not change the precision. */
8264 if (CONVERT_EXPR_P (op0
)
8265 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8266 || (POINTER_TYPE_P (TREE_TYPE (op0
))
8267 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
8268 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8269 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8270 && !upc_shared_type_p (TREE_TYPE (
8272 TREE_OPERAND (op0
, 0))))))
8273 && (TYPE_PRECISION (TREE_TYPE (op0
))
8274 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8275 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8276 type
, TREE_OPERAND (op0
, 0));
8278 return fold_view_convert_expr (type
, op0
);
8281 tem
= fold_negate_expr (loc
, arg0
);
8283 return fold_convert_loc (loc
, type
, tem
);
8287 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8288 return fold_abs_const (arg0
, type
);
8289 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8290 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8291 /* Convert fabs((double)float) into (double)fabsf(float). */
8292 else if (TREE_CODE (arg0
) == NOP_EXPR
8293 && TREE_CODE (type
) == REAL_TYPE
)
8295 tree targ0
= strip_float_extensions (arg0
);
8297 return fold_convert_loc (loc
, type
,
8298 fold_build1_loc (loc
, ABS_EXPR
,
8302 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8303 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8305 else if (tree_expr_nonnegative_p (arg0
))
8308 /* Strip sign ops from argument. */
8309 if (TREE_CODE (type
) == REAL_TYPE
)
8311 tem
= fold_strip_sign_ops (arg0
);
8313 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8314 fold_convert_loc (loc
, type
, tem
));
8319 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8320 return fold_convert_loc (loc
, type
, arg0
);
8321 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8323 tree itype
= TREE_TYPE (type
);
8324 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8325 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8326 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8327 negate_expr (ipart
));
8329 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8331 tree itype
= TREE_TYPE (type
);
8332 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8333 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8334 return build_complex (type
, rpart
, negate_expr (ipart
));
8336 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8337 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8341 if (TREE_CODE (arg0
) == INTEGER_CST
)
8342 return fold_not_const (arg0
, type
);
8343 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8344 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8345 /* Convert ~ (-A) to A - 1. */
8346 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8347 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8348 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8349 build_int_cst (type
, 1));
8350 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8351 else if (INTEGRAL_TYPE_P (type
)
8352 && ((TREE_CODE (arg0
) == MINUS_EXPR
8353 && integer_onep (TREE_OPERAND (arg0
, 1)))
8354 || (TREE_CODE (arg0
) == PLUS_EXPR
8355 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8356 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8357 fold_convert_loc (loc
, type
,
8358 TREE_OPERAND (arg0
, 0)));
8359 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8360 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8361 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8362 fold_convert_loc (loc
, type
,
8363 TREE_OPERAND (arg0
, 0)))))
8364 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8365 fold_convert_loc (loc
, type
,
8366 TREE_OPERAND (arg0
, 1)));
8367 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8368 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8369 fold_convert_loc (loc
, type
,
8370 TREE_OPERAND (arg0
, 1)))))
8371 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8372 fold_convert_loc (loc
, type
,
8373 TREE_OPERAND (arg0
, 0)), tem
);
8374 /* Perform BIT_NOT_EXPR on each element individually. */
8375 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8379 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8381 elements
= XALLOCAVEC (tree
, count
);
8382 for (i
= 0; i
< count
; i
++)
8384 elem
= VECTOR_CST_ELT (arg0
, i
);
8385 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8386 if (elem
== NULL_TREE
)
8391 return build_vector (type
, elements
);
8393 else if (COMPARISON_CLASS_P (arg0
)
8394 && (VECTOR_TYPE_P (type
)
8395 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8397 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8398 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8399 HONOR_NANS (TYPE_MODE (op_type
)));
8400 if (subcode
!= ERROR_MARK
)
8401 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8402 TREE_OPERAND (arg0
, 1));
8408 case TRUTH_NOT_EXPR
:
8409 /* Note that the operand of this must be an int
8410 and its values must be 0 or 1.
8411 ("true" is a fixed value perhaps depending on the language,
8412 but we don't handle values other than 1 correctly yet.) */
8413 tem
= fold_truth_not_expr (loc
, arg0
);
8416 return fold_convert_loc (loc
, type
, tem
);
8419 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8420 return fold_convert_loc (loc
, type
, arg0
);
8421 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8422 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8423 TREE_OPERAND (arg0
, 1));
8424 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8425 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8426 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8428 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8429 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8430 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8431 TREE_OPERAND (arg0
, 0)),
8432 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8433 TREE_OPERAND (arg0
, 1)));
8434 return fold_convert_loc (loc
, type
, tem
);
8436 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8438 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8439 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8440 TREE_OPERAND (arg0
, 0));
8441 return fold_convert_loc (loc
, type
, tem
);
8443 if (TREE_CODE (arg0
) == CALL_EXPR
)
8445 tree fn
= get_callee_fndecl (arg0
);
8446 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8447 switch (DECL_FUNCTION_CODE (fn
))
8449 CASE_FLT_FN (BUILT_IN_CEXPI
):
8450 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8452 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8462 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8463 return build_zero_cst (type
);
8464 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8465 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8466 TREE_OPERAND (arg0
, 0));
8467 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8468 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8469 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8471 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8472 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8473 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8474 TREE_OPERAND (arg0
, 0)),
8475 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8476 TREE_OPERAND (arg0
, 1)));
8477 return fold_convert_loc (loc
, type
, tem
);
8479 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8481 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8482 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8483 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8485 if (TREE_CODE (arg0
) == CALL_EXPR
)
8487 tree fn
= get_callee_fndecl (arg0
);
8488 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8489 switch (DECL_FUNCTION_CODE (fn
))
8491 CASE_FLT_FN (BUILT_IN_CEXPI
):
8492 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8494 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8504 /* Fold *&X to X if X is an lvalue. */
8505 if (TREE_CODE (op0
) == ADDR_EXPR
)
8507 tree op00
= TREE_OPERAND (op0
, 0);
8508 if ((TREE_CODE (op00
) == VAR_DECL
8509 || TREE_CODE (op00
) == PARM_DECL
8510 || TREE_CODE (op00
) == RESULT_DECL
)
8511 && !TREE_READONLY (op00
))
8516 case VEC_UNPACK_LO_EXPR
:
8517 case VEC_UNPACK_HI_EXPR
:
8518 case VEC_UNPACK_FLOAT_LO_EXPR
:
8519 case VEC_UNPACK_FLOAT_HI_EXPR
:
8521 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8523 enum tree_code subcode
;
8525 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8526 if (TREE_CODE (arg0
) != VECTOR_CST
)
8529 elts
= XALLOCAVEC (tree
, nelts
* 2);
8530 if (!vec_cst_ctor_to_array (arg0
, elts
))
8533 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8534 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8537 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8540 subcode
= FLOAT_EXPR
;
8542 for (i
= 0; i
< nelts
; i
++)
8544 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8545 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8549 return build_vector (type
, elts
);
8552 case REDUC_MIN_EXPR
:
8553 case REDUC_MAX_EXPR
:
8554 case REDUC_PLUS_EXPR
:
8556 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8558 enum tree_code subcode
;
8560 if (TREE_CODE (op0
) != VECTOR_CST
)
8563 elts
= XALLOCAVEC (tree
, nelts
);
8564 if (!vec_cst_ctor_to_array (op0
, elts
))
8569 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8570 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8571 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8572 default: gcc_unreachable ();
8575 for (i
= 1; i
< nelts
; i
++)
8577 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8578 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8580 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8583 return build_vector (type
, elts
);
8588 } /* switch (code) */
8592 /* If the operation was a conversion do _not_ mark a resulting constant
8593 with TREE_OVERFLOW if the original constant was not. These conversions
8594 have implementation defined behavior and retaining the TREE_OVERFLOW
8595 flag here would confuse later passes such as VRP. */
8597 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8598 tree type
, tree op0
)
8600 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8602 && TREE_CODE (res
) == INTEGER_CST
8603 && TREE_CODE (op0
) == INTEGER_CST
8604 && CONVERT_EXPR_CODE_P (code
))
8605 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8610 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8611 operands OP0 and OP1. LOC is the location of the resulting expression.
8612 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8613 Return the folded expression if folding is successful. Otherwise,
8614 return NULL_TREE. */
8616 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8617 tree arg0
, tree arg1
, tree op0
, tree op1
)
8621 /* We only do these simplifications if we are optimizing. */
8625 /* Check for things like (A || B) && (A || C). We can convert this
8626 to A || (B && C). Note that either operator can be any of the four
8627 truth and/or operations and the transformation will still be
8628 valid. Also note that we only care about order for the
8629 ANDIF and ORIF operators. If B contains side effects, this
8630 might change the truth-value of A. */
8631 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8632 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8633 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8634 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8635 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8636 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8638 tree a00
= TREE_OPERAND (arg0
, 0);
8639 tree a01
= TREE_OPERAND (arg0
, 1);
8640 tree a10
= TREE_OPERAND (arg1
, 0);
8641 tree a11
= TREE_OPERAND (arg1
, 1);
8642 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8643 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8644 && (code
== TRUTH_AND_EXPR
8645 || code
== TRUTH_OR_EXPR
));
8647 if (operand_equal_p (a00
, a10
, 0))
8648 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8649 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8650 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8651 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8652 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8653 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8654 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8655 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8657 /* This case if tricky because we must either have commutative
8658 operators or else A10 must not have side-effects. */
8660 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8661 && operand_equal_p (a01
, a11
, 0))
8662 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8663 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8667 /* See if we can build a range comparison. */
8668 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8671 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8672 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8674 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8676 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8679 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8680 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8682 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8684 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8687 /* Check for the possibility of merging component references. If our
8688 lhs is another similar operation, try to merge its rhs with our
8689 rhs. Then try to merge our lhs and rhs. */
8690 if (TREE_CODE (arg0
) == code
8691 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8692 TREE_OPERAND (arg0
, 1), arg1
)))
8693 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8695 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8698 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8699 && (code
== TRUTH_AND_EXPR
8700 || code
== TRUTH_ANDIF_EXPR
8701 || code
== TRUTH_OR_EXPR
8702 || code
== TRUTH_ORIF_EXPR
))
8704 enum tree_code ncode
, icode
;
8706 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8707 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8708 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8710 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8711 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8712 We don't want to pack more than two leafs to a non-IF AND/OR
8714 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8715 equal to IF-CODE, then we don't want to add right-hand operand.
8716 If the inner right-hand side of left-hand operand has
8717 side-effects, or isn't simple, then we can't add to it,
8718 as otherwise we might destroy if-sequence. */
8719 if (TREE_CODE (arg0
) == icode
8720 && simple_operand_p_2 (arg1
)
8721 /* Needed for sequence points to handle trappings, and
8723 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8725 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8727 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8730 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8731 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8732 else if (TREE_CODE (arg1
) == icode
8733 && simple_operand_p_2 (arg0
)
8734 /* Needed for sequence points to handle trappings, and
8736 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8738 tem
= fold_build2_loc (loc
, ncode
, type
,
8739 arg0
, TREE_OPERAND (arg1
, 0));
8740 return fold_build2_loc (loc
, icode
, type
, tem
,
8741 TREE_OPERAND (arg1
, 1));
8743 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8745 For sequence point consistancy, we need to check for trapping,
8746 and side-effects. */
8747 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8748 && simple_operand_p_2 (arg1
))
8749 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8755 /* Fold a binary expression of code CODE and type TYPE with operands
8756 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8757 Return the folded expression if folding is successful. Otherwise,
8758 return NULL_TREE. */
8761 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8763 enum tree_code compl_code
;
8765 if (code
== MIN_EXPR
)
8766 compl_code
= MAX_EXPR
;
8767 else if (code
== MAX_EXPR
)
8768 compl_code
= MIN_EXPR
;
8772 /* MIN (MAX (a, b), b) == b. */
8773 if (TREE_CODE (op0
) == compl_code
8774 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8775 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8777 /* MIN (MAX (b, a), b) == b. */
8778 if (TREE_CODE (op0
) == compl_code
8779 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8780 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8781 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8783 /* MIN (a, MAX (a, b)) == a. */
8784 if (TREE_CODE (op1
) == compl_code
8785 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8786 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8787 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8789 /* MIN (a, MAX (b, a)) == a. */
8790 if (TREE_CODE (op1
) == compl_code
8791 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8792 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8793 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8798 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8799 by changing CODE to reduce the magnitude of constants involved in
8800 ARG0 of the comparison.
8801 Returns a canonicalized comparison tree if a simplification was
8802 possible, otherwise returns NULL_TREE.
8803 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8804 valid if signed overflow is undefined. */
8807 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8808 tree arg0
, tree arg1
,
8809 bool *strict_overflow_p
)
8811 enum tree_code code0
= TREE_CODE (arg0
);
8812 tree t
, cst0
= NULL_TREE
;
8816 /* Match A +- CST code arg1 and CST code arg1. We can change the
8817 first form only if overflow is undefined. */
8818 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8819 /* In principle pointers also have undefined overflow behavior,
8820 but that causes problems elsewhere. */
8821 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8822 && (code0
== MINUS_EXPR
8823 || code0
== PLUS_EXPR
)
8824 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8825 || code0
== INTEGER_CST
))
8828 /* Identify the constant in arg0 and its sign. */
8829 if (code0
== INTEGER_CST
)
8832 cst0
= TREE_OPERAND (arg0
, 1);
8833 sgn0
= tree_int_cst_sgn (cst0
);
8835 /* Overflowed constants and zero will cause problems. */
8836 if (integer_zerop (cst0
)
8837 || TREE_OVERFLOW (cst0
))
8840 /* See if we can reduce the magnitude of the constant in
8841 arg0 by changing the comparison code. */
8842 if (code0
== INTEGER_CST
)
8844 /* CST <= arg1 -> CST-1 < arg1. */
8845 if (code
== LE_EXPR
&& sgn0
== 1)
8847 /* -CST < arg1 -> -CST-1 <= arg1. */
8848 else if (code
== LT_EXPR
&& sgn0
== -1)
8850 /* CST > arg1 -> CST-1 >= arg1. */
8851 else if (code
== GT_EXPR
&& sgn0
== 1)
8853 /* -CST >= arg1 -> -CST-1 > arg1. */
8854 else if (code
== GE_EXPR
&& sgn0
== -1)
8858 /* arg1 code' CST' might be more canonical. */
8863 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8865 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8867 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8868 else if (code
== GT_EXPR
8869 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8871 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8872 else if (code
== LE_EXPR
8873 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8875 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8876 else if (code
== GE_EXPR
8877 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8881 *strict_overflow_p
= true;
8884 /* Now build the constant reduced in magnitude. But not if that
8885 would produce one outside of its types range. */
8886 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8888 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8889 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8891 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8892 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8893 /* We cannot swap the comparison here as that would cause us to
8894 endlessly recurse. */
8897 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8898 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8899 if (code0
!= INTEGER_CST
)
8900 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8901 t
= fold_convert (TREE_TYPE (arg1
), t
);
8903 /* If swapping might yield to a more canonical form, do so. */
8905 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8907 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8910 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8911 overflow further. Try to decrease the magnitude of constants involved
8912 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8913 and put sole constants at the second argument position.
8914 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8917 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8918 tree arg0
, tree arg1
)
8921 bool strict_overflow_p
;
8922 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8923 "when reducing constant in comparison");
8925 /* Try canonicalization by simplifying arg0. */
8926 strict_overflow_p
= false;
8927 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8928 &strict_overflow_p
);
8931 if (strict_overflow_p
)
8932 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8936 /* Try canonicalization by simplifying arg1 using the swapped
8938 code
= swap_tree_comparison (code
);
8939 strict_overflow_p
= false;
8940 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8941 &strict_overflow_p
);
8942 if (t
&& strict_overflow_p
)
8943 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8947 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8948 space. This is used to avoid issuing overflow warnings for
8949 expressions like &p->x which can not wrap. */
8952 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8954 double_int di_offset
, total
;
8956 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8962 if (offset
== NULL_TREE
)
8963 di_offset
= double_int_zero
;
8964 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8967 di_offset
= TREE_INT_CST (offset
);
8970 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8971 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8975 if (total
.high
!= 0)
8978 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8982 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8984 if (TREE_CODE (base
) == ADDR_EXPR
)
8986 HOST_WIDE_INT base_size
;
8988 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8989 if (base_size
> 0 && size
< base_size
)
8993 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8996 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8997 kind INTEGER_CST. This makes sure to properly sign-extend the
9000 static HOST_WIDE_INT
9001 size_low_cst (const_tree t
)
9003 double_int d
= tree_to_double_int (t
);
9004 return d
.sext (TYPE_PRECISION (TREE_TYPE (t
))).low
;
9007 /* Subroutine of fold_binary. This routine performs all of the
9008 transformations that are common to the equality/inequality
9009 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9010 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9011 fold_binary should call fold_binary. Fold a comparison with
9012 tree code CODE and type TYPE with operands OP0 and OP1. Return
9013 the folded comparison or NULL_TREE. */
9016 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9019 tree arg0
, arg1
, tem
;
9024 STRIP_SIGN_NOPS (arg0
);
9025 STRIP_SIGN_NOPS (arg1
);
9027 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9028 if (tem
!= NULL_TREE
)
9031 /* If one arg is a real or integer constant, put it last. */
9032 if (tree_swap_operands_p (arg0
, arg1
, true))
9033 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9035 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9036 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9037 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9038 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9039 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
9040 && (TREE_CODE (arg1
) == INTEGER_CST
9041 && !TREE_OVERFLOW (arg1
)))
9043 tree const1
= TREE_OPERAND (arg0
, 1);
9045 tree variable
= TREE_OPERAND (arg0
, 0);
9048 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9050 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9051 TREE_TYPE (arg1
), const2
, const1
);
9053 /* If the constant operation overflowed this can be
9054 simplified as a comparison against INT_MAX/INT_MIN. */
9055 if (TREE_CODE (lhs
) == INTEGER_CST
9056 && TREE_OVERFLOW (lhs
))
9058 int const1_sgn
= tree_int_cst_sgn (const1
);
9059 enum tree_code code2
= code
;
9061 /* Get the sign of the constant on the lhs if the
9062 operation were VARIABLE + CONST1. */
9063 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9064 const1_sgn
= -const1_sgn
;
9066 /* The sign of the constant determines if we overflowed
9067 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9068 Canonicalize to the INT_MIN overflow by swapping the comparison
9070 if (const1_sgn
== -1)
9071 code2
= swap_tree_comparison (code
);
9073 /* We now can look at the canonicalized case
9074 VARIABLE + 1 CODE2 INT_MIN
9075 and decide on the result. */
9076 if (code2
== LT_EXPR
9078 || code2
== EQ_EXPR
)
9079 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9080 else if (code2
== NE_EXPR
9082 || code2
== GT_EXPR
)
9083 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9086 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9087 && (TREE_CODE (lhs
) != INTEGER_CST
9088 || !TREE_OVERFLOW (lhs
)))
9090 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9091 fold_overflow_warning ("assuming signed overflow does not occur "
9092 "when changing X +- C1 cmp C2 to "
9094 WARN_STRICT_OVERFLOW_COMPARISON
);
9095 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9099 /* For comparisons of pointers we can decompose it to a compile time
9100 comparison of the base objects and the offsets into the object.
9101 This requires at least one operand being an ADDR_EXPR or a
9102 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9103 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9104 && (TREE_CODE (arg0
) == ADDR_EXPR
9105 || TREE_CODE (arg1
) == ADDR_EXPR
9106 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9107 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9109 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9110 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9111 enum machine_mode mode
;
9112 int volatilep
, unsignedp
;
9113 bool indirect_base0
= false, indirect_base1
= false;
9115 /* Get base and offset for the access. Strip ADDR_EXPR for
9116 get_inner_reference, but put it back by stripping INDIRECT_REF
9117 off the base object if possible. indirect_baseN will be true
9118 if baseN is not an address but refers to the object itself. */
9120 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9122 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9123 &bitsize
, &bitpos0
, &offset0
, &mode
,
9124 &unsignedp
, &volatilep
, false);
9125 if (TREE_CODE (base0
) == INDIRECT_REF
)
9126 base0
= TREE_OPERAND (base0
, 0);
9128 indirect_base0
= true;
9130 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9132 base0
= TREE_OPERAND (arg0
, 0);
9133 STRIP_SIGN_NOPS (base0
);
9134 if (TREE_CODE (base0
) == ADDR_EXPR
)
9136 base0
= TREE_OPERAND (base0
, 0);
9137 indirect_base0
= true;
9139 offset0
= TREE_OPERAND (arg0
, 1);
9140 if (tree_fits_shwi_p (offset0
))
9142 HOST_WIDE_INT off
= size_low_cst (offset0
);
9143 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9145 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9147 bitpos0
= off
* BITS_PER_UNIT
;
9148 offset0
= NULL_TREE
;
9154 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9156 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9157 &bitsize
, &bitpos1
, &offset1
, &mode
,
9158 &unsignedp
, &volatilep
, false);
9159 if (TREE_CODE (base1
) == INDIRECT_REF
)
9160 base1
= TREE_OPERAND (base1
, 0);
9162 indirect_base1
= true;
9164 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9166 base1
= TREE_OPERAND (arg1
, 0);
9167 STRIP_SIGN_NOPS (base1
);
9168 if (TREE_CODE (base1
) == ADDR_EXPR
)
9170 base1
= TREE_OPERAND (base1
, 0);
9171 indirect_base1
= true;
9173 offset1
= TREE_OPERAND (arg1
, 1);
9174 if (tree_fits_shwi_p (offset1
))
9176 HOST_WIDE_INT off
= size_low_cst (offset1
);
9177 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9179 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9181 bitpos1
= off
* BITS_PER_UNIT
;
9182 offset1
= NULL_TREE
;
9187 /* A local variable can never be pointed to by
9188 the default SSA name of an incoming parameter. */
9189 if ((TREE_CODE (arg0
) == ADDR_EXPR
9191 && TREE_CODE (base0
) == VAR_DECL
9192 && auto_var_in_fn_p (base0
, current_function_decl
)
9194 && TREE_CODE (base1
) == SSA_NAME
9195 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9196 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9197 || (TREE_CODE (arg1
) == ADDR_EXPR
9199 && TREE_CODE (base1
) == VAR_DECL
9200 && auto_var_in_fn_p (base1
, current_function_decl
)
9202 && TREE_CODE (base0
) == SSA_NAME
9203 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9204 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9206 if (code
== NE_EXPR
)
9207 return constant_boolean_node (1, type
);
9208 else if (code
== EQ_EXPR
)
9209 return constant_boolean_node (0, type
);
9211 /* If we have equivalent bases we might be able to simplify. */
9212 else if (indirect_base0
== indirect_base1
9213 && operand_equal_p (base0
, base1
, 0))
9215 /* We can fold this expression to a constant if the non-constant
9216 offset parts are equal. */
9217 if ((offset0
== offset1
9218 || (offset0
&& offset1
9219 && operand_equal_p (offset0
, offset1
, 0)))
9222 || (indirect_base0
&& DECL_P (base0
))
9223 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9228 && bitpos0
!= bitpos1
9229 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9230 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9231 fold_overflow_warning (("assuming pointer wraparound does not "
9232 "occur when comparing P +- C1 with "
9234 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9239 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9241 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9243 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9245 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9247 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9249 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9253 /* We can simplify the comparison to a comparison of the variable
9254 offset parts if the constant offset parts are equal.
9255 Be careful to use signed sizetype here because otherwise we
9256 mess with array offsets in the wrong way. This is possible
9257 because pointer arithmetic is restricted to retain within an
9258 object and overflow on pointer differences is undefined as of
9259 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9260 else if (bitpos0
== bitpos1
9261 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9262 || (indirect_base0
&& DECL_P (base0
))
9263 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9265 /* By converting to signed sizetype we cover middle-end pointer
9266 arithmetic which operates on unsigned pointer types of size
9267 type size and ARRAY_REF offsets which are properly sign or
9268 zero extended from their type in case it is narrower than
9270 if (offset0
== NULL_TREE
)
9271 offset0
= build_int_cst (ssizetype
, 0);
9273 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9274 if (offset1
== NULL_TREE
)
9275 offset1
= build_int_cst (ssizetype
, 0);
9277 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9281 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9282 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9283 fold_overflow_warning (("assuming pointer wraparound does not "
9284 "occur when comparing P +- C1 with "
9286 WARN_STRICT_OVERFLOW_COMPARISON
);
9288 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9291 /* For non-equal bases we can simplify if they are addresses
9292 of local binding decls or constants. */
9293 else if (indirect_base0
&& indirect_base1
9294 /* We know that !operand_equal_p (base0, base1, 0)
9295 because the if condition was false. But make
9296 sure two decls are not the same. */
9298 && TREE_CODE (arg0
) == ADDR_EXPR
9299 && TREE_CODE (arg1
) == ADDR_EXPR
9300 && (((TREE_CODE (base0
) == VAR_DECL
9301 || TREE_CODE (base0
) == PARM_DECL
)
9302 && (targetm
.binds_local_p (base0
)
9303 || CONSTANT_CLASS_P (base1
)))
9304 || CONSTANT_CLASS_P (base0
))
9305 && (((TREE_CODE (base1
) == VAR_DECL
9306 || TREE_CODE (base1
) == PARM_DECL
)
9307 && (targetm
.binds_local_p (base1
)
9308 || CONSTANT_CLASS_P (base0
)))
9309 || CONSTANT_CLASS_P (base1
)))
9311 if (code
== EQ_EXPR
)
9312 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9314 else if (code
== NE_EXPR
)
9315 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9318 /* For equal offsets we can simplify to a comparison of the
9320 else if (bitpos0
== bitpos1
9322 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9324 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9325 && ((offset0
== offset1
)
9326 || (offset0
&& offset1
9327 && operand_equal_p (offset0
, offset1
, 0))))
9330 base0
= build_fold_addr_expr_loc (loc
, base0
);
9332 base1
= build_fold_addr_expr_loc (loc
, base1
);
9333 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9337 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9338 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9339 the resulting offset is smaller in absolute value than the
9341 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9342 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9343 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9344 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9345 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9346 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9347 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9349 tree const1
= TREE_OPERAND (arg0
, 1);
9350 tree const2
= TREE_OPERAND (arg1
, 1);
9351 tree variable1
= TREE_OPERAND (arg0
, 0);
9352 tree variable2
= TREE_OPERAND (arg1
, 0);
9354 const char * const warnmsg
= G_("assuming signed overflow does not "
9355 "occur when combining constants around "
9358 /* Put the constant on the side where it doesn't overflow and is
9359 of lower absolute value than before. */
9360 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9361 ? MINUS_EXPR
: PLUS_EXPR
,
9363 if (!TREE_OVERFLOW (cst
)
9364 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9366 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9367 return fold_build2_loc (loc
, code
, type
,
9369 fold_build2_loc (loc
,
9370 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9374 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9375 ? MINUS_EXPR
: PLUS_EXPR
,
9377 if (!TREE_OVERFLOW (cst
)
9378 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9380 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9381 return fold_build2_loc (loc
, code
, type
,
9382 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9388 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9389 signed arithmetic case. That form is created by the compiler
9390 often enough for folding it to be of value. One example is in
9391 computing loop trip counts after Operator Strength Reduction. */
9392 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9393 && TREE_CODE (arg0
) == MULT_EXPR
9394 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9395 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9396 && integer_zerop (arg1
))
9398 tree const1
= TREE_OPERAND (arg0
, 1);
9399 tree const2
= arg1
; /* zero */
9400 tree variable1
= TREE_OPERAND (arg0
, 0);
9401 enum tree_code cmp_code
= code
;
9403 /* Handle unfolded multiplication by zero. */
9404 if (integer_zerop (const1
))
9405 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9407 fold_overflow_warning (("assuming signed overflow does not occur when "
9408 "eliminating multiplication in comparison "
9410 WARN_STRICT_OVERFLOW_COMPARISON
);
9412 /* If const1 is negative we swap the sense of the comparison. */
9413 if (tree_int_cst_sgn (const1
) < 0)
9414 cmp_code
= swap_tree_comparison (cmp_code
);
9416 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9419 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9423 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9425 tree targ0
= strip_float_extensions (arg0
);
9426 tree targ1
= strip_float_extensions (arg1
);
9427 tree newtype
= TREE_TYPE (targ0
);
9429 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9430 newtype
= TREE_TYPE (targ1
);
9432 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9433 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9434 return fold_build2_loc (loc
, code
, type
,
9435 fold_convert_loc (loc
, newtype
, targ0
),
9436 fold_convert_loc (loc
, newtype
, targ1
));
9438 /* (-a) CMP (-b) -> b CMP a */
9439 if (TREE_CODE (arg0
) == NEGATE_EXPR
9440 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9441 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9442 TREE_OPERAND (arg0
, 0));
9444 if (TREE_CODE (arg1
) == REAL_CST
)
9446 REAL_VALUE_TYPE cst
;
9447 cst
= TREE_REAL_CST (arg1
);
9449 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9450 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9451 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9452 TREE_OPERAND (arg0
, 0),
9453 build_real (TREE_TYPE (arg1
),
9454 real_value_negate (&cst
)));
9456 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9457 /* a CMP (-0) -> a CMP 0 */
9458 if (REAL_VALUE_MINUS_ZERO (cst
))
9459 return fold_build2_loc (loc
, code
, type
, arg0
,
9460 build_real (TREE_TYPE (arg1
), dconst0
));
9462 /* x != NaN is always true, other ops are always false. */
9463 if (REAL_VALUE_ISNAN (cst
)
9464 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9466 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9467 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9470 /* Fold comparisons against infinity. */
9471 if (REAL_VALUE_ISINF (cst
)
9472 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9474 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9475 if (tem
!= NULL_TREE
)
9480 /* If this is a comparison of a real constant with a PLUS_EXPR
9481 or a MINUS_EXPR of a real constant, we can convert it into a
9482 comparison with a revised real constant as long as no overflow
9483 occurs when unsafe_math_optimizations are enabled. */
9484 if (flag_unsafe_math_optimizations
9485 && TREE_CODE (arg1
) == REAL_CST
9486 && (TREE_CODE (arg0
) == PLUS_EXPR
9487 || TREE_CODE (arg0
) == MINUS_EXPR
)
9488 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9489 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9490 ? MINUS_EXPR
: PLUS_EXPR
,
9491 arg1
, TREE_OPERAND (arg0
, 1)))
9492 && !TREE_OVERFLOW (tem
))
9493 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9495 /* Likewise, we can simplify a comparison of a real constant with
9496 a MINUS_EXPR whose first operand is also a real constant, i.e.
9497 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9498 floating-point types only if -fassociative-math is set. */
9499 if (flag_associative_math
9500 && TREE_CODE (arg1
) == REAL_CST
9501 && TREE_CODE (arg0
) == MINUS_EXPR
9502 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9503 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9505 && !TREE_OVERFLOW (tem
))
9506 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9507 TREE_OPERAND (arg0
, 1), tem
);
9509 /* Fold comparisons against built-in math functions. */
9510 if (TREE_CODE (arg1
) == REAL_CST
9511 && flag_unsafe_math_optimizations
9512 && ! flag_errno_math
)
9514 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9516 if (fcode
!= END_BUILTINS
)
9518 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9519 if (tem
!= NULL_TREE
)
9525 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9526 && CONVERT_EXPR_P (arg0
))
9528 /* If we are widening one operand of an integer comparison,
9529 see if the other operand is similarly being widened. Perhaps we
9530 can do the comparison in the narrower type. */
9531 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9535 /* Or if we are changing signedness. */
9536 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9541 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9542 constant, we can simplify it. */
9543 if (TREE_CODE (arg1
) == INTEGER_CST
9544 && (TREE_CODE (arg0
) == MIN_EXPR
9545 || TREE_CODE (arg0
) == MAX_EXPR
)
9546 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9548 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9553 /* Simplify comparison of something with itself. (For IEEE
9554 floating-point, we can only do some of these simplifications.) */
9555 if (operand_equal_p (arg0
, arg1
, 0))
9560 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9561 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9562 return constant_boolean_node (1, type
);
9567 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9568 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9569 return constant_boolean_node (1, type
);
9570 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9573 /* For NE, we can only do this simplification if integer
9574 or we don't honor IEEE floating point NaNs. */
9575 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9576 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9578 /* ... fall through ... */
9581 return constant_boolean_node (0, type
);
9587 /* If we are comparing an expression that just has comparisons
9588 of two integer values, arithmetic expressions of those comparisons,
9589 and constants, we can simplify it. There are only three cases
9590 to check: the two values can either be equal, the first can be
9591 greater, or the second can be greater. Fold the expression for
9592 those three values. Since each value must be 0 or 1, we have
9593 eight possibilities, each of which corresponds to the constant 0
9594 or 1 or one of the six possible comparisons.
9596 This handles common cases like (a > b) == 0 but also handles
9597 expressions like ((x > y) - (y > x)) > 0, which supposedly
9598 occur in macroized code. */
9600 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9602 tree cval1
= 0, cval2
= 0;
9605 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9606 /* Don't handle degenerate cases here; they should already
9607 have been handled anyway. */
9608 && cval1
!= 0 && cval2
!= 0
9609 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9610 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9611 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9612 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9613 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9614 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9615 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9617 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9618 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9620 /* We can't just pass T to eval_subst in case cval1 or cval2
9621 was the same as ARG1. */
9624 = fold_build2_loc (loc
, code
, type
,
9625 eval_subst (loc
, arg0
, cval1
, maxval
,
9629 = fold_build2_loc (loc
, code
, type
,
9630 eval_subst (loc
, arg0
, cval1
, maxval
,
9634 = fold_build2_loc (loc
, code
, type
,
9635 eval_subst (loc
, arg0
, cval1
, minval
,
9639 /* All three of these results should be 0 or 1. Confirm they are.
9640 Then use those values to select the proper code to use. */
9642 if (TREE_CODE (high_result
) == INTEGER_CST
9643 && TREE_CODE (equal_result
) == INTEGER_CST
9644 && TREE_CODE (low_result
) == INTEGER_CST
)
9646 /* Make a 3-bit mask with the high-order bit being the
9647 value for `>', the next for '=', and the low for '<'. */
9648 switch ((integer_onep (high_result
) * 4)
9649 + (integer_onep (equal_result
) * 2)
9650 + integer_onep (low_result
))
9654 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9675 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9680 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9681 SET_EXPR_LOCATION (tem
, loc
);
9684 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9689 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9690 into a single range test. */
9691 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9692 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9693 && TREE_CODE (arg1
) == INTEGER_CST
9694 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9695 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9696 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9697 && !TREE_OVERFLOW (arg1
))
9699 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9700 if (tem
!= NULL_TREE
)
9704 /* Fold ~X op ~Y as Y op X. */
9705 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9706 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9708 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9709 return fold_build2_loc (loc
, code
, type
,
9710 fold_convert_loc (loc
, cmp_type
,
9711 TREE_OPERAND (arg1
, 0)),
9712 TREE_OPERAND (arg0
, 0));
9715 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9716 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9717 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9719 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9720 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9721 TREE_OPERAND (arg0
, 0),
9722 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9723 fold_convert_loc (loc
, cmp_type
, arg1
)));
9730 /* Subroutine of fold_binary. Optimize complex multiplications of the
9731 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9732 argument EXPR represents the expression "z" of type TYPE. */
9735 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9737 tree itype
= TREE_TYPE (type
);
9738 tree rpart
, ipart
, tem
;
9740 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9742 rpart
= TREE_OPERAND (expr
, 0);
9743 ipart
= TREE_OPERAND (expr
, 1);
9745 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9747 rpart
= TREE_REALPART (expr
);
9748 ipart
= TREE_IMAGPART (expr
);
9752 expr
= save_expr (expr
);
9753 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9754 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9757 rpart
= save_expr (rpart
);
9758 ipart
= save_expr (ipart
);
9759 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9760 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9761 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9762 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9763 build_zero_cst (itype
));
9767 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9768 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9769 guarantees that P and N have the same least significant log2(M) bits.
9770 N is not otherwise constrained. In particular, N is not normalized to
9771 0 <= N < M as is common. In general, the precise value of P is unknown.
9772 M is chosen as large as possible such that constant N can be determined.
9774 Returns M and sets *RESIDUE to N.
9776 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9777 account. This is not always possible due to PR 35705.
9780 static unsigned HOST_WIDE_INT
9781 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9782 bool allow_func_align
)
9784 enum tree_code code
;
9788 code
= TREE_CODE (expr
);
9789 if (code
== ADDR_EXPR
)
9791 unsigned int bitalign
;
9792 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9793 *residue
/= BITS_PER_UNIT
;
9794 return bitalign
/ BITS_PER_UNIT
;
9796 else if (code
== POINTER_PLUS_EXPR
)
9799 unsigned HOST_WIDE_INT modulus
;
9800 enum tree_code inner_code
;
9802 op0
= TREE_OPERAND (expr
, 0);
9804 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9807 op1
= TREE_OPERAND (expr
, 1);
9809 inner_code
= TREE_CODE (op1
);
9810 if (inner_code
== INTEGER_CST
)
9812 *residue
+= TREE_INT_CST_LOW (op1
);
9815 else if (inner_code
== MULT_EXPR
)
9817 op1
= TREE_OPERAND (op1
, 1);
9818 if (TREE_CODE (op1
) == INTEGER_CST
)
9820 unsigned HOST_WIDE_INT align
;
9822 /* Compute the greatest power-of-2 divisor of op1. */
9823 align
= TREE_INT_CST_LOW (op1
);
9826 /* If align is non-zero and less than *modulus, replace
9827 *modulus with align., If align is 0, then either op1 is 0
9828 or the greatest power-of-2 divisor of op1 doesn't fit in an
9829 unsigned HOST_WIDE_INT. In either case, no additional
9830 constraint is imposed. */
9832 modulus
= MIN (modulus
, align
);
9839 /* If we get here, we were unable to determine anything useful about the
9844 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9845 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9848 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9850 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9852 if (TREE_CODE (arg
) == VECTOR_CST
)
9854 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9855 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9857 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9859 constructor_elt
*elt
;
9861 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9862 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9865 elts
[i
] = elt
->value
;
9869 for (; i
< nelts
; i
++)
9871 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9875 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9876 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9877 NULL_TREE otherwise. */
9880 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9882 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9884 bool need_ctor
= false;
9886 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9887 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9888 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9889 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9892 elts
= XALLOCAVEC (tree
, nelts
* 3);
9893 if (!vec_cst_ctor_to_array (arg0
, elts
)
9894 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9897 for (i
= 0; i
< nelts
; i
++)
9899 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9901 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9906 vec
<constructor_elt
, va_gc
> *v
;
9907 vec_alloc (v
, nelts
);
9908 for (i
= 0; i
< nelts
; i
++)
9909 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9910 return build_constructor (type
, v
);
9913 return build_vector (type
, &elts
[2 * nelts
]);
9916 /* Try to fold a pointer difference of type TYPE two address expressions of
9917 array references AREF0 and AREF1 using location LOC. Return a
9918 simplified expression for the difference or NULL_TREE. */
9921 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9922 tree aref0
, tree aref1
)
9924 tree base0
= TREE_OPERAND (aref0
, 0);
9925 tree base1
= TREE_OPERAND (aref1
, 0);
9926 tree base_offset
= build_int_cst (type
, 0);
9928 /* If the bases are array references as well, recurse. If the bases
9929 are pointer indirections compute the difference of the pointers.
9930 If the bases are equal, we are set. */
9931 if ((TREE_CODE (base0
) == ARRAY_REF
9932 && TREE_CODE (base1
) == ARRAY_REF
9934 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9935 || (INDIRECT_REF_P (base0
)
9936 && INDIRECT_REF_P (base1
)
9937 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9938 TREE_OPERAND (base0
, 0),
9939 TREE_OPERAND (base1
, 0))))
9940 || operand_equal_p (base0
, base1
, 0))
9942 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9943 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9944 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9945 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9946 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9948 fold_build2_loc (loc
, MULT_EXPR
, type
,
9954 /* If the real or vector real constant CST of type TYPE has an exact
9955 inverse, return it, else return NULL. */
9958 exact_inverse (tree type
, tree cst
)
9961 tree unit_type
, *elts
;
9962 enum machine_mode mode
;
9963 unsigned vec_nelts
, i
;
9965 switch (TREE_CODE (cst
))
9968 r
= TREE_REAL_CST (cst
);
9970 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9971 return build_real (type
, r
);
9976 vec_nelts
= VECTOR_CST_NELTS (cst
);
9977 elts
= XALLOCAVEC (tree
, vec_nelts
);
9978 unit_type
= TREE_TYPE (type
);
9979 mode
= TYPE_MODE (unit_type
);
9981 for (i
= 0; i
< vec_nelts
; i
++)
9983 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9984 if (!exact_real_inverse (mode
, &r
))
9986 elts
[i
] = build_real (unit_type
, r
);
9989 return build_vector (type
, elts
);
9996 /* Mask out the tz least significant bits of X of type TYPE where
9997 tz is the number of trailing zeroes in Y. */
9999 mask_with_tz (tree type
, double_int x
, double_int y
)
10001 int tz
= y
.trailing_zeros ();
10007 mask
= ~double_int::mask (tz
);
10008 mask
= mask
.ext (TYPE_PRECISION (type
), TYPE_UNSIGNED (type
));
10014 /* Return true when T is an address and is known to be nonzero.
10015 For floating point we further ensure that T is not denormal.
10016 Similar logic is present in nonzero_address in rtlanal.h.
10018 If the return value is based on the assumption that signed overflow
10019 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10020 change *STRICT_OVERFLOW_P. */
10023 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10025 tree type
= TREE_TYPE (t
);
10026 enum tree_code code
;
10028 /* Doing something useful for floating point would need more work. */
10029 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10032 code
= TREE_CODE (t
);
10033 switch (TREE_CODE_CLASS (code
))
10036 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10037 strict_overflow_p
);
10039 case tcc_comparison
:
10040 return tree_binary_nonzero_warnv_p (code
, type
,
10041 TREE_OPERAND (t
, 0),
10042 TREE_OPERAND (t
, 1),
10043 strict_overflow_p
);
10045 case tcc_declaration
:
10046 case tcc_reference
:
10047 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10055 case TRUTH_NOT_EXPR
:
10056 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10057 strict_overflow_p
);
10059 case TRUTH_AND_EXPR
:
10060 case TRUTH_OR_EXPR
:
10061 case TRUTH_XOR_EXPR
:
10062 return tree_binary_nonzero_warnv_p (code
, type
,
10063 TREE_OPERAND (t
, 0),
10064 TREE_OPERAND (t
, 1),
10065 strict_overflow_p
);
10072 case WITH_SIZE_EXPR
:
10074 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10076 case COMPOUND_EXPR
:
10079 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10080 strict_overflow_p
);
10083 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10084 strict_overflow_p
);
10088 tree fndecl
= get_callee_fndecl (t
);
10089 if (!fndecl
) return false;
10090 if (flag_delete_null_pointer_checks
&& !flag_check_new
10091 && DECL_IS_OPERATOR_NEW (fndecl
)
10092 && !TREE_NOTHROW (fndecl
))
10094 if (flag_delete_null_pointer_checks
10095 && lookup_attribute ("returns_nonnull",
10096 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10098 return alloca_call_p (t
);
10107 /* Return true when T is an address and is known to be nonzero.
10108 Handle warnings about undefined signed overflow. */
10111 tree_expr_nonzero_p (tree t
)
10113 bool ret
, strict_overflow_p
;
10115 strict_overflow_p
= false;
10116 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10117 if (strict_overflow_p
)
10118 fold_overflow_warning (("assuming signed overflow does not occur when "
10119 "determining that expression is always "
10121 WARN_STRICT_OVERFLOW_MISC
);
10125 /* Fold a binary expression of code CODE and type TYPE with operands
10126 OP0 and OP1. LOC is the location of the resulting expression.
10127 Return the folded expression if folding is successful. Otherwise,
10128 return NULL_TREE. */
10131 fold_binary_loc (location_t loc
,
10132 enum tree_code code
, tree type
, tree op0
, tree op1
)
10134 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10135 tree arg0
, arg1
, tem
;
10136 tree t1
= NULL_TREE
;
10137 bool strict_overflow_p
;
10140 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10141 && TREE_CODE_LENGTH (code
) == 2
10142 && op0
!= NULL_TREE
10143 && op1
!= NULL_TREE
);
10148 /* Strip any conversions that don't change the mode. This is
10149 safe for every expression, except for a comparison expression
10150 because its signedness is derived from its operands. So, in
10151 the latter case, only strip conversions that don't change the
10152 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10155 Note that this is done as an internal manipulation within the
10156 constant folder, in order to find the simplest representation
10157 of the arguments so that their form can be studied. In any
10158 cases, the appropriate type conversions should be put back in
10159 the tree that will get out of the constant folder. */
10161 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10163 STRIP_SIGN_NOPS (arg0
);
10164 STRIP_SIGN_NOPS (arg1
);
10172 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10173 constant but we can't do arithmetic on them. */
10174 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10175 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10176 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10177 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10178 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10179 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10180 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10182 if (kind
== tcc_binary
)
10184 /* Make sure type and arg0 have the same saturating flag. */
10185 gcc_assert (TYPE_SATURATING (type
)
10186 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10187 tem
= const_binop (code
, arg0
, arg1
);
10189 else if (kind
== tcc_comparison
)
10190 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10194 if (tem
!= NULL_TREE
)
10196 if (TREE_TYPE (tem
) != type
)
10197 tem
= fold_convert_loc (loc
, type
, tem
);
10202 /* If this is a commutative operation, and ARG0 is a constant, move it
10203 to ARG1 to reduce the number of tests below. */
10204 if (commutative_tree_code (code
)
10205 && tree_swap_operands_p (arg0
, arg1
, true))
10206 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10208 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10210 First check for cases where an arithmetic operation is applied to a
10211 compound, conditional, or comparison operation. Push the arithmetic
10212 operation inside the compound or conditional to see if any folding
10213 can then be done. Convert comparison to conditional for this purpose.
10214 The also optimizes non-constant cases that used to be done in
10217 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10218 one of the operands is a comparison and the other is a comparison, a
10219 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10220 code below would make the expression more complex. Change it to a
10221 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10222 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10224 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10225 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10226 && TREE_CODE (type
) != VECTOR_TYPE
10227 && ((truth_value_p (TREE_CODE (arg0
))
10228 && (truth_value_p (TREE_CODE (arg1
))
10229 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10230 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10231 || (truth_value_p (TREE_CODE (arg1
))
10232 && (truth_value_p (TREE_CODE (arg0
))
10233 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10234 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10236 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10237 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10240 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10241 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10243 if (code
== EQ_EXPR
)
10244 tem
= invert_truthvalue_loc (loc
, tem
);
10246 return fold_convert_loc (loc
, type
, tem
);
10249 if (TREE_CODE_CLASS (code
) == tcc_binary
10250 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10252 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10254 tem
= fold_build2_loc (loc
, code
, type
,
10255 fold_convert_loc (loc
, TREE_TYPE (op0
),
10256 TREE_OPERAND (arg0
, 1)), op1
);
10257 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10260 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10261 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10263 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10264 fold_convert_loc (loc
, TREE_TYPE (op1
),
10265 TREE_OPERAND (arg1
, 1)));
10266 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10270 if (TREE_CODE (arg0
) == COND_EXPR
10271 || TREE_CODE (arg0
) == VEC_COND_EXPR
10272 || COMPARISON_CLASS_P (arg0
))
10274 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10276 /*cond_first_p=*/1);
10277 if (tem
!= NULL_TREE
)
10281 if (TREE_CODE (arg1
) == COND_EXPR
10282 || TREE_CODE (arg1
) == VEC_COND_EXPR
10283 || COMPARISON_CLASS_P (arg1
))
10285 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10287 /*cond_first_p=*/0);
10288 if (tem
!= NULL_TREE
)
10296 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10297 if (TREE_CODE (arg0
) == ADDR_EXPR
10298 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10300 tree iref
= TREE_OPERAND (arg0
, 0);
10301 return fold_build2 (MEM_REF
, type
,
10302 TREE_OPERAND (iref
, 0),
10303 int_const_binop (PLUS_EXPR
, arg1
,
10304 TREE_OPERAND (iref
, 1)));
10307 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10308 if (TREE_CODE (arg0
) == ADDR_EXPR
10309 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10312 HOST_WIDE_INT coffset
;
10313 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10317 return fold_build2 (MEM_REF
, type
,
10318 build_fold_addr_expr (base
),
10319 int_const_binop (PLUS_EXPR
, arg1
,
10320 size_int (coffset
)));
10325 case POINTER_PLUS_EXPR
:
10326 /* 0 +p index -> (type)index */
10327 if (integer_zerop (arg0
))
10328 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10330 /* PTR +p 0 -> PTR */
10331 if (integer_zerop (arg1
))
10332 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10334 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10335 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10336 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10337 return fold_convert_loc (loc
, type
,
10338 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10339 fold_convert_loc (loc
, sizetype
,
10341 fold_convert_loc (loc
, sizetype
,
10344 /* (PTR +p B) +p A -> PTR +p (B + A) */
10345 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10346 && !upc_shared_type_p (TREE_TYPE (type
)))
10349 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10350 tree arg00
= TREE_OPERAND (arg0
, 0);
10351 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10352 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10353 return fold_convert_loc (loc
, type
,
10354 fold_build_pointer_plus_loc (loc
,
10358 /* PTR_CST +p CST -> CST1 */
10359 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10360 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10361 fold_convert_loc (loc
, type
, arg1
));
10363 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10364 of the array. Loop optimizer sometimes produce this type of
10366 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10368 tem
= try_move_mult_to_index (loc
, arg0
,
10369 fold_convert_loc (loc
,
10372 return fold_convert_loc (loc
, type
, tem
);
10378 /* A + (-B) -> A - B */
10379 if (TREE_CODE (arg1
) == NEGATE_EXPR
10380 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10381 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10382 fold_convert_loc (loc
, type
, arg0
),
10383 fold_convert_loc (loc
, type
,
10384 TREE_OPERAND (arg1
, 0)));
10385 /* (-A) + B -> B - A */
10386 if (TREE_CODE (arg0
) == NEGATE_EXPR
10387 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10388 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10389 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10390 fold_convert_loc (loc
, type
, arg1
),
10391 fold_convert_loc (loc
, type
,
10392 TREE_OPERAND (arg0
, 0)));
10394 /* Disable further optimizations involving UPC shared pointers,
10395 because integers are not interoperable with shared pointers. */
10396 if ((TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10397 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10398 || (TREE_TYPE (arg1
) && POINTER_TYPE_P (TREE_TYPE (arg1
))
10399 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1
)))))
10402 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10404 /* Convert ~A + 1 to -A. */
10405 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10406 && integer_onep (arg1
))
10407 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10408 fold_convert_loc (loc
, type
,
10409 TREE_OPERAND (arg0
, 0)));
10411 /* ~X + X is -1. */
10412 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10413 && !TYPE_OVERFLOW_TRAPS (type
))
10415 tree tem
= TREE_OPERAND (arg0
, 0);
10418 if (operand_equal_p (tem
, arg1
, 0))
10420 t1
= build_all_ones_cst (type
);
10421 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10425 /* X + ~X is -1. */
10426 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10427 && !TYPE_OVERFLOW_TRAPS (type
))
10429 tree tem
= TREE_OPERAND (arg1
, 0);
10432 if (operand_equal_p (arg0
, tem
, 0))
10434 t1
= build_all_ones_cst (type
);
10435 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10439 /* X + (X / CST) * -CST is X % CST. */
10440 if (TREE_CODE (arg1
) == MULT_EXPR
10441 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10442 && operand_equal_p (arg0
,
10443 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10445 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10446 tree cst1
= TREE_OPERAND (arg1
, 1);
10447 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10449 if (sum
&& integer_zerop (sum
))
10450 return fold_convert_loc (loc
, type
,
10451 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10452 TREE_TYPE (arg0
), arg0
,
10457 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10458 one. Make sure the type is not saturating and has the signedness of
10459 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10460 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10461 if ((TREE_CODE (arg0
) == MULT_EXPR
10462 || TREE_CODE (arg1
) == MULT_EXPR
)
10463 && !TYPE_SATURATING (type
)
10464 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10465 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10466 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10468 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10473 if (! FLOAT_TYPE_P (type
))
10475 if (integer_zerop (arg1
))
10476 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10478 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10479 with a constant, and the two constants have no bits in common,
10480 we should treat this as a BIT_IOR_EXPR since this may produce more
10481 simplifications. */
10482 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10483 && TREE_CODE (arg1
) == BIT_AND_EXPR
10484 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10485 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10486 && integer_zerop (const_binop (BIT_AND_EXPR
,
10487 TREE_OPERAND (arg0
, 1),
10488 TREE_OPERAND (arg1
, 1))))
10490 code
= BIT_IOR_EXPR
;
10494 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10495 (plus (plus (mult) (mult)) (foo)) so that we can
10496 take advantage of the factoring cases below. */
10497 if (TYPE_OVERFLOW_WRAPS (type
)
10498 && (((TREE_CODE (arg0
) == PLUS_EXPR
10499 || TREE_CODE (arg0
) == MINUS_EXPR
)
10500 && TREE_CODE (arg1
) == MULT_EXPR
)
10501 || ((TREE_CODE (arg1
) == PLUS_EXPR
10502 || TREE_CODE (arg1
) == MINUS_EXPR
)
10503 && TREE_CODE (arg0
) == MULT_EXPR
)))
10505 tree parg0
, parg1
, parg
, marg
;
10506 enum tree_code pcode
;
10508 if (TREE_CODE (arg1
) == MULT_EXPR
)
10509 parg
= arg0
, marg
= arg1
;
10511 parg
= arg1
, marg
= arg0
;
10512 pcode
= TREE_CODE (parg
);
10513 parg0
= TREE_OPERAND (parg
, 0);
10514 parg1
= TREE_OPERAND (parg
, 1);
10515 STRIP_NOPS (parg0
);
10516 STRIP_NOPS (parg1
);
10518 if (TREE_CODE (parg0
) == MULT_EXPR
10519 && TREE_CODE (parg1
) != MULT_EXPR
)
10520 return fold_build2_loc (loc
, pcode
, type
,
10521 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10522 fold_convert_loc (loc
, type
,
10524 fold_convert_loc (loc
, type
,
10526 fold_convert_loc (loc
, type
, parg1
));
10527 if (TREE_CODE (parg0
) != MULT_EXPR
10528 && TREE_CODE (parg1
) == MULT_EXPR
)
10530 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10531 fold_convert_loc (loc
, type
, parg0
),
10532 fold_build2_loc (loc
, pcode
, type
,
10533 fold_convert_loc (loc
, type
, marg
),
10534 fold_convert_loc (loc
, type
,
10540 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10541 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10542 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10544 /* Likewise if the operands are reversed. */
10545 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10546 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10548 /* Convert X + -C into X - C. */
10549 if (TREE_CODE (arg1
) == REAL_CST
10550 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10552 tem
= fold_negate_const (arg1
, type
);
10553 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10554 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10555 fold_convert_loc (loc
, type
, arg0
),
10556 fold_convert_loc (loc
, type
, tem
));
10559 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10560 to __complex__ ( x, y ). This is not the same for SNaNs or
10561 if signed zeros are involved. */
10562 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10563 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10564 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10566 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10567 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10568 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10569 bool arg0rz
= false, arg0iz
= false;
10570 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10571 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10573 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10574 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10575 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10577 tree rp
= arg1r
? arg1r
10578 : build1 (REALPART_EXPR
, rtype
, arg1
);
10579 tree ip
= arg0i
? arg0i
10580 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10581 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10583 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10585 tree rp
= arg0r
? arg0r
10586 : build1 (REALPART_EXPR
, rtype
, arg0
);
10587 tree ip
= arg1i
? arg1i
10588 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10589 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10594 if (flag_unsafe_math_optimizations
10595 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10596 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10597 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10600 /* Convert x+x into x*2.0. */
10601 if (operand_equal_p (arg0
, arg1
, 0)
10602 && SCALAR_FLOAT_TYPE_P (type
))
10603 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10604 build_real (type
, dconst2
));
10606 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10607 We associate floats only if the user has specified
10608 -fassociative-math. */
10609 if (flag_associative_math
10610 && TREE_CODE (arg1
) == PLUS_EXPR
10611 && TREE_CODE (arg0
) != MULT_EXPR
)
10613 tree tree10
= TREE_OPERAND (arg1
, 0);
10614 tree tree11
= TREE_OPERAND (arg1
, 1);
10615 if (TREE_CODE (tree11
) == MULT_EXPR
10616 && TREE_CODE (tree10
) == MULT_EXPR
)
10619 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10620 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10623 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10624 We associate floats only if the user has specified
10625 -fassociative-math. */
10626 if (flag_associative_math
10627 && TREE_CODE (arg0
) == PLUS_EXPR
10628 && TREE_CODE (arg1
) != MULT_EXPR
)
10630 tree tree00
= TREE_OPERAND (arg0
, 0);
10631 tree tree01
= TREE_OPERAND (arg0
, 1);
10632 if (TREE_CODE (tree01
) == MULT_EXPR
10633 && TREE_CODE (tree00
) == MULT_EXPR
)
10636 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10637 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10643 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10644 is a rotate of A by C1 bits. */
10645 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10646 is a rotate of A by B bits. */
10648 enum tree_code code0
, code1
;
10650 code0
= TREE_CODE (arg0
);
10651 code1
= TREE_CODE (arg1
);
10652 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10653 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10654 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10655 TREE_OPERAND (arg1
, 0), 0)
10656 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10657 TYPE_UNSIGNED (rtype
))
10658 /* Only create rotates in complete modes. Other cases are not
10659 expanded properly. */
10660 && (element_precision (rtype
)
10661 == element_precision (TYPE_MODE (rtype
))))
10663 tree tree01
, tree11
;
10664 enum tree_code code01
, code11
;
10666 tree01
= TREE_OPERAND (arg0
, 1);
10667 tree11
= TREE_OPERAND (arg1
, 1);
10668 STRIP_NOPS (tree01
);
10669 STRIP_NOPS (tree11
);
10670 code01
= TREE_CODE (tree01
);
10671 code11
= TREE_CODE (tree11
);
10672 if (code01
== INTEGER_CST
10673 && code11
== INTEGER_CST
10674 && TREE_INT_CST_HIGH (tree01
) == 0
10675 && TREE_INT_CST_HIGH (tree11
) == 0
10676 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10677 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10679 tem
= build2_loc (loc
, LROTATE_EXPR
,
10680 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10681 TREE_OPERAND (arg0
, 0),
10682 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10683 return fold_convert_loc (loc
, type
, tem
);
10685 else if (code11
== MINUS_EXPR
)
10687 tree tree110
, tree111
;
10688 tree110
= TREE_OPERAND (tree11
, 0);
10689 tree111
= TREE_OPERAND (tree11
, 1);
10690 STRIP_NOPS (tree110
);
10691 STRIP_NOPS (tree111
);
10692 if (TREE_CODE (tree110
) == INTEGER_CST
10693 && 0 == compare_tree_int (tree110
,
10695 (TREE_TYPE (TREE_OPERAND
10697 && operand_equal_p (tree01
, tree111
, 0))
10699 fold_convert_loc (loc
, type
,
10700 build2 ((code0
== LSHIFT_EXPR
10703 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10704 TREE_OPERAND (arg0
, 0), tree01
));
10706 else if (code01
== MINUS_EXPR
)
10708 tree tree010
, tree011
;
10709 tree010
= TREE_OPERAND (tree01
, 0);
10710 tree011
= TREE_OPERAND (tree01
, 1);
10711 STRIP_NOPS (tree010
);
10712 STRIP_NOPS (tree011
);
10713 if (TREE_CODE (tree010
) == INTEGER_CST
10714 && 0 == compare_tree_int (tree010
,
10716 (TREE_TYPE (TREE_OPERAND
10718 && operand_equal_p (tree11
, tree011
, 0))
10719 return fold_convert_loc
10721 build2 ((code0
!= LSHIFT_EXPR
10724 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10725 TREE_OPERAND (arg0
, 0), tree11
));
10731 /* In most languages, can't associate operations on floats through
10732 parentheses. Rather than remember where the parentheses were, we
10733 don't associate floats at all, unless the user has specified
10734 -fassociative-math.
10735 And, we need to make sure type is not saturating. */
10737 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10738 && !TYPE_SATURATING (type
))
10740 tree var0
, con0
, lit0
, minus_lit0
;
10741 tree var1
, con1
, lit1
, minus_lit1
;
10745 /* Split both trees into variables, constants, and literals. Then
10746 associate each group together, the constants with literals,
10747 then the result with variables. This increases the chances of
10748 literals being recombined later and of generating relocatable
10749 expressions for the sum of a constant and literal. */
10750 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10751 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10752 code
== MINUS_EXPR
);
10754 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10755 if (code
== MINUS_EXPR
)
10758 /* With undefined overflow prefer doing association in a type
10759 which wraps on overflow, if that is one of the operand types. */
10760 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10761 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10763 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10764 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10765 atype
= TREE_TYPE (arg0
);
10766 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10767 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10768 atype
= TREE_TYPE (arg1
);
10769 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10772 /* With undefined overflow we can only associate constants with one
10773 variable, and constants whose association doesn't overflow. */
10774 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10775 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10782 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10783 tmp0
= TREE_OPERAND (tmp0
, 0);
10784 if (CONVERT_EXPR_P (tmp0
)
10785 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10786 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10787 <= TYPE_PRECISION (atype
)))
10788 tmp0
= TREE_OPERAND (tmp0
, 0);
10789 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10790 tmp1
= TREE_OPERAND (tmp1
, 0);
10791 if (CONVERT_EXPR_P (tmp1
)
10792 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10793 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10794 <= TYPE_PRECISION (atype
)))
10795 tmp1
= TREE_OPERAND (tmp1
, 0);
10796 /* The only case we can still associate with two variables
10797 is if they are the same, modulo negation and bit-pattern
10798 preserving conversions. */
10799 if (!operand_equal_p (tmp0
, tmp1
, 0))
10804 /* Only do something if we found more than two objects. Otherwise,
10805 nothing has changed and we risk infinite recursion. */
10807 && (2 < ((var0
!= 0) + (var1
!= 0)
10808 + (con0
!= 0) + (con1
!= 0)
10809 + (lit0
!= 0) + (lit1
!= 0)
10810 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10812 bool any_overflows
= false;
10813 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10814 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10815 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10816 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10817 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10818 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10819 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10820 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10823 /* Preserve the MINUS_EXPR if the negative part of the literal is
10824 greater than the positive part. Otherwise, the multiplicative
10825 folding code (i.e extract_muldiv) may be fooled in case
10826 unsigned constants are subtracted, like in the following
10827 example: ((X*2 + 4) - 8U)/2. */
10828 if (minus_lit0
&& lit0
)
10830 if (TREE_CODE (lit0
) == INTEGER_CST
10831 && TREE_CODE (minus_lit0
) == INTEGER_CST
10832 && tree_int_cst_lt (lit0
, minus_lit0
))
10834 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10835 MINUS_EXPR
, atype
);
10840 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10841 MINUS_EXPR
, atype
);
10846 /* Don't introduce overflows through reassociation. */
10848 && ((lit0
&& TREE_OVERFLOW (lit0
))
10849 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10856 fold_convert_loc (loc
, type
,
10857 associate_trees (loc
, var0
, minus_lit0
,
10858 MINUS_EXPR
, atype
));
10861 con0
= associate_trees (loc
, con0
, minus_lit0
,
10862 MINUS_EXPR
, atype
);
10864 fold_convert_loc (loc
, type
,
10865 associate_trees (loc
, var0
, con0
,
10866 PLUS_EXPR
, atype
));
10870 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10872 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10880 /* Pointer simplifications for subtraction, simple reassociations. */
10881 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10883 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10884 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10885 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10887 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10888 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10889 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10890 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10891 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10892 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10894 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10897 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10898 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10900 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10901 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10902 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10903 fold_convert_loc (loc
, type
, arg1
));
10905 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10908 /* A - (-B) -> A + B */
10909 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10910 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10911 fold_convert_loc (loc
, type
,
10912 TREE_OPERAND (arg1
, 0)));
10914 /* Disable further optimizations involving UPC shared pointers,
10915 because integers are not interoperable with shared pointers.
10916 (The test below also detects pointer difference between
10917 shared pointers, which cannot be folded. */
10919 if (TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10920 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10923 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10924 if (TREE_CODE (arg0
) == NEGATE_EXPR
10925 && negate_expr_p (arg1
)
10926 && reorder_operands_p (arg0
, arg1
))
10927 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10928 fold_convert_loc (loc
, type
,
10929 negate_expr (arg1
)),
10930 fold_convert_loc (loc
, type
,
10931 TREE_OPERAND (arg0
, 0)));
10932 /* Convert -A - 1 to ~A. */
10933 if (TREE_CODE (type
) != COMPLEX_TYPE
10934 && TREE_CODE (arg0
) == NEGATE_EXPR
10935 && integer_onep (arg1
)
10936 && !TYPE_OVERFLOW_TRAPS (type
))
10937 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10938 fold_convert_loc (loc
, type
,
10939 TREE_OPERAND (arg0
, 0)));
10941 /* Convert -1 - A to ~A. */
10942 if (TREE_CODE (type
) != COMPLEX_TYPE
10943 && integer_all_onesp (arg0
))
10944 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10947 /* X - (X / Y) * Y is X % Y. */
10948 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10949 && TREE_CODE (arg1
) == MULT_EXPR
10950 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10951 && operand_equal_p (arg0
,
10952 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10953 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10954 TREE_OPERAND (arg1
, 1), 0))
10956 fold_convert_loc (loc
, type
,
10957 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10958 arg0
, TREE_OPERAND (arg1
, 1)));
10960 if (! FLOAT_TYPE_P (type
))
10962 if (integer_zerop (arg0
))
10963 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10964 if (integer_zerop (arg1
))
10965 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10967 /* Fold A - (A & B) into ~B & A. */
10968 if (!TREE_SIDE_EFFECTS (arg0
)
10969 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10971 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10973 tree arg10
= fold_convert_loc (loc
, type
,
10974 TREE_OPERAND (arg1
, 0));
10975 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10976 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10978 fold_convert_loc (loc
, type
, arg0
));
10980 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10982 tree arg11
= fold_convert_loc (loc
,
10983 type
, TREE_OPERAND (arg1
, 1));
10984 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10985 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10987 fold_convert_loc (loc
, type
, arg0
));
10991 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10992 any power of 2 minus 1. */
10993 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10994 && TREE_CODE (arg1
) == BIT_AND_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10996 TREE_OPERAND (arg1
, 0), 0))
10998 tree mask0
= TREE_OPERAND (arg0
, 1);
10999 tree mask1
= TREE_OPERAND (arg1
, 1);
11000 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
11002 if (operand_equal_p (tem
, mask1
, 0))
11004 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
11005 TREE_OPERAND (arg0
, 0), mask1
);
11006 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
11011 /* See if ARG1 is zero and X - ARG1 reduces to X. */
11012 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
11013 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11015 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
11016 ARG0 is zero and X + ARG0 reduces to X, since that would mean
11017 (-ARG1 + ARG0) reduces to -ARG1. */
11018 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
11019 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
11021 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11022 __complex__ ( x, -y ). This is not the same for SNaNs or if
11023 signed zeros are involved. */
11024 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11025 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11026 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11028 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11029 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11030 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11031 bool arg0rz
= false, arg0iz
= false;
11032 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11033 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11035 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11036 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11037 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11039 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11041 : build1 (REALPART_EXPR
, rtype
, arg1
));
11042 tree ip
= arg0i
? arg0i
11043 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11044 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11046 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11048 tree rp
= arg0r
? arg0r
11049 : build1 (REALPART_EXPR
, rtype
, arg0
);
11050 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11052 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
11053 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11058 /* Fold &x - &x. This can happen from &x.foo - &x.
11059 This is unsafe for certain floats even in non-IEEE formats.
11060 In IEEE, it is unsafe because it does wrong for NaNs.
11061 Also note that operand_equal_p is always false if an operand
11064 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
11065 && operand_equal_p (arg0
, arg1
, 0))
11066 return build_zero_cst (type
);
11068 /* A - B -> A + (-B) if B is easily negatable. */
11069 if (negate_expr_p (arg1
)
11070 && ((FLOAT_TYPE_P (type
)
11071 /* Avoid this transformation if B is a positive REAL_CST. */
11072 && (TREE_CODE (arg1
) != REAL_CST
11073 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
11074 || INTEGRAL_TYPE_P (type
)))
11075 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11076 fold_convert_loc (loc
, type
, arg0
),
11077 fold_convert_loc (loc
, type
,
11078 negate_expr (arg1
)));
11080 /* Try folding difference of addresses. */
11082 HOST_WIDE_INT diff
;
11084 if ((TREE_CODE (arg0
) == ADDR_EXPR
11085 || TREE_CODE (arg1
) == ADDR_EXPR
)
11086 && ptr_difference_const (arg0
, arg1
, &diff
))
11087 return build_int_cst_type (type
, diff
);
11090 /* Fold &a[i] - &a[j] to i-j. */
11091 if (TREE_CODE (arg0
) == ADDR_EXPR
11092 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11093 && TREE_CODE (arg1
) == ADDR_EXPR
11094 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11096 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11097 TREE_OPERAND (arg0
, 0),
11098 TREE_OPERAND (arg1
, 0));
11103 if (FLOAT_TYPE_P (type
)
11104 && flag_unsafe_math_optimizations
11105 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11106 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11107 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11110 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11111 one. Make sure the type is not saturating and has the signedness of
11112 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11113 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11114 if ((TREE_CODE (arg0
) == MULT_EXPR
11115 || TREE_CODE (arg1
) == MULT_EXPR
)
11116 && !TYPE_SATURATING (type
)
11117 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11118 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11119 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11121 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11129 /* (-A) * (-B) -> A * B */
11130 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11131 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11132 fold_convert_loc (loc
, type
,
11133 TREE_OPERAND (arg0
, 0)),
11134 fold_convert_loc (loc
, type
,
11135 negate_expr (arg1
)));
11136 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11137 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11138 fold_convert_loc (loc
, type
,
11139 negate_expr (arg0
)),
11140 fold_convert_loc (loc
, type
,
11141 TREE_OPERAND (arg1
, 0)));
11143 if (! FLOAT_TYPE_P (type
))
11145 if (integer_zerop (arg1
))
11146 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11147 if (integer_onep (arg1
))
11148 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11149 /* Transform x * -1 into -x. Make sure to do the negation
11150 on the original operand with conversions not stripped
11151 because we can only strip non-sign-changing conversions. */
11152 if (integer_minus_onep (arg1
))
11153 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11154 /* Transform x * -C into -x * C if x is easily negatable. */
11155 if (TREE_CODE (arg1
) == INTEGER_CST
11156 && tree_int_cst_sgn (arg1
) == -1
11157 && negate_expr_p (arg0
)
11158 && (tem
= negate_expr (arg1
)) != arg1
11159 && !TREE_OVERFLOW (tem
))
11160 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11161 fold_convert_loc (loc
, type
,
11162 negate_expr (arg0
)),
11165 /* (a * (1 << b)) is (a << b) */
11166 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11167 && integer_onep (TREE_OPERAND (arg1
, 0)))
11168 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11169 TREE_OPERAND (arg1
, 1));
11170 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11171 && integer_onep (TREE_OPERAND (arg0
, 0)))
11172 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11173 TREE_OPERAND (arg0
, 1));
11175 /* (A + A) * C -> A * 2 * C */
11176 if (TREE_CODE (arg0
) == PLUS_EXPR
11177 && TREE_CODE (arg1
) == INTEGER_CST
11178 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11179 TREE_OPERAND (arg0
, 1), 0))
11180 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11181 omit_one_operand_loc (loc
, type
,
11182 TREE_OPERAND (arg0
, 0),
11183 TREE_OPERAND (arg0
, 1)),
11184 fold_build2_loc (loc
, MULT_EXPR
, type
,
11185 build_int_cst (type
, 2) , arg1
));
11187 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11188 sign-changing only. */
11189 if (TREE_CODE (arg1
) == INTEGER_CST
11190 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11191 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11192 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11194 strict_overflow_p
= false;
11195 if (TREE_CODE (arg1
) == INTEGER_CST
11196 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11197 &strict_overflow_p
)))
11199 if (strict_overflow_p
)
11200 fold_overflow_warning (("assuming signed overflow does not "
11201 "occur when simplifying "
11203 WARN_STRICT_OVERFLOW_MISC
);
11204 return fold_convert_loc (loc
, type
, tem
);
11207 /* Optimize z * conj(z) for integer complex numbers. */
11208 if (TREE_CODE (arg0
) == CONJ_EXPR
11209 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11210 return fold_mult_zconjz (loc
, type
, arg1
);
11211 if (TREE_CODE (arg1
) == CONJ_EXPR
11212 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11213 return fold_mult_zconjz (loc
, type
, arg0
);
11217 /* Maybe fold x * 0 to 0. The expressions aren't the same
11218 when x is NaN, since x * 0 is also NaN. Nor are they the
11219 same in modes with signed zeros, since multiplying a
11220 negative value by 0 gives -0, not +0. */
11221 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11222 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11223 && real_zerop (arg1
))
11224 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11225 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11226 Likewise for complex arithmetic with signed zeros. */
11227 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11228 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11229 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11230 && real_onep (arg1
))
11231 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11233 /* Transform x * -1.0 into -x. */
11234 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11235 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11236 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11237 && real_minus_onep (arg1
))
11238 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11240 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11241 the result for floating point types due to rounding so it is applied
11242 only if -fassociative-math was specify. */
11243 if (flag_associative_math
11244 && TREE_CODE (arg0
) == RDIV_EXPR
11245 && TREE_CODE (arg1
) == REAL_CST
11246 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11248 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11251 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11252 TREE_OPERAND (arg0
, 1));
11255 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11256 if (operand_equal_p (arg0
, arg1
, 0))
11258 tree tem
= fold_strip_sign_ops (arg0
);
11259 if (tem
!= NULL_TREE
)
11261 tem
= fold_convert_loc (loc
, type
, tem
);
11262 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11266 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11267 This is not the same for NaNs or if signed zeros are
11269 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11270 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11271 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11272 && TREE_CODE (arg1
) == COMPLEX_CST
11273 && real_zerop (TREE_REALPART (arg1
)))
11275 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11276 if (real_onep (TREE_IMAGPART (arg1
)))
11278 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11279 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11281 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11282 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11284 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11285 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11286 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11290 /* Optimize z * conj(z) for floating point complex numbers.
11291 Guarded by flag_unsafe_math_optimizations as non-finite
11292 imaginary components don't produce scalar results. */
11293 if (flag_unsafe_math_optimizations
11294 && TREE_CODE (arg0
) == CONJ_EXPR
11295 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11296 return fold_mult_zconjz (loc
, type
, arg1
);
11297 if (flag_unsafe_math_optimizations
11298 && TREE_CODE (arg1
) == CONJ_EXPR
11299 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11300 return fold_mult_zconjz (loc
, type
, arg0
);
11302 if (flag_unsafe_math_optimizations
)
11304 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11305 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11307 /* Optimizations of root(...)*root(...). */
11308 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11311 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11312 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11314 /* Optimize sqrt(x)*sqrt(x) as x. */
11315 if (BUILTIN_SQRT_P (fcode0
)
11316 && operand_equal_p (arg00
, arg10
, 0)
11317 && ! HONOR_SNANS (TYPE_MODE (type
)))
11320 /* Optimize root(x)*root(y) as root(x*y). */
11321 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11322 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11323 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11326 /* Optimize expN(x)*expN(y) as expN(x+y). */
11327 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11329 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11330 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11331 CALL_EXPR_ARG (arg0
, 0),
11332 CALL_EXPR_ARG (arg1
, 0));
11333 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11336 /* Optimizations of pow(...)*pow(...). */
11337 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11338 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11339 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11341 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11342 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11343 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11344 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11346 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11347 if (operand_equal_p (arg01
, arg11
, 0))
11349 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11350 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11352 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11355 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11356 if (operand_equal_p (arg00
, arg10
, 0))
11358 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11359 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11361 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11365 /* Optimize tan(x)*cos(x) as sin(x). */
11366 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11367 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11368 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11369 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11370 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11371 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11372 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11373 CALL_EXPR_ARG (arg1
, 0), 0))
11375 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11377 if (sinfn
!= NULL_TREE
)
11378 return build_call_expr_loc (loc
, sinfn
, 1,
11379 CALL_EXPR_ARG (arg0
, 0));
11382 /* Optimize x*pow(x,c) as pow(x,c+1). */
11383 if (fcode1
== BUILT_IN_POW
11384 || fcode1
== BUILT_IN_POWF
11385 || fcode1
== BUILT_IN_POWL
)
11387 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11388 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11389 if (TREE_CODE (arg11
) == REAL_CST
11390 && !TREE_OVERFLOW (arg11
)
11391 && operand_equal_p (arg0
, arg10
, 0))
11393 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11397 c
= TREE_REAL_CST (arg11
);
11398 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11399 arg
= build_real (type
, c
);
11400 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11404 /* Optimize pow(x,c)*x as pow(x,c+1). */
11405 if (fcode0
== BUILT_IN_POW
11406 || fcode0
== BUILT_IN_POWF
11407 || fcode0
== BUILT_IN_POWL
)
11409 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11410 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11411 if (TREE_CODE (arg01
) == REAL_CST
11412 && !TREE_OVERFLOW (arg01
)
11413 && operand_equal_p (arg1
, arg00
, 0))
11415 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11419 c
= TREE_REAL_CST (arg01
);
11420 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11421 arg
= build_real (type
, c
);
11422 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11426 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11427 if (!in_gimple_form
11429 && operand_equal_p (arg0
, arg1
, 0))
11431 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11435 tree arg
= build_real (type
, dconst2
);
11436 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11445 if (integer_all_onesp (arg1
))
11446 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11447 if (integer_zerop (arg1
))
11448 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11449 if (operand_equal_p (arg0
, arg1
, 0))
11450 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11452 /* ~X | X is -1. */
11453 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11454 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11456 t1
= build_zero_cst (type
);
11457 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11458 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11461 /* X | ~X is -1. */
11462 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11463 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11465 t1
= build_zero_cst (type
);
11466 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11467 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11470 /* Canonicalize (X & C1) | C2. */
11471 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11472 && TREE_CODE (arg1
) == INTEGER_CST
11473 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11475 double_int c1
, c2
, c3
, msk
;
11476 int width
= TYPE_PRECISION (type
), w
;
11477 bool try_simplify
= true;
11479 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11480 c2
= tree_to_double_int (arg1
);
11482 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11483 if ((c1
& c2
) == c1
)
11484 return omit_one_operand_loc (loc
, type
, arg1
,
11485 TREE_OPERAND (arg0
, 0));
11487 msk
= double_int::mask (width
);
11489 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11490 if (msk
.and_not (c1
| c2
).is_zero ())
11491 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11492 TREE_OPERAND (arg0
, 0), arg1
);
11494 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11495 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11496 mode which allows further optimizations. */
11499 c3
= c1
.and_not (c2
);
11500 for (w
= BITS_PER_UNIT
;
11501 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11504 unsigned HOST_WIDE_INT mask
11505 = HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- w
);
11506 if (((c1
.low
| c2
.low
) & mask
) == mask
11507 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11509 c3
= double_int::from_uhwi (mask
);
11514 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11515 with that optimization from the BIT_AND_EXPR optimizations.
11516 This could end up in an infinite recursion. */
11517 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == MULT_EXPR
11518 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11521 tree t
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11522 double_int masked
= mask_with_tz (type
, c3
, tree_to_double_int (t
));
11524 try_simplify
= (masked
!= c1
);
11527 if (try_simplify
&& c3
!= c1
)
11528 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11529 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11530 TREE_OPERAND (arg0
, 0),
11531 double_int_to_tree (type
,
11536 /* (X & Y) | Y is (X, Y). */
11537 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11538 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11539 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11540 /* (X & Y) | X is (Y, X). */
11541 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11542 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11543 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11544 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11545 /* X | (X & Y) is (Y, X). */
11546 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11547 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11548 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11549 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11550 /* X | (Y & X) is (Y, X). */
11551 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11552 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11553 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11554 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11556 /* (X & ~Y) | (~X & Y) is X ^ Y */
11557 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11558 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11560 tree a0
, a1
, l0
, l1
, n0
, n1
;
11562 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11563 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11565 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11566 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11568 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11569 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11571 if ((operand_equal_p (n0
, a0
, 0)
11572 && operand_equal_p (n1
, a1
, 0))
11573 || (operand_equal_p (n0
, a1
, 0)
11574 && operand_equal_p (n1
, a0
, 0)))
11575 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11578 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11579 if (t1
!= NULL_TREE
)
11582 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11584 This results in more efficient code for machines without a NAND
11585 instruction. Combine will canonicalize to the first form
11586 which will allow use of NAND instructions provided by the
11587 backend if they exist. */
11588 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11589 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11592 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11593 build2 (BIT_AND_EXPR
, type
,
11594 fold_convert_loc (loc
, type
,
11595 TREE_OPERAND (arg0
, 0)),
11596 fold_convert_loc (loc
, type
,
11597 TREE_OPERAND (arg1
, 0))));
11600 /* See if this can be simplified into a rotate first. If that
11601 is unsuccessful continue in the association code. */
11605 if (integer_zerop (arg1
))
11606 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11607 if (integer_all_onesp (arg1
))
11608 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11609 if (operand_equal_p (arg0
, arg1
, 0))
11610 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11612 /* ~X ^ X is -1. */
11613 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11614 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11616 t1
= build_zero_cst (type
);
11617 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11618 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11621 /* X ^ ~X is -1. */
11622 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11623 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11625 t1
= build_zero_cst (type
);
11626 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11627 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11630 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11631 with a constant, and the two constants have no bits in common,
11632 we should treat this as a BIT_IOR_EXPR since this may produce more
11633 simplifications. */
11634 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11635 && TREE_CODE (arg1
) == BIT_AND_EXPR
11636 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11637 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11638 && integer_zerop (const_binop (BIT_AND_EXPR
,
11639 TREE_OPERAND (arg0
, 1),
11640 TREE_OPERAND (arg1
, 1))))
11642 code
= BIT_IOR_EXPR
;
11646 /* (X | Y) ^ X -> Y & ~ X*/
11647 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11648 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11650 tree t2
= TREE_OPERAND (arg0
, 1);
11651 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11653 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11654 fold_convert_loc (loc
, type
, t2
),
11655 fold_convert_loc (loc
, type
, t1
));
11659 /* (Y | X) ^ X -> Y & ~ X*/
11660 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11661 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11663 tree t2
= TREE_OPERAND (arg0
, 0);
11664 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11666 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11667 fold_convert_loc (loc
, type
, t2
),
11668 fold_convert_loc (loc
, type
, t1
));
11672 /* X ^ (X | Y) -> Y & ~ X*/
11673 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11674 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11676 tree t2
= TREE_OPERAND (arg1
, 1);
11677 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11679 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11680 fold_convert_loc (loc
, type
, t2
),
11681 fold_convert_loc (loc
, type
, t1
));
11685 /* X ^ (Y | X) -> Y & ~ X*/
11686 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11687 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11689 tree t2
= TREE_OPERAND (arg1
, 0);
11690 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11692 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11693 fold_convert_loc (loc
, type
, t2
),
11694 fold_convert_loc (loc
, type
, t1
));
11698 /* Convert ~X ^ ~Y to X ^ Y. */
11699 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11700 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11701 return fold_build2_loc (loc
, code
, type
,
11702 fold_convert_loc (loc
, type
,
11703 TREE_OPERAND (arg0
, 0)),
11704 fold_convert_loc (loc
, type
,
11705 TREE_OPERAND (arg1
, 0)));
11707 /* Convert ~X ^ C to X ^ ~C. */
11708 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11709 && TREE_CODE (arg1
) == INTEGER_CST
)
11710 return fold_build2_loc (loc
, code
, type
,
11711 fold_convert_loc (loc
, type
,
11712 TREE_OPERAND (arg0
, 0)),
11713 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11715 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11716 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11717 && integer_onep (TREE_OPERAND (arg0
, 1))
11718 && integer_onep (arg1
))
11719 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11720 build_zero_cst (TREE_TYPE (arg0
)));
11722 /* Fold (X & Y) ^ Y as ~X & Y. */
11723 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11724 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11726 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11727 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11728 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11729 fold_convert_loc (loc
, type
, arg1
));
11731 /* Fold (X & Y) ^ X as ~Y & X. */
11732 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11733 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11734 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11736 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11737 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11738 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11739 fold_convert_loc (loc
, type
, arg1
));
11741 /* Fold X ^ (X & Y) as X & ~Y. */
11742 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11743 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11745 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11746 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11747 fold_convert_loc (loc
, type
, arg0
),
11748 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11750 /* Fold X ^ (Y & X) as ~Y & X. */
11751 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11752 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11753 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11755 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11756 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11757 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11758 fold_convert_loc (loc
, type
, arg0
));
11761 /* See if this can be simplified into a rotate first. If that
11762 is unsuccessful continue in the association code. */
11766 if (integer_all_onesp (arg1
))
11767 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11768 if (integer_zerop (arg1
))
11769 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11770 if (operand_equal_p (arg0
, arg1
, 0))
11771 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11773 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11774 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11775 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11776 || (TREE_CODE (arg0
) == EQ_EXPR
11777 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11778 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11779 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11781 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11782 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11783 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11784 || (TREE_CODE (arg1
) == EQ_EXPR
11785 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11786 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11787 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11789 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11790 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11791 && TREE_CODE (arg1
) == INTEGER_CST
11792 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11794 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11795 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11796 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11797 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11798 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11800 fold_convert_loc (loc
, type
,
11801 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11802 type
, tmp2
, tmp3
));
11805 /* (X | Y) & Y is (X, Y). */
11806 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11807 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11808 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11809 /* (X | Y) & X is (Y, X). */
11810 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11811 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11812 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11813 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11814 /* X & (X | Y) is (Y, X). */
11815 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11816 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11817 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11818 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11819 /* X & (Y | X) is (Y, X). */
11820 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11821 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11822 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11823 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11825 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11826 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11827 && integer_onep (TREE_OPERAND (arg0
, 1))
11828 && integer_onep (arg1
))
11831 tem
= TREE_OPERAND (arg0
, 0);
11832 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11833 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11835 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11836 build_zero_cst (TREE_TYPE (tem
)));
11838 /* Fold ~X & 1 as (X & 1) == 0. */
11839 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11840 && integer_onep (arg1
))
11843 tem
= TREE_OPERAND (arg0
, 0);
11844 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11845 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11847 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11848 build_zero_cst (TREE_TYPE (tem
)));
11850 /* Fold !X & 1 as X == 0. */
11851 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11852 && integer_onep (arg1
))
11854 tem
= TREE_OPERAND (arg0
, 0);
11855 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11856 build_zero_cst (TREE_TYPE (tem
)));
11859 /* Fold (X ^ Y) & Y as ~X & Y. */
11860 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11861 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11863 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11864 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11865 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11866 fold_convert_loc (loc
, type
, arg1
));
11868 /* Fold (X ^ Y) & X as ~Y & X. */
11869 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11870 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11871 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11873 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11874 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11875 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11876 fold_convert_loc (loc
, type
, arg1
));
11878 /* Fold X & (X ^ Y) as X & ~Y. */
11879 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11880 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11882 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11883 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11884 fold_convert_loc (loc
, type
, arg0
),
11885 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11887 /* Fold X & (Y ^ X) as ~Y & X. */
11888 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11889 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11890 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11892 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11893 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11894 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11895 fold_convert_loc (loc
, type
, arg0
));
11898 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11899 multiple of 1 << CST. */
11900 if (TREE_CODE (arg1
) == INTEGER_CST
)
11902 double_int cst1
= tree_to_double_int (arg1
);
11903 double_int ncst1
= (-cst1
).ext (TYPE_PRECISION (TREE_TYPE (arg1
)),
11904 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11905 if ((cst1
& ncst1
) == ncst1
11906 && multiple_of_p (type
, arg0
,
11907 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11908 return fold_convert_loc (loc
, type
, arg0
);
11911 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11913 if (TREE_CODE (arg1
) == INTEGER_CST
11914 && TREE_CODE (arg0
) == MULT_EXPR
11915 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11918 = mask_with_tz (type
, tree_to_double_int (arg1
),
11919 tree_to_double_int (TREE_OPERAND (arg0
, 1)));
11921 if (masked
.is_zero ())
11922 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11924 else if (masked
!= tree_to_double_int (arg1
))
11925 return fold_build2_loc (loc
, code
, type
, op0
,
11926 double_int_to_tree (type
, masked
));
11929 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11930 ((A & N) + B) & M -> (A + B) & M
11931 Similarly if (N & M) == 0,
11932 ((A | N) + B) & M -> (A + B) & M
11933 and for - instead of + (or unary - instead of +)
11934 and/or ^ instead of |.
11935 If B is constant and (B & M) == 0, fold into A & M. */
11936 if (tree_fits_uhwi_p (arg1
))
11938 unsigned HOST_WIDE_INT cst1
= tree_to_uhwi (arg1
);
11939 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11940 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11941 && (TREE_CODE (arg0
) == PLUS_EXPR
11942 || TREE_CODE (arg0
) == MINUS_EXPR
11943 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11944 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11945 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11949 unsigned HOST_WIDE_INT cst0
;
11951 /* Now we know that arg0 is (C + D) or (C - D) or
11952 -C and arg1 (M) is == (1LL << cst) - 1.
11953 Store C into PMOP[0] and D into PMOP[1]. */
11954 pmop
[0] = TREE_OPERAND (arg0
, 0);
11956 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11958 pmop
[1] = TREE_OPERAND (arg0
, 1);
11962 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11963 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0
)))
11967 for (; which
>= 0; which
--)
11968 switch (TREE_CODE (pmop
[which
]))
11973 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11976 /* tree_to_[su]hwi not used, because we don't care about
11978 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11980 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11985 else if (cst0
!= 0)
11987 /* If C or D is of the form (A & N) where
11988 (N & M) == M, or of the form (A | N) or
11989 (A ^ N) where (N & M) == 0, replace it with A. */
11990 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11993 /* If C or D is a N where (N & M) == 0, it can be
11994 omitted (assumed 0). */
11995 if ((TREE_CODE (arg0
) == PLUS_EXPR
11996 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11997 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11998 pmop
[which
] = NULL
;
12004 /* Only build anything new if we optimized one or both arguments
12006 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
12007 || (TREE_CODE (arg0
) != NEGATE_EXPR
12008 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
12010 tree utype
= TREE_TYPE (arg0
);
12011 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
12013 /* Perform the operations in a type that has defined
12014 overflow behavior. */
12015 utype
= unsigned_type_for (TREE_TYPE (arg0
));
12016 if (pmop
[0] != NULL
)
12017 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
12018 if (pmop
[1] != NULL
)
12019 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
12022 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
12023 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
12024 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
12026 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
12027 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
12029 else if (pmop
[0] != NULL
)
12031 else if (pmop
[1] != NULL
)
12034 return build_int_cst (type
, 0);
12036 else if (pmop
[0] == NULL
)
12037 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
12039 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
12041 /* TEM is now the new binary +, - or unary - replacement. */
12042 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
12043 fold_convert_loc (loc
, utype
, arg1
));
12044 return fold_convert_loc (loc
, type
, tem
);
12049 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
12050 if (t1
!= NULL_TREE
)
12052 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12053 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
12054 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
12056 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
12058 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
12059 && (~TREE_INT_CST_LOW (arg1
)
12060 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
12062 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12065 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12067 This results in more efficient code for machines without a NOR
12068 instruction. Combine will canonicalize to the first form
12069 which will allow use of NOR instructions provided by the
12070 backend if they exist. */
12071 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
12072 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
12074 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
12075 build2 (BIT_IOR_EXPR
, type
,
12076 fold_convert_loc (loc
, type
,
12077 TREE_OPERAND (arg0
, 0)),
12078 fold_convert_loc (loc
, type
,
12079 TREE_OPERAND (arg1
, 0))));
12082 /* If arg0 is derived from the address of an object or function, we may
12083 be able to fold this expression using the object or function's
12085 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
12087 unsigned HOST_WIDE_INT modulus
, residue
;
12088 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
12090 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
12091 integer_onep (arg1
));
12093 /* This works because modulus is a power of 2. If this weren't the
12094 case, we'd have to replace it by its greatest power-of-2
12095 divisor: modulus & -modulus. */
12097 return build_int_cst (type
, residue
& low
);
12100 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12101 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12102 if the new mask might be further optimized. */
12103 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
12104 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
12105 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12106 && TREE_CODE (arg1
) == INTEGER_CST
12107 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12108 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
12109 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12110 < TYPE_PRECISION (TREE_TYPE (arg0
))))
12112 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12113 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
12114 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12115 tree shift_type
= TREE_TYPE (arg0
);
12117 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12118 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12119 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12120 && TYPE_PRECISION (TREE_TYPE (arg0
))
12121 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
12123 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12124 tree arg00
= TREE_OPERAND (arg0
, 0);
12125 /* See if more bits can be proven as zero because of
12127 if (TREE_CODE (arg00
) == NOP_EXPR
12128 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12130 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12131 if (TYPE_PRECISION (inner_type
)
12132 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
12133 && TYPE_PRECISION (inner_type
) < prec
)
12135 prec
= TYPE_PRECISION (inner_type
);
12136 /* See if we can shorten the right shift. */
12138 shift_type
= inner_type
;
12141 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12142 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12143 zerobits
<<= prec
- shiftc
;
12144 /* For arithmetic shift if sign bit could be set, zerobits
12145 can contain actually sign bits, so no transformation is
12146 possible, unless MASK masks them all away. In that
12147 case the shift needs to be converted into logical shift. */
12148 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12149 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12151 if ((mask
& zerobits
) == 0)
12152 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12158 /* ((X << 16) & 0xff00) is (X, 0). */
12159 if ((mask
& zerobits
) == mask
)
12160 return omit_one_operand_loc (loc
, type
,
12161 build_int_cst (type
, 0), arg0
);
12163 newmask
= mask
| zerobits
;
12164 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12166 /* Only do the transformation if NEWMASK is some integer
12168 for (prec
= BITS_PER_UNIT
;
12169 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12170 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12172 if (prec
< HOST_BITS_PER_WIDE_INT
12173 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12177 if (shift_type
!= TREE_TYPE (arg0
))
12179 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12180 fold_convert_loc (loc
, shift_type
,
12181 TREE_OPERAND (arg0
, 0)),
12182 TREE_OPERAND (arg0
, 1));
12183 tem
= fold_convert_loc (loc
, type
, tem
);
12187 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12188 if (!tree_int_cst_equal (newmaskt
, arg1
))
12189 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12197 /* Don't touch a floating-point divide by zero unless the mode
12198 of the constant can represent infinity. */
12199 if (TREE_CODE (arg1
) == REAL_CST
12200 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12201 && real_zerop (arg1
))
12204 /* Optimize A / A to 1.0 if we don't care about
12205 NaNs or Infinities. Skip the transformation
12206 for non-real operands. */
12207 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12208 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12209 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12210 && operand_equal_p (arg0
, arg1
, 0))
12212 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12214 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12217 /* The complex version of the above A / A optimization. */
12218 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12219 && operand_equal_p (arg0
, arg1
, 0))
12221 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12222 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12223 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12225 tree r
= build_real (elem_type
, dconst1
);
12226 /* omit_two_operands will call fold_convert for us. */
12227 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12231 /* (-A) / (-B) -> A / B */
12232 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12233 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12234 TREE_OPERAND (arg0
, 0),
12235 negate_expr (arg1
));
12236 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12237 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12238 negate_expr (arg0
),
12239 TREE_OPERAND (arg1
, 0));
12241 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12242 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12243 && real_onep (arg1
))
12244 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12246 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12247 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12248 && real_minus_onep (arg1
))
12249 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12250 negate_expr (arg0
)));
12252 /* If ARG1 is a constant, we can convert this to a multiply by the
12253 reciprocal. This does not have the same rounding properties,
12254 so only do this if -freciprocal-math. We can actually
12255 always safely do it if ARG1 is a power of two, but it's hard to
12256 tell if it is or not in a portable manner. */
12258 && (TREE_CODE (arg1
) == REAL_CST
12259 || (TREE_CODE (arg1
) == COMPLEX_CST
12260 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12261 || (TREE_CODE (arg1
) == VECTOR_CST
12262 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12264 if (flag_reciprocal_math
12265 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12266 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12267 /* Find the reciprocal if optimizing and the result is exact.
12268 TODO: Complex reciprocal not implemented. */
12269 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12271 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12274 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12277 /* Convert A/B/C to A/(B*C). */
12278 if (flag_reciprocal_math
12279 && TREE_CODE (arg0
) == RDIV_EXPR
)
12280 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12281 fold_build2_loc (loc
, MULT_EXPR
, type
,
12282 TREE_OPERAND (arg0
, 1), arg1
));
12284 /* Convert A/(B/C) to (A/B)*C. */
12285 if (flag_reciprocal_math
12286 && TREE_CODE (arg1
) == RDIV_EXPR
)
12287 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12288 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12289 TREE_OPERAND (arg1
, 0)),
12290 TREE_OPERAND (arg1
, 1));
12292 /* Convert C1/(X*C2) into (C1/C2)/X. */
12293 if (flag_reciprocal_math
12294 && TREE_CODE (arg1
) == MULT_EXPR
12295 && TREE_CODE (arg0
) == REAL_CST
12296 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12298 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12299 TREE_OPERAND (arg1
, 1));
12301 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12302 TREE_OPERAND (arg1
, 0));
12305 if (flag_unsafe_math_optimizations
)
12307 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12308 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12310 /* Optimize sin(x)/cos(x) as tan(x). */
12311 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12312 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12313 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12314 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12315 CALL_EXPR_ARG (arg1
, 0), 0))
12317 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12319 if (tanfn
!= NULL_TREE
)
12320 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12323 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12324 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12325 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12326 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12327 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12328 CALL_EXPR_ARG (arg1
, 0), 0))
12330 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12332 if (tanfn
!= NULL_TREE
)
12334 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12335 CALL_EXPR_ARG (arg0
, 0));
12336 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12337 build_real (type
, dconst1
), tmp
);
12341 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12342 NaNs or Infinities. */
12343 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12344 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12345 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12347 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12348 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12350 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12351 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12352 && operand_equal_p (arg00
, arg01
, 0))
12354 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12356 if (cosfn
!= NULL_TREE
)
12357 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12361 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12362 NaNs or Infinities. */
12363 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12364 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12365 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12367 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12368 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12370 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12371 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12372 && operand_equal_p (arg00
, arg01
, 0))
12374 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12376 if (cosfn
!= NULL_TREE
)
12378 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12379 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12380 build_real (type
, dconst1
),
12386 /* Optimize pow(x,c)/x as pow(x,c-1). */
12387 if (fcode0
== BUILT_IN_POW
12388 || fcode0
== BUILT_IN_POWF
12389 || fcode0
== BUILT_IN_POWL
)
12391 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12392 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12393 if (TREE_CODE (arg01
) == REAL_CST
12394 && !TREE_OVERFLOW (arg01
)
12395 && operand_equal_p (arg1
, arg00
, 0))
12397 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12401 c
= TREE_REAL_CST (arg01
);
12402 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12403 arg
= build_real (type
, c
);
12404 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12408 /* Optimize a/root(b/c) into a*root(c/b). */
12409 if (BUILTIN_ROOT_P (fcode1
))
12411 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12413 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12415 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12416 tree b
= TREE_OPERAND (rootarg
, 0);
12417 tree c
= TREE_OPERAND (rootarg
, 1);
12419 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12421 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12422 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12426 /* Optimize x/expN(y) into x*expN(-y). */
12427 if (BUILTIN_EXPONENT_P (fcode1
))
12429 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12430 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12431 arg1
= build_call_expr_loc (loc
,
12433 fold_convert_loc (loc
, type
, arg
));
12434 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12437 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12438 if (fcode1
== BUILT_IN_POW
12439 || fcode1
== BUILT_IN_POWF
12440 || fcode1
== BUILT_IN_POWL
)
12442 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12443 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12444 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12445 tree neg11
= fold_convert_loc (loc
, type
,
12446 negate_expr (arg11
));
12447 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12448 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12453 case TRUNC_DIV_EXPR
:
12454 /* Optimize (X & (-A)) / A where A is a power of 2,
12456 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12457 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12458 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12460 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12461 arg1
, TREE_OPERAND (arg0
, 1));
12462 if (sum
&& integer_zerop (sum
)) {
12463 unsigned long pow2
;
12465 if (TREE_INT_CST_LOW (arg1
))
12466 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12468 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12469 + HOST_BITS_PER_WIDE_INT
;
12471 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12472 TREE_OPERAND (arg0
, 0),
12473 build_int_cst (integer_type_node
, pow2
));
12479 case FLOOR_DIV_EXPR
:
12480 /* Simplify A / (B << N) where A and B are positive and B is
12481 a power of 2, to A >> (N + log2(B)). */
12482 strict_overflow_p
= false;
12483 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12484 && (TYPE_UNSIGNED (type
)
12485 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12487 tree sval
= TREE_OPERAND (arg1
, 0);
12488 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12490 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12491 unsigned long pow2
;
12493 if (TREE_INT_CST_LOW (sval
))
12494 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12496 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12497 + HOST_BITS_PER_WIDE_INT
;
12499 if (strict_overflow_p
)
12500 fold_overflow_warning (("assuming signed overflow does not "
12501 "occur when simplifying A / (B << N)"),
12502 WARN_STRICT_OVERFLOW_MISC
);
12504 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12506 build_int_cst (TREE_TYPE (sh_cnt
),
12508 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12509 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12513 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12514 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12515 if (INTEGRAL_TYPE_P (type
)
12516 && TYPE_UNSIGNED (type
)
12517 && code
== FLOOR_DIV_EXPR
)
12518 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12522 case ROUND_DIV_EXPR
:
12523 case CEIL_DIV_EXPR
:
12524 case EXACT_DIV_EXPR
:
12525 if (integer_onep (arg1
))
12526 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12527 if (integer_zerop (arg1
))
12529 /* X / -1 is -X. */
12530 if (!TYPE_UNSIGNED (type
)
12531 && TREE_CODE (arg1
) == INTEGER_CST
12532 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12533 && TREE_INT_CST_HIGH (arg1
) == -1)
12534 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12536 /* Convert -A / -B to A / B when the type is signed and overflow is
12538 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12539 && TREE_CODE (arg0
) == NEGATE_EXPR
12540 && negate_expr_p (arg1
))
12542 if (INTEGRAL_TYPE_P (type
))
12543 fold_overflow_warning (("assuming signed overflow does not occur "
12544 "when distributing negation across "
12546 WARN_STRICT_OVERFLOW_MISC
);
12547 return fold_build2_loc (loc
, code
, type
,
12548 fold_convert_loc (loc
, type
,
12549 TREE_OPERAND (arg0
, 0)),
12550 fold_convert_loc (loc
, type
,
12551 negate_expr (arg1
)));
12553 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12554 && TREE_CODE (arg1
) == NEGATE_EXPR
12555 && negate_expr_p (arg0
))
12557 if (INTEGRAL_TYPE_P (type
))
12558 fold_overflow_warning (("assuming signed overflow does not occur "
12559 "when distributing negation across "
12561 WARN_STRICT_OVERFLOW_MISC
);
12562 return fold_build2_loc (loc
, code
, type
,
12563 fold_convert_loc (loc
, type
,
12564 negate_expr (arg0
)),
12565 fold_convert_loc (loc
, type
,
12566 TREE_OPERAND (arg1
, 0)));
12569 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12570 operation, EXACT_DIV_EXPR.
12572 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12573 At one time others generated faster code, it's not clear if they do
12574 after the last round to changes to the DIV code in expmed.c. */
12575 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12576 && multiple_of_p (type
, arg0
, arg1
))
12577 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12579 strict_overflow_p
= false;
12580 if (TREE_CODE (arg1
) == INTEGER_CST
12581 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12582 &strict_overflow_p
)))
12584 if (strict_overflow_p
)
12585 fold_overflow_warning (("assuming signed overflow does not occur "
12586 "when simplifying division"),
12587 WARN_STRICT_OVERFLOW_MISC
);
12588 return fold_convert_loc (loc
, type
, tem
);
12593 case CEIL_MOD_EXPR
:
12594 case FLOOR_MOD_EXPR
:
12595 case ROUND_MOD_EXPR
:
12596 case TRUNC_MOD_EXPR
:
12597 /* X % 1 is always zero, but be sure to preserve any side
12599 if (integer_onep (arg1
))
12600 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12602 /* X % 0, return X % 0 unchanged so that we can get the
12603 proper warnings and errors. */
12604 if (integer_zerop (arg1
))
12607 /* 0 % X is always zero, but be sure to preserve any side
12608 effects in X. Place this after checking for X == 0. */
12609 if (integer_zerop (arg0
))
12610 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12612 /* X % -1 is zero. */
12613 if (!TYPE_UNSIGNED (type
)
12614 && TREE_CODE (arg1
) == INTEGER_CST
12615 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12616 && TREE_INT_CST_HIGH (arg1
) == -1)
12617 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12619 /* X % -C is the same as X % C. */
12620 if (code
== TRUNC_MOD_EXPR
12621 && !TYPE_UNSIGNED (type
)
12622 && TREE_CODE (arg1
) == INTEGER_CST
12623 && !TREE_OVERFLOW (arg1
)
12624 && TREE_INT_CST_HIGH (arg1
) < 0
12625 && !TYPE_OVERFLOW_TRAPS (type
)
12626 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12627 && !sign_bit_p (arg1
, arg1
))
12628 return fold_build2_loc (loc
, code
, type
,
12629 fold_convert_loc (loc
, type
, arg0
),
12630 fold_convert_loc (loc
, type
,
12631 negate_expr (arg1
)));
12633 /* X % -Y is the same as X % Y. */
12634 if (code
== TRUNC_MOD_EXPR
12635 && !TYPE_UNSIGNED (type
)
12636 && TREE_CODE (arg1
) == NEGATE_EXPR
12637 && !TYPE_OVERFLOW_TRAPS (type
))
12638 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12639 fold_convert_loc (loc
, type
,
12640 TREE_OPERAND (arg1
, 0)));
12642 strict_overflow_p
= false;
12643 if (TREE_CODE (arg1
) == INTEGER_CST
12644 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12645 &strict_overflow_p
)))
12647 if (strict_overflow_p
)
12648 fold_overflow_warning (("assuming signed overflow does not occur "
12649 "when simplifying modulus"),
12650 WARN_STRICT_OVERFLOW_MISC
);
12651 return fold_convert_loc (loc
, type
, tem
);
12654 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12655 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12656 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12657 && (TYPE_UNSIGNED (type
)
12658 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12661 /* Also optimize A % (C << N) where C is a power of 2,
12662 to A & ((C << N) - 1). */
12663 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12664 c
= TREE_OPERAND (arg1
, 0);
12666 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12669 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12670 build_int_cst (TREE_TYPE (arg1
), 1));
12671 if (strict_overflow_p
)
12672 fold_overflow_warning (("assuming signed overflow does not "
12673 "occur when simplifying "
12674 "X % (power of two)"),
12675 WARN_STRICT_OVERFLOW_MISC
);
12676 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12677 fold_convert_loc (loc
, type
, arg0
),
12678 fold_convert_loc (loc
, type
, mask
));
12686 if (integer_all_onesp (arg0
))
12687 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12691 /* Optimize -1 >> x for arithmetic right shifts. */
12692 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12693 && tree_expr_nonnegative_p (arg1
))
12694 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12695 /* ... fall through ... */
12699 if (integer_zerop (arg1
))
12700 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12701 if (integer_zerop (arg0
))
12702 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12704 /* Prefer vector1 << scalar to vector1 << vector2
12705 if vector2 is uniform. */
12706 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12707 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12708 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12710 /* Since negative shift count is not well-defined,
12711 don't try to compute it in the compiler. */
12712 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12715 prec
= element_precision (type
);
12717 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12718 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12719 && tree_to_uhwi (arg1
) < prec
12720 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12721 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12723 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12724 + tree_to_uhwi (arg1
));
12726 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12727 being well defined. */
12730 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12732 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12733 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12734 TREE_OPERAND (arg0
, 0));
12739 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12740 build_int_cst (TREE_TYPE (arg1
), low
));
12743 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12744 into x & ((unsigned)-1 >> c) for unsigned types. */
12745 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12746 || (TYPE_UNSIGNED (type
)
12747 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12748 && tree_fits_uhwi_p (arg1
)
12749 && tree_to_uhwi (arg1
) < prec
12750 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12751 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12753 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12754 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12760 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12762 lshift
= build_minus_one_cst (type
);
12763 lshift
= const_binop (code
, lshift
, arg1
);
12765 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12769 /* Rewrite an LROTATE_EXPR by a constant into an
12770 RROTATE_EXPR by a new constant. */
12771 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12773 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12774 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12775 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12778 /* If we have a rotate of a bit operation with the rotate count and
12779 the second operand of the bit operation both constant,
12780 permute the two operations. */
12781 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12782 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12783 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12784 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12785 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12786 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12787 fold_build2_loc (loc
, code
, type
,
12788 TREE_OPERAND (arg0
, 0), arg1
),
12789 fold_build2_loc (loc
, code
, type
,
12790 TREE_OPERAND (arg0
, 1), arg1
));
12792 /* Two consecutive rotates adding up to the precision of the
12793 type can be ignored. */
12794 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12795 && TREE_CODE (arg0
) == RROTATE_EXPR
12796 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12797 && TREE_INT_CST_HIGH (arg1
) == 0
12798 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12799 && ((TREE_INT_CST_LOW (arg1
)
12800 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12802 return TREE_OPERAND (arg0
, 0);
12804 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12805 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12806 if the latter can be further optimized. */
12807 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12808 && TREE_CODE (arg0
) == BIT_AND_EXPR
12809 && TREE_CODE (arg1
) == INTEGER_CST
12810 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12812 tree mask
= fold_build2_loc (loc
, code
, type
,
12813 fold_convert_loc (loc
, type
,
12814 TREE_OPERAND (arg0
, 1)),
12816 tree shift
= fold_build2_loc (loc
, code
, type
,
12817 fold_convert_loc (loc
, type
,
12818 TREE_OPERAND (arg0
, 0)),
12820 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12828 if (operand_equal_p (arg0
, arg1
, 0))
12829 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12830 if (INTEGRAL_TYPE_P (type
)
12831 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12832 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12833 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12839 if (operand_equal_p (arg0
, arg1
, 0))
12840 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12841 if (INTEGRAL_TYPE_P (type
)
12842 && TYPE_MAX_VALUE (type
)
12843 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12844 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12845 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12850 case TRUTH_ANDIF_EXPR
:
12851 /* Note that the operands of this must be ints
12852 and their values must be 0 or 1.
12853 ("true" is a fixed value perhaps depending on the language.) */
12854 /* If first arg is constant zero, return it. */
12855 if (integer_zerop (arg0
))
12856 return fold_convert_loc (loc
, type
, arg0
);
12857 case TRUTH_AND_EXPR
:
12858 /* If either arg is constant true, drop it. */
12859 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12860 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12861 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12862 /* Preserve sequence points. */
12863 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12864 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12865 /* If second arg is constant zero, result is zero, but first arg
12866 must be evaluated. */
12867 if (integer_zerop (arg1
))
12868 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12869 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12870 case will be handled here. */
12871 if (integer_zerop (arg0
))
12872 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12874 /* !X && X is always false. */
12875 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12876 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12877 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12878 /* X && !X is always false. */
12879 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12880 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12881 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12883 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12884 means A >= Y && A != MAX, but in this case we know that
12887 if (!TREE_SIDE_EFFECTS (arg0
)
12888 && !TREE_SIDE_EFFECTS (arg1
))
12890 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12891 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12892 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12894 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12895 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12896 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12899 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12905 case TRUTH_ORIF_EXPR
:
12906 /* Note that the operands of this must be ints
12907 and their values must be 0 or true.
12908 ("true" is a fixed value perhaps depending on the language.) */
12909 /* If first arg is constant true, return it. */
12910 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12911 return fold_convert_loc (loc
, type
, arg0
);
12912 case TRUTH_OR_EXPR
:
12913 /* If either arg is constant zero, drop it. */
12914 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12915 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12916 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12917 /* Preserve sequence points. */
12918 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12919 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12920 /* If second arg is constant true, result is true, but we must
12921 evaluate first arg. */
12922 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12923 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12924 /* Likewise for first arg, but note this only occurs here for
12926 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12927 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12929 /* !X || X is always true. */
12930 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12931 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12932 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12933 /* X || !X is always true. */
12934 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12935 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12936 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12938 /* (X && !Y) || (!X && Y) is X ^ Y */
12939 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12940 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12942 tree a0
, a1
, l0
, l1
, n0
, n1
;
12944 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12945 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12947 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12948 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12950 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12951 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12953 if ((operand_equal_p (n0
, a0
, 0)
12954 && operand_equal_p (n1
, a1
, 0))
12955 || (operand_equal_p (n0
, a1
, 0)
12956 && operand_equal_p (n1
, a0
, 0)))
12957 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12960 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12966 case TRUTH_XOR_EXPR
:
12967 /* If the second arg is constant zero, drop it. */
12968 if (integer_zerop (arg1
))
12969 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12970 /* If the second arg is constant true, this is a logical inversion. */
12971 if (integer_onep (arg1
))
12973 tem
= invert_truthvalue_loc (loc
, arg0
);
12974 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12976 /* Identical arguments cancel to zero. */
12977 if (operand_equal_p (arg0
, arg1
, 0))
12978 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12980 /* !X ^ X is always true. */
12981 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12982 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12983 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12985 /* X ^ !X is always true. */
12986 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12987 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12988 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12997 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12998 if (tem
!= NULL_TREE
)
13001 /* bool_var != 0 becomes bool_var. */
13002 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
13003 && code
== NE_EXPR
)
13004 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
13006 /* bool_var == 1 becomes bool_var. */
13007 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
13008 && code
== EQ_EXPR
)
13009 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
13011 /* bool_var != 1 becomes !bool_var. */
13012 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
13013 && code
== NE_EXPR
)
13014 return fold_convert_loc (loc
, type
,
13015 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
13016 TREE_TYPE (arg0
), arg0
));
13018 /* bool_var == 0 becomes !bool_var. */
13019 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
13020 && code
== EQ_EXPR
)
13021 return fold_convert_loc (loc
, type
,
13022 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
13023 TREE_TYPE (arg0
), arg0
));
13025 /* !exp != 0 becomes !exp */
13026 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
13027 && code
== NE_EXPR
)
13028 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
13030 /* If this is an equality comparison of the address of two non-weak,
13031 unaliased symbols neither of which are extern (since we do not
13032 have access to attributes for externs), then we know the result. */
13033 if (TREE_CODE (arg0
) == ADDR_EXPR
13034 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
13035 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
13036 && ! lookup_attribute ("alias",
13037 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
13038 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
13039 && TREE_CODE (arg1
) == ADDR_EXPR
13040 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
13041 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
13042 && ! lookup_attribute ("alias",
13043 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
13044 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
13046 /* We know that we're looking at the address of two
13047 non-weak, unaliased, static _DECL nodes.
13049 It is both wasteful and incorrect to call operand_equal_p
13050 to compare the two ADDR_EXPR nodes. It is wasteful in that
13051 all we need to do is test pointer equality for the arguments
13052 to the two ADDR_EXPR nodes. It is incorrect to use
13053 operand_equal_p as that function is NOT equivalent to a
13054 C equality test. It can in fact return false for two
13055 objects which would test as equal using the C equality
13057 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
13058 return constant_boolean_node (equal
13059 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
13063 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13064 a MINUS_EXPR of a constant, we can convert it into a comparison with
13065 a revised constant as long as no overflow occurs. */
13066 if (TREE_CODE (arg1
) == INTEGER_CST
13067 && (TREE_CODE (arg0
) == PLUS_EXPR
13068 || TREE_CODE (arg0
) == MINUS_EXPR
)
13069 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13070 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
13071 ? MINUS_EXPR
: PLUS_EXPR
,
13072 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13074 TREE_OPERAND (arg0
, 1)))
13075 && !TREE_OVERFLOW (tem
))
13076 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13078 /* Similarly for a NEGATE_EXPR. */
13079 if (TREE_CODE (arg0
) == NEGATE_EXPR
13080 && TREE_CODE (arg1
) == INTEGER_CST
13081 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
13083 && TREE_CODE (tem
) == INTEGER_CST
13084 && !TREE_OVERFLOW (tem
))
13085 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13087 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13088 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13089 && TREE_CODE (arg1
) == INTEGER_CST
13090 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13091 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13092 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
13093 fold_convert_loc (loc
,
13096 TREE_OPERAND (arg0
, 1)));
13098 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13099 if ((TREE_CODE (arg0
) == PLUS_EXPR
13100 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
13101 || TREE_CODE (arg0
) == MINUS_EXPR
)
13102 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13105 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13106 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
13108 tree val
= TREE_OPERAND (arg0
, 1);
13109 return omit_two_operands_loc (loc
, type
,
13110 fold_build2_loc (loc
, code
, type
,
13112 build_int_cst (TREE_TYPE (val
),
13114 TREE_OPERAND (arg0
, 0), arg1
);
13117 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13118 if (TREE_CODE (arg0
) == MINUS_EXPR
13119 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
13120 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13123 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
13125 return omit_two_operands_loc (loc
, type
,
13127 ? boolean_true_node
: boolean_false_node
,
13128 TREE_OPERAND (arg0
, 1), arg1
);
13131 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13132 for !=. Don't do this for ordered comparisons due to overflow. */
13133 if (TREE_CODE (arg0
) == MINUS_EXPR
13134 && integer_zerop (arg1
))
13135 return fold_build2_loc (loc
, code
, type
,
13136 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
13138 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13139 if (TREE_CODE (arg0
) == ABS_EXPR
13140 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13141 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13143 /* If this is an EQ or NE comparison with zero and ARG0 is
13144 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13145 two operations, but the latter can be done in one less insn
13146 on machines that have only two-operand insns or on which a
13147 constant cannot be the first operand. */
13148 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13149 && integer_zerop (arg1
))
13151 tree arg00
= TREE_OPERAND (arg0
, 0);
13152 tree arg01
= TREE_OPERAND (arg0
, 1);
13153 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13154 && integer_onep (TREE_OPERAND (arg00
, 0)))
13156 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13157 arg01
, TREE_OPERAND (arg00
, 1));
13158 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13159 build_int_cst (TREE_TYPE (arg0
), 1));
13160 return fold_build2_loc (loc
, code
, type
,
13161 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13164 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13165 && integer_onep (TREE_OPERAND (arg01
, 0)))
13167 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13168 arg00
, TREE_OPERAND (arg01
, 1));
13169 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13170 build_int_cst (TREE_TYPE (arg0
), 1));
13171 return fold_build2_loc (loc
, code
, type
,
13172 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13177 /* If this is an NE or EQ comparison of zero against the result of a
13178 signed MOD operation whose second operand is a power of 2, make
13179 the MOD operation unsigned since it is simpler and equivalent. */
13180 if (integer_zerop (arg1
)
13181 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13182 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13183 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13184 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13185 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13186 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13188 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13189 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13190 fold_convert_loc (loc
, newtype
,
13191 TREE_OPERAND (arg0
, 0)),
13192 fold_convert_loc (loc
, newtype
,
13193 TREE_OPERAND (arg0
, 1)));
13195 return fold_build2_loc (loc
, code
, type
, newmod
,
13196 fold_convert_loc (loc
, newtype
, arg1
));
13199 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13200 C1 is a valid shift constant, and C2 is a power of two, i.e.
13202 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13203 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13204 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13206 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13207 && integer_zerop (arg1
))
13209 tree itype
= TREE_TYPE (arg0
);
13210 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13211 prec
= TYPE_PRECISION (itype
);
13213 /* Check for a valid shift count. */
13214 if (TREE_INT_CST_HIGH (arg001
) == 0
13215 && TREE_INT_CST_LOW (arg001
) < prec
)
13217 tree arg01
= TREE_OPERAND (arg0
, 1);
13218 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13219 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13220 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13221 can be rewritten as (X & (C2 << C1)) != 0. */
13222 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13224 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13225 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13226 return fold_build2_loc (loc
, code
, type
, tem
,
13227 fold_convert_loc (loc
, itype
, arg1
));
13229 /* Otherwise, for signed (arithmetic) shifts,
13230 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13231 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13232 else if (!TYPE_UNSIGNED (itype
))
13233 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13234 arg000
, build_int_cst (itype
, 0));
13235 /* Otherwise, of unsigned (logical) shifts,
13236 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13237 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13239 return omit_one_operand_loc (loc
, type
,
13240 code
== EQ_EXPR
? integer_one_node
13241 : integer_zero_node
,
13246 /* If we have (A & C) == C where C is a power of 2, convert this into
13247 (A & C) != 0. Similarly for NE_EXPR. */
13248 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13249 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13250 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13251 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13252 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13253 integer_zero_node
));
13255 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13256 bit, then fold the expression into A < 0 or A >= 0. */
13257 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13261 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13262 Similarly for NE_EXPR. */
13263 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13264 && TREE_CODE (arg1
) == INTEGER_CST
13265 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13267 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13268 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13269 TREE_OPERAND (arg0
, 1));
13271 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13272 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13274 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13275 if (integer_nonzerop (dandnotc
))
13276 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13279 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13280 Similarly for NE_EXPR. */
13281 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13282 && TREE_CODE (arg1
) == INTEGER_CST
13283 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13285 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13287 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13288 TREE_OPERAND (arg0
, 1),
13289 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13290 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13291 if (integer_nonzerop (candnotd
))
13292 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13295 /* If this is a comparison of a field, we may be able to simplify it. */
13296 if ((TREE_CODE (arg0
) == COMPONENT_REF
13297 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13298 /* Handle the constant case even without -O
13299 to make sure the warnings are given. */
13300 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13302 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13307 /* Optimize comparisons of strlen vs zero to a compare of the
13308 first character of the string vs zero. To wit,
13309 strlen(ptr) == 0 => *ptr == 0
13310 strlen(ptr) != 0 => *ptr != 0
13311 Other cases should reduce to one of these two (or a constant)
13312 due to the return value of strlen being unsigned. */
13313 if (TREE_CODE (arg0
) == CALL_EXPR
13314 && integer_zerop (arg1
))
13316 tree fndecl
= get_callee_fndecl (arg0
);
13319 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13320 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13321 && call_expr_nargs (arg0
) == 1
13322 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13324 tree iref
= build_fold_indirect_ref_loc (loc
,
13325 CALL_EXPR_ARG (arg0
, 0));
13326 return fold_build2_loc (loc
, code
, type
, iref
,
13327 build_int_cst (TREE_TYPE (iref
), 0));
13331 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13332 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13333 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13334 && integer_zerop (arg1
)
13335 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13337 tree arg00
= TREE_OPERAND (arg0
, 0);
13338 tree arg01
= TREE_OPERAND (arg0
, 1);
13339 tree itype
= TREE_TYPE (arg00
);
13340 if (TREE_INT_CST_HIGH (arg01
) == 0
13341 && TREE_INT_CST_LOW (arg01
)
13342 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13344 if (TYPE_UNSIGNED (itype
))
13346 itype
= signed_type_for (itype
);
13347 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13349 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13350 type
, arg00
, build_zero_cst (itype
));
13354 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13355 if (integer_zerop (arg1
)
13356 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13357 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13358 TREE_OPERAND (arg0
, 1));
13360 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13361 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13362 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13363 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13364 build_zero_cst (TREE_TYPE (arg0
)));
13365 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13366 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13367 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13368 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13369 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13370 build_zero_cst (TREE_TYPE (arg0
)));
13372 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13373 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13374 && TREE_CODE (arg1
) == INTEGER_CST
13375 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13376 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13377 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13378 TREE_OPERAND (arg0
, 1), arg1
));
13380 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13381 (X & C) == 0 when C is a single bit. */
13382 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13383 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13384 && integer_zerop (arg1
)
13385 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13387 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13388 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13389 TREE_OPERAND (arg0
, 1));
13390 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13392 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13396 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13397 constant C is a power of two, i.e. a single bit. */
13398 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13399 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13400 && integer_zerop (arg1
)
13401 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13402 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13403 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13405 tree arg00
= TREE_OPERAND (arg0
, 0);
13406 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13407 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13410 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13411 when is C is a power of two, i.e. a single bit. */
13412 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13413 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13414 && integer_zerop (arg1
)
13415 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13416 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13417 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13419 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13420 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13421 arg000
, TREE_OPERAND (arg0
, 1));
13422 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13423 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13426 if (integer_zerop (arg1
)
13427 && tree_expr_nonzero_p (arg0
))
13429 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13430 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13433 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13434 if (TREE_CODE (arg0
) == NEGATE_EXPR
13435 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13436 return fold_build2_loc (loc
, code
, type
,
13437 TREE_OPERAND (arg0
, 0),
13438 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13439 TREE_OPERAND (arg1
, 0)));
13441 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13442 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13443 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13445 tree arg00
= TREE_OPERAND (arg0
, 0);
13446 tree arg01
= TREE_OPERAND (arg0
, 1);
13447 tree arg10
= TREE_OPERAND (arg1
, 0);
13448 tree arg11
= TREE_OPERAND (arg1
, 1);
13449 tree itype
= TREE_TYPE (arg0
);
13451 if (operand_equal_p (arg01
, arg11
, 0))
13452 return fold_build2_loc (loc
, code
, type
,
13453 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13454 fold_build2_loc (loc
,
13455 BIT_XOR_EXPR
, itype
,
13458 build_zero_cst (itype
));
13460 if (operand_equal_p (arg01
, arg10
, 0))
13461 return fold_build2_loc (loc
, code
, type
,
13462 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13463 fold_build2_loc (loc
,
13464 BIT_XOR_EXPR
, itype
,
13467 build_zero_cst (itype
));
13469 if (operand_equal_p (arg00
, arg11
, 0))
13470 return fold_build2_loc (loc
, code
, type
,
13471 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13472 fold_build2_loc (loc
,
13473 BIT_XOR_EXPR
, itype
,
13476 build_zero_cst (itype
));
13478 if (operand_equal_p (arg00
, arg10
, 0))
13479 return fold_build2_loc (loc
, code
, type
,
13480 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13481 fold_build2_loc (loc
,
13482 BIT_XOR_EXPR
, itype
,
13485 build_zero_cst (itype
));
13488 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13489 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13491 tree arg00
= TREE_OPERAND (arg0
, 0);
13492 tree arg01
= TREE_OPERAND (arg0
, 1);
13493 tree arg10
= TREE_OPERAND (arg1
, 0);
13494 tree arg11
= TREE_OPERAND (arg1
, 1);
13495 tree itype
= TREE_TYPE (arg0
);
13497 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13498 operand_equal_p guarantees no side-effects so we don't need
13499 to use omit_one_operand on Z. */
13500 if (operand_equal_p (arg01
, arg11
, 0))
13501 return fold_build2_loc (loc
, code
, type
, arg00
,
13502 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13504 if (operand_equal_p (arg01
, arg10
, 0))
13505 return fold_build2_loc (loc
, code
, type
, arg00
,
13506 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13508 if (operand_equal_p (arg00
, arg11
, 0))
13509 return fold_build2_loc (loc
, code
, type
, arg01
,
13510 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13512 if (operand_equal_p (arg00
, arg10
, 0))
13513 return fold_build2_loc (loc
, code
, type
, arg01
,
13514 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13517 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13518 if (TREE_CODE (arg01
) == INTEGER_CST
13519 && TREE_CODE (arg11
) == INTEGER_CST
)
13521 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13522 fold_convert_loc (loc
, itype
, arg11
));
13523 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13524 return fold_build2_loc (loc
, code
, type
, tem
,
13525 fold_convert_loc (loc
, itype
, arg10
));
13529 /* Attempt to simplify equality/inequality comparisons of complex
13530 values. Only lower the comparison if the result is known or
13531 can be simplified to a single scalar comparison. */
13532 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13533 || TREE_CODE (arg0
) == COMPLEX_CST
)
13534 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13535 || TREE_CODE (arg1
) == COMPLEX_CST
))
13537 tree real0
, imag0
, real1
, imag1
;
13540 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13542 real0
= TREE_OPERAND (arg0
, 0);
13543 imag0
= TREE_OPERAND (arg0
, 1);
13547 real0
= TREE_REALPART (arg0
);
13548 imag0
= TREE_IMAGPART (arg0
);
13551 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13553 real1
= TREE_OPERAND (arg1
, 0);
13554 imag1
= TREE_OPERAND (arg1
, 1);
13558 real1
= TREE_REALPART (arg1
);
13559 imag1
= TREE_IMAGPART (arg1
);
13562 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13563 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13565 if (integer_zerop (rcond
))
13567 if (code
== EQ_EXPR
)
13568 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13570 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13574 if (code
== NE_EXPR
)
13575 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13577 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13581 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13582 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13584 if (integer_zerop (icond
))
13586 if (code
== EQ_EXPR
)
13587 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13589 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13593 if (code
== NE_EXPR
)
13594 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13596 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13607 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13608 if (tem
!= NULL_TREE
)
13611 /* Transform comparisons of the form X +- C CMP X. */
13612 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13613 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13614 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13615 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13616 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13617 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13619 tree arg01
= TREE_OPERAND (arg0
, 1);
13620 enum tree_code code0
= TREE_CODE (arg0
);
13623 if (TREE_CODE (arg01
) == REAL_CST
)
13624 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13626 is_positive
= tree_int_cst_sgn (arg01
);
13628 /* (X - c) > X becomes false. */
13629 if (code
== GT_EXPR
13630 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13631 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13633 if (TREE_CODE (arg01
) == INTEGER_CST
13634 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13635 fold_overflow_warning (("assuming signed overflow does not "
13636 "occur when assuming that (X - c) > X "
13637 "is always false"),
13638 WARN_STRICT_OVERFLOW_ALL
);
13639 return constant_boolean_node (0, type
);
13642 /* Likewise (X + c) < X becomes false. */
13643 if (code
== LT_EXPR
13644 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13645 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13647 if (TREE_CODE (arg01
) == INTEGER_CST
13648 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13649 fold_overflow_warning (("assuming signed overflow does not "
13650 "occur when assuming that "
13651 "(X + c) < X is always false"),
13652 WARN_STRICT_OVERFLOW_ALL
);
13653 return constant_boolean_node (0, type
);
13656 /* Convert (X - c) <= X to true. */
13657 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13659 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13660 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13662 if (TREE_CODE (arg01
) == INTEGER_CST
13663 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13664 fold_overflow_warning (("assuming signed overflow does not "
13665 "occur when assuming that "
13666 "(X - c) <= X is always true"),
13667 WARN_STRICT_OVERFLOW_ALL
);
13668 return constant_boolean_node (1, type
);
13671 /* Convert (X + c) >= X to true. */
13672 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13674 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13675 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13677 if (TREE_CODE (arg01
) == INTEGER_CST
13678 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13679 fold_overflow_warning (("assuming signed overflow does not "
13680 "occur when assuming that "
13681 "(X + c) >= X is always true"),
13682 WARN_STRICT_OVERFLOW_ALL
);
13683 return constant_boolean_node (1, type
);
13686 if (TREE_CODE (arg01
) == INTEGER_CST
)
13688 /* Convert X + c > X and X - c < X to true for integers. */
13689 if (code
== GT_EXPR
13690 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13691 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13693 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13694 fold_overflow_warning (("assuming signed overflow does "
13695 "not occur when assuming that "
13696 "(X + c) > X is always true"),
13697 WARN_STRICT_OVERFLOW_ALL
);
13698 return constant_boolean_node (1, type
);
13701 if (code
== LT_EXPR
13702 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13703 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13705 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13706 fold_overflow_warning (("assuming signed overflow does "
13707 "not occur when assuming that "
13708 "(X - c) < X is always true"),
13709 WARN_STRICT_OVERFLOW_ALL
);
13710 return constant_boolean_node (1, type
);
13713 /* Convert X + c <= X and X - c >= X to false for integers. */
13714 if (code
== LE_EXPR
13715 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13716 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13718 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13719 fold_overflow_warning (("assuming signed overflow does "
13720 "not occur when assuming that "
13721 "(X + c) <= X is always false"),
13722 WARN_STRICT_OVERFLOW_ALL
);
13723 return constant_boolean_node (0, type
);
13726 if (code
== GE_EXPR
13727 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13728 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13730 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13731 fold_overflow_warning (("assuming signed overflow does "
13732 "not occur when assuming that "
13733 "(X - c) >= X is always false"),
13734 WARN_STRICT_OVERFLOW_ALL
);
13735 return constant_boolean_node (0, type
);
13740 /* Comparisons with the highest or lowest possible integer of
13741 the specified precision will have known values. */
13743 tree arg1_type
= TREE_TYPE (arg1
);
13744 unsigned int width
= TYPE_PRECISION (arg1_type
);
13746 if (TREE_CODE (arg1
) == INTEGER_CST
13747 && width
<= HOST_BITS_PER_DOUBLE_INT
13748 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13750 HOST_WIDE_INT signed_max_hi
;
13751 unsigned HOST_WIDE_INT signed_max_lo
;
13752 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13754 if (width
<= HOST_BITS_PER_WIDE_INT
)
13756 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13761 if (TYPE_UNSIGNED (arg1_type
))
13763 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13769 max_lo
= signed_max_lo
;
13770 min_lo
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13776 width
-= HOST_BITS_PER_WIDE_INT
;
13777 signed_max_lo
= -1;
13778 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13783 if (TYPE_UNSIGNED (arg1_type
))
13785 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13790 max_hi
= signed_max_hi
;
13791 min_hi
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13795 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13796 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13800 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13803 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13806 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13809 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13811 /* The GE_EXPR and LT_EXPR cases above are not normally
13812 reached because of previous transformations. */
13817 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13819 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13823 arg1
= const_binop (PLUS_EXPR
, arg1
,
13824 build_int_cst (TREE_TYPE (arg1
), 1));
13825 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13826 fold_convert_loc (loc
,
13827 TREE_TYPE (arg1
), arg0
),
13830 arg1
= const_binop (PLUS_EXPR
, arg1
,
13831 build_int_cst (TREE_TYPE (arg1
), 1));
13832 return fold_build2_loc (loc
, NE_EXPR
, type
,
13833 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13839 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13841 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13845 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13848 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13851 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13854 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13859 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13861 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13865 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13866 return fold_build2_loc (loc
, NE_EXPR
, type
,
13867 fold_convert_loc (loc
,
13868 TREE_TYPE (arg1
), arg0
),
13871 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13872 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13873 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13880 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13881 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13882 && TYPE_UNSIGNED (arg1_type
)
13883 /* We will flip the signedness of the comparison operator
13884 associated with the mode of arg1, so the sign bit is
13885 specified by this mode. Check that arg1 is the signed
13886 max associated with this sign bit. */
13887 && width
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13888 /* signed_type does not work on pointer types. */
13889 && INTEGRAL_TYPE_P (arg1_type
))
13891 /* The following case also applies to X < signed_max+1
13892 and X >= signed_max+1 because previous transformations. */
13893 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13895 tree st
= signed_type_for (arg1_type
);
13896 return fold_build2_loc (loc
,
13897 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13898 type
, fold_convert_loc (loc
, st
, arg0
),
13899 build_int_cst (st
, 0));
13905 /* If we are comparing an ABS_EXPR with a constant, we can
13906 convert all the cases into explicit comparisons, but they may
13907 well not be faster than doing the ABS and one comparison.
13908 But ABS (X) <= C is a range comparison, which becomes a subtraction
13909 and a comparison, and is probably faster. */
13910 if (code
== LE_EXPR
13911 && TREE_CODE (arg1
) == INTEGER_CST
13912 && TREE_CODE (arg0
) == ABS_EXPR
13913 && ! TREE_SIDE_EFFECTS (arg0
)
13914 && (0 != (tem
= negate_expr (arg1
)))
13915 && TREE_CODE (tem
) == INTEGER_CST
13916 && !TREE_OVERFLOW (tem
))
13917 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13918 build2 (GE_EXPR
, type
,
13919 TREE_OPERAND (arg0
, 0), tem
),
13920 build2 (LE_EXPR
, type
,
13921 TREE_OPERAND (arg0
, 0), arg1
));
13923 /* Convert ABS_EXPR<x> >= 0 to true. */
13924 strict_overflow_p
= false;
13925 if (code
== GE_EXPR
13926 && (integer_zerop (arg1
)
13927 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13928 && real_zerop (arg1
)))
13929 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13931 if (strict_overflow_p
)
13932 fold_overflow_warning (("assuming signed overflow does not occur "
13933 "when simplifying comparison of "
13934 "absolute value and zero"),
13935 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13936 return omit_one_operand_loc (loc
, type
,
13937 constant_boolean_node (true, type
),
13941 /* Convert ABS_EXPR<x> < 0 to false. */
13942 strict_overflow_p
= false;
13943 if (code
== LT_EXPR
13944 && (integer_zerop (arg1
) || real_zerop (arg1
))
13945 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13947 if (strict_overflow_p
)
13948 fold_overflow_warning (("assuming signed overflow does not occur "
13949 "when simplifying comparison of "
13950 "absolute value and zero"),
13951 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13952 return omit_one_operand_loc (loc
, type
,
13953 constant_boolean_node (false, type
),
13957 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13958 and similarly for >= into !=. */
13959 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13960 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13961 && TREE_CODE (arg1
) == LSHIFT_EXPR
13962 && integer_onep (TREE_OPERAND (arg1
, 0)))
13963 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13964 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13965 TREE_OPERAND (arg1
, 1)),
13966 build_zero_cst (TREE_TYPE (arg0
)));
13968 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13969 otherwise Y might be >= # of bits in X's type and thus e.g.
13970 (unsigned char) (1 << Y) for Y 15 might be 0.
13971 If the cast is widening, then 1 << Y should have unsigned type,
13972 otherwise if Y is number of bits in the signed shift type minus 1,
13973 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13974 31 might be 0xffffffff80000000. */
13975 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13976 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13977 && CONVERT_EXPR_P (arg1
)
13978 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13979 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13980 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13981 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13982 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13983 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13984 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13986 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13987 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13988 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13989 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13990 build_zero_cst (TREE_TYPE (arg0
)));
13995 case UNORDERED_EXPR
:
14003 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
14005 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
14006 if (t1
!= NULL_TREE
)
14010 /* If the first operand is NaN, the result is constant. */
14011 if (TREE_CODE (arg0
) == REAL_CST
14012 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
14013 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
14015 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
14016 ? integer_zero_node
14017 : integer_one_node
;
14018 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
14021 /* If the second operand is NaN, the result is constant. */
14022 if (TREE_CODE (arg1
) == REAL_CST
14023 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
14024 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
14026 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
14027 ? integer_zero_node
14028 : integer_one_node
;
14029 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
14032 /* Simplify unordered comparison of something with itself. */
14033 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
14034 && operand_equal_p (arg0
, arg1
, 0))
14035 return constant_boolean_node (1, type
);
14037 if (code
== LTGT_EXPR
14038 && !flag_trapping_math
14039 && operand_equal_p (arg0
, arg1
, 0))
14040 return constant_boolean_node (0, type
);
14042 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
14044 tree targ0
= strip_float_extensions (arg0
);
14045 tree targ1
= strip_float_extensions (arg1
);
14046 tree newtype
= TREE_TYPE (targ0
);
14048 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
14049 newtype
= TREE_TYPE (targ1
);
14051 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
14052 return fold_build2_loc (loc
, code
, type
,
14053 fold_convert_loc (loc
, newtype
, targ0
),
14054 fold_convert_loc (loc
, newtype
, targ1
));
14059 case COMPOUND_EXPR
:
14060 /* When pedantic, a compound expression can be neither an lvalue
14061 nor an integer constant expression. */
14062 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
14064 /* Don't let (0, 0) be null pointer constant. */
14065 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
14066 : fold_convert_loc (loc
, type
, arg1
);
14067 return pedantic_non_lvalue_loc (loc
, tem
);
14070 if ((TREE_CODE (arg0
) == REAL_CST
14071 && TREE_CODE (arg1
) == REAL_CST
)
14072 || (TREE_CODE (arg0
) == INTEGER_CST
14073 && TREE_CODE (arg1
) == INTEGER_CST
))
14074 return build_complex (type
, arg0
, arg1
);
14075 if (TREE_CODE (arg0
) == REALPART_EXPR
14076 && TREE_CODE (arg1
) == IMAGPART_EXPR
14077 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
14078 && operand_equal_p (TREE_OPERAND (arg0
, 0),
14079 TREE_OPERAND (arg1
, 0), 0))
14080 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
14081 TREE_OPERAND (arg1
, 0));
14085 /* An ASSERT_EXPR should never be passed to fold_binary. */
14086 gcc_unreachable ();
14088 case VEC_PACK_TRUNC_EXPR
:
14089 case VEC_PACK_FIX_TRUNC_EXPR
:
14091 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14094 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
14095 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
14096 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14099 elts
= XALLOCAVEC (tree
, nelts
);
14100 if (!vec_cst_ctor_to_array (arg0
, elts
)
14101 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
14104 for (i
= 0; i
< nelts
; i
++)
14106 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
14107 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
14108 TREE_TYPE (type
), elts
[i
]);
14109 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
14113 return build_vector (type
, elts
);
14116 case VEC_WIDEN_MULT_LO_EXPR
:
14117 case VEC_WIDEN_MULT_HI_EXPR
:
14118 case VEC_WIDEN_MULT_EVEN_EXPR
:
14119 case VEC_WIDEN_MULT_ODD_EXPR
:
14121 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
14122 unsigned int out
, ofs
, scale
;
14125 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
14126 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
14127 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14130 elts
= XALLOCAVEC (tree
, nelts
* 4);
14131 if (!vec_cst_ctor_to_array (arg0
, elts
)
14132 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
14135 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
14136 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
14137 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
14138 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
14139 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
14140 scale
= 1, ofs
= 0;
14141 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14142 scale
= 1, ofs
= 1;
14144 for (out
= 0; out
< nelts
; out
++)
14146 unsigned int in1
= (out
<< scale
) + ofs
;
14147 unsigned int in2
= in1
+ nelts
* 2;
14150 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
14151 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
14153 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
14155 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
14156 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
14160 return build_vector (type
, elts
);
14165 } /* switch (code) */
14168 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14169 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14173 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
14175 switch (TREE_CODE (*tp
))
14181 *walk_subtrees
= 0;
14183 /* ... fall through ... */
14190 /* Return whether the sub-tree ST contains a label which is accessible from
14191 outside the sub-tree. */
14194 contains_label_p (tree st
)
14197 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14200 /* Fold a ternary expression of code CODE and type TYPE with operands
14201 OP0, OP1, and OP2. Return the folded expression if folding is
14202 successful. Otherwise, return NULL_TREE. */
14205 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14206 tree op0
, tree op1
, tree op2
)
14209 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14210 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14212 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14213 && TREE_CODE_LENGTH (code
) == 3);
14215 /* Strip any conversions that don't change the mode. This is safe
14216 for every expression, except for a comparison expression because
14217 its signedness is derived from its operands. So, in the latter
14218 case, only strip conversions that don't change the signedness.
14220 Note that this is done as an internal manipulation within the
14221 constant folder, in order to find the simplest representation of
14222 the arguments so that their form can be studied. In any cases,
14223 the appropriate type conversions should be put back in the tree
14224 that will get out of the constant folder. */
14245 case COMPONENT_REF
:
14246 if (TREE_CODE (arg0
) == CONSTRUCTOR
14247 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14249 unsigned HOST_WIDE_INT idx
;
14251 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14258 case VEC_COND_EXPR
:
14259 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14260 so all simple results must be passed through pedantic_non_lvalue. */
14261 if (TREE_CODE (arg0
) == INTEGER_CST
)
14263 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14264 tem
= integer_zerop (arg0
) ? op2
: op1
;
14265 /* Only optimize constant conditions when the selected branch
14266 has the same type as the COND_EXPR. This avoids optimizing
14267 away "c ? x : throw", where the throw has a void type.
14268 Avoid throwing away that operand which contains label. */
14269 if ((!TREE_SIDE_EFFECTS (unused_op
)
14270 || !contains_label_p (unused_op
))
14271 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14272 || VOID_TYPE_P (type
)))
14273 return pedantic_non_lvalue_loc (loc
, tem
);
14276 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14278 if (integer_all_onesp (arg0
))
14279 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14280 if (integer_zerop (arg0
))
14281 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14283 if ((TREE_CODE (arg1
) == VECTOR_CST
14284 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14285 && (TREE_CODE (arg2
) == VECTOR_CST
14286 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14288 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14289 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14290 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14291 for (i
= 0; i
< nelts
; i
++)
14293 tree val
= VECTOR_CST_ELT (arg0
, i
);
14294 if (integer_all_onesp (val
))
14296 else if (integer_zerop (val
))
14297 sel
[i
] = nelts
+ i
;
14298 else /* Currently unreachable. */
14301 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14302 if (t
!= NULL_TREE
)
14307 if (operand_equal_p (arg1
, op2
, 0))
14308 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14310 /* If we have A op B ? A : C, we may be able to convert this to a
14311 simpler expression, depending on the operation and the values
14312 of B and C. Signed zeros prevent all of these transformations,
14313 for reasons given above each one.
14315 Also try swapping the arguments and inverting the conditional. */
14316 if (COMPARISON_CLASS_P (arg0
)
14317 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14318 arg1
, TREE_OPERAND (arg0
, 1))
14319 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14321 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14326 if (COMPARISON_CLASS_P (arg0
)
14327 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14329 TREE_OPERAND (arg0
, 1))
14330 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14332 location_t loc0
= expr_location_or (arg0
, loc
);
14333 tem
= fold_invert_truthvalue (loc0
, arg0
);
14334 if (tem
&& COMPARISON_CLASS_P (tem
))
14336 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14342 /* If the second operand is simpler than the third, swap them
14343 since that produces better jump optimization results. */
14344 if (truth_value_p (TREE_CODE (arg0
))
14345 && tree_swap_operands_p (op1
, op2
, false))
14347 location_t loc0
= expr_location_or (arg0
, loc
);
14348 /* See if this can be inverted. If it can't, possibly because
14349 it was a floating-point inequality comparison, don't do
14351 tem
= fold_invert_truthvalue (loc0
, arg0
);
14353 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14356 /* Convert A ? 1 : 0 to simply A. */
14357 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14358 : (integer_onep (op1
)
14359 && !VECTOR_TYPE_P (type
)))
14360 && integer_zerop (op2
)
14361 /* If we try to convert OP0 to our type, the
14362 call to fold will try to move the conversion inside
14363 a COND, which will recurse. In that case, the COND_EXPR
14364 is probably the best choice, so leave it alone. */
14365 && type
== TREE_TYPE (arg0
))
14366 return pedantic_non_lvalue_loc (loc
, arg0
);
14368 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14369 over COND_EXPR in cases such as floating point comparisons. */
14370 if (integer_zerop (op1
)
14371 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14372 : (integer_onep (op2
)
14373 && !VECTOR_TYPE_P (type
)))
14374 && truth_value_p (TREE_CODE (arg0
)))
14375 return pedantic_non_lvalue_loc (loc
,
14376 fold_convert_loc (loc
, type
,
14377 invert_truthvalue_loc (loc
,
14380 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14381 if (TREE_CODE (arg0
) == LT_EXPR
14382 && integer_zerop (TREE_OPERAND (arg0
, 1))
14383 && integer_zerop (op2
)
14384 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14386 /* sign_bit_p looks through both zero and sign extensions,
14387 but for this optimization only sign extensions are
14389 tree tem2
= TREE_OPERAND (arg0
, 0);
14390 while (tem
!= tem2
)
14392 if (TREE_CODE (tem2
) != NOP_EXPR
14393 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14398 tem2
= TREE_OPERAND (tem2
, 0);
14400 /* sign_bit_p only checks ARG1 bits within A's precision.
14401 If <sign bit of A> has wider type than A, bits outside
14402 of A's precision in <sign bit of A> need to be checked.
14403 If they are all 0, this optimization needs to be done
14404 in unsigned A's type, if they are all 1 in signed A's type,
14405 otherwise this can't be done. */
14407 && TYPE_PRECISION (TREE_TYPE (tem
))
14408 < TYPE_PRECISION (TREE_TYPE (arg1
))
14409 && TYPE_PRECISION (TREE_TYPE (tem
))
14410 < TYPE_PRECISION (type
))
14412 unsigned HOST_WIDE_INT mask_lo
;
14413 HOST_WIDE_INT mask_hi
;
14414 int inner_width
, outer_width
;
14417 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14418 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14419 if (outer_width
> TYPE_PRECISION (type
))
14420 outer_width
= TYPE_PRECISION (type
);
14422 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14424 mask_hi
= (HOST_WIDE_INT_M1U
14425 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14431 mask_lo
= (HOST_WIDE_INT_M1U
14432 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14434 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14436 mask_hi
&= ~(HOST_WIDE_INT_M1U
14437 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14441 mask_lo
&= ~(HOST_WIDE_INT_M1U
14442 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14444 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14445 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14447 tem_type
= signed_type_for (TREE_TYPE (tem
));
14448 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14450 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14451 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14453 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14454 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14462 fold_convert_loc (loc
, type
,
14463 fold_build2_loc (loc
, BIT_AND_EXPR
,
14464 TREE_TYPE (tem
), tem
,
14465 fold_convert_loc (loc
,
14470 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14471 already handled above. */
14472 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14473 && integer_onep (TREE_OPERAND (arg0
, 1))
14474 && integer_zerop (op2
)
14475 && integer_pow2p (arg1
))
14477 tree tem
= TREE_OPERAND (arg0
, 0);
14479 if (TREE_CODE (tem
) == RSHIFT_EXPR
14480 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14481 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14482 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14483 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14484 TREE_OPERAND (tem
, 0), arg1
);
14487 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14488 is probably obsolete because the first operand should be a
14489 truth value (that's why we have the two cases above), but let's
14490 leave it in until we can confirm this for all front-ends. */
14491 if (integer_zerop (op2
)
14492 && TREE_CODE (arg0
) == NE_EXPR
14493 && integer_zerop (TREE_OPERAND (arg0
, 1))
14494 && integer_pow2p (arg1
)
14495 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14496 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14497 arg1
, OEP_ONLY_CONST
))
14498 return pedantic_non_lvalue_loc (loc
,
14499 fold_convert_loc (loc
, type
,
14500 TREE_OPERAND (arg0
, 0)));
14502 /* Disable the transformations below for vectors, since
14503 fold_binary_op_with_conditional_arg may undo them immediately,
14504 yielding an infinite loop. */
14505 if (code
== VEC_COND_EXPR
)
14508 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14509 if (integer_zerop (op2
)
14510 && truth_value_p (TREE_CODE (arg0
))
14511 && truth_value_p (TREE_CODE (arg1
))
14512 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14513 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14514 : TRUTH_ANDIF_EXPR
,
14515 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14517 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14518 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14519 && truth_value_p (TREE_CODE (arg0
))
14520 && truth_value_p (TREE_CODE (arg1
))
14521 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14523 location_t loc0
= expr_location_or (arg0
, loc
);
14524 /* Only perform transformation if ARG0 is easily inverted. */
14525 tem
= fold_invert_truthvalue (loc0
, arg0
);
14527 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14530 type
, fold_convert_loc (loc
, type
, tem
),
14534 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14535 if (integer_zerop (arg1
)
14536 && truth_value_p (TREE_CODE (arg0
))
14537 && truth_value_p (TREE_CODE (op2
))
14538 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14540 location_t loc0
= expr_location_or (arg0
, loc
);
14541 /* Only perform transformation if ARG0 is easily inverted. */
14542 tem
= fold_invert_truthvalue (loc0
, arg0
);
14544 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14545 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14546 type
, fold_convert_loc (loc
, type
, tem
),
14550 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14551 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14552 && truth_value_p (TREE_CODE (arg0
))
14553 && truth_value_p (TREE_CODE (op2
))
14554 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14555 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14556 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14557 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14562 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14563 of fold_ternary on them. */
14564 gcc_unreachable ();
14566 case BIT_FIELD_REF
:
14567 if ((TREE_CODE (arg0
) == VECTOR_CST
14568 || (TREE_CODE (arg0
) == CONSTRUCTOR
14569 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14570 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14571 || (TREE_CODE (type
) == VECTOR_TYPE
14572 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14574 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14575 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14576 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14577 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14580 && (idx
% width
) == 0
14581 && (n
% width
) == 0
14582 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14587 if (TREE_CODE (arg0
) == VECTOR_CST
)
14590 return VECTOR_CST_ELT (arg0
, idx
);
14592 tree
*vals
= XALLOCAVEC (tree
, n
);
14593 for (unsigned i
= 0; i
< n
; ++i
)
14594 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14595 return build_vector (type
, vals
);
14598 /* Constructor elements can be subvectors. */
14599 unsigned HOST_WIDE_INT k
= 1;
14600 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14602 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14603 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14604 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14607 /* We keep an exact subset of the constructor elements. */
14608 if ((idx
% k
) == 0 && (n
% k
) == 0)
14610 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14611 return build_constructor (type
, NULL
);
14616 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14617 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14618 return build_zero_cst (type
);
14621 vec
<constructor_elt
, va_gc
> *vals
;
14622 vec_alloc (vals
, n
);
14623 for (unsigned i
= 0;
14624 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14626 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14628 (arg0
, idx
+ i
)->value
);
14629 return build_constructor (type
, vals
);
14631 /* The bitfield references a single constructor element. */
14632 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14634 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14635 return build_zero_cst (type
);
14637 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14639 return fold_build3_loc (loc
, code
, type
,
14640 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14641 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14646 /* A bit-field-ref that referenced the full argument can be stripped. */
14647 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14648 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14649 && integer_zerop (op2
))
14650 return fold_convert_loc (loc
, type
, arg0
);
14652 /* On constants we can use native encode/interpret to constant
14653 fold (nearly) all BIT_FIELD_REFs. */
14654 if (CONSTANT_CLASS_P (arg0
)
14655 && can_native_interpret_type_p (type
)
14656 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14657 /* This limitation should not be necessary, we just need to
14658 round this up to mode size. */
14659 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14660 /* Need bit-shifting of the buffer to relax the following. */
14661 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14663 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14664 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14665 unsigned HOST_WIDE_INT clen
;
14666 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14667 /* ??? We cannot tell native_encode_expr to start at
14668 some random byte only. So limit us to a reasonable amount
14672 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14673 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14675 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14677 tree v
= native_interpret_expr (type
,
14678 b
+ bitpos
/ BITS_PER_UNIT
,
14679 bitsize
/ BITS_PER_UNIT
);
14689 /* For integers we can decompose the FMA if possible. */
14690 if (TREE_CODE (arg0
) == INTEGER_CST
14691 && TREE_CODE (arg1
) == INTEGER_CST
)
14692 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14693 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14694 if (integer_zerop (arg2
))
14695 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14697 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14699 case VEC_PERM_EXPR
:
14700 if (TREE_CODE (arg2
) == VECTOR_CST
)
14702 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14703 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14705 bool need_mask_canon
= false;
14706 bool all_in_vec0
= true;
14707 bool all_in_vec1
= true;
14708 bool maybe_identity
= true;
14709 bool single_arg
= (op0
== op1
);
14710 bool changed
= false;
14712 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14713 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14714 for (i
= 0; i
< nelts
; i
++)
14716 tree val
= VECTOR_CST_ELT (arg2
, i
);
14717 if (TREE_CODE (val
) != INTEGER_CST
)
14720 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14721 if (TREE_INT_CST_HIGH (val
)
14722 || ((unsigned HOST_WIDE_INT
)
14723 TREE_INT_CST_LOW (val
) != sel
[i
]))
14724 need_mask_canon
= true;
14726 if (sel
[i
] < nelts
)
14727 all_in_vec1
= false;
14729 all_in_vec0
= false;
14731 if ((sel
[i
] & (nelts
-1)) != i
)
14732 maybe_identity
= false;
14735 if (maybe_identity
)
14745 else if (all_in_vec1
)
14748 for (i
= 0; i
< nelts
; i
++)
14750 need_mask_canon
= true;
14753 if ((TREE_CODE (op0
) == VECTOR_CST
14754 || TREE_CODE (op0
) == CONSTRUCTOR
)
14755 && (TREE_CODE (op1
) == VECTOR_CST
14756 || TREE_CODE (op1
) == CONSTRUCTOR
))
14758 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14759 if (t
!= NULL_TREE
)
14763 if (op0
== op1
&& !single_arg
)
14766 if (need_mask_canon
&& arg2
== op2
)
14768 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14769 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14770 for (i
= 0; i
< nelts
; i
++)
14771 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14772 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14777 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14783 } /* switch (code) */
14786 /* Perform constant folding and related simplification of EXPR.
14787 The related simplifications include x*1 => x, x*0 => 0, etc.,
14788 and application of the associative law.
14789 NOP_EXPR conversions may be removed freely (as long as we
14790 are careful not to change the type of the overall expression).
14791 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14792 but we can constant-fold them if they have constant operands. */
14794 #ifdef ENABLE_FOLD_CHECKING
14795 # define fold(x) fold_1 (x)
14796 static tree
fold_1 (tree
);
14802 const tree t
= expr
;
14803 enum tree_code code
= TREE_CODE (t
);
14804 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14806 location_t loc
= EXPR_LOCATION (expr
);
14808 /* Return right away if a constant. */
14809 if (kind
== tcc_constant
)
14812 /* CALL_EXPR-like objects with variable numbers of operands are
14813 treated specially. */
14814 if (kind
== tcc_vl_exp
)
14816 if (code
== CALL_EXPR
)
14818 tem
= fold_call_expr (loc
, expr
, false);
14819 return tem
? tem
: expr
;
14824 if (IS_EXPR_CODE_CLASS (kind
))
14826 tree type
= TREE_TYPE (t
);
14827 tree op0
, op1
, op2
;
14829 switch (TREE_CODE_LENGTH (code
))
14832 op0
= TREE_OPERAND (t
, 0);
14833 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14834 return tem
? tem
: expr
;
14836 op0
= TREE_OPERAND (t
, 0);
14837 op1
= TREE_OPERAND (t
, 1);
14838 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14839 return tem
? tem
: expr
;
14841 op0
= TREE_OPERAND (t
, 0);
14842 op1
= TREE_OPERAND (t
, 1);
14843 op2
= TREE_OPERAND (t
, 2);
14844 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14845 return tem
? tem
: expr
;
14855 tree op0
= TREE_OPERAND (t
, 0);
14856 tree op1
= TREE_OPERAND (t
, 1);
14858 if (TREE_CODE (op1
) == INTEGER_CST
14859 && TREE_CODE (op0
) == CONSTRUCTOR
14860 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14862 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14863 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14864 unsigned HOST_WIDE_INT begin
= 0;
14866 /* Find a matching index by means of a binary search. */
14867 while (begin
!= end
)
14869 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14870 tree index
= (*elts
)[middle
].index
;
14872 if (TREE_CODE (index
) == INTEGER_CST
14873 && tree_int_cst_lt (index
, op1
))
14874 begin
= middle
+ 1;
14875 else if (TREE_CODE (index
) == INTEGER_CST
14876 && tree_int_cst_lt (op1
, index
))
14878 else if (TREE_CODE (index
) == RANGE_EXPR
14879 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14880 begin
= middle
+ 1;
14881 else if (TREE_CODE (index
) == RANGE_EXPR
14882 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14885 return (*elts
)[middle
].value
;
14892 /* Return a VECTOR_CST if possible. */
14895 tree type
= TREE_TYPE (t
);
14896 if (TREE_CODE (type
) != VECTOR_TYPE
)
14899 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14900 unsigned HOST_WIDE_INT idx
, pos
= 0;
14903 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14905 if (!CONSTANT_CLASS_P (value
))
14907 if (TREE_CODE (value
) == VECTOR_CST
)
14909 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14910 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14913 vec
[pos
++] = value
;
14915 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14916 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14918 return build_vector (type
, vec
);
14922 return fold (DECL_INITIAL (t
));
14926 } /* switch (code) */
14929 #ifdef ENABLE_FOLD_CHECKING
14932 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14933 hash_table
<pointer_hash
<tree_node
> >);
14934 static void fold_check_failed (const_tree
, const_tree
);
14935 void print_fold_checksum (const_tree
);
14937 /* When --enable-checking=fold, compute a digest of expr before
14938 and after actual fold call to see if fold did not accidentally
14939 change original expr. */
14945 struct md5_ctx ctx
;
14946 unsigned char checksum_before
[16], checksum_after
[16];
14947 hash_table
<pointer_hash
<tree_node
> > ht
;
14950 md5_init_ctx (&ctx
);
14951 fold_checksum_tree (expr
, &ctx
, ht
);
14952 md5_finish_ctx (&ctx
, checksum_before
);
14955 ret
= fold_1 (expr
);
14957 md5_init_ctx (&ctx
);
14958 fold_checksum_tree (expr
, &ctx
, ht
);
14959 md5_finish_ctx (&ctx
, checksum_after
);
14962 if (memcmp (checksum_before
, checksum_after
, 16))
14963 fold_check_failed (expr
, ret
);
14969 print_fold_checksum (const_tree expr
)
14971 struct md5_ctx ctx
;
14972 unsigned char checksum
[16], cnt
;
14973 hash_table
<pointer_hash
<tree_node
> > ht
;
14976 md5_init_ctx (&ctx
);
14977 fold_checksum_tree (expr
, &ctx
, ht
);
14978 md5_finish_ctx (&ctx
, checksum
);
14980 for (cnt
= 0; cnt
< 16; ++cnt
)
14981 fprintf (stderr
, "%02x", checksum
[cnt
]);
14982 putc ('\n', stderr
);
14986 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14988 internal_error ("fold check: original tree changed by fold");
14992 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14993 hash_table
<pointer_hash
<tree_node
> > ht
)
14996 enum tree_code code
;
14997 union tree_node buf
;
15003 slot
= ht
.find_slot (expr
, INSERT
);
15006 *slot
= CONST_CAST_TREE (expr
);
15007 code
= TREE_CODE (expr
);
15008 if (TREE_CODE_CLASS (code
) == tcc_declaration
15009 && DECL_ASSEMBLER_NAME_SET_P (expr
))
15011 /* Allow DECL_ASSEMBLER_NAME to be modified. */
15012 memcpy ((char *) &buf
, expr
, tree_size (expr
));
15013 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
15014 expr
= (tree
) &buf
;
15016 else if (TREE_CODE_CLASS (code
) == tcc_type
15017 && (TYPE_POINTER_TO (expr
)
15018 || TYPE_REFERENCE_TO (expr
)
15019 || TYPE_CACHED_VALUES_P (expr
)
15020 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
15021 || TYPE_NEXT_VARIANT (expr
)))
15023 /* Allow these fields to be modified. */
15025 memcpy ((char *) &buf
, expr
, tree_size (expr
));
15026 expr
= tmp
= (tree
) &buf
;
15027 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
15028 TYPE_POINTER_TO (tmp
) = NULL
;
15029 TYPE_REFERENCE_TO (tmp
) = NULL
;
15030 TYPE_NEXT_VARIANT (tmp
) = NULL
;
15031 if (TYPE_CACHED_VALUES_P (tmp
))
15033 TYPE_CACHED_VALUES_P (tmp
) = 0;
15034 TYPE_CACHED_VALUES (tmp
) = NULL
;
15037 md5_process_bytes (expr
, tree_size (expr
), ctx
);
15038 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
15039 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
15040 if (TREE_CODE_CLASS (code
) != tcc_type
15041 && TREE_CODE_CLASS (code
) != tcc_declaration
15042 && code
!= TREE_LIST
15043 && code
!= SSA_NAME
15044 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
15045 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
15046 switch (TREE_CODE_CLASS (code
))
15052 md5_process_bytes (TREE_STRING_POINTER (expr
),
15053 TREE_STRING_LENGTH (expr
), ctx
);
15056 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
15057 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
15060 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
15061 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
15067 case tcc_exceptional
:
15071 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
15072 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
15073 expr
= TREE_CHAIN (expr
);
15074 goto recursive_label
;
15077 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
15078 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
15084 case tcc_expression
:
15085 case tcc_reference
:
15086 case tcc_comparison
:
15089 case tcc_statement
:
15091 len
= TREE_OPERAND_LENGTH (expr
);
15092 for (i
= 0; i
< len
; ++i
)
15093 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
15095 case tcc_declaration
:
15096 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
15097 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
15098 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
15100 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
15101 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
15102 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
15103 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
15104 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
15106 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
15107 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
15109 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
15111 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
15112 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
15113 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
15117 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
15118 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
15119 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
15120 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
15121 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
15122 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
15123 if (INTEGRAL_TYPE_P (expr
)
15124 || SCALAR_FLOAT_TYPE_P (expr
))
15126 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
15127 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
15129 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
15130 if (TREE_CODE (expr
) == RECORD_TYPE
15131 || TREE_CODE (expr
) == UNION_TYPE
15132 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
15133 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
15134 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
15141 /* Helper function for outputting the checksum of a tree T. When
15142 debugging with gdb, you can "define mynext" to be "next" followed
15143 by "call debug_fold_checksum (op0)", then just trace down till the
15146 DEBUG_FUNCTION
void
15147 debug_fold_checksum (const_tree t
)
15150 unsigned char checksum
[16];
15151 struct md5_ctx ctx
;
15152 hash_table
<pointer_hash
<tree_node
> > ht
;
15155 md5_init_ctx (&ctx
);
15156 fold_checksum_tree (t
, &ctx
, ht
);
15157 md5_finish_ctx (&ctx
, checksum
);
15160 for (i
= 0; i
< 16; i
++)
15161 fprintf (stderr
, "%d ", checksum
[i
]);
15163 fprintf (stderr
, "\n");
15168 /* Fold a unary tree expression with code CODE of type TYPE with an
15169 operand OP0. LOC is the location of the resulting expression.
15170 Return a folded expression if successful. Otherwise, return a tree
15171 expression with code CODE of type TYPE with an operand OP0. */
15174 fold_build1_stat_loc (location_t loc
,
15175 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
15178 #ifdef ENABLE_FOLD_CHECKING
15179 unsigned char checksum_before
[16], checksum_after
[16];
15180 struct md5_ctx ctx
;
15181 hash_table
<pointer_hash
<tree_node
> > ht
;
15184 md5_init_ctx (&ctx
);
15185 fold_checksum_tree (op0
, &ctx
, ht
);
15186 md5_finish_ctx (&ctx
, checksum_before
);
15190 tem
= fold_unary_loc (loc
, code
, type
, op0
);
15192 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
15194 #ifdef ENABLE_FOLD_CHECKING
15195 md5_init_ctx (&ctx
);
15196 fold_checksum_tree (op0
, &ctx
, ht
);
15197 md5_finish_ctx (&ctx
, checksum_after
);
15200 if (memcmp (checksum_before
, checksum_after
, 16))
15201 fold_check_failed (op0
, tem
);
15206 /* Fold a binary tree expression with code CODE of type TYPE with
15207 operands OP0 and OP1. LOC is the location of the resulting
15208 expression. Return a folded expression if successful. Otherwise,
15209 return a tree expression with code CODE of type TYPE with operands
15213 fold_build2_stat_loc (location_t loc
,
15214 enum tree_code code
, tree type
, tree op0
, tree op1
15218 #ifdef ENABLE_FOLD_CHECKING
15219 unsigned char checksum_before_op0
[16],
15220 checksum_before_op1
[16],
15221 checksum_after_op0
[16],
15222 checksum_after_op1
[16];
15223 struct md5_ctx ctx
;
15224 hash_table
<pointer_hash
<tree_node
> > ht
;
15227 md5_init_ctx (&ctx
);
15228 fold_checksum_tree (op0
, &ctx
, ht
);
15229 md5_finish_ctx (&ctx
, checksum_before_op0
);
15232 md5_init_ctx (&ctx
);
15233 fold_checksum_tree (op1
, &ctx
, ht
);
15234 md5_finish_ctx (&ctx
, checksum_before_op1
);
15238 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15240 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15242 #ifdef ENABLE_FOLD_CHECKING
15243 md5_init_ctx (&ctx
);
15244 fold_checksum_tree (op0
, &ctx
, ht
);
15245 md5_finish_ctx (&ctx
, checksum_after_op0
);
15248 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15249 fold_check_failed (op0
, tem
);
15251 md5_init_ctx (&ctx
);
15252 fold_checksum_tree (op1
, &ctx
, ht
);
15253 md5_finish_ctx (&ctx
, checksum_after_op1
);
15256 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15257 fold_check_failed (op1
, tem
);
15262 /* Fold a ternary tree expression with code CODE of type TYPE with
15263 operands OP0, OP1, and OP2. Return a folded expression if
15264 successful. Otherwise, return a tree expression with code CODE of
15265 type TYPE with operands OP0, OP1, and OP2. */
15268 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15269 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15272 #ifdef ENABLE_FOLD_CHECKING
15273 unsigned char checksum_before_op0
[16],
15274 checksum_before_op1
[16],
15275 checksum_before_op2
[16],
15276 checksum_after_op0
[16],
15277 checksum_after_op1
[16],
15278 checksum_after_op2
[16];
15279 struct md5_ctx ctx
;
15280 hash_table
<pointer_hash
<tree_node
> > ht
;
15283 md5_init_ctx (&ctx
);
15284 fold_checksum_tree (op0
, &ctx
, ht
);
15285 md5_finish_ctx (&ctx
, checksum_before_op0
);
15288 md5_init_ctx (&ctx
);
15289 fold_checksum_tree (op1
, &ctx
, ht
);
15290 md5_finish_ctx (&ctx
, checksum_before_op1
);
15293 md5_init_ctx (&ctx
);
15294 fold_checksum_tree (op2
, &ctx
, ht
);
15295 md5_finish_ctx (&ctx
, checksum_before_op2
);
15299 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15300 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15302 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15304 #ifdef ENABLE_FOLD_CHECKING
15305 md5_init_ctx (&ctx
);
15306 fold_checksum_tree (op0
, &ctx
, ht
);
15307 md5_finish_ctx (&ctx
, checksum_after_op0
);
15310 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15311 fold_check_failed (op0
, tem
);
15313 md5_init_ctx (&ctx
);
15314 fold_checksum_tree (op1
, &ctx
, ht
);
15315 md5_finish_ctx (&ctx
, checksum_after_op1
);
15318 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15319 fold_check_failed (op1
, tem
);
15321 md5_init_ctx (&ctx
);
15322 fold_checksum_tree (op2
, &ctx
, ht
);
15323 md5_finish_ctx (&ctx
, checksum_after_op2
);
15326 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15327 fold_check_failed (op2
, tem
);
15332 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15333 arguments in ARGARRAY, and a null static chain.
15334 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15335 of type TYPE from the given operands as constructed by build_call_array. */
15338 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15339 int nargs
, tree
*argarray
)
15342 #ifdef ENABLE_FOLD_CHECKING
15343 unsigned char checksum_before_fn
[16],
15344 checksum_before_arglist
[16],
15345 checksum_after_fn
[16],
15346 checksum_after_arglist
[16];
15347 struct md5_ctx ctx
;
15348 hash_table
<pointer_hash
<tree_node
> > ht
;
15352 md5_init_ctx (&ctx
);
15353 fold_checksum_tree (fn
, &ctx
, ht
);
15354 md5_finish_ctx (&ctx
, checksum_before_fn
);
15357 md5_init_ctx (&ctx
);
15358 for (i
= 0; i
< nargs
; i
++)
15359 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15360 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15364 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15366 #ifdef ENABLE_FOLD_CHECKING
15367 md5_init_ctx (&ctx
);
15368 fold_checksum_tree (fn
, &ctx
, ht
);
15369 md5_finish_ctx (&ctx
, checksum_after_fn
);
15372 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15373 fold_check_failed (fn
, tem
);
15375 md5_init_ctx (&ctx
);
15376 for (i
= 0; i
< nargs
; i
++)
15377 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15378 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15381 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15382 fold_check_failed (NULL_TREE
, tem
);
15387 /* Perform constant folding and related simplification of initializer
15388 expression EXPR. These behave identically to "fold_buildN" but ignore
15389 potential run-time traps and exceptions that fold must preserve. */
15391 #define START_FOLD_INIT \
15392 int saved_signaling_nans = flag_signaling_nans;\
15393 int saved_trapping_math = flag_trapping_math;\
15394 int saved_rounding_math = flag_rounding_math;\
15395 int saved_trapv = flag_trapv;\
15396 int saved_folding_initializer = folding_initializer;\
15397 flag_signaling_nans = 0;\
15398 flag_trapping_math = 0;\
15399 flag_rounding_math = 0;\
15401 folding_initializer = 1;
15403 #define END_FOLD_INIT \
15404 flag_signaling_nans = saved_signaling_nans;\
15405 flag_trapping_math = saved_trapping_math;\
15406 flag_rounding_math = saved_rounding_math;\
15407 flag_trapv = saved_trapv;\
15408 folding_initializer = saved_folding_initializer;
15411 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15412 tree type
, tree op
)
15417 result
= fold_build1_loc (loc
, code
, type
, op
);
15424 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15425 tree type
, tree op0
, tree op1
)
15430 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15437 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15438 int nargs
, tree
*argarray
)
15443 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15449 #undef START_FOLD_INIT
15450 #undef END_FOLD_INIT
15452 /* Determine if first argument is a multiple of second argument. Return 0 if
15453 it is not, or we cannot easily determined it to be.
15455 An example of the sort of thing we care about (at this point; this routine
15456 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15457 fold cases do now) is discovering that
15459 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15465 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15467 This code also handles discovering that
15469 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15471 is a multiple of 8 so we don't have to worry about dealing with a
15472 possible remainder.
15474 Note that we *look* inside a SAVE_EXPR only to determine how it was
15475 calculated; it is not safe for fold to do much of anything else with the
15476 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15477 at run time. For example, the latter example above *cannot* be implemented
15478 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15479 evaluation time of the original SAVE_EXPR is not necessarily the same at
15480 the time the new expression is evaluated. The only optimization of this
15481 sort that would be valid is changing
15483 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15487 SAVE_EXPR (I) * SAVE_EXPR (J)
15489 (where the same SAVE_EXPR (J) is used in the original and the
15490 transformed version). */
15493 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15495 if (operand_equal_p (top
, bottom
, 0))
15498 if (TREE_CODE (type
) != INTEGER_TYPE
)
15501 switch (TREE_CODE (top
))
15504 /* Bitwise and provides a power of two multiple. If the mask is
15505 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15506 if (!integer_pow2p (bottom
))
15511 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15512 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15516 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15517 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15520 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15524 op1
= TREE_OPERAND (top
, 1);
15525 /* const_binop may not detect overflow correctly,
15526 so check for it explicitly here. */
15527 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15528 > TREE_INT_CST_LOW (op1
)
15529 && TREE_INT_CST_HIGH (op1
) == 0
15530 && 0 != (t1
= fold_convert (type
,
15531 const_binop (LSHIFT_EXPR
,
15534 && !TREE_OVERFLOW (t1
))
15535 return multiple_of_p (type
, t1
, bottom
);
15540 /* Can't handle conversions from non-integral or wider integral type. */
15541 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15542 || (TYPE_PRECISION (type
)
15543 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15546 /* .. fall through ... */
15549 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15552 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15553 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15556 if (TREE_CODE (bottom
) != INTEGER_CST
15557 || integer_zerop (bottom
)
15558 || (TYPE_UNSIGNED (type
)
15559 && (tree_int_cst_sgn (top
) < 0
15560 || tree_int_cst_sgn (bottom
) < 0)))
15562 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15570 /* Return true if CODE or TYPE is known to be non-negative. */
15573 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15575 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15576 && truth_value_p (code
))
15577 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15578 have a signed:1 type (where the value is -1 and 0). */
15583 /* Return true if (CODE OP0) is known to be non-negative. If the return
15584 value is based on the assumption that signed overflow is undefined,
15585 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15586 *STRICT_OVERFLOW_P. */
15589 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15590 bool *strict_overflow_p
)
15592 if (TYPE_UNSIGNED (type
))
15598 /* We can't return 1 if flag_wrapv is set because
15599 ABS_EXPR<INT_MIN> = INT_MIN. */
15600 if (!INTEGRAL_TYPE_P (type
))
15602 if (TYPE_OVERFLOW_UNDEFINED (type
))
15604 *strict_overflow_p
= true;
15609 case NON_LVALUE_EXPR
:
15611 case FIX_TRUNC_EXPR
:
15612 return tree_expr_nonnegative_warnv_p (op0
,
15613 strict_overflow_p
);
15617 tree inner_type
= TREE_TYPE (op0
);
15618 tree outer_type
= type
;
15620 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15622 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15623 return tree_expr_nonnegative_warnv_p (op0
,
15624 strict_overflow_p
);
15625 if (INTEGRAL_TYPE_P (inner_type
))
15627 if (TYPE_UNSIGNED (inner_type
))
15629 return tree_expr_nonnegative_warnv_p (op0
,
15630 strict_overflow_p
);
15633 else if (INTEGRAL_TYPE_P (outer_type
))
15635 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15636 return tree_expr_nonnegative_warnv_p (op0
,
15637 strict_overflow_p
);
15638 if (INTEGRAL_TYPE_P (inner_type
))
15639 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15640 && TYPE_UNSIGNED (inner_type
);
15646 return tree_simple_nonnegative_warnv_p (code
, type
);
15649 /* We don't know sign of `t', so be conservative and return false. */
15653 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15654 value is based on the assumption that signed overflow is undefined,
15655 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15656 *STRICT_OVERFLOW_P. */
15659 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15660 tree op1
, bool *strict_overflow_p
)
15662 if (TYPE_UNSIGNED (type
))
15667 case POINTER_PLUS_EXPR
:
15669 if (FLOAT_TYPE_P (type
))
15670 return (tree_expr_nonnegative_warnv_p (op0
,
15672 && tree_expr_nonnegative_warnv_p (op1
,
15673 strict_overflow_p
));
15675 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15676 both unsigned and at least 2 bits shorter than the result. */
15677 if (TREE_CODE (type
) == INTEGER_TYPE
15678 && TREE_CODE (op0
) == NOP_EXPR
15679 && TREE_CODE (op1
) == NOP_EXPR
)
15681 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15682 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15683 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15684 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15686 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15687 TYPE_PRECISION (inner2
)) + 1;
15688 return prec
< TYPE_PRECISION (type
);
15694 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15696 /* x * x is always non-negative for floating point x
15697 or without overflow. */
15698 if (operand_equal_p (op0
, op1
, 0)
15699 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15700 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15702 if (TYPE_OVERFLOW_UNDEFINED (type
))
15703 *strict_overflow_p
= true;
15708 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15709 both unsigned and their total bits is shorter than the result. */
15710 if (TREE_CODE (type
) == INTEGER_TYPE
15711 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15712 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15714 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15715 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15717 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15718 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15721 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15722 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15724 if (TREE_CODE (op0
) == INTEGER_CST
)
15725 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15727 if (TREE_CODE (op1
) == INTEGER_CST
)
15728 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15730 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15731 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15733 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15734 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15735 : TYPE_PRECISION (inner0
);
15737 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15738 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15739 : TYPE_PRECISION (inner1
);
15741 return precision0
+ precision1
< TYPE_PRECISION (type
);
15748 return (tree_expr_nonnegative_warnv_p (op0
,
15750 || tree_expr_nonnegative_warnv_p (op1
,
15751 strict_overflow_p
));
15757 case TRUNC_DIV_EXPR
:
15758 case CEIL_DIV_EXPR
:
15759 case FLOOR_DIV_EXPR
:
15760 case ROUND_DIV_EXPR
:
15761 return (tree_expr_nonnegative_warnv_p (op0
,
15763 && tree_expr_nonnegative_warnv_p (op1
,
15764 strict_overflow_p
));
15766 case TRUNC_MOD_EXPR
:
15767 case CEIL_MOD_EXPR
:
15768 case FLOOR_MOD_EXPR
:
15769 case ROUND_MOD_EXPR
:
15770 return tree_expr_nonnegative_warnv_p (op0
,
15771 strict_overflow_p
);
15773 return tree_simple_nonnegative_warnv_p (code
, type
);
15776 /* We don't know sign of `t', so be conservative and return false. */
15780 /* Return true if T is known to be non-negative. If the return
15781 value is based on the assumption that signed overflow is undefined,
15782 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15783 *STRICT_OVERFLOW_P. */
15786 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15788 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15791 switch (TREE_CODE (t
))
15794 return tree_int_cst_sgn (t
) >= 0;
15797 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15800 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15803 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15805 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15806 strict_overflow_p
));
15808 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15811 /* We don't know sign of `t', so be conservative and return false. */
15815 /* Return true if T is known to be non-negative. If the return
15816 value is based on the assumption that signed overflow is undefined,
15817 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15818 *STRICT_OVERFLOW_P. */
15821 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15822 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15824 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15825 switch (DECL_FUNCTION_CODE (fndecl
))
15827 CASE_FLT_FN (BUILT_IN_ACOS
):
15828 CASE_FLT_FN (BUILT_IN_ACOSH
):
15829 CASE_FLT_FN (BUILT_IN_CABS
):
15830 CASE_FLT_FN (BUILT_IN_COSH
):
15831 CASE_FLT_FN (BUILT_IN_ERFC
):
15832 CASE_FLT_FN (BUILT_IN_EXP
):
15833 CASE_FLT_FN (BUILT_IN_EXP10
):
15834 CASE_FLT_FN (BUILT_IN_EXP2
):
15835 CASE_FLT_FN (BUILT_IN_FABS
):
15836 CASE_FLT_FN (BUILT_IN_FDIM
):
15837 CASE_FLT_FN (BUILT_IN_HYPOT
):
15838 CASE_FLT_FN (BUILT_IN_POW10
):
15839 CASE_INT_FN (BUILT_IN_FFS
):
15840 CASE_INT_FN (BUILT_IN_PARITY
):
15841 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15842 CASE_INT_FN (BUILT_IN_CLZ
):
15843 CASE_INT_FN (BUILT_IN_CLRSB
):
15844 case BUILT_IN_BSWAP32
:
15845 case BUILT_IN_BSWAP64
:
15849 CASE_FLT_FN (BUILT_IN_SQRT
):
15850 /* sqrt(-0.0) is -0.0. */
15851 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15853 return tree_expr_nonnegative_warnv_p (arg0
,
15854 strict_overflow_p
);
15856 CASE_FLT_FN (BUILT_IN_ASINH
):
15857 CASE_FLT_FN (BUILT_IN_ATAN
):
15858 CASE_FLT_FN (BUILT_IN_ATANH
):
15859 CASE_FLT_FN (BUILT_IN_CBRT
):
15860 CASE_FLT_FN (BUILT_IN_CEIL
):
15861 CASE_FLT_FN (BUILT_IN_ERF
):
15862 CASE_FLT_FN (BUILT_IN_EXPM1
):
15863 CASE_FLT_FN (BUILT_IN_FLOOR
):
15864 CASE_FLT_FN (BUILT_IN_FMOD
):
15865 CASE_FLT_FN (BUILT_IN_FREXP
):
15866 CASE_FLT_FN (BUILT_IN_ICEIL
):
15867 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15868 CASE_FLT_FN (BUILT_IN_IRINT
):
15869 CASE_FLT_FN (BUILT_IN_IROUND
):
15870 CASE_FLT_FN (BUILT_IN_LCEIL
):
15871 CASE_FLT_FN (BUILT_IN_LDEXP
):
15872 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15873 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15874 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15875 CASE_FLT_FN (BUILT_IN_LLRINT
):
15876 CASE_FLT_FN (BUILT_IN_LLROUND
):
15877 CASE_FLT_FN (BUILT_IN_LRINT
):
15878 CASE_FLT_FN (BUILT_IN_LROUND
):
15879 CASE_FLT_FN (BUILT_IN_MODF
):
15880 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15881 CASE_FLT_FN (BUILT_IN_RINT
):
15882 CASE_FLT_FN (BUILT_IN_ROUND
):
15883 CASE_FLT_FN (BUILT_IN_SCALB
):
15884 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15885 CASE_FLT_FN (BUILT_IN_SCALBN
):
15886 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15887 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15888 CASE_FLT_FN (BUILT_IN_SINH
):
15889 CASE_FLT_FN (BUILT_IN_TANH
):
15890 CASE_FLT_FN (BUILT_IN_TRUNC
):
15891 /* True if the 1st argument is nonnegative. */
15892 return tree_expr_nonnegative_warnv_p (arg0
,
15893 strict_overflow_p
);
15895 CASE_FLT_FN (BUILT_IN_FMAX
):
15896 /* True if the 1st OR 2nd arguments are nonnegative. */
15897 return (tree_expr_nonnegative_warnv_p (arg0
,
15899 || (tree_expr_nonnegative_warnv_p (arg1
,
15900 strict_overflow_p
)));
15902 CASE_FLT_FN (BUILT_IN_FMIN
):
15903 /* True if the 1st AND 2nd arguments are nonnegative. */
15904 return (tree_expr_nonnegative_warnv_p (arg0
,
15906 && (tree_expr_nonnegative_warnv_p (arg1
,
15907 strict_overflow_p
)));
15909 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15910 /* True if the 2nd argument is nonnegative. */
15911 return tree_expr_nonnegative_warnv_p (arg1
,
15912 strict_overflow_p
);
15914 CASE_FLT_FN (BUILT_IN_POWI
):
15915 /* True if the 1st argument is nonnegative or the second
15916 argument is an even integer. */
15917 if (TREE_CODE (arg1
) == INTEGER_CST
15918 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15920 return tree_expr_nonnegative_warnv_p (arg0
,
15921 strict_overflow_p
);
15923 CASE_FLT_FN (BUILT_IN_POW
):
15924 /* True if the 1st argument is nonnegative or the second
15925 argument is an even integer valued real. */
15926 if (TREE_CODE (arg1
) == REAL_CST
)
15931 c
= TREE_REAL_CST (arg1
);
15932 n
= real_to_integer (&c
);
15935 REAL_VALUE_TYPE cint
;
15936 real_from_integer (&cint
, VOIDmode
, n
,
15937 n
< 0 ? -1 : 0, 0);
15938 if (real_identical (&c
, &cint
))
15942 return tree_expr_nonnegative_warnv_p (arg0
,
15943 strict_overflow_p
);
15948 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15952 /* Return true if T is known to be non-negative. If the return
15953 value is based on the assumption that signed overflow is undefined,
15954 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15955 *STRICT_OVERFLOW_P. */
15958 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15960 enum tree_code code
= TREE_CODE (t
);
15961 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15968 tree temp
= TARGET_EXPR_SLOT (t
);
15969 t
= TARGET_EXPR_INITIAL (t
);
15971 /* If the initializer is non-void, then it's a normal expression
15972 that will be assigned to the slot. */
15973 if (!VOID_TYPE_P (t
))
15974 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15976 /* Otherwise, the initializer sets the slot in some way. One common
15977 way is an assignment statement at the end of the initializer. */
15980 if (TREE_CODE (t
) == BIND_EXPR
)
15981 t
= expr_last (BIND_EXPR_BODY (t
));
15982 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15983 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15984 t
= expr_last (TREE_OPERAND (t
, 0));
15985 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15990 if (TREE_CODE (t
) == MODIFY_EXPR
15991 && TREE_OPERAND (t
, 0) == temp
)
15992 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15993 strict_overflow_p
);
16000 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
16001 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
16003 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
16004 get_callee_fndecl (t
),
16007 strict_overflow_p
);
16009 case COMPOUND_EXPR
:
16011 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
16012 strict_overflow_p
);
16014 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
16015 strict_overflow_p
);
16017 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
16018 strict_overflow_p
);
16021 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
16025 /* We don't know sign of `t', so be conservative and return false. */
16029 /* Return true if T is known to be non-negative. If the return
16030 value is based on the assumption that signed overflow is undefined,
16031 set *STRICT_OVERFLOW_P to true; otherwise, don't change
16032 *STRICT_OVERFLOW_P. */
16035 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
16037 enum tree_code code
;
16038 if (t
== error_mark_node
)
16041 code
= TREE_CODE (t
);
16042 switch (TREE_CODE_CLASS (code
))
16045 case tcc_comparison
:
16046 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
16048 TREE_OPERAND (t
, 0),
16049 TREE_OPERAND (t
, 1),
16050 strict_overflow_p
);
16053 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
16055 TREE_OPERAND (t
, 0),
16056 strict_overflow_p
);
16059 case tcc_declaration
:
16060 case tcc_reference
:
16061 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16069 case TRUTH_AND_EXPR
:
16070 case TRUTH_OR_EXPR
:
16071 case TRUTH_XOR_EXPR
:
16072 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
16074 TREE_OPERAND (t
, 0),
16075 TREE_OPERAND (t
, 1),
16076 strict_overflow_p
);
16077 case TRUTH_NOT_EXPR
:
16078 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
16080 TREE_OPERAND (t
, 0),
16081 strict_overflow_p
);
16088 case WITH_SIZE_EXPR
:
16090 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16093 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
16097 /* Return true if `t' is known to be non-negative. Handle warnings
16098 about undefined signed overflow. */
16101 tree_expr_nonnegative_p (tree t
)
16103 bool ret
, strict_overflow_p
;
16105 strict_overflow_p
= false;
16106 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
16107 if (strict_overflow_p
)
16108 fold_overflow_warning (("assuming signed overflow does not occur when "
16109 "determining that expression is always "
16111 WARN_STRICT_OVERFLOW_MISC
);
16116 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16117 For floating point we further ensure that T is not denormal.
16118 Similar logic is present in nonzero_address in rtlanal.h.
16120 If the return value is based on the assumption that signed overflow
16121 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16122 change *STRICT_OVERFLOW_P. */
16125 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
16126 bool *strict_overflow_p
)
16131 return tree_expr_nonzero_warnv_p (op0
,
16132 strict_overflow_p
);
16136 tree inner_type
= TREE_TYPE (op0
);
16137 tree outer_type
= type
;
16139 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
16140 && tree_expr_nonzero_warnv_p (op0
,
16141 strict_overflow_p
));
16145 case NON_LVALUE_EXPR
:
16146 return tree_expr_nonzero_warnv_p (op0
,
16147 strict_overflow_p
);
16156 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16157 For floating point we further ensure that T is not denormal.
16158 Similar logic is present in nonzero_address in rtlanal.h.
16160 If the return value is based on the assumption that signed overflow
16161 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16162 change *STRICT_OVERFLOW_P. */
16165 tree_binary_nonzero_warnv_p (enum tree_code code
,
16168 tree op1
, bool *strict_overflow_p
)
16170 bool sub_strict_overflow_p
;
16173 case POINTER_PLUS_EXPR
:
16175 if (TYPE_OVERFLOW_UNDEFINED (type
))
16177 /* With the presence of negative values it is hard
16178 to say something. */
16179 sub_strict_overflow_p
= false;
16180 if (!tree_expr_nonnegative_warnv_p (op0
,
16181 &sub_strict_overflow_p
)
16182 || !tree_expr_nonnegative_warnv_p (op1
,
16183 &sub_strict_overflow_p
))
16185 /* One of operands must be positive and the other non-negative. */
16186 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16187 overflows, on a twos-complement machine the sum of two
16188 nonnegative numbers can never be zero. */
16189 return (tree_expr_nonzero_warnv_p (op0
,
16191 || tree_expr_nonzero_warnv_p (op1
,
16192 strict_overflow_p
));
16197 if (TYPE_OVERFLOW_UNDEFINED (type
))
16199 if (tree_expr_nonzero_warnv_p (op0
,
16201 && tree_expr_nonzero_warnv_p (op1
,
16202 strict_overflow_p
))
16204 *strict_overflow_p
= true;
16211 sub_strict_overflow_p
= false;
16212 if (tree_expr_nonzero_warnv_p (op0
,
16213 &sub_strict_overflow_p
)
16214 && tree_expr_nonzero_warnv_p (op1
,
16215 &sub_strict_overflow_p
))
16217 if (sub_strict_overflow_p
)
16218 *strict_overflow_p
= true;
16223 sub_strict_overflow_p
= false;
16224 if (tree_expr_nonzero_warnv_p (op0
,
16225 &sub_strict_overflow_p
))
16227 if (sub_strict_overflow_p
)
16228 *strict_overflow_p
= true;
16230 /* When both operands are nonzero, then MAX must be too. */
16231 if (tree_expr_nonzero_warnv_p (op1
,
16232 strict_overflow_p
))
16235 /* MAX where operand 0 is positive is positive. */
16236 return tree_expr_nonnegative_warnv_p (op0
,
16237 strict_overflow_p
);
16239 /* MAX where operand 1 is positive is positive. */
16240 else if (tree_expr_nonzero_warnv_p (op1
,
16241 &sub_strict_overflow_p
)
16242 && tree_expr_nonnegative_warnv_p (op1
,
16243 &sub_strict_overflow_p
))
16245 if (sub_strict_overflow_p
)
16246 *strict_overflow_p
= true;
16252 return (tree_expr_nonzero_warnv_p (op1
,
16254 || tree_expr_nonzero_warnv_p (op0
,
16255 strict_overflow_p
));
16264 /* Return true when T is an address and is known to be nonzero.
16265 For floating point we further ensure that T is not denormal.
16266 Similar logic is present in nonzero_address in rtlanal.h.
16268 If the return value is based on the assumption that signed overflow
16269 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16270 change *STRICT_OVERFLOW_P. */
16273 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16275 bool sub_strict_overflow_p
;
16276 switch (TREE_CODE (t
))
16279 return !integer_zerop (t
);
16283 tree base
= TREE_OPERAND (t
, 0);
16284 if (!DECL_P (base
))
16285 base
= get_base_address (base
);
16290 /* Weak declarations may link to NULL. Other things may also be NULL
16291 so protect with -fdelete-null-pointer-checks; but not variables
16292 allocated on the stack. */
16294 && (flag_delete_null_pointer_checks
16295 || (DECL_CONTEXT (base
)
16296 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16297 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
16298 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
16300 /* Constants are never weak. */
16301 if (CONSTANT_CLASS_P (base
))
16308 sub_strict_overflow_p
= false;
16309 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16310 &sub_strict_overflow_p
)
16311 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16312 &sub_strict_overflow_p
))
16314 if (sub_strict_overflow_p
)
16315 *strict_overflow_p
= true;
16326 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16327 attempt to fold the expression to a constant without modifying TYPE,
16330 If the expression could be simplified to a constant, then return
16331 the constant. If the expression would not be simplified to a
16332 constant, then return NULL_TREE. */
16335 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16337 tree tem
= fold_binary (code
, type
, op0
, op1
);
16338 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16341 /* Given the components of a unary expression CODE, TYPE and OP0,
16342 attempt to fold the expression to a constant without modifying
16345 If the expression could be simplified to a constant, then return
16346 the constant. If the expression would not be simplified to a
16347 constant, then return NULL_TREE. */
16350 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16352 tree tem
= fold_unary (code
, type
, op0
);
16353 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16356 /* If EXP represents referencing an element in a constant string
16357 (either via pointer arithmetic or array indexing), return the
16358 tree representing the value accessed, otherwise return NULL. */
16361 fold_read_from_constant_string (tree exp
)
16363 if ((TREE_CODE (exp
) == INDIRECT_REF
16364 || TREE_CODE (exp
) == ARRAY_REF
)
16365 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16367 tree exp1
= TREE_OPERAND (exp
, 0);
16370 location_t loc
= EXPR_LOCATION (exp
);
16372 if (TREE_CODE (exp
) == INDIRECT_REF
)
16373 string
= string_constant (exp1
, &index
);
16376 tree low_bound
= array_ref_low_bound (exp
);
16377 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16379 /* Optimize the special-case of a zero lower bound.
16381 We convert the low_bound to sizetype to avoid some problems
16382 with constant folding. (E.g. suppose the lower bound is 1,
16383 and its mode is QI. Without the conversion,l (ARRAY
16384 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16385 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16386 if (! integer_zerop (low_bound
))
16387 index
= size_diffop_loc (loc
, index
,
16388 fold_convert_loc (loc
, sizetype
, low_bound
));
16394 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16395 && TREE_CODE (string
) == STRING_CST
16396 && TREE_CODE (index
) == INTEGER_CST
16397 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16398 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16400 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16401 return build_int_cst_type (TREE_TYPE (exp
),
16402 (TREE_STRING_POINTER (string
)
16403 [TREE_INT_CST_LOW (index
)]));
16408 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16409 an integer constant, real, or fixed-point constant.
16411 TYPE is the type of the result. */
16414 fold_negate_const (tree arg0
, tree type
)
16416 tree t
= NULL_TREE
;
16418 switch (TREE_CODE (arg0
))
16422 double_int val
= tree_to_double_int (arg0
);
16424 val
= val
.neg_with_overflow (&overflow
);
16425 t
= force_fit_type_double (type
, val
, 1,
16426 (overflow
| TREE_OVERFLOW (arg0
))
16427 && !TYPE_UNSIGNED (type
));
16432 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16437 FIXED_VALUE_TYPE f
;
16438 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16439 &(TREE_FIXED_CST (arg0
)), NULL
,
16440 TYPE_SATURATING (type
));
16441 t
= build_fixed (type
, f
);
16442 /* Propagate overflow flags. */
16443 if (overflow_p
| TREE_OVERFLOW (arg0
))
16444 TREE_OVERFLOW (t
) = 1;
16449 gcc_unreachable ();
16455 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16456 an integer constant or real constant.
16458 TYPE is the type of the result. */
16461 fold_abs_const (tree arg0
, tree type
)
16463 tree t
= NULL_TREE
;
16465 switch (TREE_CODE (arg0
))
16469 double_int val
= tree_to_double_int (arg0
);
16471 /* If the value is unsigned or non-negative, then the absolute value
16472 is the same as the ordinary value. */
16473 if (TYPE_UNSIGNED (type
)
16474 || !val
.is_negative ())
16477 /* If the value is negative, then the absolute value is
16482 val
= val
.neg_with_overflow (&overflow
);
16483 t
= force_fit_type_double (type
, val
, -1,
16484 overflow
| TREE_OVERFLOW (arg0
));
16490 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16491 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16497 gcc_unreachable ();
16503 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16504 constant. TYPE is the type of the result. */
16507 fold_not_const (const_tree arg0
, tree type
)
16511 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16513 val
= ~tree_to_double_int (arg0
);
16514 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16517 /* Given CODE, a relational operator, the target type, TYPE and two
16518 constant operands OP0 and OP1, return the result of the
16519 relational operation. If the result is not a compile time
16520 constant, then return NULL_TREE. */
16523 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16525 int result
, invert
;
16527 /* From here on, the only cases we handle are when the result is
16528 known to be a constant. */
16530 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16532 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16533 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16535 /* Handle the cases where either operand is a NaN. */
16536 if (real_isnan (c0
) || real_isnan (c1
))
16546 case UNORDERED_EXPR
:
16560 if (flag_trapping_math
)
16566 gcc_unreachable ();
16569 return constant_boolean_node (result
, type
);
16572 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16575 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16577 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16578 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16579 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16582 /* Handle equality/inequality of complex constants. */
16583 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16585 tree rcond
= fold_relational_const (code
, type
,
16586 TREE_REALPART (op0
),
16587 TREE_REALPART (op1
));
16588 tree icond
= fold_relational_const (code
, type
,
16589 TREE_IMAGPART (op0
),
16590 TREE_IMAGPART (op1
));
16591 if (code
== EQ_EXPR
)
16592 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16593 else if (code
== NE_EXPR
)
16594 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16599 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16601 unsigned count
= VECTOR_CST_NELTS (op0
);
16602 tree
*elts
= XALLOCAVEC (tree
, count
);
16603 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16604 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16606 for (unsigned i
= 0; i
< count
; i
++)
16608 tree elem_type
= TREE_TYPE (type
);
16609 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16610 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16612 tree tem
= fold_relational_const (code
, elem_type
,
16615 if (tem
== NULL_TREE
)
16618 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16621 return build_vector (type
, elts
);
16624 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16626 To compute GT, swap the arguments and do LT.
16627 To compute GE, do LT and invert the result.
16628 To compute LE, swap the arguments, do LT and invert the result.
16629 To compute NE, do EQ and invert the result.
16631 Therefore, the code below must handle only EQ and LT. */
16633 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16638 code
= swap_tree_comparison (code
);
16641 /* Note that it is safe to invert for real values here because we
16642 have already handled the one case that it matters. */
16645 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16648 code
= invert_tree_comparison (code
, false);
16651 /* Compute a result for LT or EQ if args permit;
16652 Otherwise return T. */
16653 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16655 if (code
== EQ_EXPR
)
16656 result
= tree_int_cst_equal (op0
, op1
);
16657 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16658 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16660 result
= INT_CST_LT (op0
, op1
);
16667 return constant_boolean_node (result
, type
);
16670 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16671 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16675 fold_build_cleanup_point_expr (tree type
, tree expr
)
16677 /* If the expression does not have side effects then we don't have to wrap
16678 it with a cleanup point expression. */
16679 if (!TREE_SIDE_EFFECTS (expr
))
16682 /* If the expression is a return, check to see if the expression inside the
16683 return has no side effects or the right hand side of the modify expression
16684 inside the return. If either don't have side effects set we don't need to
16685 wrap the expression in a cleanup point expression. Note we don't check the
16686 left hand side of the modify because it should always be a return decl. */
16687 if (TREE_CODE (expr
) == RETURN_EXPR
)
16689 tree op
= TREE_OPERAND (expr
, 0);
16690 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16692 op
= TREE_OPERAND (op
, 1);
16693 if (!TREE_SIDE_EFFECTS (op
))
16697 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16700 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16701 of an indirection through OP0, or NULL_TREE if no simplification is
16705 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16711 subtype
= TREE_TYPE (sub
);
16712 if (!POINTER_TYPE_P (subtype
))
16715 if (TREE_CODE (sub
) == ADDR_EXPR
)
16717 tree op
= TREE_OPERAND (sub
, 0);
16718 tree optype
= TREE_TYPE (op
);
16719 /* *&CONST_DECL -> to the value of the const decl. */
16720 if (TREE_CODE (op
) == CONST_DECL
)
16721 return DECL_INITIAL (op
);
16722 /* *&p => p; make sure to handle *&"str"[cst] here. */
16723 if (type
== optype
)
16725 tree fop
= fold_read_from_constant_string (op
);
16731 /* *(foo *)&fooarray => fooarray[0] */
16732 else if (TREE_CODE (optype
) == ARRAY_TYPE
16733 && type
== TREE_TYPE (optype
)
16734 && (!in_gimple_form
16735 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16737 tree type_domain
= TYPE_DOMAIN (optype
);
16738 tree min_val
= size_zero_node
;
16739 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16740 min_val
= TYPE_MIN_VALUE (type_domain
);
16742 && TREE_CODE (min_val
) != INTEGER_CST
)
16744 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16745 NULL_TREE
, NULL_TREE
);
16747 /* *(foo *)&complexfoo => __real__ complexfoo */
16748 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16749 && type
== TREE_TYPE (optype
))
16750 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16751 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16752 else if (TREE_CODE (optype
) == VECTOR_TYPE
16753 && type
== TREE_TYPE (optype
))
16755 tree part_width
= TYPE_SIZE (type
);
16756 tree index
= bitsize_int (0);
16757 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16761 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16762 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16764 tree op00
= TREE_OPERAND (sub
, 0);
16765 tree op01
= TREE_OPERAND (sub
, 1);
16768 if (TREE_CODE (op00
) == ADDR_EXPR
)
16771 op00
= TREE_OPERAND (op00
, 0);
16772 op00type
= TREE_TYPE (op00
);
16774 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16775 if (TREE_CODE (op00type
) == VECTOR_TYPE
16776 && type
== TREE_TYPE (op00type
))
16778 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16779 tree part_width
= TYPE_SIZE (type
);
16780 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16781 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16782 tree index
= bitsize_int (indexi
);
16784 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16785 return fold_build3_loc (loc
,
16786 BIT_FIELD_REF
, type
, op00
,
16787 part_width
, index
);
16790 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16791 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16792 && type
== TREE_TYPE (op00type
))
16794 tree size
= TYPE_SIZE_UNIT (type
);
16795 if (tree_int_cst_equal (size
, op01
))
16796 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16798 /* ((foo *)&fooarray)[1] => fooarray[1] */
16799 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16800 && type
== TREE_TYPE (op00type
))
16802 tree type_domain
= TYPE_DOMAIN (op00type
);
16803 tree min_val
= size_zero_node
;
16804 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16805 min_val
= TYPE_MIN_VALUE (type_domain
);
16806 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16807 TYPE_SIZE_UNIT (type
));
16808 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16809 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16810 NULL_TREE
, NULL_TREE
);
16815 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16816 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16817 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16818 && (!in_gimple_form
16819 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16822 tree min_val
= size_zero_node
;
16823 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16824 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16825 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16826 min_val
= TYPE_MIN_VALUE (type_domain
);
16828 && TREE_CODE (min_val
) != INTEGER_CST
)
16830 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16837 /* Builds an expression for an indirection through T, simplifying some
16841 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16843 tree type
= TREE_TYPE (TREE_TYPE (t
));
16844 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16849 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16852 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16855 fold_indirect_ref_loc (location_t loc
, tree t
)
16857 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16865 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16866 whose result is ignored. The type of the returned tree need not be
16867 the same as the original expression. */
16870 fold_ignored_result (tree t
)
16872 if (!TREE_SIDE_EFFECTS (t
))
16873 return integer_zero_node
;
16876 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16879 t
= TREE_OPERAND (t
, 0);
16883 case tcc_comparison
:
16884 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16885 t
= TREE_OPERAND (t
, 0);
16886 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16887 t
= TREE_OPERAND (t
, 1);
16892 case tcc_expression
:
16893 switch (TREE_CODE (t
))
16895 case COMPOUND_EXPR
:
16896 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16898 t
= TREE_OPERAND (t
, 0);
16902 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16903 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16905 t
= TREE_OPERAND (t
, 0);
16918 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16919 This can only be applied to objects of a sizetype. */
16922 round_up_loc (location_t loc
, tree value
, int divisor
)
16924 tree div
= NULL_TREE
;
16926 gcc_assert (divisor
> 0);
16930 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16931 have to do anything. Only do this when we are not given a const,
16932 because in that case, this check is more expensive than just
16934 if (TREE_CODE (value
) != INTEGER_CST
)
16936 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16938 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16942 /* If divisor is a power of two, simplify this to bit manipulation. */
16943 if (divisor
== (divisor
& -divisor
))
16945 if (TREE_CODE (value
) == INTEGER_CST
)
16947 double_int val
= tree_to_double_int (value
);
16950 if ((val
.low
& (divisor
- 1)) == 0)
16953 overflow_p
= TREE_OVERFLOW (value
);
16954 val
.low
&= ~(divisor
- 1);
16955 val
.low
+= divisor
;
16963 return force_fit_type_double (TREE_TYPE (value
), val
,
16970 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16971 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16972 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16973 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16979 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16980 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16981 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16987 /* Likewise, but round down. */
16990 round_down_loc (location_t loc
, tree value
, int divisor
)
16992 tree div
= NULL_TREE
;
16994 gcc_assert (divisor
> 0);
16998 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16999 have to do anything. Only do this when we are not given a const,
17000 because in that case, this check is more expensive than just
17002 if (TREE_CODE (value
) != INTEGER_CST
)
17004 div
= build_int_cst (TREE_TYPE (value
), divisor
);
17006 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
17010 /* If divisor is a power of two, simplify this to bit manipulation. */
17011 if (divisor
== (divisor
& -divisor
))
17015 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
17016 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
17021 div
= build_int_cst (TREE_TYPE (value
), divisor
);
17022 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
17023 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
17029 /* Returns the pointer to the base of the object addressed by EXP and
17030 extracts the information about the offset of the access, storing it
17031 to PBITPOS and POFFSET. */
17034 split_address_to_core_and_offset (tree exp
,
17035 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
17038 enum machine_mode mode
;
17039 int unsignedp
, volatilep
;
17040 HOST_WIDE_INT bitsize
;
17041 location_t loc
= EXPR_LOCATION (exp
);
17043 if (TREE_CODE (exp
) == ADDR_EXPR
)
17045 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
17046 poffset
, &mode
, &unsignedp
, &volatilep
,
17048 core
= build_fold_addr_expr_loc (loc
, core
);
17054 *poffset
= NULL_TREE
;
17060 /* Returns true if addresses of E1 and E2 differ by a constant, false
17061 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17064 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
17067 HOST_WIDE_INT bitpos1
, bitpos2
;
17068 tree toffset1
, toffset2
, tdiff
, type
;
17070 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
17071 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
17073 if (bitpos1
% BITS_PER_UNIT
!= 0
17074 || bitpos2
% BITS_PER_UNIT
!= 0
17075 || !operand_equal_p (core1
, core2
, 0))
17078 if (toffset1
&& toffset2
)
17080 type
= TREE_TYPE (toffset1
);
17081 if (type
!= TREE_TYPE (toffset2
))
17082 toffset2
= fold_convert (type
, toffset2
);
17084 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
17085 if (!cst_and_fits_in_hwi (tdiff
))
17088 *diff
= int_cst_value (tdiff
);
17090 else if (toffset1
|| toffset2
)
17092 /* If only one of the offsets is non-constant, the difference cannot
17099 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
17103 /* Simplify the floating point expression EXP when the sign of the
17104 result is not significant. Return NULL_TREE if no simplification
17108 fold_strip_sign_ops (tree exp
)
17111 location_t loc
= EXPR_LOCATION (exp
);
17113 switch (TREE_CODE (exp
))
17117 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17118 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
17122 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
17124 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17125 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17126 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
17127 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
17128 arg0
? arg0
: TREE_OPERAND (exp
, 0),
17129 arg1
? arg1
: TREE_OPERAND (exp
, 1));
17132 case COMPOUND_EXPR
:
17133 arg0
= TREE_OPERAND (exp
, 0);
17134 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17136 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
17140 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17141 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
17143 return fold_build3_loc (loc
,
17144 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
17145 arg0
? arg0
: TREE_OPERAND (exp
, 1),
17146 arg1
? arg1
: TREE_OPERAND (exp
, 2));
17151 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
17154 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
17155 /* Strip copysign function call, return the 1st argument. */
17156 arg0
= CALL_EXPR_ARG (exp
, 0);
17157 arg1
= CALL_EXPR_ARG (exp
, 1);
17158 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
17161 /* Strip sign ops from the argument of "odd" math functions. */
17162 if (negate_mathfn_p (fcode
))
17164 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
17166 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);