1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "diagnostic-core.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
63 /* Nonzero if we are folding constants inside an initializer; zero
65 int folding_initializer
= 0;
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code
{
89 static bool negate_mathfn_p (enum built_in_function
);
90 static bool negate_expr_p (tree
);
91 static tree
negate_expr (tree
);
92 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
93 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
94 static tree
const_binop (enum tree_code
, tree
, tree
);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
101 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (location_t
, tree
, tree
,
103 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
104 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
106 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (const_tree
, int);
111 static tree
sign_bit_p (tree
, const_tree
);
112 static int simple_operand_p (const_tree
);
113 static bool simple_operand_p_2 (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
118 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
119 static tree
unextend (tree
, int, int, tree
);
120 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
122 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
123 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
124 static tree
fold_binary_op_with_conditional_arg (location_t
,
125 enum tree_code
, tree
,
128 static tree
fold_mathfn_compare (location_t
,
129 enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (const_tree
, const_tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (const_tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
143 expr_location_or (tree t
, location_t loc
)
145 location_t tloc
= EXPR_LOCATION (t
);
146 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
153 protected_set_expr_location_unshare (tree x
, location_t loc
)
155 if (CAN_HAVE_LOCATION_P (x
)
156 && EXPR_LOCATION (x
) != loc
157 && !(TREE_CODE (x
) == SAVE_EXPR
158 || TREE_CODE (x
) == TARGET_EXPR
159 || TREE_CODE (x
) == BIND_EXPR
))
162 SET_EXPR_LOCATION (x
, loc
);
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
172 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
182 quo
= tree_to_double_int (arg1
).divmod (tree_to_double_int (arg2
),
186 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
200 static int fold_deferring_overflow_warnings
;
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
207 static const char* fold_deferred_overflow_warning
;
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
212 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
218 fold_defer_overflow_warnings (void)
220 ++fold_deferring_overflow_warnings
;
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
233 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
238 gcc_assert (fold_deferring_overflow_warnings
> 0);
239 --fold_deferring_overflow_warnings
;
240 if (fold_deferring_overflow_warnings
> 0)
242 if (fold_deferred_overflow_warning
!= NULL
244 && code
< (int) fold_deferred_overflow_code
)
245 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
249 warnmsg
= fold_deferred_overflow_warning
;
250 fold_deferred_overflow_warning
= NULL
;
252 if (!issue
|| warnmsg
== NULL
)
255 if (gimple_no_warning_p (stmt
))
258 /* Use the smallest code level when deciding to issue the
260 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
261 code
= fold_deferred_overflow_code
;
263 if (!issue_strict_overflow_warning (code
))
267 locus
= input_location
;
269 locus
= gimple_location (stmt
);
270 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
273 /* Stop deferring overflow warnings, ignoring any deferred
277 fold_undefer_and_ignore_overflow_warnings (void)
279 fold_undefer_overflow_warnings (false, NULL
, 0);
282 /* Whether we are deferring overflow warnings. */
285 fold_deferring_overflow_warnings_p (void)
287 return fold_deferring_overflow_warnings
> 0;
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
294 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
296 if (fold_deferring_overflow_warnings
> 0)
298 if (fold_deferred_overflow_warning
== NULL
299 || wc
< fold_deferred_overflow_code
)
301 fold_deferred_overflow_warning
= gmsgid
;
302 fold_deferred_overflow_code
= wc
;
305 else if (issue_strict_overflow_warning (wc
))
306 warning (OPT_Wstrict_overflow
, gmsgid
);
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
313 negate_mathfn_p (enum built_in_function code
)
317 CASE_FLT_FN (BUILT_IN_ASIN
):
318 CASE_FLT_FN (BUILT_IN_ASINH
):
319 CASE_FLT_FN (BUILT_IN_ATAN
):
320 CASE_FLT_FN (BUILT_IN_ATANH
):
321 CASE_FLT_FN (BUILT_IN_CASIN
):
322 CASE_FLT_FN (BUILT_IN_CASINH
):
323 CASE_FLT_FN (BUILT_IN_CATAN
):
324 CASE_FLT_FN (BUILT_IN_CATANH
):
325 CASE_FLT_FN (BUILT_IN_CBRT
):
326 CASE_FLT_FN (BUILT_IN_CPROJ
):
327 CASE_FLT_FN (BUILT_IN_CSIN
):
328 CASE_FLT_FN (BUILT_IN_CSINH
):
329 CASE_FLT_FN (BUILT_IN_CTAN
):
330 CASE_FLT_FN (BUILT_IN_CTANH
):
331 CASE_FLT_FN (BUILT_IN_ERF
):
332 CASE_FLT_FN (BUILT_IN_LLROUND
):
333 CASE_FLT_FN (BUILT_IN_LROUND
):
334 CASE_FLT_FN (BUILT_IN_ROUND
):
335 CASE_FLT_FN (BUILT_IN_SIN
):
336 CASE_FLT_FN (BUILT_IN_SINH
):
337 CASE_FLT_FN (BUILT_IN_TAN
):
338 CASE_FLT_FN (BUILT_IN_TANH
):
339 CASE_FLT_FN (BUILT_IN_TRUNC
):
342 CASE_FLT_FN (BUILT_IN_LLRINT
):
343 CASE_FLT_FN (BUILT_IN_LRINT
):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
345 CASE_FLT_FN (BUILT_IN_RINT
):
346 return !flag_rounding_math
;
354 /* Check whether we may negate an integer constant T without causing
358 may_negate_without_overflow_p (const_tree t
)
360 unsigned HOST_WIDE_INT val
;
364 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
366 type
= TREE_TYPE (t
);
367 if (TYPE_UNSIGNED (type
))
370 prec
= TYPE_PRECISION (type
);
371 if (prec
> HOST_BITS_PER_WIDE_INT
)
373 if (TREE_INT_CST_LOW (t
) != 0)
375 prec
-= HOST_BITS_PER_WIDE_INT
;
376 val
= TREE_INT_CST_HIGH (t
);
379 val
= TREE_INT_CST_LOW (t
);
380 if (prec
< HOST_BITS_PER_WIDE_INT
)
381 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
382 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
389 negate_expr_p (tree t
)
396 type
= TREE_TYPE (t
);
399 switch (TREE_CODE (t
))
402 if (TYPE_OVERFLOW_WRAPS (type
))
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t
);
408 return (INTEGRAL_TYPE_P (type
)
409 && TYPE_OVERFLOW_WRAPS (type
));
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
421 return negate_expr_p (TREE_REALPART (t
))
422 && negate_expr_p (TREE_IMAGPART (t
));
426 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
429 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
431 for (i
= 0; i
< count
; i
++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
439 return negate_expr_p (TREE_OPERAND (t
, 0))
440 && negate_expr_p (TREE_OPERAND (t
, 1));
443 return negate_expr_p (TREE_OPERAND (t
, 0));
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t
, 1))
451 && reorder_operands_p (TREE_OPERAND (t
, 0),
452 TREE_OPERAND (t
, 1)))
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t
, 0));
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
461 && reorder_operands_p (TREE_OPERAND (t
, 0),
462 TREE_OPERAND (t
, 1));
465 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
472 return negate_expr_p (TREE_OPERAND (t
, 1))
473 || negate_expr_p (TREE_OPERAND (t
, 0));
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
501 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
503 return negate_expr_p (TREE_OPERAND (t
, 1));
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type
) == REAL_TYPE
)
509 tree tem
= strip_float_extensions (t
);
511 return negate_expr_p (tem
);
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t
)))
518 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
525 tree op1
= TREE_OPERAND (t
, 1);
526 if (TREE_INT_CST_HIGH (op1
) == 0
527 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
528 == TREE_INT_CST_LOW (op1
))
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
545 fold_negate_expr (location_t loc
, tree t
)
547 tree type
= TREE_TYPE (t
);
550 switch (TREE_CODE (t
))
552 /* Convert - (~A) to A + 1. */
554 if (INTEGRAL_TYPE_P (type
))
555 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
556 build_one_cst (type
));
560 tem
= fold_negate_const (t
, type
);
561 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
562 || !TYPE_OVERFLOW_TRAPS (type
))
567 tem
= fold_negate_const (t
, type
);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
574 tem
= fold_negate_const (t
, type
);
579 tree rpart
= negate_expr (TREE_REALPART (t
));
580 tree ipart
= negate_expr (TREE_IMAGPART (t
));
582 if ((TREE_CODE (rpart
) == REAL_CST
583 && TREE_CODE (ipart
) == REAL_CST
)
584 || (TREE_CODE (rpart
) == INTEGER_CST
585 && TREE_CODE (ipart
) == INTEGER_CST
))
586 return build_complex (type
, rpart
, ipart
);
592 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
593 tree
*elts
= XALLOCAVEC (tree
, count
);
595 for (i
= 0; i
< count
; i
++)
597 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
598 if (elts
[i
] == NULL_TREE
)
602 return build_vector (type
, elts
);
606 if (negate_expr_p (t
))
607 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
608 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
609 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
613 if (negate_expr_p (t
))
614 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
615 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
619 return TREE_OPERAND (t
, 0);
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t
, 1))
627 && reorder_operands_p (TREE_OPERAND (t
, 0),
628 TREE_OPERAND (t
, 1)))
630 tem
= negate_expr (TREE_OPERAND (t
, 1));
631 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
632 tem
, TREE_OPERAND (t
, 0));
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t
, 0)))
638 tem
= negate_expr (TREE_OPERAND (t
, 0));
639 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
640 tem
, TREE_OPERAND (t
, 1));
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
649 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
650 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
651 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
655 if (TYPE_UNSIGNED (type
))
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
665 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
666 TREE_OPERAND (t
, 0), negate_expr (tem
));
667 tem
= TREE_OPERAND (t
, 0);
668 if (negate_expr_p (tem
))
669 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
670 negate_expr (tem
), TREE_OPERAND (t
, 1));
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
684 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
686 const char * const warnmsg
= G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem
= TREE_OPERAND (t
, 1);
689 if (negate_expr_p (tem
))
691 if (INTEGRAL_TYPE_P (type
)
692 && (TREE_CODE (tem
) != INTEGER_CST
693 || integer_onep (tem
)))
694 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
695 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
696 TREE_OPERAND (t
, 0), negate_expr (tem
));
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem
= TREE_OPERAND (t
, 0);
705 if ((INTEGRAL_TYPE_P (type
)
706 && (TREE_CODE (tem
) == NEGATE_EXPR
707 || (TREE_CODE (tem
) == INTEGER_CST
708 && may_negate_without_overflow_p (tem
))))
709 || !INTEGRAL_TYPE_P (type
))
710 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
711 negate_expr (tem
), TREE_OPERAND (t
, 1));
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type
) == REAL_TYPE
)
719 tem
= strip_float_extensions (t
);
720 if (tem
!= t
&& negate_expr_p (tem
))
721 return fold_convert_loc (loc
, type
, negate_expr (tem
));
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t
))
728 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
732 fndecl
= get_callee_fndecl (t
);
733 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
734 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
742 tree op1
= TREE_OPERAND (t
, 1);
743 if (TREE_INT_CST_HIGH (op1
) == 0
744 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
745 == TREE_INT_CST_LOW (op1
))
747 tree ntype
= TYPE_UNSIGNED (type
)
748 ? signed_type_for (type
)
749 : unsigned_type_for (type
);
750 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
751 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
752 return fold_convert_loc (loc
, type
, temp
);
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
777 loc
= EXPR_LOCATION (t
);
778 type
= TREE_TYPE (t
);
781 tem
= fold_negate_expr (loc
, t
);
783 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
784 return fold_convert_loc (loc
, type
, tem
);
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
802 If IN is itself a literal or constant, return it as appropriate.
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
808 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
809 tree
*minus_litp
, int negate_p
)
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in
);
820 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
821 || TREE_CODE (in
) == FIXED_CST
)
823 else if (TREE_CODE (in
) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
831 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
833 tree op0
= TREE_OPERAND (in
, 0);
834 tree op1
= TREE_OPERAND (in
, 1);
835 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
836 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
840 || TREE_CODE (op0
) == FIXED_CST
)
841 *litp
= op0
, op0
= 0;
842 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
843 || TREE_CODE (op1
) == FIXED_CST
)
844 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
846 if (op0
!= 0 && TREE_CONSTANT (op0
))
847 *conp
= op0
, op0
= 0;
848 else if (op1
!= 0 && TREE_CONSTANT (op1
))
849 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0
!= 0 && op1
!= 0)
858 var
= op1
, neg_var_p
= neg1_p
;
860 /* Now do any needed negations. */
862 *minus_litp
= *litp
, *litp
= 0;
864 *conp
= negate_expr (*conp
);
866 var
= negate_expr (var
);
868 else if (TREE_CODE (in
) == BIT_NOT_EXPR
869 && code
== PLUS_EXPR
)
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp
= build_one_cst (TREE_TYPE (in
));
873 var
= negate_expr (TREE_OPERAND (in
, 0));
875 else if (TREE_CONSTANT (in
))
883 *minus_litp
= *litp
, *litp
= 0;
884 else if (*minus_litp
)
885 *litp
= *minus_litp
, *minus_litp
= 0;
886 *conp
= negate_expr (*conp
);
887 var
= negate_expr (var
);
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
899 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
910 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
912 if (code
== PLUS_EXPR
)
914 if (TREE_CODE (t1
) == NEGATE_EXPR
)
915 return build2_loc (loc
, MINUS_EXPR
, type
,
916 fold_convert_loc (loc
, type
, t2
),
917 fold_convert_loc (loc
, type
,
918 TREE_OPERAND (t1
, 0)));
919 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
920 return build2_loc (loc
, MINUS_EXPR
, type
,
921 fold_convert_loc (loc
, type
, t1
),
922 fold_convert_loc (loc
, type
,
923 TREE_OPERAND (t2
, 0)));
924 else if (integer_zerop (t2
))
925 return fold_convert_loc (loc
, type
, t1
);
927 else if (code
== MINUS_EXPR
)
929 if (integer_zerop (t2
))
930 return fold_convert_loc (loc
, type
, t1
);
933 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
934 fold_convert_loc (loc
, type
, t2
));
937 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
938 fold_convert_loc (loc
, type
, t2
));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
945 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
947 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
949 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
964 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
965 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
966 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
975 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree arg2
,
978 double_int op1
, op2
, res
, tmp
;
980 tree type
= TREE_TYPE (arg1
);
981 bool uns
= TYPE_UNSIGNED (type
);
982 bool overflow
= false;
984 op1
= tree_to_double_int (arg1
);
985 op2
= tree_to_double_int (arg2
);
1002 res
= op1
.rshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res
= op1
.lshift (op2
.to_shwi (), TYPE_PRECISION (type
), !uns
);
1013 res
= op1
.rrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1017 res
= op1
.lrotate (op2
.to_shwi (), TYPE_PRECISION (type
));
1021 res
= op1
.add_with_sign (op2
, false, &overflow
);
1025 res
= op1
.sub_with_overflow (op2
, &overflow
);
1029 res
= op1
.mul_with_sign (op2
, false, &overflow
);
1032 case MULT_HIGHPART_EXPR
:
1033 if (TYPE_PRECISION (type
) > HOST_BITS_PER_WIDE_INT
)
1035 bool dummy_overflow
;
1036 if (TYPE_PRECISION (type
) != 2 * HOST_BITS_PER_WIDE_INT
)
1038 op1
.wide_mul_with_sign (op2
, uns
, &res
, &dummy_overflow
);
1042 bool dummy_overflow
;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp
= op1
.mul_with_sign (op2
, false, &dummy_overflow
);
1046 res
= tmp
.rshift (TYPE_PRECISION (type
),
1047 2 * TYPE_PRECISION (type
), !uns
);
1051 case TRUNC_DIV_EXPR
:
1052 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1053 case EXACT_DIV_EXPR
:
1054 /* This is a shortcut for a common special case. */
1055 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1056 && !TREE_OVERFLOW (arg1
)
1057 && !TREE_OVERFLOW (arg2
)
1058 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1060 if (code
== CEIL_DIV_EXPR
)
1061 op1
.low
+= op2
.low
- 1;
1063 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1067 /* ... fall through ... */
1069 case ROUND_DIV_EXPR
:
1077 if (op1
== op2
&& !op1
.is_zero ())
1079 res
= double_int_one
;
1082 res
= op1
.divmod_with_overflow (op2
, uns
, code
, &tmp
, &overflow
);
1085 case TRUNC_MOD_EXPR
:
1086 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1087 /* This is a shortcut for a common special case. */
1088 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1089 && !TREE_OVERFLOW (arg1
)
1090 && !TREE_OVERFLOW (arg2
)
1091 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1093 if (code
== CEIL_MOD_EXPR
)
1094 op1
.low
+= op2
.low
- 1;
1095 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1099 /* ... fall through ... */
1101 case ROUND_MOD_EXPR
:
1104 tmp
= op1
.divmod_with_overflow (op2
, uns
, code
, &res
, &overflow
);
1108 res
= op1
.min (op2
, uns
);
1112 res
= op1
.max (op2
, uns
);
1119 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, overflowable
,
1121 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1127 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1129 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1138 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1140 /* Sanity check for the recursive cases. */
1147 if (TREE_CODE (arg1
) == INTEGER_CST
)
1148 return int_const_binop (code
, arg1
, arg2
);
1150 if (TREE_CODE (arg1
) == REAL_CST
)
1152 enum machine_mode mode
;
1155 REAL_VALUE_TYPE value
;
1156 REAL_VALUE_TYPE result
;
1160 /* The following codes are handled by real_arithmetic. */
1175 d1
= TREE_REAL_CST (arg1
);
1176 d2
= TREE_REAL_CST (arg2
);
1178 type
= TREE_TYPE (arg1
);
1179 mode
= TYPE_MODE (type
);
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode
)
1184 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code
== RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2
, dconst0
)
1191 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1
))
1198 else if (REAL_VALUE_ISNAN (d2
))
1201 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1202 real_convert (&result
, mode
, &value
);
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode
)
1208 && REAL_VALUE_ISINF (result
)
1209 && !REAL_VALUE_ISINF (d1
)
1210 && !REAL_VALUE_ISINF (d2
))
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1219 && (inexact
|| !real_identical (&result
, &value
)))
1222 t
= build_real (type
, result
);
1224 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1228 if (TREE_CODE (arg1
) == FIXED_CST
)
1230 FIXED_VALUE_TYPE f1
;
1231 FIXED_VALUE_TYPE f2
;
1232 FIXED_VALUE_TYPE result
;
1237 /* The following codes are handled by fixed_arithmetic. */
1243 case TRUNC_DIV_EXPR
:
1244 f2
= TREE_FIXED_CST (arg2
);
1249 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1250 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1258 f1
= TREE_FIXED_CST (arg1
);
1259 type
= TREE_TYPE (arg1
);
1260 sat_p
= TYPE_SATURATING (type
);
1261 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1262 t
= build_fixed (type
, result
);
1263 /* Propagate overflow flags. */
1264 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1265 TREE_OVERFLOW (t
) = 1;
1269 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1271 tree type
= TREE_TYPE (arg1
);
1272 tree r1
= TREE_REALPART (arg1
);
1273 tree i1
= TREE_IMAGPART (arg1
);
1274 tree r2
= TREE_REALPART (arg2
);
1275 tree i2
= TREE_IMAGPART (arg2
);
1282 real
= const_binop (code
, r1
, r2
);
1283 imag
= const_binop (code
, i1
, i2
);
1287 if (COMPLEX_FLOAT_TYPE_P (type
))
1288 return do_mpc_arg2 (arg1
, arg2
, type
,
1289 /* do_nonfinite= */ folding_initializer
,
1292 real
= const_binop (MINUS_EXPR
,
1293 const_binop (MULT_EXPR
, r1
, r2
),
1294 const_binop (MULT_EXPR
, i1
, i2
));
1295 imag
= const_binop (PLUS_EXPR
,
1296 const_binop (MULT_EXPR
, r1
, i2
),
1297 const_binop (MULT_EXPR
, i1
, r2
));
1301 if (COMPLEX_FLOAT_TYPE_P (type
))
1302 return do_mpc_arg2 (arg1
, arg2
, type
,
1303 /* do_nonfinite= */ folding_initializer
,
1306 case TRUNC_DIV_EXPR
:
1308 case FLOOR_DIV_EXPR
:
1309 case ROUND_DIV_EXPR
:
1310 if (flag_complex_method
== 0)
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1320 = const_binop (PLUS_EXPR
,
1321 const_binop (MULT_EXPR
, r2
, r2
),
1322 const_binop (MULT_EXPR
, i2
, i2
));
1324 = const_binop (PLUS_EXPR
,
1325 const_binop (MULT_EXPR
, r1
, r2
),
1326 const_binop (MULT_EXPR
, i1
, i2
));
1328 = const_binop (MINUS_EXPR
,
1329 const_binop (MULT_EXPR
, i1
, r2
),
1330 const_binop (MULT_EXPR
, r1
, i2
));
1332 real
= const_binop (code
, t1
, magsquared
);
1333 imag
= const_binop (code
, t2
, magsquared
);
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1343 fold_abs_const (r2
, TREE_TYPE (type
)),
1344 fold_abs_const (i2
, TREE_TYPE (type
)));
1346 if (integer_nonzerop (compare
))
1348 /* In the TRUE branch, we compute
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1355 tree ratio
= const_binop (code
, r2
, i2
);
1356 tree div
= const_binop (PLUS_EXPR
, i2
,
1357 const_binop (MULT_EXPR
, r2
, ratio
));
1358 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1359 real
= const_binop (PLUS_EXPR
, real
, i1
);
1360 real
= const_binop (code
, real
, div
);
1362 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1363 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1364 imag
= const_binop (code
, imag
, div
);
1368 /* In the FALSE branch, we compute
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1375 tree ratio
= const_binop (code
, i2
, r2
);
1376 tree div
= const_binop (PLUS_EXPR
, r2
,
1377 const_binop (MULT_EXPR
, i2
, ratio
));
1379 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1380 real
= const_binop (PLUS_EXPR
, real
, r1
);
1381 real
= const_binop (code
, real
, div
);
1383 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1384 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1385 imag
= const_binop (code
, imag
, div
);
1395 return build_complex (type
, real
, imag
);
1398 if (TREE_CODE (arg1
) == VECTOR_CST
1399 && TREE_CODE (arg2
) == VECTOR_CST
)
1401 tree type
= TREE_TYPE (arg1
);
1402 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1403 tree
*elts
= XALLOCAVEC (tree
, count
);
1405 for (i
= 0; i
< count
; i
++)
1407 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1408 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1410 elts
[i
] = const_binop (code
, elem1
, elem2
);
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts
[i
] == NULL_TREE
)
1418 return build_vector (type
, elts
);
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1
) == VECTOR_CST
1423 && TREE_CODE (arg2
) == INTEGER_CST
)
1425 tree type
= TREE_TYPE (arg1
);
1426 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1427 tree
*elts
= XALLOCAVEC (tree
, count
);
1429 if (code
== VEC_LSHIFT_EXPR
1430 || code
== VEC_RSHIFT_EXPR
)
1432 if (!host_integerp (arg2
, 1))
1435 unsigned HOST_WIDE_INT shiftc
= tree_low_cst (arg2
, 1);
1436 unsigned HOST_WIDE_INT outerc
= tree_low_cst (TYPE_SIZE (type
), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type
)), 1);
1439 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1441 int offset
= shiftc
/ innerc
;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1448 tree zero
= build_zero_cst (TREE_TYPE (type
));
1449 for (i
= 0; i
< count
; i
++)
1451 if (i
+ offset
< 0 || i
+ offset
>= count
)
1454 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1458 for (i
= 0; i
< count
; i
++)
1460 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1462 elts
[i
] = const_binop (code
, elem1
, arg2
);
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts
[i
] == NULL_TREE
)
1470 return build_vector (type
, elts
);
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1479 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1481 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1490 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1492 tree type
= TREE_TYPE (arg0
);
1494 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1495 return error_mark_node
;
1497 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1503 /* And some specific cases even faster than that. */
1504 if (code
== PLUS_EXPR
)
1506 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1508 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1511 else if (code
== MINUS_EXPR
)
1513 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1516 else if (code
== MULT_EXPR
)
1518 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1528 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1536 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1538 tree type
= TREE_TYPE (arg0
);
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type
))
1546 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1548 if (type
== sizetype
)
1550 else if (type
== bitsizetype
)
1551 ctype
= sbitsizetype
;
1553 ctype
= signed_type_for (type
);
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1559 return size_binop_loc (loc
, MINUS_EXPR
,
1560 fold_convert_loc (loc
, ctype
, arg0
),
1561 fold_convert_loc (loc
, ctype
, arg1
));
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0
, arg1
))
1568 return build_int_cst (ctype
, 0);
1569 else if (tree_int_cst_lt (arg1
, arg0
))
1570 return fold_convert_loc (loc
, ctype
,
1571 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1573 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1574 fold_convert_loc (loc
, ctype
,
1575 size_binop_loc (loc
,
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1584 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1592 (TREE_INT_CST_HIGH (arg1
) < 0
1593 && (TYPE_UNSIGNED (type
)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1595 | TREE_OVERFLOW (arg1
));
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1604 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1620 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1624 case FIX_TRUNC_EXPR
:
1625 real_trunc (&r
, VOIDmode
, &x
);
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r
))
1636 val
= double_int_zero
;
1639 /* See if R is less than the lower bound or greater than the
1644 tree lt
= TYPE_MIN_VALUE (type
);
1645 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1646 if (REAL_VALUES_LESS (r
, l
))
1649 val
= tree_to_double_int (lt
);
1655 tree ut
= TYPE_MAX_VALUE (type
);
1658 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1659 if (REAL_VALUES_LESS (u
, r
))
1662 val
= tree_to_double_int (ut
);
1668 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1670 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1678 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1681 double_int temp
, temp_trunc
;
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp
= TREE_FIXED_CST (arg1
).data
;
1686 mode
= TREE_FIXED_CST (arg1
).mode
;
1687 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1689 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1690 HOST_BITS_PER_DOUBLE_INT
,
1691 SIGNED_FIXED_POINT_MODE_P (mode
));
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1695 HOST_BITS_PER_DOUBLE_INT
,
1696 SIGNED_FIXED_POINT_MODE_P (mode
));
1700 temp
= double_int_zero
;
1701 temp_trunc
= double_int_zero
;
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1707 && temp_trunc
.is_negative ()
1708 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1709 temp
+= double_int_one
;
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t
= force_fit_type_double (type
, temp
, -1,
1714 (temp
.is_negative ()
1715 && (TYPE_UNSIGNED (type
)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1717 | TREE_OVERFLOW (arg1
));
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1726 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1728 REAL_VALUE_TYPE value
;
1731 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1732 t
= build_real (type
, value
);
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1740 TREE_OVERFLOW (t
) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1742 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1743 TREE_OVERFLOW (t
) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1747 && REAL_VALUE_ISINF (value
)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1749 TREE_OVERFLOW (t
) = 1;
1751 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1759 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1761 REAL_VALUE_TYPE value
;
1764 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1765 t
= build_real (type
, value
);
1767 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1775 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1777 FIXED_VALUE_TYPE value
;
1781 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1782 TYPE_SATURATING (type
));
1783 t
= build_fixed (type
, value
);
1785 /* Propagate overflow flags. */
1786 if (overflow_p
| TREE_OVERFLOW (arg1
))
1787 TREE_OVERFLOW (t
) = 1;
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1795 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1797 FIXED_VALUE_TYPE value
;
1801 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1802 TREE_INT_CST (arg1
),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1804 TYPE_SATURATING (type
));
1805 t
= build_fixed (type
, value
);
1807 /* Propagate overflow flags. */
1808 if (overflow_p
| TREE_OVERFLOW (arg1
))
1809 TREE_OVERFLOW (t
) = 1;
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1817 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1819 FIXED_VALUE_TYPE value
;
1823 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1824 &TREE_REAL_CST (arg1
),
1825 TYPE_SATURATING (type
));
1826 t
= build_fixed (type
, value
);
1828 /* Propagate overflow flags. */
1829 if (overflow_p
| TREE_OVERFLOW (arg1
))
1830 TREE_OVERFLOW (t
) = 1;
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1838 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1840 if (TREE_TYPE (arg1
) == type
)
1843 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1844 || TREE_CODE (type
) == OFFSET_TYPE
)
1846 if (TREE_CODE (arg1
) == INTEGER_CST
)
1847 return fold_convert_const_int_from_int (type
, arg1
);
1848 else if (TREE_CODE (arg1
) == REAL_CST
)
1849 return fold_convert_const_int_from_real (code
, type
, arg1
);
1850 else if (TREE_CODE (arg1
) == FIXED_CST
)
1851 return fold_convert_const_int_from_fixed (type
, arg1
);
1853 else if (TREE_CODE (type
) == REAL_TYPE
)
1855 if (TREE_CODE (arg1
) == INTEGER_CST
)
1856 return build_real_from_int_cst (type
, arg1
);
1857 else if (TREE_CODE (arg1
) == REAL_CST
)
1858 return fold_convert_const_real_from_real (type
, arg1
);
1859 else if (TREE_CODE (arg1
) == FIXED_CST
)
1860 return fold_convert_const_real_from_fixed (type
, arg1
);
1862 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1864 if (TREE_CODE (arg1
) == FIXED_CST
)
1865 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1866 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1867 return fold_convert_const_fixed_from_int (type
, arg1
);
1868 else if (TREE_CODE (arg1
) == REAL_CST
)
1869 return fold_convert_const_fixed_from_real (type
, arg1
);
1874 /* Construct a vector of zero elements of vector type TYPE. */
1877 build_zero_vector (tree type
)
1881 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1882 return build_vector_from_val (type
, t
);
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1888 fold_convertible_p (const_tree type
, const_tree arg
)
1890 tree orig
= TREE_TYPE (arg
);
1895 if (TREE_CODE (arg
) == ERROR_MARK
1896 || TREE_CODE (type
) == ERROR_MARK
1897 || TREE_CODE (orig
) == ERROR_MARK
)
1900 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1903 switch (TREE_CODE (type
))
1905 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1906 case POINTER_TYPE
: case REFERENCE_TYPE
:
1908 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1909 || TREE_CODE (orig
) == OFFSET_TYPE
)
1911 return (TREE_CODE (orig
) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1915 case FIXED_POINT_TYPE
:
1919 return TREE_CODE (type
) == TREE_CODE (orig
);
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1930 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1932 tree orig
= TREE_TYPE (arg
);
1938 if (TREE_CODE (arg
) == ERROR_MARK
1939 || TREE_CODE (type
) == ERROR_MARK
1940 || TREE_CODE (orig
) == ERROR_MARK
)
1941 return error_mark_node
;
1943 switch (TREE_CODE (type
))
1946 case REFERENCE_TYPE
:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig
)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1951 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1954 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1956 if (TREE_CODE (arg
) == INTEGER_CST
)
1958 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1959 if (tem
!= NULL_TREE
)
1962 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1963 || TREE_CODE (orig
) == OFFSET_TYPE
)
1964 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1965 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1966 return fold_convert_loc (loc
, type
,
1967 fold_build1_loc (loc
, REALPART_EXPR
,
1968 TREE_TYPE (orig
), arg
));
1969 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1971 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1974 if (TREE_CODE (arg
) == INTEGER_CST
)
1976 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1977 if (tem
!= NULL_TREE
)
1980 else if (TREE_CODE (arg
) == REAL_CST
)
1982 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1983 if (tem
!= NULL_TREE
)
1986 else if (TREE_CODE (arg
) == FIXED_CST
)
1988 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1989 if (tem
!= NULL_TREE
)
1993 switch (TREE_CODE (orig
))
1996 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1997 case POINTER_TYPE
: case REFERENCE_TYPE
:
1998 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2001 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2003 case FIXED_POINT_TYPE
:
2004 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2007 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2008 return fold_convert_loc (loc
, type
, tem
);
2014 case FIXED_POINT_TYPE
:
2015 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2016 || TREE_CODE (arg
) == REAL_CST
)
2018 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2019 if (tem
!= NULL_TREE
)
2020 goto fold_convert_exit
;
2023 switch (TREE_CODE (orig
))
2025 case FIXED_POINT_TYPE
:
2030 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2033 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2034 return fold_convert_loc (loc
, type
, tem
);
2041 switch (TREE_CODE (orig
))
2044 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2045 case POINTER_TYPE
: case REFERENCE_TYPE
:
2047 case FIXED_POINT_TYPE
:
2048 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2049 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2050 fold_convert_loc (loc
, TREE_TYPE (type
),
2051 integer_zero_node
));
2056 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2058 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2059 TREE_OPERAND (arg
, 0));
2060 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2061 TREE_OPERAND (arg
, 1));
2062 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2065 arg
= save_expr (arg
);
2066 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2067 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2068 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2069 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2070 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2078 if (integer_zerop (arg
))
2079 return build_zero_vector (type
);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2082 || TREE_CODE (orig
) == VECTOR_TYPE
);
2083 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2086 tem
= fold_ignored_result (arg
);
2087 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2090 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2091 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2095 protected_set_expr_location_unshare (tem
, loc
);
2099 /* Return false if expr can be assumed not to be an lvalue, true
2103 maybe_lvalue_p (const_tree x
)
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x
))
2119 case ARRAY_RANGE_REF
:
2125 case PREINCREMENT_EXPR
:
2126 case PREDECREMENT_EXPR
:
2128 case TRY_CATCH_EXPR
:
2129 case WITH_CLEANUP_EXPR
:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2150 non_lvalue_loc (location_t loc
, tree x
)
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2157 if (! maybe_lvalue_p (x
))
2159 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2165 int pedantic_lvalues
;
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2171 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2173 if (pedantic_lvalues
)
2174 return non_lvalue_loc (loc
, x
);
2176 return protected_set_expr_location_unshare (x
, loc
);
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2185 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2187 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2188 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2198 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2200 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2202 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2204 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2218 return UNORDERED_EXPR
;
2219 case UNORDERED_EXPR
:
2220 return ORDERED_EXPR
;
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2230 swap_tree_comparison (enum tree_code code
)
2237 case UNORDERED_EXPR
:
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code
)
2285 return COMPCODE_ORD
;
2286 case UNORDERED_EXPR
:
2287 return COMPCODE_UNORD
;
2289 return COMPCODE_UNLT
;
2291 return COMPCODE_UNEQ
;
2293 return COMPCODE_UNLE
;
2295 return COMPCODE_UNGT
;
2297 return COMPCODE_LTGT
;
2299 return COMPCODE_UNGE
;
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code
)
2327 return ORDERED_EXPR
;
2328 case COMPCODE_UNORD
:
2329 return UNORDERED_EXPR
;
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2354 combine_comparisons (location_t loc
,
2355 enum tree_code code
, enum tree_code lcode
,
2356 enum tree_code rcode
, tree truth_type
,
2357 tree ll_arg
, tree lr_arg
)
2359 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2360 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2361 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2366 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2367 compcode
= lcompcode
& rcompcode
;
2370 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2371 compcode
= lcompcode
| rcompcode
;
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode
&= ~COMPCODE_UNORD
;
2383 if (compcode
== COMPCODE_LTGT
)
2384 compcode
= COMPCODE_NE
;
2385 else if (compcode
== COMPCODE_ORD
)
2386 compcode
= COMPCODE_TRUE
;
2388 else if (flag_trapping_math
)
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2393 && (lcompcode
!= COMPCODE_EQ
)
2394 && (lcompcode
!= COMPCODE_ORD
);
2395 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2396 && (rcompcode
!= COMPCODE_EQ
)
2397 && (rcompcode
!= COMPCODE_ORD
);
2398 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2399 && (compcode
!= COMPCODE_EQ
)
2400 && (compcode
!= COMPCODE_ORD
);
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2409 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2415 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap
|| rtrap
) != trap
)
2423 if (compcode
== COMPCODE_TRUE
)
2424 return constant_boolean_node (true, truth_type
);
2425 else if (compcode
== COMPCODE_FALSE
)
2426 return constant_boolean_node (false, truth_type
);
2429 enum tree_code tcode
;
2431 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2432 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2463 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2467 || TREE_TYPE (arg0
) == error_mark_node
2468 || TREE_TYPE (arg1
) == error_mark_node
)
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2479 return tree_int_cst_equal (arg0
, arg1
);
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2496 /* If both types don't have the same precision, then it is not safe
2498 if (element_precision (TREE_TYPE (arg0
))
2499 != element_precision (TREE_TYPE (arg1
)))
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2509 && COMPARISON_CLASS_P (arg0
)
2510 && COMPARISON_CLASS_P (arg1
))
2512 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2514 if (TREE_CODE (arg0
) == swap_code
)
2515 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2516 TREE_OPERAND (arg1
, 1), flags
)
2517 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2518 TREE_OPERAND (arg1
, 0), flags
);
2521 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2543 && (TREE_CODE (arg0
) == SAVE_EXPR
2544 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2545 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2551 switch (TREE_CODE (arg0
))
2554 return tree_int_cst_equal (arg0
, arg1
);
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2558 TREE_FIXED_CST (arg1
));
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2562 TREE_REAL_CST (arg1
)))
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0
) && real_zerop (arg1
))
2579 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2582 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2585 VECTOR_CST_ELT (arg1
, i
), flags
))
2592 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2594 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2598 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2599 && ! memcmp (TREE_STRING_POINTER (arg0
),
2600 TREE_STRING_POINTER (arg1
),
2601 TREE_STRING_LENGTH (arg0
)));
2604 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2605 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2606 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2611 if (flags
& OEP_ONLY_CONST
)
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0
))
2632 case FIX_TRUNC_EXPR
:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2644 case tcc_comparison
:
2646 if (OP_SAME (0) && OP_SAME (1))
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0
))
2651 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2652 TREE_OPERAND (arg1
, 1), flags
)
2653 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2654 TREE_OPERAND (arg1
, 0), flags
));
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2661 && (TREE_SIDE_EFFECTS (arg0
)
2662 || TREE_SIDE_EFFECTS (arg1
)))
2665 switch (TREE_CODE (arg0
))
2668 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2675 case TARGET_MEM_REF
:
2676 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2685 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2691 || (TYPE_SIZE (TREE_TYPE (arg0
))
2692 && TYPE_SIZE (TREE_TYPE (arg1
))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2694 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2695 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2696 && alias_ptr_types_compatible_p
2697 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2698 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2699 && OP_SAME (0) && OP_SAME (1));
2702 case ARRAY_RANGE_REF
:
2703 /* Operands 2 and 3 may be null.
2704 Compare the array index by value if it is constant first as we
2705 may have different types but same value here. */
2708 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2709 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2710 TREE_OPERAND (arg1
, 1))
2712 && OP_SAME_WITH_NULL (2)
2713 && OP_SAME_WITH_NULL (3));
2716 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2717 may be NULL when we're called to compare MEM_EXPRs. */
2718 if (!OP_SAME_WITH_NULL (0)
2721 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2722 return OP_SAME_WITH_NULL (2);
2727 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2728 return OP_SAME (1) && OP_SAME (2);
2734 case tcc_expression
:
2735 switch (TREE_CODE (arg0
))
2738 case TRUTH_NOT_EXPR
:
2741 case TRUTH_ANDIF_EXPR
:
2742 case TRUTH_ORIF_EXPR
:
2743 return OP_SAME (0) && OP_SAME (1);
2746 case WIDEN_MULT_PLUS_EXPR
:
2747 case WIDEN_MULT_MINUS_EXPR
:
2750 /* The multiplcation operands are commutative. */
2753 case TRUTH_AND_EXPR
:
2755 case TRUTH_XOR_EXPR
:
2756 if (OP_SAME (0) && OP_SAME (1))
2759 /* Otherwise take into account this is a commutative operation. */
2760 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2761 TREE_OPERAND (arg1
, 1), flags
)
2762 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2763 TREE_OPERAND (arg1
, 0), flags
));
2768 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2775 switch (TREE_CODE (arg0
))
2778 /* If the CALL_EXPRs call different functions, then they
2779 clearly can not be equal. */
2780 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2785 unsigned int cef
= call_expr_flags (arg0
);
2786 if (flags
& OEP_PURE_SAME
)
2787 cef
&= ECF_CONST
| ECF_PURE
;
2794 /* Now see if all the arguments are the same. */
2796 const_call_expr_arg_iterator iter0
, iter1
;
2798 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2799 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2801 a0
= next_const_call_expr_arg (&iter0
),
2802 a1
= next_const_call_expr_arg (&iter1
))
2803 if (! operand_equal_p (a0
, a1
, flags
))
2806 /* If we get here and both argument lists are exhausted
2807 then the CALL_EXPRs are equal. */
2808 return ! (a0
|| a1
);
2814 case tcc_declaration
:
2815 /* Consider __builtin_sqrt equal to sqrt. */
2816 return (TREE_CODE (arg0
) == FUNCTION_DECL
2817 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2818 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2819 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2826 #undef OP_SAME_WITH_NULL
2829 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2830 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2832 When in doubt, return 0. */
2835 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2837 int unsignedp1
, unsignedpo
;
2838 tree primarg0
, primarg1
, primother
;
2839 unsigned int correct_width
;
2841 if (operand_equal_p (arg0
, arg1
, 0))
2844 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2845 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2848 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2849 and see if the inner values are the same. This removes any
2850 signedness comparison, which doesn't matter here. */
2851 primarg0
= arg0
, primarg1
= arg1
;
2852 STRIP_NOPS (primarg0
);
2853 STRIP_NOPS (primarg1
);
2854 if (operand_equal_p (primarg0
, primarg1
, 0))
2857 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2858 actual comparison operand, ARG0.
2860 First throw away any conversions to wider types
2861 already present in the operands. */
2863 primarg1
= get_narrower (arg1
, &unsignedp1
);
2864 primother
= get_narrower (other
, &unsignedpo
);
2866 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2867 if (unsignedp1
== unsignedpo
2868 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2869 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2871 tree type
= TREE_TYPE (arg0
);
2873 /* Make sure shorter operand is extended the right way
2874 to match the longer operand. */
2875 primarg1
= fold_convert (signed_or_unsigned_type_for
2876 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2878 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2885 /* See if ARG is an expression that is either a comparison or is performing
2886 arithmetic on comparisons. The comparisons must only be comparing
2887 two different values, which will be stored in *CVAL1 and *CVAL2; if
2888 they are nonzero it means that some operands have already been found.
2889 No variables may be used anywhere else in the expression except in the
2890 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2891 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2893 If this is true, return 1. Otherwise, return zero. */
2896 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2898 enum tree_code code
= TREE_CODE (arg
);
2899 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2901 /* We can handle some of the tcc_expression cases here. */
2902 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2904 else if (tclass
== tcc_expression
2905 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2906 || code
== COMPOUND_EXPR
))
2907 tclass
= tcc_binary
;
2909 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2910 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2912 /* If we've already found a CVAL1 or CVAL2, this expression is
2913 two complex to handle. */
2914 if (*cval1
|| *cval2
)
2924 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2927 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2928 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2929 cval1
, cval2
, save_p
));
2934 case tcc_expression
:
2935 if (code
== COND_EXPR
)
2936 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2937 cval1
, cval2
, save_p
)
2938 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2939 cval1
, cval2
, save_p
)
2940 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2941 cval1
, cval2
, save_p
));
2944 case tcc_comparison
:
2945 /* First see if we can handle the first operand, then the second. For
2946 the second operand, we know *CVAL1 can't be zero. It must be that
2947 one side of the comparison is each of the values; test for the
2948 case where this isn't true by failing if the two operands
2951 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2952 TREE_OPERAND (arg
, 1), 0))
2956 *cval1
= TREE_OPERAND (arg
, 0);
2957 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2959 else if (*cval2
== 0)
2960 *cval2
= TREE_OPERAND (arg
, 0);
2961 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2966 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2968 else if (*cval2
== 0)
2969 *cval2
= TREE_OPERAND (arg
, 1);
2970 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2982 /* ARG is a tree that is known to contain just arithmetic operations and
2983 comparisons. Evaluate the operations in the tree substituting NEW0 for
2984 any occurrence of OLD0 as an operand of a comparison and likewise for
2988 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2989 tree old1
, tree new1
)
2991 tree type
= TREE_TYPE (arg
);
2992 enum tree_code code
= TREE_CODE (arg
);
2993 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2995 /* We can handle some of the tcc_expression cases here. */
2996 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2998 else if (tclass
== tcc_expression
2999 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3000 tclass
= tcc_binary
;
3005 return fold_build1_loc (loc
, code
, type
,
3006 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3007 old0
, new0
, old1
, new1
));
3010 return fold_build2_loc (loc
, code
, type
,
3011 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3012 old0
, new0
, old1
, new1
),
3013 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3014 old0
, new0
, old1
, new1
));
3016 case tcc_expression
:
3020 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3024 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3028 return fold_build3_loc (loc
, code
, type
,
3029 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3030 old0
, new0
, old1
, new1
),
3031 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3032 old0
, new0
, old1
, new1
),
3033 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3034 old0
, new0
, old1
, new1
));
3038 /* Fall through - ??? */
3040 case tcc_comparison
:
3042 tree arg0
= TREE_OPERAND (arg
, 0);
3043 tree arg1
= TREE_OPERAND (arg
, 1);
3045 /* We need to check both for exact equality and tree equality. The
3046 former will be true if the operand has a side-effect. In that
3047 case, we know the operand occurred exactly once. */
3049 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3051 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3054 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3056 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3059 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3067 /* Return a tree for the case when the result of an expression is RESULT
3068 converted to TYPE and OMITTED was previously an operand of the expression
3069 but is now not needed (e.g., we folded OMITTED * 0).
3071 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3072 the conversion of RESULT to TYPE. */
3075 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3077 tree t
= fold_convert_loc (loc
, type
, result
);
3079 /* If the resulting operand is an empty statement, just return the omitted
3080 statement casted to void. */
3081 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3082 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3083 fold_ignored_result (omitted
));
3085 if (TREE_SIDE_EFFECTS (omitted
))
3086 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3087 fold_ignored_result (omitted
), t
);
3089 return non_lvalue_loc (loc
, t
);
3092 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3095 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3098 tree t
= fold_convert_loc (loc
, type
, result
);
3100 /* If the resulting operand is an empty statement, just return the omitted
3101 statement casted to void. */
3102 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3103 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3104 fold_ignored_result (omitted
));
3106 if (TREE_SIDE_EFFECTS (omitted
))
3107 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3108 fold_ignored_result (omitted
), t
);
3110 return pedantic_non_lvalue_loc (loc
, t
);
3113 /* Return a tree for the case when the result of an expression is RESULT
3114 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3115 of the expression but are now not needed.
3117 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3118 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3119 evaluated before OMITTED2. Otherwise, if neither has side effects,
3120 just do the conversion of RESULT to TYPE. */
3123 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3124 tree omitted1
, tree omitted2
)
3126 tree t
= fold_convert_loc (loc
, type
, result
);
3128 if (TREE_SIDE_EFFECTS (omitted2
))
3129 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3130 if (TREE_SIDE_EFFECTS (omitted1
))
3131 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3133 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3137 /* Return a simplified tree node for the truth-negation of ARG. This
3138 never alters ARG itself. We assume that ARG is an operation that
3139 returns a truth value (0 or 1).
3141 FIXME: one would think we would fold the result, but it causes
3142 problems with the dominator optimizer. */
3145 fold_truth_not_expr (location_t loc
, tree arg
)
3147 tree type
= TREE_TYPE (arg
);
3148 enum tree_code code
= TREE_CODE (arg
);
3149 location_t loc1
, loc2
;
3151 /* If this is a comparison, we can simply invert it, except for
3152 floating-point non-equality comparisons, in which case we just
3153 enclose a TRUTH_NOT_EXPR around what we have. */
3155 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3157 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3158 if (FLOAT_TYPE_P (op_type
)
3159 && flag_trapping_math
3160 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3161 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3164 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3165 if (code
== ERROR_MARK
)
3168 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3169 TREE_OPERAND (arg
, 1));
3175 return constant_boolean_node (integer_zerop (arg
), type
);
3177 case TRUTH_AND_EXPR
:
3178 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3179 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3180 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3181 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3182 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3185 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3186 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3187 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3188 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3189 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3191 case TRUTH_XOR_EXPR
:
3192 /* Here we can invert either operand. We invert the first operand
3193 unless the second operand is a TRUTH_NOT_EXPR in which case our
3194 result is the XOR of the first operand with the inside of the
3195 negation of the second operand. */
3197 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3198 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3199 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3201 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3202 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3203 TREE_OPERAND (arg
, 1));
3205 case TRUTH_ANDIF_EXPR
:
3206 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3207 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3208 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3209 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3210 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3212 case TRUTH_ORIF_EXPR
:
3213 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3214 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3215 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3216 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3217 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3219 case TRUTH_NOT_EXPR
:
3220 return TREE_OPERAND (arg
, 0);
3224 tree arg1
= TREE_OPERAND (arg
, 1);
3225 tree arg2
= TREE_OPERAND (arg
, 2);
3227 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3228 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3230 /* A COND_EXPR may have a throw as one operand, which
3231 then has void type. Just leave void operands
3233 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3234 VOID_TYPE_P (TREE_TYPE (arg1
))
3235 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3236 VOID_TYPE_P (TREE_TYPE (arg2
))
3237 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3241 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3242 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3243 TREE_OPERAND (arg
, 0),
3244 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3246 case NON_LVALUE_EXPR
:
3247 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3248 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3251 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3252 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3254 /* ... fall through ... */
3257 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3258 return build1_loc (loc
, TREE_CODE (arg
), type
,
3259 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3262 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3264 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3267 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3269 case CLEANUP_POINT_EXPR
:
3270 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3271 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3272 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3279 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3280 assume that ARG is an operation that returns a truth value (0 or 1
3281 for scalars, 0 or -1 for vectors). Return the folded expression if
3282 folding is successful. Otherwise, return NULL_TREE. */
3285 fold_invert_truthvalue (location_t loc
, tree arg
)
3287 tree type
= TREE_TYPE (arg
);
3288 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3294 /* Return a simplified tree node for the truth-negation of ARG. This
3295 never alters ARG itself. We assume that ARG is an operation that
3296 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3299 invert_truthvalue_loc (location_t loc
, tree arg
)
3301 if (TREE_CODE (arg
) == ERROR_MARK
)
3304 tree type
= TREE_TYPE (arg
);
3305 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3311 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3312 operands are another bit-wise operation with a common input. If so,
3313 distribute the bit operations to save an operation and possibly two if
3314 constants are involved. For example, convert
3315 (A | B) & (A | C) into A | (B & C)
3316 Further simplification will occur if B and C are constants.
3318 If this optimization cannot be done, 0 will be returned. */
3321 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3322 tree arg0
, tree arg1
)
3327 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3328 || TREE_CODE (arg0
) == code
3329 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3330 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3333 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3335 common
= TREE_OPERAND (arg0
, 0);
3336 left
= TREE_OPERAND (arg0
, 1);
3337 right
= TREE_OPERAND (arg1
, 1);
3339 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3341 common
= TREE_OPERAND (arg0
, 0);
3342 left
= TREE_OPERAND (arg0
, 1);
3343 right
= TREE_OPERAND (arg1
, 0);
3345 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3347 common
= TREE_OPERAND (arg0
, 1);
3348 left
= TREE_OPERAND (arg0
, 0);
3349 right
= TREE_OPERAND (arg1
, 1);
3351 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3353 common
= TREE_OPERAND (arg0
, 1);
3354 left
= TREE_OPERAND (arg0
, 0);
3355 right
= TREE_OPERAND (arg1
, 0);
3360 common
= fold_convert_loc (loc
, type
, common
);
3361 left
= fold_convert_loc (loc
, type
, left
);
3362 right
= fold_convert_loc (loc
, type
, right
);
3363 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3364 fold_build2_loc (loc
, code
, type
, left
, right
));
3367 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3368 with code CODE. This optimization is unsafe. */
3370 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3371 tree arg0
, tree arg1
)
3373 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3374 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3376 /* (A / C) +- (B / C) -> (A +- B) / C. */
3378 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3379 TREE_OPERAND (arg1
, 1), 0))
3380 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3381 fold_build2_loc (loc
, code
, type
,
3382 TREE_OPERAND (arg0
, 0),
3383 TREE_OPERAND (arg1
, 0)),
3384 TREE_OPERAND (arg0
, 1));
3386 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3387 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3388 TREE_OPERAND (arg1
, 0), 0)
3389 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3390 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3392 REAL_VALUE_TYPE r0
, r1
;
3393 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3394 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3396 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3398 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3399 real_arithmetic (&r0
, code
, &r0
, &r1
);
3400 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3401 TREE_OPERAND (arg0
, 0),
3402 build_real (type
, r0
));
3408 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3409 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3412 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3413 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3415 tree result
, bftype
;
3419 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3420 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3421 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3422 && host_integerp (size
, 0)
3423 && tree_low_cst (size
, 0) == bitsize
)
3424 return fold_convert_loc (loc
, type
, inner
);
3428 if (TYPE_PRECISION (bftype
) != bitsize
3429 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3430 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3432 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3433 size_int (bitsize
), bitsize_int (bitpos
));
3436 result
= fold_convert_loc (loc
, type
, result
);
3441 /* Optimize a bit-field compare.
3443 There are two cases: First is a compare against a constant and the
3444 second is a comparison of two items where the fields are at the same
3445 bit position relative to the start of a chunk (byte, halfword, word)
3446 large enough to contain it. In these cases we can avoid the shift
3447 implicit in bitfield extractions.
3449 For constants, we emit a compare of the shifted constant with the
3450 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3451 compared. For two fields at the same position, we do the ANDs with the
3452 similar mask and compare the result of the ANDs.
3454 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3455 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3456 are the left and right operands of the comparison, respectively.
3458 If the optimization described above can be done, we return the resulting
3459 tree. Otherwise we return zero. */
3462 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3463 tree compare_type
, tree lhs
, tree rhs
)
3465 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3466 tree type
= TREE_TYPE (lhs
);
3467 tree signed_type
, unsigned_type
;
3468 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3469 enum machine_mode lmode
, rmode
, nmode
;
3470 int lunsignedp
, runsignedp
;
3471 int lvolatilep
= 0, rvolatilep
= 0;
3472 tree linner
, rinner
= NULL_TREE
;
3476 /* Get all the information about the extractions being done. If the bit size
3477 if the same as the size of the underlying object, we aren't doing an
3478 extraction at all and so can do nothing. We also don't want to
3479 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3480 then will no longer be able to replace it. */
3481 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3482 &lunsignedp
, &lvolatilep
, false);
3483 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3484 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3489 /* If this is not a constant, we can only do something if bit positions,
3490 sizes, and signedness are the same. */
3491 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3492 &runsignedp
, &rvolatilep
, false);
3494 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3495 || lunsignedp
!= runsignedp
|| offset
!= 0
3496 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3500 /* See if we can find a mode to refer to this field. We should be able to,
3501 but fail if we can't. */
3502 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3503 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3504 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3505 TYPE_ALIGN (TREE_TYPE (rinner
))),
3507 if (nmode
== VOIDmode
)
3510 /* Set signed and unsigned types of the precision of this mode for the
3512 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3513 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3515 /* Compute the bit position and size for the new reference and our offset
3516 within it. If the new reference is the same size as the original, we
3517 won't optimize anything, so return zero. */
3518 nbitsize
= GET_MODE_BITSIZE (nmode
);
3519 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3521 if (nbitsize
== lbitsize
)
3524 if (BYTES_BIG_ENDIAN
)
3525 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3527 /* Make the mask to be used against the extracted field. */
3528 mask
= build_int_cst_type (unsigned_type
, -1);
3529 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3530 mask
= const_binop (RSHIFT_EXPR
, mask
,
3531 size_int (nbitsize
- lbitsize
- lbitpos
));
3534 /* If not comparing with constant, just rework the comparison
3536 return fold_build2_loc (loc
, code
, compare_type
,
3537 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3538 make_bit_field_ref (loc
, linner
,
3543 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3544 make_bit_field_ref (loc
, rinner
,
3550 /* Otherwise, we are handling the constant case. See if the constant is too
3551 big for the field. Warn and return a tree of for 0 (false) if so. We do
3552 this not only for its own sake, but to avoid having to test for this
3553 error case below. If we didn't, we might generate wrong code.
3555 For unsigned fields, the constant shifted right by the field length should
3556 be all zero. For signed fields, the high-order bits should agree with
3561 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3562 fold_convert_loc (loc
,
3563 unsigned_type
, rhs
),
3564 size_int (lbitsize
))))
3566 warning (0, "comparison is always %d due to width of bit-field",
3568 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3573 tree tem
= const_binop (RSHIFT_EXPR
,
3574 fold_convert_loc (loc
, signed_type
, rhs
),
3575 size_int (lbitsize
- 1));
3576 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3578 warning (0, "comparison is always %d due to width of bit-field",
3580 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3584 /* Single-bit compares should always be against zero. */
3585 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3587 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3588 rhs
= build_int_cst (type
, 0);
3591 /* Make a new bitfield reference, shift the constant over the
3592 appropriate number of bits and mask it with the computed mask
3593 (in case this was a signed field). If we changed it, make a new one. */
3594 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3596 rhs
= const_binop (BIT_AND_EXPR
,
3597 const_binop (LSHIFT_EXPR
,
3598 fold_convert_loc (loc
, unsigned_type
, rhs
),
3599 size_int (lbitpos
)),
3602 lhs
= build2_loc (loc
, code
, compare_type
,
3603 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3607 /* Subroutine for fold_truth_andor_1: decode a field reference.
3609 If EXP is a comparison reference, we return the innermost reference.
3611 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3612 set to the starting bit number.
3614 If the innermost field can be completely contained in a mode-sized
3615 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3617 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3618 otherwise it is not changed.
3620 *PUNSIGNEDP is set to the signedness of the field.
3622 *PMASK is set to the mask used. This is either contained in a
3623 BIT_AND_EXPR or derived from the width of the field.
3625 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3627 Return 0 if this is not a component reference or is one that we can't
3628 do anything with. */
3631 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3632 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3633 int *punsignedp
, int *pvolatilep
,
3634 tree
*pmask
, tree
*pand_mask
)
3636 tree outer_type
= 0;
3638 tree mask
, inner
, offset
;
3640 unsigned int precision
;
3642 /* All the optimizations using this function assume integer fields.
3643 There are problems with FP fields since the type_for_size call
3644 below can fail for, e.g., XFmode. */
3645 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3648 /* We are interested in the bare arrangement of bits, so strip everything
3649 that doesn't affect the machine mode. However, record the type of the
3650 outermost expression if it may matter below. */
3651 if (CONVERT_EXPR_P (exp
)
3652 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3653 outer_type
= TREE_TYPE (exp
);
3656 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3658 and_mask
= TREE_OPERAND (exp
, 1);
3659 exp
= TREE_OPERAND (exp
, 0);
3660 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3661 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3665 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3666 punsignedp
, pvolatilep
, false);
3667 if ((inner
== exp
&& and_mask
== 0)
3668 || *pbitsize
< 0 || offset
!= 0
3669 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3672 /* If the number of bits in the reference is the same as the bitsize of
3673 the outer type, then the outer type gives the signedness. Otherwise
3674 (in case of a small bitfield) the signedness is unchanged. */
3675 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3676 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3678 /* Compute the mask to access the bitfield. */
3679 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3680 precision
= TYPE_PRECISION (unsigned_type
);
3682 mask
= build_int_cst_type (unsigned_type
, -1);
3684 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3685 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3687 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3689 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3690 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3693 *pand_mask
= and_mask
;
3697 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3701 all_ones_mask_p (const_tree mask
, int size
)
3703 tree type
= TREE_TYPE (mask
);
3704 unsigned int precision
= TYPE_PRECISION (type
);
3707 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3710 tree_int_cst_equal (mask
,
3711 const_binop (RSHIFT_EXPR
,
3712 const_binop (LSHIFT_EXPR
, tmask
,
3713 size_int (precision
- size
)),
3714 size_int (precision
- size
)));
3717 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3718 represents the sign bit of EXP's type. If EXP represents a sign
3719 or zero extension, also test VAL against the unextended type.
3720 The return value is the (sub)expression whose sign bit is VAL,
3721 or NULL_TREE otherwise. */
3724 sign_bit_p (tree exp
, const_tree val
)
3726 unsigned HOST_WIDE_INT mask_lo
, lo
;
3727 HOST_WIDE_INT mask_hi
, hi
;
3731 /* Tree EXP must have an integral type. */
3732 t
= TREE_TYPE (exp
);
3733 if (! INTEGRAL_TYPE_P (t
))
3736 /* Tree VAL must be an integer constant. */
3737 if (TREE_CODE (val
) != INTEGER_CST
3738 || TREE_OVERFLOW (val
))
3741 width
= TYPE_PRECISION (t
);
3742 if (width
> HOST_BITS_PER_WIDE_INT
)
3744 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3747 mask_hi
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_DOUBLE_INT
- width
));
3753 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3756 mask_lo
= (HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- width
));
3759 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3760 treat VAL as if it were unsigned. */
3761 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3762 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3765 /* Handle extension from a narrower type. */
3766 if (TREE_CODE (exp
) == NOP_EXPR
3767 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3768 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3773 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3774 to be evaluated unconditionally. */
3777 simple_operand_p (const_tree exp
)
3779 /* Strip any conversions that don't change the machine mode. */
3782 return (CONSTANT_CLASS_P (exp
)
3783 || TREE_CODE (exp
) == SSA_NAME
3785 && ! TREE_ADDRESSABLE (exp
)
3786 && ! TREE_THIS_VOLATILE (exp
)
3787 && ! DECL_NONLOCAL (exp
)
3788 /* Don't regard global variables as simple. They may be
3789 allocated in ways unknown to the compiler (shared memory,
3790 #pragma weak, etc). */
3791 && ! TREE_PUBLIC (exp
)
3792 && ! DECL_EXTERNAL (exp
)
3793 /* Weakrefs are not safe to be read, since they can be NULL.
3794 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3795 have DECL_WEAK flag set. */
3796 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3797 /* Loading a static variable is unduly expensive, but global
3798 registers aren't expensive. */
3799 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3802 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3803 to be evaluated unconditionally.
3804 I addition to simple_operand_p, we assume that comparisons, conversions,
3805 and logic-not operations are simple, if their operands are simple, too. */
3808 simple_operand_p_2 (tree exp
)
3810 enum tree_code code
;
3812 if (TREE_SIDE_EFFECTS (exp
)
3813 || tree_could_trap_p (exp
))
3816 while (CONVERT_EXPR_P (exp
))
3817 exp
= TREE_OPERAND (exp
, 0);
3819 code
= TREE_CODE (exp
);
3821 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3822 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3823 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3825 if (code
== TRUTH_NOT_EXPR
)
3826 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3828 return simple_operand_p (exp
);
3832 /* The following functions are subroutines to fold_range_test and allow it to
3833 try to change a logical combination of comparisons into a range test.
3836 X == 2 || X == 3 || X == 4 || X == 5
3840 (unsigned) (X - 2) <= 3
3842 We describe each set of comparisons as being either inside or outside
3843 a range, using a variable named like IN_P, and then describe the
3844 range with a lower and upper bound. If one of the bounds is omitted,
3845 it represents either the highest or lowest value of the type.
3847 In the comments below, we represent a range by two numbers in brackets
3848 preceded by a "+" to designate being inside that range, or a "-" to
3849 designate being outside that range, so the condition can be inverted by
3850 flipping the prefix. An omitted bound is represented by a "-". For
3851 example, "- [-, 10]" means being outside the range starting at the lowest
3852 possible value and ending at 10, in other words, being greater than 10.
3853 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3856 We set up things so that the missing bounds are handled in a consistent
3857 manner so neither a missing bound nor "true" and "false" need to be
3858 handled using a special case. */
3860 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3861 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3862 and UPPER1_P are nonzero if the respective argument is an upper bound
3863 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3864 must be specified for a comparison. ARG1 will be converted to ARG0's
3865 type if both are specified. */
3868 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3869 tree arg1
, int upper1_p
)
3875 /* If neither arg represents infinity, do the normal operation.
3876 Else, if not a comparison, return infinity. Else handle the special
3877 comparison rules. Note that most of the cases below won't occur, but
3878 are handled for consistency. */
3880 if (arg0
!= 0 && arg1
!= 0)
3882 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3883 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3885 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3888 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3891 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3892 for neither. In real maths, we cannot assume open ended ranges are
3893 the same. But, this is computer arithmetic, where numbers are finite.
3894 We can therefore make the transformation of any unbounded range with
3895 the value Z, Z being greater than any representable number. This permits
3896 us to treat unbounded ranges as equal. */
3897 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3898 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3902 result
= sgn0
== sgn1
;
3905 result
= sgn0
!= sgn1
;
3908 result
= sgn0
< sgn1
;
3911 result
= sgn0
<= sgn1
;
3914 result
= sgn0
> sgn1
;
3917 result
= sgn0
>= sgn1
;
3923 return constant_boolean_node (result
, type
);
3926 /* Helper routine for make_range. Perform one step for it, return
3927 new expression if the loop should continue or NULL_TREE if it should
3931 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3932 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3933 bool *strict_overflow_p
)
3935 tree arg0_type
= TREE_TYPE (arg0
);
3936 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3937 int in_p
= *p_in_p
, n_in_p
;
3941 case TRUTH_NOT_EXPR
:
3942 /* We can only do something if the range is testing for zero. */
3943 if (low
== NULL_TREE
|| high
== NULL_TREE
3944 || ! integer_zerop (low
) || ! integer_zerop (high
))
3949 case EQ_EXPR
: case NE_EXPR
:
3950 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3951 /* We can only do something if the range is testing for zero
3952 and if the second operand is an integer constant. Note that
3953 saying something is "in" the range we make is done by
3954 complementing IN_P since it will set in the initial case of
3955 being not equal to zero; "out" is leaving it alone. */
3956 if (low
== NULL_TREE
|| high
== NULL_TREE
3957 || ! integer_zerop (low
) || ! integer_zerop (high
)
3958 || TREE_CODE (arg1
) != INTEGER_CST
)
3963 case NE_EXPR
: /* - [c, c] */
3966 case EQ_EXPR
: /* + [c, c] */
3967 in_p
= ! in_p
, low
= high
= arg1
;
3969 case GT_EXPR
: /* - [-, c] */
3970 low
= 0, high
= arg1
;
3972 case GE_EXPR
: /* + [c, -] */
3973 in_p
= ! in_p
, low
= arg1
, high
= 0;
3975 case LT_EXPR
: /* - [c, -] */
3976 low
= arg1
, high
= 0;
3978 case LE_EXPR
: /* + [-, c] */
3979 in_p
= ! in_p
, low
= 0, high
= arg1
;
3985 /* If this is an unsigned comparison, we also know that EXP is
3986 greater than or equal to zero. We base the range tests we make
3987 on that fact, so we record it here so we can parse existing
3988 range tests. We test arg0_type since often the return type
3989 of, e.g. EQ_EXPR, is boolean. */
3990 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3992 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3994 build_int_cst (arg0_type
, 0),
3998 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4000 /* If the high bound is missing, but we have a nonzero low
4001 bound, reverse the range so it goes from zero to the low bound
4003 if (high
== 0 && low
&& ! integer_zerop (low
))
4006 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4007 integer_one_node
, 0);
4008 low
= build_int_cst (arg0_type
, 0);
4018 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4019 low and high are non-NULL, then normalize will DTRT. */
4020 if (!TYPE_UNSIGNED (arg0_type
)
4021 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4023 if (low
== NULL_TREE
)
4024 low
= TYPE_MIN_VALUE (arg0_type
);
4025 if (high
== NULL_TREE
)
4026 high
= TYPE_MAX_VALUE (arg0_type
);
4029 /* (-x) IN [a,b] -> x in [-b, -a] */
4030 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4031 build_int_cst (exp_type
, 0),
4033 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4034 build_int_cst (exp_type
, 0),
4036 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4042 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4043 build_int_cst (exp_type
, 1));
4047 if (TREE_CODE (arg1
) != INTEGER_CST
)
4050 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4051 move a constant to the other side. */
4052 if (!TYPE_UNSIGNED (arg0_type
)
4053 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4056 /* If EXP is signed, any overflow in the computation is undefined,
4057 so we don't worry about it so long as our computations on
4058 the bounds don't overflow. For unsigned, overflow is defined
4059 and this is exactly the right thing. */
4060 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4061 arg0_type
, low
, 0, arg1
, 0);
4062 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4063 arg0_type
, high
, 1, arg1
, 0);
4064 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4065 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4068 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4069 *strict_overflow_p
= true;
4072 /* Check for an unsigned range which has wrapped around the maximum
4073 value thus making n_high < n_low, and normalize it. */
4074 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4076 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4077 integer_one_node
, 0);
4078 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4079 integer_one_node
, 0);
4081 /* If the range is of the form +/- [ x+1, x ], we won't
4082 be able to normalize it. But then, it represents the
4083 whole range or the empty set, so make it
4085 if (tree_int_cst_equal (n_low
, low
)
4086 && tree_int_cst_equal (n_high
, high
))
4092 low
= n_low
, high
= n_high
;
4100 case NON_LVALUE_EXPR
:
4101 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4104 if (! INTEGRAL_TYPE_P (arg0_type
)
4105 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4106 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4109 n_low
= low
, n_high
= high
;
4112 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4115 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4117 /* If we're converting arg0 from an unsigned type, to exp,
4118 a signed type, we will be doing the comparison as unsigned.
4119 The tests above have already verified that LOW and HIGH
4122 So we have to ensure that we will handle large unsigned
4123 values the same way that the current signed bounds treat
4126 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4130 /* For fixed-point modes, we need to pass the saturating flag
4131 as the 2nd parameter. */
4132 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4134 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4135 TYPE_SATURATING (arg0_type
));
4138 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4140 /* A range without an upper bound is, naturally, unbounded.
4141 Since convert would have cropped a very large value, use
4142 the max value for the destination type. */
4144 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4145 : TYPE_MAX_VALUE (arg0_type
);
4147 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4148 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4149 fold_convert_loc (loc
, arg0_type
,
4151 build_int_cst (arg0_type
, 1));
4153 /* If the low bound is specified, "and" the range with the
4154 range for which the original unsigned value will be
4158 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4159 1, fold_convert_loc (loc
, arg0_type
,
4164 in_p
= (n_in_p
== in_p
);
4168 /* Otherwise, "or" the range with the range of the input
4169 that will be interpreted as negative. */
4170 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4171 1, fold_convert_loc (loc
, arg0_type
,
4176 in_p
= (in_p
!= n_in_p
);
4190 /* Given EXP, a logical expression, set the range it is testing into
4191 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4192 actually being tested. *PLOW and *PHIGH will be made of the same
4193 type as the returned expression. If EXP is not a comparison, we
4194 will most likely not be returning a useful value and range. Set
4195 *STRICT_OVERFLOW_P to true if the return value is only valid
4196 because signed overflow is undefined; otherwise, do not change
4197 *STRICT_OVERFLOW_P. */
4200 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4201 bool *strict_overflow_p
)
4203 enum tree_code code
;
4204 tree arg0
, arg1
= NULL_TREE
;
4205 tree exp_type
, nexp
;
4208 location_t loc
= EXPR_LOCATION (exp
);
4210 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4211 and see if we can refine the range. Some of the cases below may not
4212 happen, but it doesn't seem worth worrying about this. We "continue"
4213 the outer loop when we've changed something; otherwise we "break"
4214 the switch, which will "break" the while. */
4217 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4221 code
= TREE_CODE (exp
);
4222 exp_type
= TREE_TYPE (exp
);
4225 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4227 if (TREE_OPERAND_LENGTH (exp
) > 0)
4228 arg0
= TREE_OPERAND (exp
, 0);
4229 if (TREE_CODE_CLASS (code
) == tcc_binary
4230 || TREE_CODE_CLASS (code
) == tcc_comparison
4231 || (TREE_CODE_CLASS (code
) == tcc_expression
4232 && TREE_OPERAND_LENGTH (exp
) > 1))
4233 arg1
= TREE_OPERAND (exp
, 1);
4235 if (arg0
== NULL_TREE
)
4238 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4239 &high
, &in_p
, strict_overflow_p
);
4240 if (nexp
== NULL_TREE
)
4245 /* If EXP is a constant, we can evaluate whether this is true or false. */
4246 if (TREE_CODE (exp
) == INTEGER_CST
)
4248 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4250 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4256 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4260 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4261 type, TYPE, return an expression to test if EXP is in (or out of, depending
4262 on IN_P) the range. Return 0 if the test couldn't be created. */
4265 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4266 tree low
, tree high
)
4268 tree etype
= TREE_TYPE (exp
), value
;
4270 #ifdef HAVE_canonicalize_funcptr_for_compare
4271 /* Disable this optimization for function pointer expressions
4272 on targets that require function pointer canonicalization. */
4273 if (HAVE_canonicalize_funcptr_for_compare
4274 && TREE_CODE (etype
) == POINTER_TYPE
4275 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4281 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4283 return invert_truthvalue_loc (loc
, value
);
4288 if (low
== 0 && high
== 0)
4289 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4292 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4293 fold_convert_loc (loc
, etype
, high
));
4296 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4297 fold_convert_loc (loc
, etype
, low
));
4299 if (operand_equal_p (low
, high
, 0))
4300 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4301 fold_convert_loc (loc
, etype
, low
));
4303 if (integer_zerop (low
))
4305 if (! TYPE_UNSIGNED (etype
))
4307 etype
= unsigned_type_for (etype
);
4308 high
= fold_convert_loc (loc
, etype
, high
);
4309 exp
= fold_convert_loc (loc
, etype
, exp
);
4311 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4314 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4315 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4317 unsigned HOST_WIDE_INT lo
;
4321 prec
= TYPE_PRECISION (etype
);
4322 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4325 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4329 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4330 lo
= HOST_WIDE_INT_M1U
;
4333 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4335 if (TYPE_UNSIGNED (etype
))
4337 tree signed_etype
= signed_type_for (etype
);
4338 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4340 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4342 etype
= signed_etype
;
4343 exp
= fold_convert_loc (loc
, etype
, exp
);
4345 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4346 build_int_cst (etype
, 0));
4350 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4351 This requires wrap-around arithmetics for the type of the expression.
4352 First make sure that arithmetics in this type is valid, then make sure
4353 that it wraps around. */
4354 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4355 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4356 TYPE_UNSIGNED (etype
));
4358 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4360 tree utype
, minv
, maxv
;
4362 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4363 for the type in question, as we rely on this here. */
4364 utype
= unsigned_type_for (etype
);
4365 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4366 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4367 integer_one_node
, 1);
4368 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4370 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4377 high
= fold_convert_loc (loc
, etype
, high
);
4378 low
= fold_convert_loc (loc
, etype
, low
);
4379 exp
= fold_convert_loc (loc
, etype
, exp
);
4381 value
= const_binop (MINUS_EXPR
, high
, low
);
4384 if (POINTER_TYPE_P (etype
))
4386 if (value
!= 0 && !TREE_OVERFLOW (value
))
4388 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4389 return build_range_check (loc
, type
,
4390 fold_build_pointer_plus_loc (loc
, exp
, low
),
4391 1, build_int_cst (etype
, 0), value
);
4396 if (value
!= 0 && !TREE_OVERFLOW (value
))
4397 return build_range_check (loc
, type
,
4398 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4399 1, build_int_cst (etype
, 0), value
);
4404 /* Return the predecessor of VAL in its type, handling the infinite case. */
4407 range_predecessor (tree val
)
4409 tree type
= TREE_TYPE (val
);
4411 if (INTEGRAL_TYPE_P (type
)
4412 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4415 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4418 /* Return the successor of VAL in its type, handling the infinite case. */
4421 range_successor (tree val
)
4423 tree type
= TREE_TYPE (val
);
4425 if (INTEGRAL_TYPE_P (type
)
4426 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4429 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4432 /* Given two ranges, see if we can merge them into one. Return 1 if we
4433 can, 0 if we can't. Set the output range into the specified parameters. */
4436 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4437 tree high0
, int in1_p
, tree low1
, tree high1
)
4445 int lowequal
= ((low0
== 0 && low1
== 0)
4446 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4447 low0
, 0, low1
, 0)));
4448 int highequal
= ((high0
== 0 && high1
== 0)
4449 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4450 high0
, 1, high1
, 1)));
4452 /* Make range 0 be the range that starts first, or ends last if they
4453 start at the same value. Swap them if it isn't. */
4454 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4457 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4458 high1
, 1, high0
, 1))))
4460 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4461 tem
= low0
, low0
= low1
, low1
= tem
;
4462 tem
= high0
, high0
= high1
, high1
= tem
;
4465 /* Now flag two cases, whether the ranges are disjoint or whether the
4466 second range is totally subsumed in the first. Note that the tests
4467 below are simplified by the ones above. */
4468 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4469 high0
, 1, low1
, 0));
4470 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4471 high1
, 1, high0
, 1));
4473 /* We now have four cases, depending on whether we are including or
4474 excluding the two ranges. */
4477 /* If they don't overlap, the result is false. If the second range
4478 is a subset it is the result. Otherwise, the range is from the start
4479 of the second to the end of the first. */
4481 in_p
= 0, low
= high
= 0;
4483 in_p
= 1, low
= low1
, high
= high1
;
4485 in_p
= 1, low
= low1
, high
= high0
;
4488 else if (in0_p
&& ! in1_p
)
4490 /* If they don't overlap, the result is the first range. If they are
4491 equal, the result is false. If the second range is a subset of the
4492 first, and the ranges begin at the same place, we go from just after
4493 the end of the second range to the end of the first. If the second
4494 range is not a subset of the first, or if it is a subset and both
4495 ranges end at the same place, the range starts at the start of the
4496 first range and ends just before the second range.
4497 Otherwise, we can't describe this as a single range. */
4499 in_p
= 1, low
= low0
, high
= high0
;
4500 else if (lowequal
&& highequal
)
4501 in_p
= 0, low
= high
= 0;
4502 else if (subset
&& lowequal
)
4504 low
= range_successor (high1
);
4509 /* We are in the weird situation where high0 > high1 but
4510 high1 has no successor. Punt. */
4514 else if (! subset
|| highequal
)
4517 high
= range_predecessor (low1
);
4521 /* low0 < low1 but low1 has no predecessor. Punt. */
4529 else if (! in0_p
&& in1_p
)
4531 /* If they don't overlap, the result is the second range. If the second
4532 is a subset of the first, the result is false. Otherwise,
4533 the range starts just after the first range and ends at the
4534 end of the second. */
4536 in_p
= 1, low
= low1
, high
= high1
;
4537 else if (subset
|| highequal
)
4538 in_p
= 0, low
= high
= 0;
4541 low
= range_successor (high0
);
4546 /* high1 > high0 but high0 has no successor. Punt. */
4554 /* The case where we are excluding both ranges. Here the complex case
4555 is if they don't overlap. In that case, the only time we have a
4556 range is if they are adjacent. If the second is a subset of the
4557 first, the result is the first. Otherwise, the range to exclude
4558 starts at the beginning of the first range and ends at the end of the
4562 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4563 range_successor (high0
),
4565 in_p
= 0, low
= low0
, high
= high1
;
4568 /* Canonicalize - [min, x] into - [-, x]. */
4569 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4570 switch (TREE_CODE (TREE_TYPE (low0
)))
4573 if (TYPE_PRECISION (TREE_TYPE (low0
))
4574 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4578 if (tree_int_cst_equal (low0
,
4579 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4583 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4584 && integer_zerop (low0
))
4591 /* Canonicalize - [x, max] into - [x, -]. */
4592 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4593 switch (TREE_CODE (TREE_TYPE (high1
)))
4596 if (TYPE_PRECISION (TREE_TYPE (high1
))
4597 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4601 if (tree_int_cst_equal (high1
,
4602 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4606 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4607 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4609 integer_one_node
, 1)))
4616 /* The ranges might be also adjacent between the maximum and
4617 minimum values of the given type. For
4618 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4619 return + [x + 1, y - 1]. */
4620 if (low0
== 0 && high1
== 0)
4622 low
= range_successor (high0
);
4623 high
= range_predecessor (low1
);
4624 if (low
== 0 || high
== 0)
4634 in_p
= 0, low
= low0
, high
= high0
;
4636 in_p
= 0, low
= low0
, high
= high1
;
4639 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4644 /* Subroutine of fold, looking inside expressions of the form
4645 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4646 of the COND_EXPR. This function is being used also to optimize
4647 A op B ? C : A, by reversing the comparison first.
4649 Return a folded expression whose code is not a COND_EXPR
4650 anymore, or NULL_TREE if no folding opportunity is found. */
4653 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4654 tree arg0
, tree arg1
, tree arg2
)
4656 enum tree_code comp_code
= TREE_CODE (arg0
);
4657 tree arg00
= TREE_OPERAND (arg0
, 0);
4658 tree arg01
= TREE_OPERAND (arg0
, 1);
4659 tree arg1_type
= TREE_TYPE (arg1
);
4665 /* If we have A op 0 ? A : -A, consider applying the following
4668 A == 0? A : -A same as -A
4669 A != 0? A : -A same as A
4670 A >= 0? A : -A same as abs (A)
4671 A > 0? A : -A same as abs (A)
4672 A <= 0? A : -A same as -abs (A)
4673 A < 0? A : -A same as -abs (A)
4675 None of these transformations work for modes with signed
4676 zeros. If A is +/-0, the first two transformations will
4677 change the sign of the result (from +0 to -0, or vice
4678 versa). The last four will fix the sign of the result,
4679 even though the original expressions could be positive or
4680 negative, depending on the sign of A.
4682 Note that all these transformations are correct if A is
4683 NaN, since the two alternatives (A and -A) are also NaNs. */
4684 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4685 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4686 ? real_zerop (arg01
)
4687 : integer_zerop (arg01
))
4688 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4689 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4690 /* In the case that A is of the form X-Y, '-A' (arg2) may
4691 have already been folded to Y-X, check for that. */
4692 || (TREE_CODE (arg1
) == MINUS_EXPR
4693 && TREE_CODE (arg2
) == MINUS_EXPR
4694 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4695 TREE_OPERAND (arg2
, 1), 0)
4696 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4697 TREE_OPERAND (arg2
, 0), 0))))
4702 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4703 return pedantic_non_lvalue_loc (loc
,
4704 fold_convert_loc (loc
, type
,
4705 negate_expr (tem
)));
4708 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4711 if (flag_trapping_math
)
4716 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4717 arg1
= fold_convert_loc (loc
, signed_type_for
4718 (TREE_TYPE (arg1
)), arg1
);
4719 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4720 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4723 if (flag_trapping_math
)
4727 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4728 arg1
= fold_convert_loc (loc
, signed_type_for
4729 (TREE_TYPE (arg1
)), arg1
);
4730 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4731 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4733 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4737 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4738 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4739 both transformations are correct when A is NaN: A != 0
4740 is then true, and A == 0 is false. */
4742 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4743 && integer_zerop (arg01
) && integer_zerop (arg2
))
4745 if (comp_code
== NE_EXPR
)
4746 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4747 else if (comp_code
== EQ_EXPR
)
4748 return build_zero_cst (type
);
4751 /* Try some transformations of A op B ? A : B.
4753 A == B? A : B same as B
4754 A != B? A : B same as A
4755 A >= B? A : B same as max (A, B)
4756 A > B? A : B same as max (B, A)
4757 A <= B? A : B same as min (A, B)
4758 A < B? A : B same as min (B, A)
4760 As above, these transformations don't work in the presence
4761 of signed zeros. For example, if A and B are zeros of
4762 opposite sign, the first two transformations will change
4763 the sign of the result. In the last four, the original
4764 expressions give different results for (A=+0, B=-0) and
4765 (A=-0, B=+0), but the transformed expressions do not.
4767 The first two transformations are correct if either A or B
4768 is a NaN. In the first transformation, the condition will
4769 be false, and B will indeed be chosen. In the case of the
4770 second transformation, the condition A != B will be true,
4771 and A will be chosen.
4773 The conversions to max() and min() are not correct if B is
4774 a number and A is not. The conditions in the original
4775 expressions will be false, so all four give B. The min()
4776 and max() versions would give a NaN instead. */
4777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4778 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4779 /* Avoid these transformations if the COND_EXPR may be used
4780 as an lvalue in the C++ front-end. PR c++/19199. */
4782 || VECTOR_TYPE_P (type
)
4783 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4784 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4785 || ! maybe_lvalue_p (arg1
)
4786 || ! maybe_lvalue_p (arg2
)))
4788 tree comp_op0
= arg00
;
4789 tree comp_op1
= arg01
;
4790 tree comp_type
= TREE_TYPE (comp_op0
);
4792 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4793 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4803 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4805 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4810 /* In C++ a ?: expression can be an lvalue, so put the
4811 operand which will be used if they are equal first
4812 so that we can convert this back to the
4813 corresponding COND_EXPR. */
4814 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4816 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4817 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4818 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4819 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4820 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4821 comp_op1
, comp_op0
);
4822 return pedantic_non_lvalue_loc (loc
,
4823 fold_convert_loc (loc
, type
, tem
));
4830 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4832 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4833 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4834 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4835 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4836 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4837 comp_op1
, comp_op0
);
4838 return pedantic_non_lvalue_loc (loc
,
4839 fold_convert_loc (loc
, type
, tem
));
4843 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4844 return pedantic_non_lvalue_loc (loc
,
4845 fold_convert_loc (loc
, type
, arg2
));
4848 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4849 return pedantic_non_lvalue_loc (loc
,
4850 fold_convert_loc (loc
, type
, arg1
));
4853 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4858 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4859 we might still be able to simplify this. For example,
4860 if C1 is one less or one more than C2, this might have started
4861 out as a MIN or MAX and been transformed by this function.
4862 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4864 if (INTEGRAL_TYPE_P (type
)
4865 && TREE_CODE (arg01
) == INTEGER_CST
4866 && TREE_CODE (arg2
) == INTEGER_CST
)
4870 if (TREE_CODE (arg1
) == INTEGER_CST
)
4872 /* We can replace A with C1 in this case. */
4873 arg1
= fold_convert_loc (loc
, type
, arg01
);
4874 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4877 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4878 MIN_EXPR, to preserve the signedness of the comparison. */
4879 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4881 && operand_equal_p (arg01
,
4882 const_binop (PLUS_EXPR
, arg2
,
4883 build_int_cst (type
, 1)),
4886 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4887 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4889 return pedantic_non_lvalue_loc (loc
,
4890 fold_convert_loc (loc
, type
, tem
));
4895 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4897 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4899 && operand_equal_p (arg01
,
4900 const_binop (MINUS_EXPR
, arg2
,
4901 build_int_cst (type
, 1)),
4904 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4905 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4907 return pedantic_non_lvalue_loc (loc
,
4908 fold_convert_loc (loc
, type
, tem
));
4913 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4914 MAX_EXPR, to preserve the signedness of the comparison. */
4915 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4917 && operand_equal_p (arg01
,
4918 const_binop (MINUS_EXPR
, arg2
,
4919 build_int_cst (type
, 1)),
4922 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4923 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4925 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4930 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4931 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4933 && operand_equal_p (arg01
,
4934 const_binop (PLUS_EXPR
, arg2
,
4935 build_int_cst (type
, 1)),
4938 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4939 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4941 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4955 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4956 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4957 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4961 /* EXP is some logical combination of boolean tests. See if we can
4962 merge it into some range test. Return the new tree if so. */
4965 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4968 int or_op
= (code
== TRUTH_ORIF_EXPR
4969 || code
== TRUTH_OR_EXPR
);
4970 int in0_p
, in1_p
, in_p
;
4971 tree low0
, low1
, low
, high0
, high1
, high
;
4972 bool strict_overflow_p
= false;
4974 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4975 "when simplifying range test");
4977 if (!INTEGRAL_TYPE_P (type
))
4980 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4981 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4983 /* If this is an OR operation, invert both sides; we will invert
4984 again at the end. */
4986 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4988 /* If both expressions are the same, if we can merge the ranges, and we
4989 can build the range test, return it or it inverted. If one of the
4990 ranges is always true or always false, consider it to be the same
4991 expression as the other. */
4992 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4993 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4995 && 0 != (tem
= (build_range_check (loc
, type
,
4997 : rhs
!= 0 ? rhs
: integer_zero_node
,
5000 if (strict_overflow_p
)
5001 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5002 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5005 /* On machines where the branch cost is expensive, if this is a
5006 short-circuited branch and the underlying object on both sides
5007 is the same, make a non-short-circuit operation. */
5008 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5009 && lhs
!= 0 && rhs
!= 0
5010 && (code
== TRUTH_ANDIF_EXPR
5011 || code
== TRUTH_ORIF_EXPR
)
5012 && operand_equal_p (lhs
, rhs
, 0))
5014 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5015 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5016 which cases we can't do this. */
5017 if (simple_operand_p (lhs
))
5018 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5019 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5022 else if (!lang_hooks
.decls
.global_bindings_p ()
5023 && !CONTAINS_PLACEHOLDER_P (lhs
))
5025 tree common
= save_expr (lhs
);
5027 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5028 or_op
? ! in0_p
: in0_p
,
5030 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5031 or_op
? ! in1_p
: in1_p
,
5034 if (strict_overflow_p
)
5035 fold_overflow_warning (warnmsg
,
5036 WARN_STRICT_OVERFLOW_COMPARISON
);
5037 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5038 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5047 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5048 bit value. Arrange things so the extra bits will be set to zero if and
5049 only if C is signed-extended to its full width. If MASK is nonzero,
5050 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5053 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5055 tree type
= TREE_TYPE (c
);
5056 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5059 if (p
== modesize
|| unsignedp
)
5062 /* We work by getting just the sign bit into the low-order bit, then
5063 into the high-order bit, then sign-extend. We then XOR that value
5065 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
5066 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
5068 /* We must use a signed type in order to get an arithmetic right shift.
5069 However, we must also avoid introducing accidental overflows, so that
5070 a subsequent call to integer_zerop will work. Hence we must
5071 do the type conversion here. At this point, the constant is either
5072 zero or one, and the conversion to a signed type can never overflow.
5073 We could get an overflow if this conversion is done anywhere else. */
5074 if (TYPE_UNSIGNED (type
))
5075 temp
= fold_convert (signed_type_for (type
), temp
);
5077 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5078 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5080 temp
= const_binop (BIT_AND_EXPR
, temp
,
5081 fold_convert (TREE_TYPE (c
), mask
));
5082 /* If necessary, convert the type back to match the type of C. */
5083 if (TYPE_UNSIGNED (type
))
5084 temp
= fold_convert (type
, temp
);
5086 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5089 /* For an expression that has the form
5093 we can drop one of the inner expressions and simplify to
5097 LOC is the location of the resulting expression. OP is the inner
5098 logical operation; the left-hand side in the examples above, while CMPOP
5099 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5100 removing a condition that guards another, as in
5101 (A != NULL && A->...) || A == NULL
5102 which we must not transform. If RHS_ONLY is true, only eliminate the
5103 right-most operand of the inner logical operation. */
5106 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5109 tree type
= TREE_TYPE (cmpop
);
5110 enum tree_code code
= TREE_CODE (cmpop
);
5111 enum tree_code truthop_code
= TREE_CODE (op
);
5112 tree lhs
= TREE_OPERAND (op
, 0);
5113 tree rhs
= TREE_OPERAND (op
, 1);
5114 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5115 enum tree_code rhs_code
= TREE_CODE (rhs
);
5116 enum tree_code lhs_code
= TREE_CODE (lhs
);
5117 enum tree_code inv_code
;
5119 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5122 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5125 if (rhs_code
== truthop_code
)
5127 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5128 if (newrhs
!= NULL_TREE
)
5131 rhs_code
= TREE_CODE (rhs
);
5134 if (lhs_code
== truthop_code
&& !rhs_only
)
5136 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5137 if (newlhs
!= NULL_TREE
)
5140 lhs_code
= TREE_CODE (lhs
);
5144 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5145 if (inv_code
== rhs_code
5146 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5147 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5149 if (!rhs_only
&& inv_code
== lhs_code
5150 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5151 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5153 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5154 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5159 /* Find ways of folding logical expressions of LHS and RHS:
5160 Try to merge two comparisons to the same innermost item.
5161 Look for range tests like "ch >= '0' && ch <= '9'".
5162 Look for combinations of simple terms on machines with expensive branches
5163 and evaluate the RHS unconditionally.
5165 For example, if we have p->a == 2 && p->b == 4 and we can make an
5166 object large enough to span both A and B, we can do this with a comparison
5167 against the object ANDed with the a mask.
5169 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5170 operations to do this with one comparison.
5172 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5173 function and the one above.
5175 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5176 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5178 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5181 We return the simplified tree or 0 if no optimization is possible. */
5184 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5187 /* If this is the "or" of two comparisons, we can do something if
5188 the comparisons are NE_EXPR. If this is the "and", we can do something
5189 if the comparisons are EQ_EXPR. I.e.,
5190 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5192 WANTED_CODE is this operation code. For single bit fields, we can
5193 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5194 comparison for one-bit fields. */
5196 enum tree_code wanted_code
;
5197 enum tree_code lcode
, rcode
;
5198 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5199 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5200 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5201 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5202 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5203 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5204 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5205 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5206 enum machine_mode lnmode
, rnmode
;
5207 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5208 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5209 tree l_const
, r_const
;
5210 tree lntype
, rntype
, result
;
5211 HOST_WIDE_INT first_bit
, end_bit
;
5214 /* Start by getting the comparison codes. Fail if anything is volatile.
5215 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5216 it were surrounded with a NE_EXPR. */
5218 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5221 lcode
= TREE_CODE (lhs
);
5222 rcode
= TREE_CODE (rhs
);
5224 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5226 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5227 build_int_cst (TREE_TYPE (lhs
), 0));
5231 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5233 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5234 build_int_cst (TREE_TYPE (rhs
), 0));
5238 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5239 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5242 ll_arg
= TREE_OPERAND (lhs
, 0);
5243 lr_arg
= TREE_OPERAND (lhs
, 1);
5244 rl_arg
= TREE_OPERAND (rhs
, 0);
5245 rr_arg
= TREE_OPERAND (rhs
, 1);
5247 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5248 if (simple_operand_p (ll_arg
)
5249 && simple_operand_p (lr_arg
))
5251 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5252 && operand_equal_p (lr_arg
, rr_arg
, 0))
5254 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5255 truth_type
, ll_arg
, lr_arg
);
5259 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5260 && operand_equal_p (lr_arg
, rl_arg
, 0))
5262 result
= combine_comparisons (loc
, code
, lcode
,
5263 swap_tree_comparison (rcode
),
5264 truth_type
, ll_arg
, lr_arg
);
5270 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5271 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5273 /* If the RHS can be evaluated unconditionally and its operands are
5274 simple, it wins to evaluate the RHS unconditionally on machines
5275 with expensive branches. In this case, this isn't a comparison
5276 that can be merged. */
5278 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5280 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5281 && simple_operand_p (rl_arg
)
5282 && simple_operand_p (rr_arg
))
5284 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5285 if (code
== TRUTH_OR_EXPR
5286 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5287 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5288 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5289 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5290 return build2_loc (loc
, NE_EXPR
, truth_type
,
5291 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5293 build_int_cst (TREE_TYPE (ll_arg
), 0));
5295 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5296 if (code
== TRUTH_AND_EXPR
5297 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5298 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5299 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5300 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5301 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5302 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5304 build_int_cst (TREE_TYPE (ll_arg
), 0));
5307 /* See if the comparisons can be merged. Then get all the parameters for
5310 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5311 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5315 ll_inner
= decode_field_reference (loc
, ll_arg
,
5316 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5317 &ll_unsignedp
, &volatilep
, &ll_mask
,
5319 lr_inner
= decode_field_reference (loc
, lr_arg
,
5320 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5321 &lr_unsignedp
, &volatilep
, &lr_mask
,
5323 rl_inner
= decode_field_reference (loc
, rl_arg
,
5324 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5325 &rl_unsignedp
, &volatilep
, &rl_mask
,
5327 rr_inner
= decode_field_reference (loc
, rr_arg
,
5328 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5329 &rr_unsignedp
, &volatilep
, &rr_mask
,
5332 /* It must be true that the inner operation on the lhs of each
5333 comparison must be the same if we are to be able to do anything.
5334 Then see if we have constants. If not, the same must be true for
5336 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5337 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5340 if (TREE_CODE (lr_arg
) == INTEGER_CST
5341 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5342 l_const
= lr_arg
, r_const
= rr_arg
;
5343 else if (lr_inner
== 0 || rr_inner
== 0
5344 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5347 l_const
= r_const
= 0;
5349 /* If either comparison code is not correct for our logical operation,
5350 fail. However, we can convert a one-bit comparison against zero into
5351 the opposite comparison against that bit being set in the field. */
5353 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5354 if (lcode
!= wanted_code
)
5356 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5358 /* Make the left operand unsigned, since we are only interested
5359 in the value of one bit. Otherwise we are doing the wrong
5368 /* This is analogous to the code for l_const above. */
5369 if (rcode
!= wanted_code
)
5371 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5380 /* See if we can find a mode that contains both fields being compared on
5381 the left. If we can't, fail. Otherwise, update all constants and masks
5382 to be relative to a field of that size. */
5383 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5384 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5385 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5386 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5388 if (lnmode
== VOIDmode
)
5391 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5392 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5393 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5394 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5396 if (BYTES_BIG_ENDIAN
)
5398 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5399 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5402 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5403 size_int (xll_bitpos
));
5404 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5405 size_int (xrl_bitpos
));
5409 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5410 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5411 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5412 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5413 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5416 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5418 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5423 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5424 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5425 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5426 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5427 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5430 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5432 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5436 /* If the right sides are not constant, do the same for it. Also,
5437 disallow this optimization if a size or signedness mismatch occurs
5438 between the left and right sides. */
5441 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5442 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5443 /* Make sure the two fields on the right
5444 correspond to the left without being swapped. */
5445 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5448 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5449 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5450 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5451 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5453 if (rnmode
== VOIDmode
)
5456 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5457 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5458 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5459 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5461 if (BYTES_BIG_ENDIAN
)
5463 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5464 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5467 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5469 size_int (xlr_bitpos
));
5470 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5472 size_int (xrr_bitpos
));
5474 /* Make a mask that corresponds to both fields being compared.
5475 Do this for both items being compared. If the operands are the
5476 same size and the bits being compared are in the same position
5477 then we can do this by masking both and comparing the masked
5479 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5480 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5481 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5483 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5484 ll_unsignedp
|| rl_unsignedp
);
5485 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5486 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5488 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5489 lr_unsignedp
|| rr_unsignedp
);
5490 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5491 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5493 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5496 /* There is still another way we can do something: If both pairs of
5497 fields being compared are adjacent, we may be able to make a wider
5498 field containing them both.
5500 Note that we still must mask the lhs/rhs expressions. Furthermore,
5501 the mask must be shifted to account for the shift done by
5502 make_bit_field_ref. */
5503 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5504 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5505 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5506 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5510 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5511 ll_bitsize
+ rl_bitsize
,
5512 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5513 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5514 lr_bitsize
+ rr_bitsize
,
5515 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5517 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5518 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5519 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5520 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5522 /* Convert to the smaller type before masking out unwanted bits. */
5524 if (lntype
!= rntype
)
5526 if (lnbitsize
> rnbitsize
)
5528 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5529 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5532 else if (lnbitsize
< rnbitsize
)
5534 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5535 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5540 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5541 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5543 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5544 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5546 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5552 /* Handle the case of comparisons with constants. If there is something in
5553 common between the masks, those bits of the constants must be the same.
5554 If not, the condition is always false. Test for this to avoid generating
5555 incorrect code below. */
5556 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5557 if (! integer_zerop (result
)
5558 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5559 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5561 if (wanted_code
== NE_EXPR
)
5563 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5564 return constant_boolean_node (true, truth_type
);
5568 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5569 return constant_boolean_node (false, truth_type
);
5573 /* Construct the expression we will return. First get the component
5574 reference we will make. Unless the mask is all ones the width of
5575 that field, perform the mask operation. Then compare with the
5577 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5578 ll_unsignedp
|| rl_unsignedp
);
5580 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5581 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5582 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5584 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5585 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5588 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5592 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5596 enum tree_code op_code
;
5599 int consts_equal
, consts_lt
;
5602 STRIP_SIGN_NOPS (arg0
);
5604 op_code
= TREE_CODE (arg0
);
5605 minmax_const
= TREE_OPERAND (arg0
, 1);
5606 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5607 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5608 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5609 inner
= TREE_OPERAND (arg0
, 0);
5611 /* If something does not permit us to optimize, return the original tree. */
5612 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5613 || TREE_CODE (comp_const
) != INTEGER_CST
5614 || TREE_OVERFLOW (comp_const
)
5615 || TREE_CODE (minmax_const
) != INTEGER_CST
5616 || TREE_OVERFLOW (minmax_const
))
5619 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5620 and GT_EXPR, doing the rest with recursive calls using logical
5624 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5627 = optimize_minmax_comparison (loc
,
5628 invert_tree_comparison (code
, false),
5631 return invert_truthvalue_loc (loc
, tem
);
5637 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5638 optimize_minmax_comparison
5639 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5640 optimize_minmax_comparison
5641 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5644 if (op_code
== MAX_EXPR
&& consts_equal
)
5645 /* MAX (X, 0) == 0 -> X <= 0 */
5646 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5648 else if (op_code
== MAX_EXPR
&& consts_lt
)
5649 /* MAX (X, 0) == 5 -> X == 5 */
5650 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5652 else if (op_code
== MAX_EXPR
)
5653 /* MAX (X, 0) == -1 -> false */
5654 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5656 else if (consts_equal
)
5657 /* MIN (X, 0) == 0 -> X >= 0 */
5658 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5661 /* MIN (X, 0) == 5 -> false */
5662 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5665 /* MIN (X, 0) == -1 -> X == -1 */
5666 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5669 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5670 /* MAX (X, 0) > 0 -> X > 0
5671 MAX (X, 0) > 5 -> X > 5 */
5672 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5674 else if (op_code
== MAX_EXPR
)
5675 /* MAX (X, 0) > -1 -> true */
5676 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5678 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5679 /* MIN (X, 0) > 0 -> false
5680 MIN (X, 0) > 5 -> false */
5681 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5684 /* MIN (X, 0) > -1 -> X > -1 */
5685 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5692 /* T is an integer expression that is being multiplied, divided, or taken a
5693 modulus (CODE says which and what kind of divide or modulus) by a
5694 constant C. See if we can eliminate that operation by folding it with
5695 other operations already in T. WIDE_TYPE, if non-null, is a type that
5696 should be used for the computation if wider than our type.
5698 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5699 (X * 2) + (Y * 4). We must, however, be assured that either the original
5700 expression would not overflow or that overflow is undefined for the type
5701 in the language in question.
5703 If we return a non-null expression, it is an equivalent form of the
5704 original computation, but need not be in the original type.
5706 We set *STRICT_OVERFLOW_P to true if the return values depends on
5707 signed overflow being undefined. Otherwise we do not change
5708 *STRICT_OVERFLOW_P. */
5711 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5712 bool *strict_overflow_p
)
5714 /* To avoid exponential search depth, refuse to allow recursion past
5715 three levels. Beyond that (1) it's highly unlikely that we'll find
5716 something interesting and (2) we've probably processed it before
5717 when we built the inner expression. */
5726 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5733 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5734 bool *strict_overflow_p
)
5736 tree type
= TREE_TYPE (t
);
5737 enum tree_code tcode
= TREE_CODE (t
);
5738 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5739 > GET_MODE_SIZE (TYPE_MODE (type
)))
5740 ? wide_type
: type
);
5742 int same_p
= tcode
== code
;
5743 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5744 bool sub_strict_overflow_p
;
5746 /* Don't deal with constants of zero here; they confuse the code below. */
5747 if (integer_zerop (c
))
5750 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5751 op0
= TREE_OPERAND (t
, 0);
5753 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5754 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5756 /* Note that we need not handle conditional operations here since fold
5757 already handles those cases. So just do arithmetic here. */
5761 /* For a constant, we can always simplify if we are a multiply
5762 or (for divide and modulus) if it is a multiple of our constant. */
5763 if (code
== MULT_EXPR
5764 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5765 return const_binop (code
, fold_convert (ctype
, t
),
5766 fold_convert (ctype
, c
));
5769 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5770 /* If op0 is an expression ... */
5771 if ((COMPARISON_CLASS_P (op0
)
5772 || UNARY_CLASS_P (op0
)
5773 || BINARY_CLASS_P (op0
)
5774 || VL_EXP_CLASS_P (op0
)
5775 || EXPRESSION_CLASS_P (op0
))
5776 /* ... and has wrapping overflow, and its type is smaller
5777 than ctype, then we cannot pass through as widening. */
5778 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5779 && (TYPE_PRECISION (ctype
)
5780 > TYPE_PRECISION (TREE_TYPE (op0
))))
5781 /* ... or this is a truncation (t is narrower than op0),
5782 then we cannot pass through this narrowing. */
5783 || (TYPE_PRECISION (type
)
5784 < TYPE_PRECISION (TREE_TYPE (op0
)))
5785 /* ... or signedness changes for division or modulus,
5786 then we cannot pass through this conversion. */
5787 || (code
!= MULT_EXPR
5788 && (TYPE_UNSIGNED (ctype
)
5789 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5790 /* ... or has undefined overflow while the converted to
5791 type has not, we cannot do the operation in the inner type
5792 as that would introduce undefined overflow. */
5793 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5794 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5797 /* Pass the constant down and see if we can make a simplification. If
5798 we can, replace this expression with the inner simplification for
5799 possible later conversion to our or some other type. */
5800 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5801 && TREE_CODE (t2
) == INTEGER_CST
5802 && !TREE_OVERFLOW (t2
)
5803 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5805 ? ctype
: NULL_TREE
,
5806 strict_overflow_p
))))
5811 /* If widening the type changes it from signed to unsigned, then we
5812 must avoid building ABS_EXPR itself as unsigned. */
5813 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5815 tree cstype
= (*signed_type_for
) (ctype
);
5816 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5819 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5820 return fold_convert (ctype
, t1
);
5824 /* If the constant is negative, we cannot simplify this. */
5825 if (tree_int_cst_sgn (c
) == -1)
5829 /* For division and modulus, type can't be unsigned, as e.g.
5830 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5831 For signed types, even with wrapping overflow, this is fine. */
5832 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5834 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5836 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5839 case MIN_EXPR
: case MAX_EXPR
:
5840 /* If widening the type changes the signedness, then we can't perform
5841 this optimization as that changes the result. */
5842 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5845 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5846 sub_strict_overflow_p
= false;
5847 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5848 &sub_strict_overflow_p
)) != 0
5849 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5850 &sub_strict_overflow_p
)) != 0)
5852 if (tree_int_cst_sgn (c
) < 0)
5853 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5854 if (sub_strict_overflow_p
)
5855 *strict_overflow_p
= true;
5856 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5857 fold_convert (ctype
, t2
));
5861 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5862 /* If the second operand is constant, this is a multiplication
5863 or floor division, by a power of two, so we can treat it that
5864 way unless the multiplier or divisor overflows. Signed
5865 left-shift overflow is implementation-defined rather than
5866 undefined in C90, so do not convert signed left shift into
5868 if (TREE_CODE (op1
) == INTEGER_CST
5869 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5870 /* const_binop may not detect overflow correctly,
5871 so check for it explicitly here. */
5872 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5873 && TREE_INT_CST_HIGH (op1
) == 0
5874 && 0 != (t1
= fold_convert (ctype
,
5875 const_binop (LSHIFT_EXPR
,
5878 && !TREE_OVERFLOW (t1
))
5879 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5880 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5882 fold_convert (ctype
, op0
),
5884 c
, code
, wide_type
, strict_overflow_p
);
5887 case PLUS_EXPR
: case MINUS_EXPR
:
5888 /* See if we can eliminate the operation on both sides. If we can, we
5889 can return a new PLUS or MINUS. If we can't, the only remaining
5890 cases where we can do anything are if the second operand is a
5892 sub_strict_overflow_p
= false;
5893 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5894 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5895 if (t1
!= 0 && t2
!= 0
5896 && (code
== MULT_EXPR
5897 /* If not multiplication, we can only do this if both operands
5898 are divisible by c. */
5899 || (multiple_of_p (ctype
, op0
, c
)
5900 && multiple_of_p (ctype
, op1
, c
))))
5902 if (sub_strict_overflow_p
)
5903 *strict_overflow_p
= true;
5904 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5905 fold_convert (ctype
, t2
));
5908 /* If this was a subtraction, negate OP1 and set it to be an addition.
5909 This simplifies the logic below. */
5910 if (tcode
== MINUS_EXPR
)
5912 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5913 /* If OP1 was not easily negatable, the constant may be OP0. */
5914 if (TREE_CODE (op0
) == INTEGER_CST
)
5925 if (TREE_CODE (op1
) != INTEGER_CST
)
5928 /* If either OP1 or C are negative, this optimization is not safe for
5929 some of the division and remainder types while for others we need
5930 to change the code. */
5931 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5933 if (code
== CEIL_DIV_EXPR
)
5934 code
= FLOOR_DIV_EXPR
;
5935 else if (code
== FLOOR_DIV_EXPR
)
5936 code
= CEIL_DIV_EXPR
;
5937 else if (code
!= MULT_EXPR
5938 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5942 /* If it's a multiply or a division/modulus operation of a multiple
5943 of our constant, do the operation and verify it doesn't overflow. */
5944 if (code
== MULT_EXPR
5945 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5947 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5948 fold_convert (ctype
, c
));
5949 /* We allow the constant to overflow with wrapping semantics. */
5951 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5957 /* If we have an unsigned type, we cannot widen the operation since it
5958 will change the result if the original computation overflowed. */
5959 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5962 /* If we were able to eliminate our operation from the first side,
5963 apply our operation to the second side and reform the PLUS. */
5964 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5965 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5967 /* The last case is if we are a multiply. In that case, we can
5968 apply the distributive law to commute the multiply and addition
5969 if the multiplication of the constants doesn't overflow
5970 and overflow is defined. With undefined overflow
5971 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5972 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5973 return fold_build2 (tcode
, ctype
,
5974 fold_build2 (code
, ctype
,
5975 fold_convert (ctype
, op0
),
5976 fold_convert (ctype
, c
)),
5982 /* We have a special case here if we are doing something like
5983 (C * 8) % 4 since we know that's zero. */
5984 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5985 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5986 /* If the multiplication can overflow we cannot optimize this. */
5987 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5988 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5989 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5991 *strict_overflow_p
= true;
5992 return omit_one_operand (type
, integer_zero_node
, op0
);
5995 /* ... fall through ... */
5997 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5998 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5999 /* If we can extract our operation from the LHS, do so and return a
6000 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6001 do something only if the second operand is a constant. */
6003 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6004 strict_overflow_p
)) != 0)
6005 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6006 fold_convert (ctype
, op1
));
6007 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6008 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6009 strict_overflow_p
)) != 0)
6010 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6011 fold_convert (ctype
, t1
));
6012 else if (TREE_CODE (op1
) != INTEGER_CST
)
6015 /* If these are the same operation types, we can associate them
6016 assuming no overflow. */
6021 unsigned prec
= TYPE_PRECISION (ctype
);
6022 bool uns
= TYPE_UNSIGNED (ctype
);
6023 double_int diop1
= tree_to_double_int (op1
).ext (prec
, uns
);
6024 double_int dic
= tree_to_double_int (c
).ext (prec
, uns
);
6025 mul
= diop1
.mul_with_sign (dic
, false, &overflow_p
);
6026 overflow_p
= ((!uns
&& overflow_p
)
6027 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
6028 if (!double_int_fits_to_tree_p (ctype
, mul
)
6029 && ((uns
&& tcode
!= MULT_EXPR
) || !uns
))
6032 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6033 double_int_to_tree (ctype
, mul
));
6036 /* If these operations "cancel" each other, we have the main
6037 optimizations of this pass, which occur when either constant is a
6038 multiple of the other, in which case we replace this with either an
6039 operation or CODE or TCODE.
6041 If we have an unsigned type, we cannot do this since it will change
6042 the result if the original computation overflowed. */
6043 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6044 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6045 || (tcode
== MULT_EXPR
6046 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6047 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6048 && code
!= MULT_EXPR
)))
6050 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
6052 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6053 *strict_overflow_p
= true;
6054 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6055 fold_convert (ctype
,
6056 const_binop (TRUNC_DIV_EXPR
,
6059 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
6061 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6062 *strict_overflow_p
= true;
6063 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6064 fold_convert (ctype
,
6065 const_binop (TRUNC_DIV_EXPR
,
6078 /* Return a node which has the indicated constant VALUE (either 0 or
6079 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6080 and is of the indicated TYPE. */
6083 constant_boolean_node (bool value
, tree type
)
6085 if (type
== integer_type_node
)
6086 return value
? integer_one_node
: integer_zero_node
;
6087 else if (type
== boolean_type_node
)
6088 return value
? boolean_true_node
: boolean_false_node
;
6089 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6090 return build_vector_from_val (type
,
6091 build_int_cst (TREE_TYPE (type
),
6094 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6098 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6099 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6100 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6101 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6102 COND is the first argument to CODE; otherwise (as in the example
6103 given here), it is the second argument. TYPE is the type of the
6104 original expression. Return NULL_TREE if no simplification is
6108 fold_binary_op_with_conditional_arg (location_t loc
,
6109 enum tree_code code
,
6110 tree type
, tree op0
, tree op1
,
6111 tree cond
, tree arg
, int cond_first_p
)
6113 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6114 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6115 tree test
, true_value
, false_value
;
6116 tree lhs
= NULL_TREE
;
6117 tree rhs
= NULL_TREE
;
6118 enum tree_code cond_code
= COND_EXPR
;
6120 if (TREE_CODE (cond
) == COND_EXPR
6121 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6123 test
= TREE_OPERAND (cond
, 0);
6124 true_value
= TREE_OPERAND (cond
, 1);
6125 false_value
= TREE_OPERAND (cond
, 2);
6126 /* If this operand throws an expression, then it does not make
6127 sense to try to perform a logical or arithmetic operation
6129 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6131 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6136 tree testtype
= TREE_TYPE (cond
);
6138 true_value
= constant_boolean_node (true, testtype
);
6139 false_value
= constant_boolean_node (false, testtype
);
6142 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6143 cond_code
= VEC_COND_EXPR
;
6145 /* This transformation is only worthwhile if we don't have to wrap ARG
6146 in a SAVE_EXPR and the operation can be simplified without recursing
6147 on at least one of the branches once its pushed inside the COND_EXPR. */
6148 if (!TREE_CONSTANT (arg
)
6149 && (TREE_SIDE_EFFECTS (arg
)
6150 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6151 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6154 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6157 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6159 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6161 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6165 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6167 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6169 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6172 /* Check that we have simplified at least one of the branches. */
6173 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6176 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6180 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6182 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6183 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6184 ADDEND is the same as X.
6186 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6187 and finite. The problematic cases are when X is zero, and its mode
6188 has signed zeros. In the case of rounding towards -infinity,
6189 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6190 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6193 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6195 if (!real_zerop (addend
))
6198 /* Don't allow the fold with -fsignaling-nans. */
6199 if (HONOR_SNANS (TYPE_MODE (type
)))
6202 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6203 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6206 /* In a vector or complex, we would need to check the sign of all zeros. */
6207 if (TREE_CODE (addend
) != REAL_CST
)
6210 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6211 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6214 /* The mode has signed zeros, and we have to honor their sign.
6215 In this situation, there is only one case we can return true for.
6216 X - 0 is the same as X unless rounding towards -infinity is
6218 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6221 /* Subroutine of fold() that checks comparisons of built-in math
6222 functions against real constants.
6224 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6225 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6226 is the type of the result and ARG0 and ARG1 are the operands of the
6227 comparison. ARG1 must be a TREE_REAL_CST.
6229 The function returns the constant folded tree if a simplification
6230 can be made, and NULL_TREE otherwise. */
6233 fold_mathfn_compare (location_t loc
,
6234 enum built_in_function fcode
, enum tree_code code
,
6235 tree type
, tree arg0
, tree arg1
)
6239 if (BUILTIN_SQRT_P (fcode
))
6241 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6242 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6244 c
= TREE_REAL_CST (arg1
);
6245 if (REAL_VALUE_NEGATIVE (c
))
6247 /* sqrt(x) < y is always false, if y is negative. */
6248 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6249 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6251 /* sqrt(x) > y is always true, if y is negative and we
6252 don't care about NaNs, i.e. negative values of x. */
6253 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6254 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6256 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6257 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6258 build_real (TREE_TYPE (arg
), dconst0
));
6260 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6264 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6265 real_convert (&c2
, mode
, &c2
);
6267 if (REAL_VALUE_ISINF (c2
))
6269 /* sqrt(x) > y is x == +Inf, when y is very large. */
6270 if (HONOR_INFINITIES (mode
))
6271 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6272 build_real (TREE_TYPE (arg
), c2
));
6274 /* sqrt(x) > y is always false, when y is very large
6275 and we don't care about infinities. */
6276 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6279 /* sqrt(x) > c is the same as x > c*c. */
6280 return fold_build2_loc (loc
, code
, type
, arg
,
6281 build_real (TREE_TYPE (arg
), c2
));
6283 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6287 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6288 real_convert (&c2
, mode
, &c2
);
6290 if (REAL_VALUE_ISINF (c2
))
6292 /* sqrt(x) < y is always true, when y is a very large
6293 value and we don't care about NaNs or Infinities. */
6294 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6295 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6297 /* sqrt(x) < y is x != +Inf when y is very large and we
6298 don't care about NaNs. */
6299 if (! HONOR_NANS (mode
))
6300 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6301 build_real (TREE_TYPE (arg
), c2
));
6303 /* sqrt(x) < y is x >= 0 when y is very large and we
6304 don't care about Infinities. */
6305 if (! HONOR_INFINITIES (mode
))
6306 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6307 build_real (TREE_TYPE (arg
), dconst0
));
6309 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6310 arg
= save_expr (arg
);
6311 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6312 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6313 build_real (TREE_TYPE (arg
),
6315 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6316 build_real (TREE_TYPE (arg
),
6320 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6321 if (! HONOR_NANS (mode
))
6322 return fold_build2_loc (loc
, code
, type
, arg
,
6323 build_real (TREE_TYPE (arg
), c2
));
6325 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6326 arg
= save_expr (arg
);
6327 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6328 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6329 build_real (TREE_TYPE (arg
),
6331 fold_build2_loc (loc
, code
, type
, arg
,
6332 build_real (TREE_TYPE (arg
),
6340 /* Subroutine of fold() that optimizes comparisons against Infinities,
6341 either +Inf or -Inf.
6343 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6344 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6345 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6347 The function returns the constant folded tree if a simplification
6348 can be made, and NULL_TREE otherwise. */
6351 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6352 tree arg0
, tree arg1
)
6354 enum machine_mode mode
;
6355 REAL_VALUE_TYPE max
;
6359 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6361 /* For negative infinity swap the sense of the comparison. */
6362 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6364 code
= swap_tree_comparison (code
);
6369 /* x > +Inf is always false, if with ignore sNANs. */
6370 if (HONOR_SNANS (mode
))
6372 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6375 /* x <= +Inf is always true, if we don't case about NaNs. */
6376 if (! HONOR_NANS (mode
))
6377 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6379 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6380 arg0
= save_expr (arg0
);
6381 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6385 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6386 real_maxval (&max
, neg
, mode
);
6387 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6388 arg0
, build_real (TREE_TYPE (arg0
), max
));
6391 /* x < +Inf is always equal to x <= DBL_MAX. */
6392 real_maxval (&max
, neg
, mode
);
6393 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6394 arg0
, build_real (TREE_TYPE (arg0
), max
));
6397 /* x != +Inf is always equal to !(x > DBL_MAX). */
6398 real_maxval (&max
, neg
, mode
);
6399 if (! HONOR_NANS (mode
))
6400 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6401 arg0
, build_real (TREE_TYPE (arg0
), max
));
6403 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6404 arg0
, build_real (TREE_TYPE (arg0
), max
));
6405 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6414 /* Subroutine of fold() that optimizes comparisons of a division by
6415 a nonzero integer constant against an integer constant, i.e.
6418 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6419 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6420 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6422 The function returns the constant folded tree if a simplification
6423 can be made, and NULL_TREE otherwise. */
6426 fold_div_compare (location_t loc
,
6427 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6429 tree prod
, tmp
, hi
, lo
;
6430 tree arg00
= TREE_OPERAND (arg0
, 0);
6431 tree arg01
= TREE_OPERAND (arg0
, 1);
6433 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6437 /* We have to do this the hard way to detect unsigned overflow.
6438 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6439 val
= TREE_INT_CST (arg01
)
6440 .mul_with_sign (TREE_INT_CST (arg1
), unsigned_p
, &overflow
);
6441 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6442 neg_overflow
= false;
6446 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6447 build_int_cst (TREE_TYPE (arg01
), 1));
6450 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6451 val
= TREE_INT_CST (prod
)
6452 .add_with_sign (TREE_INT_CST (tmp
), unsigned_p
, &overflow
);
6453 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6454 -1, overflow
| TREE_OVERFLOW (prod
));
6456 else if (tree_int_cst_sgn (arg01
) >= 0)
6458 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6459 build_int_cst (TREE_TYPE (arg01
), 1));
6460 switch (tree_int_cst_sgn (arg1
))
6463 neg_overflow
= true;
6464 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6469 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6474 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6484 /* A negative divisor reverses the relational operators. */
6485 code
= swap_tree_comparison (code
);
6487 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6488 build_int_cst (TREE_TYPE (arg01
), 1));
6489 switch (tree_int_cst_sgn (arg1
))
6492 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6497 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6502 neg_overflow
= true;
6503 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6515 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6516 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6517 if (TREE_OVERFLOW (hi
))
6518 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6519 if (TREE_OVERFLOW (lo
))
6520 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6521 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6524 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6525 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6526 if (TREE_OVERFLOW (hi
))
6527 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6528 if (TREE_OVERFLOW (lo
))
6529 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6530 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6533 if (TREE_OVERFLOW (lo
))
6535 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6536 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6538 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6541 if (TREE_OVERFLOW (hi
))
6543 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6544 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6546 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6549 if (TREE_OVERFLOW (hi
))
6551 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6552 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6554 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6557 if (TREE_OVERFLOW (lo
))
6559 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6560 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6562 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6572 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6573 equality/inequality test, then return a simplified form of the test
6574 using a sign testing. Otherwise return NULL. TYPE is the desired
6578 fold_single_bit_test_into_sign_test (location_t loc
,
6579 enum tree_code code
, tree arg0
, tree arg1
,
6582 /* If this is testing a single bit, we can optimize the test. */
6583 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6584 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6585 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6587 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6588 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6589 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6591 if (arg00
!= NULL_TREE
6592 /* This is only a win if casting to a signed type is cheap,
6593 i.e. when arg00's type is not a partial mode. */
6594 && TYPE_PRECISION (TREE_TYPE (arg00
))
6595 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6597 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6598 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6600 fold_convert_loc (loc
, stype
, arg00
),
6601 build_int_cst (stype
, 0));
6608 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6609 equality/inequality test, then return a simplified form of
6610 the test using shifts and logical operations. Otherwise return
6611 NULL. TYPE is the desired result type. */
6614 fold_single_bit_test (location_t loc
, enum tree_code code
,
6615 tree arg0
, tree arg1
, tree result_type
)
6617 /* If this is testing a single bit, we can optimize the test. */
6618 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6619 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6620 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6622 tree inner
= TREE_OPERAND (arg0
, 0);
6623 tree type
= TREE_TYPE (arg0
);
6624 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6625 enum machine_mode operand_mode
= TYPE_MODE (type
);
6627 tree signed_type
, unsigned_type
, intermediate_type
;
6630 /* First, see if we can fold the single bit test into a sign-bit
6632 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6637 /* Otherwise we have (A & C) != 0 where C is a single bit,
6638 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6639 Similarly for (A & C) == 0. */
6641 /* If INNER is a right shift of a constant and it plus BITNUM does
6642 not overflow, adjust BITNUM and INNER. */
6643 if (TREE_CODE (inner
) == RSHIFT_EXPR
6644 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6645 && host_integerp (TREE_OPERAND (inner
, 1), 1)
6646 && bitnum
< TYPE_PRECISION (type
)
6647 && (TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
6648 < (unsigned) (TYPE_PRECISION (type
) - bitnum
)))
6650 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6651 inner
= TREE_OPERAND (inner
, 0);
6654 /* If we are going to be able to omit the AND below, we must do our
6655 operations as unsigned. If we must use the AND, we have a choice.
6656 Normally unsigned is faster, but for some machines signed is. */
6657 #ifdef LOAD_EXTEND_OP
6658 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6659 && !flag_syntax_only
) ? 0 : 1;
6664 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6665 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6666 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6667 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6670 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6671 inner
, size_int (bitnum
));
6673 one
= build_int_cst (intermediate_type
, 1);
6675 if (code
== EQ_EXPR
)
6676 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6678 /* Put the AND last so it can combine with more things. */
6679 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6681 /* Make sure to return the proper type. */
6682 inner
= fold_convert_loc (loc
, result_type
, inner
);
6689 /* Check whether we are allowed to reorder operands arg0 and arg1,
6690 such that the evaluation of arg1 occurs before arg0. */
6693 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6695 if (! flag_evaluation_order
)
6697 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6699 return ! TREE_SIDE_EFFECTS (arg0
)
6700 && ! TREE_SIDE_EFFECTS (arg1
);
6703 /* Test whether it is preferable two swap two operands, ARG0 and
6704 ARG1, for example because ARG0 is an integer constant and ARG1
6705 isn't. If REORDER is true, only recommend swapping if we can
6706 evaluate the operands in reverse order. */
6709 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6711 STRIP_SIGN_NOPS (arg0
);
6712 STRIP_SIGN_NOPS (arg1
);
6714 if (TREE_CODE (arg1
) == INTEGER_CST
)
6716 if (TREE_CODE (arg0
) == INTEGER_CST
)
6719 if (TREE_CODE (arg1
) == REAL_CST
)
6721 if (TREE_CODE (arg0
) == REAL_CST
)
6724 if (TREE_CODE (arg1
) == FIXED_CST
)
6726 if (TREE_CODE (arg0
) == FIXED_CST
)
6729 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6731 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6734 if (TREE_CONSTANT (arg1
))
6736 if (TREE_CONSTANT (arg0
))
6739 if (optimize_function_for_size_p (cfun
))
6742 if (reorder
&& flag_evaluation_order
6743 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6746 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6747 for commutative and comparison operators. Ensuring a canonical
6748 form allows the optimizers to find additional redundancies without
6749 having to explicitly check for both orderings. */
6750 if (TREE_CODE (arg0
) == SSA_NAME
6751 && TREE_CODE (arg1
) == SSA_NAME
6752 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6755 /* Put SSA_NAMEs last. */
6756 if (TREE_CODE (arg1
) == SSA_NAME
)
6758 if (TREE_CODE (arg0
) == SSA_NAME
)
6761 /* Put variables last. */
6770 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6771 ARG0 is extended to a wider type. */
6774 fold_widened_comparison (location_t loc
, enum tree_code code
,
6775 tree type
, tree arg0
, tree arg1
)
6777 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6779 tree shorter_type
, outer_type
;
6783 if (arg0_unw
== arg0
)
6785 shorter_type
= TREE_TYPE (arg0_unw
);
6787 #ifdef HAVE_canonicalize_funcptr_for_compare
6788 /* Disable this optimization if we're casting a function pointer
6789 type on targets that require function pointer canonicalization. */
6790 if (HAVE_canonicalize_funcptr_for_compare
6791 && TREE_CODE (shorter_type
) == POINTER_TYPE
6792 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6796 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6799 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6801 /* If possible, express the comparison in the shorter mode. */
6802 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6803 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6804 && (TREE_TYPE (arg1_unw
) == shorter_type
6805 || ((TYPE_PRECISION (shorter_type
)
6806 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6807 && (TYPE_UNSIGNED (shorter_type
)
6808 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6809 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6810 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6811 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6812 && int_fits_type_p (arg1_unw
, shorter_type
))))
6813 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6814 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6816 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6817 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6818 || !int_fits_type_p (arg1_unw
, shorter_type
))
6821 /* If we are comparing with the integer that does not fit into the range
6822 of the shorter type, the result is known. */
6823 outer_type
= TREE_TYPE (arg1_unw
);
6824 min
= lower_bound_in_type (outer_type
, shorter_type
);
6825 max
= upper_bound_in_type (outer_type
, shorter_type
);
6827 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6829 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6836 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6841 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6847 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6849 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6854 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6856 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6865 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6866 ARG0 just the signedness is changed. */
6869 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6870 tree arg0
, tree arg1
)
6873 tree inner_type
, outer_type
;
6875 if (!CONVERT_EXPR_P (arg0
))
6878 outer_type
= TREE_TYPE (arg0
);
6879 arg0_inner
= TREE_OPERAND (arg0
, 0);
6880 inner_type
= TREE_TYPE (arg0_inner
);
6882 #ifdef HAVE_canonicalize_funcptr_for_compare
6883 /* Disable this optimization if we're casting a function pointer
6884 type on targets that require function pointer canonicalization. */
6885 if (HAVE_canonicalize_funcptr_for_compare
6886 && TREE_CODE (inner_type
) == POINTER_TYPE
6887 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6891 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6894 if (TREE_CODE (arg1
) != INTEGER_CST
6895 && !(CONVERT_EXPR_P (arg1
)
6896 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6899 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6904 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6907 if (TREE_CODE (arg1
) == INTEGER_CST
)
6908 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6909 0, TREE_OVERFLOW (arg1
));
6911 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6913 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6916 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6917 step of the array. Reconstructs s and delta in the case of s *
6918 delta being an integer constant (and thus already folded). ADDR is
6919 the address. MULT is the multiplicative expression. If the
6920 function succeeds, the new address expression is returned.
6921 Otherwise NULL_TREE is returned. LOC is the location of the
6922 resulting expression. */
6925 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6927 tree s
, delta
, step
;
6928 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6933 /* Strip the nops that might be added when converting op1 to sizetype. */
6936 /* Canonicalize op1 into a possibly non-constant delta
6937 and an INTEGER_CST s. */
6938 if (TREE_CODE (op1
) == MULT_EXPR
)
6940 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6945 if (TREE_CODE (arg0
) == INTEGER_CST
)
6950 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6958 else if (TREE_CODE (op1
) == INTEGER_CST
)
6965 /* Simulate we are delta * 1. */
6967 s
= integer_one_node
;
6970 /* Handle &x.array the same as we would handle &x.array[0]. */
6971 if (TREE_CODE (ref
) == COMPONENT_REF
6972 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6976 /* Remember if this was a multi-dimensional array. */
6977 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6980 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6983 itype
= TREE_TYPE (domain
);
6985 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6986 if (TREE_CODE (step
) != INTEGER_CST
)
6991 if (! tree_int_cst_equal (step
, s
))
6996 /* Try if delta is a multiple of step. */
6997 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7003 /* Only fold here if we can verify we do not overflow one
7004 dimension of a multi-dimensional array. */
7009 if (!TYPE_MIN_VALUE (domain
)
7010 || !TYPE_MAX_VALUE (domain
)
7011 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7014 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7015 fold_convert_loc (loc
, itype
,
7016 TYPE_MIN_VALUE (domain
)),
7017 fold_convert_loc (loc
, itype
, delta
));
7018 if (TREE_CODE (tmp
) != INTEGER_CST
7019 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7023 /* We found a suitable component reference. */
7025 pref
= TREE_OPERAND (addr
, 0);
7026 ret
= copy_node (pref
);
7027 SET_EXPR_LOCATION (ret
, loc
);
7029 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
7031 (loc
, PLUS_EXPR
, itype
,
7032 fold_convert_loc (loc
, itype
,
7034 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7035 fold_convert_loc (loc
, itype
, delta
)),
7036 NULL_TREE
, NULL_TREE
);
7037 return build_fold_addr_expr_loc (loc
, ret
);
7042 for (;; ref
= TREE_OPERAND (ref
, 0))
7044 if (TREE_CODE (ref
) == ARRAY_REF
)
7048 /* Remember if this was a multi-dimensional array. */
7049 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7052 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7055 itype
= TREE_TYPE (domain
);
7057 step
= array_ref_element_size (ref
);
7058 if (TREE_CODE (step
) != INTEGER_CST
)
7063 if (! tree_int_cst_equal (step
, s
))
7068 /* Try if delta is a multiple of step. */
7069 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7075 /* Only fold here if we can verify we do not overflow one
7076 dimension of a multi-dimensional array. */
7081 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7082 || !TYPE_MAX_VALUE (domain
)
7083 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7086 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7087 fold_convert_loc (loc
, itype
,
7088 TREE_OPERAND (ref
, 1)),
7089 fold_convert_loc (loc
, itype
, delta
));
7091 || TREE_CODE (tmp
) != INTEGER_CST
7092 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7101 if (!handled_component_p (ref
))
7105 /* We found the suitable array reference. So copy everything up to it,
7106 and replace the index. */
7108 pref
= TREE_OPERAND (addr
, 0);
7109 ret
= copy_node (pref
);
7110 SET_EXPR_LOCATION (ret
, loc
);
7115 pref
= TREE_OPERAND (pref
, 0);
7116 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7117 pos
= TREE_OPERAND (pos
, 0);
7120 TREE_OPERAND (pos
, 1)
7121 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7122 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7123 fold_convert_loc (loc
, itype
, delta
));
7124 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7128 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7129 means A >= Y && A != MAX, but in this case we know that
7130 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7133 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7135 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7137 if (TREE_CODE (bound
) == LT_EXPR
)
7138 a
= TREE_OPERAND (bound
, 0);
7139 else if (TREE_CODE (bound
) == GT_EXPR
)
7140 a
= TREE_OPERAND (bound
, 1);
7144 typea
= TREE_TYPE (a
);
7145 if (!INTEGRAL_TYPE_P (typea
)
7146 && !POINTER_TYPE_P (typea
))
7149 if (TREE_CODE (ineq
) == LT_EXPR
)
7151 a1
= TREE_OPERAND (ineq
, 1);
7152 y
= TREE_OPERAND (ineq
, 0);
7154 else if (TREE_CODE (ineq
) == GT_EXPR
)
7156 a1
= TREE_OPERAND (ineq
, 0);
7157 y
= TREE_OPERAND (ineq
, 1);
7162 if (TREE_TYPE (a1
) != typea
)
7165 if (POINTER_TYPE_P (typea
))
7167 /* Convert the pointer types into integer before taking the difference. */
7168 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7169 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7170 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7173 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7175 if (!diff
|| !integer_onep (diff
))
7178 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7181 /* Fold a sum or difference of at least one multiplication.
7182 Returns the folded tree or NULL if no simplification could be made. */
7185 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7186 tree arg0
, tree arg1
)
7188 tree arg00
, arg01
, arg10
, arg11
;
7189 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7191 /* (A * C) +- (B * C) -> (A+-B) * C.
7192 (A * C) +- A -> A * (C+-1).
7193 We are most concerned about the case where C is a constant,
7194 but other combinations show up during loop reduction. Since
7195 it is not difficult, try all four possibilities. */
7197 if (TREE_CODE (arg0
) == MULT_EXPR
)
7199 arg00
= TREE_OPERAND (arg0
, 0);
7200 arg01
= TREE_OPERAND (arg0
, 1);
7202 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7204 arg00
= build_one_cst (type
);
7209 /* We cannot generate constant 1 for fract. */
7210 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7213 arg01
= build_one_cst (type
);
7215 if (TREE_CODE (arg1
) == MULT_EXPR
)
7217 arg10
= TREE_OPERAND (arg1
, 0);
7218 arg11
= TREE_OPERAND (arg1
, 1);
7220 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7222 arg10
= build_one_cst (type
);
7223 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7224 the purpose of this canonicalization. */
7225 if (TREE_INT_CST_HIGH (arg1
) == -1
7226 && negate_expr_p (arg1
)
7227 && code
== PLUS_EXPR
)
7229 arg11
= negate_expr (arg1
);
7237 /* We cannot generate constant 1 for fract. */
7238 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7241 arg11
= build_one_cst (type
);
7245 if (operand_equal_p (arg01
, arg11
, 0))
7246 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7247 else if (operand_equal_p (arg00
, arg10
, 0))
7248 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7249 else if (operand_equal_p (arg00
, arg11
, 0))
7250 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7251 else if (operand_equal_p (arg01
, arg10
, 0))
7252 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7254 /* No identical multiplicands; see if we can find a common
7255 power-of-two factor in non-power-of-two multiplies. This
7256 can help in multi-dimensional array access. */
7257 else if (host_integerp (arg01
, 0)
7258 && host_integerp (arg11
, 0))
7260 HOST_WIDE_INT int01
, int11
, tmp
;
7263 int01
= TREE_INT_CST_LOW (arg01
);
7264 int11
= TREE_INT_CST_LOW (arg11
);
7266 /* Move min of absolute values to int11. */
7267 if (absu_hwi (int01
) < absu_hwi (int11
))
7269 tmp
= int01
, int01
= int11
, int11
= tmp
;
7270 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7277 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7278 /* The remainder should not be a constant, otherwise we
7279 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7280 increased the number of multiplications necessary. */
7281 && TREE_CODE (arg10
) != INTEGER_CST
)
7283 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7284 build_int_cst (TREE_TYPE (arg00
),
7289 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7294 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7295 fold_build2_loc (loc
, code
, type
,
7296 fold_convert_loc (loc
, type
, alt0
),
7297 fold_convert_loc (loc
, type
, alt1
)),
7298 fold_convert_loc (loc
, type
, same
));
7303 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7304 specified by EXPR into the buffer PTR of length LEN bytes.
7305 Return the number of bytes placed in the buffer, or zero
7309 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7311 tree type
= TREE_TYPE (expr
);
7312 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7313 int byte
, offset
, word
, words
;
7314 unsigned char value
;
7316 if (total_bytes
> len
)
7318 words
= total_bytes
/ UNITS_PER_WORD
;
7320 for (byte
= 0; byte
< total_bytes
; byte
++)
7322 int bitpos
= byte
* BITS_PER_UNIT
;
7323 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7324 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7326 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7327 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7329 if (total_bytes
> UNITS_PER_WORD
)
7331 word
= byte
/ UNITS_PER_WORD
;
7332 if (WORDS_BIG_ENDIAN
)
7333 word
= (words
- 1) - word
;
7334 offset
= word
* UNITS_PER_WORD
;
7335 if (BYTES_BIG_ENDIAN
)
7336 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7338 offset
+= byte
% UNITS_PER_WORD
;
7341 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7342 ptr
[offset
] = value
;
7348 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7349 specified by EXPR into the buffer PTR of length LEN bytes.
7350 Return the number of bytes placed in the buffer, or zero
7354 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7356 tree type
= TREE_TYPE (expr
);
7357 enum machine_mode mode
= TYPE_MODE (type
);
7358 int total_bytes
= GET_MODE_SIZE (mode
);
7359 FIXED_VALUE_TYPE value
;
7360 tree i_value
, i_type
;
7362 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7365 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7367 if (NULL_TREE
== i_type
7368 || TYPE_PRECISION (i_type
) != total_bytes
)
7371 value
= TREE_FIXED_CST (expr
);
7372 i_value
= double_int_to_tree (i_type
, value
.data
);
7374 return native_encode_int (i_value
, ptr
, len
);
7378 /* Subroutine of native_encode_expr. Encode the REAL_CST
7379 specified by EXPR into the buffer PTR of length LEN bytes.
7380 Return the number of bytes placed in the buffer, or zero
7384 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7386 tree type
= TREE_TYPE (expr
);
7387 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7388 int byte
, offset
, word
, words
, bitpos
;
7389 unsigned char value
;
7391 /* There are always 32 bits in each long, no matter the size of
7392 the hosts long. We handle floating point representations with
7396 if (total_bytes
> len
)
7398 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7400 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7402 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7403 bitpos
+= BITS_PER_UNIT
)
7405 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7406 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7408 if (UNITS_PER_WORD
< 4)
7410 word
= byte
/ UNITS_PER_WORD
;
7411 if (WORDS_BIG_ENDIAN
)
7412 word
= (words
- 1) - word
;
7413 offset
= word
* UNITS_PER_WORD
;
7414 if (BYTES_BIG_ENDIAN
)
7415 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7417 offset
+= byte
% UNITS_PER_WORD
;
7420 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7421 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7426 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7427 specified by EXPR into the buffer PTR of length LEN bytes.
7428 Return the number of bytes placed in the buffer, or zero
7432 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7437 part
= TREE_REALPART (expr
);
7438 rsize
= native_encode_expr (part
, ptr
, len
);
7441 part
= TREE_IMAGPART (expr
);
7442 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7445 return rsize
+ isize
;
7449 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7450 specified by EXPR into the buffer PTR of length LEN bytes.
7451 Return the number of bytes placed in the buffer, or zero
7455 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7462 count
= VECTOR_CST_NELTS (expr
);
7463 itype
= TREE_TYPE (TREE_TYPE (expr
));
7464 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7465 for (i
= 0; i
< count
; i
++)
7467 elem
= VECTOR_CST_ELT (expr
, i
);
7468 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7476 /* Subroutine of native_encode_expr. Encode the STRING_CST
7477 specified by EXPR into the buffer PTR of length LEN bytes.
7478 Return the number of bytes placed in the buffer, or zero
7482 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7484 tree type
= TREE_TYPE (expr
);
7485 HOST_WIDE_INT total_bytes
;
7487 if (TREE_CODE (type
) != ARRAY_TYPE
7488 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7489 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7490 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7492 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7493 if (total_bytes
> len
)
7495 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7497 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7498 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7499 total_bytes
- TREE_STRING_LENGTH (expr
));
7502 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7507 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7508 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7509 buffer PTR of length LEN bytes. Return the number of bytes
7510 placed in the buffer, or zero upon failure. */
7513 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7515 switch (TREE_CODE (expr
))
7518 return native_encode_int (expr
, ptr
, len
);
7521 return native_encode_real (expr
, ptr
, len
);
7524 return native_encode_fixed (expr
, ptr
, len
);
7527 return native_encode_complex (expr
, ptr
, len
);
7530 return native_encode_vector (expr
, ptr
, len
);
7533 return native_encode_string (expr
, ptr
, len
);
7541 /* Subroutine of native_interpret_expr. Interpret the contents of
7542 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7543 If the buffer cannot be interpreted, return NULL_TREE. */
7546 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7548 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7551 if (total_bytes
> len
7552 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7555 result
= double_int::from_buffer (ptr
, total_bytes
);
7557 return double_int_to_tree (type
, result
);
7561 /* Subroutine of native_interpret_expr. Interpret the contents of
7562 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7563 If the buffer cannot be interpreted, return NULL_TREE. */
7566 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7568 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7570 FIXED_VALUE_TYPE fixed_value
;
7572 if (total_bytes
> len
7573 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7576 result
= double_int::from_buffer (ptr
, total_bytes
);
7577 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7579 return build_fixed (type
, fixed_value
);
7583 /* Subroutine of native_interpret_expr. Interpret the contents of
7584 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7585 If the buffer cannot be interpreted, return NULL_TREE. */
7588 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7590 enum machine_mode mode
= TYPE_MODE (type
);
7591 int total_bytes
= GET_MODE_SIZE (mode
);
7592 int byte
, offset
, word
, words
, bitpos
;
7593 unsigned char value
;
7594 /* There are always 32 bits in each long, no matter the size of
7595 the hosts long. We handle floating point representations with
7600 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7601 if (total_bytes
> len
|| total_bytes
> 24)
7603 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7605 memset (tmp
, 0, sizeof (tmp
));
7606 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7607 bitpos
+= BITS_PER_UNIT
)
7609 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7610 if (UNITS_PER_WORD
< 4)
7612 word
= byte
/ UNITS_PER_WORD
;
7613 if (WORDS_BIG_ENDIAN
)
7614 word
= (words
- 1) - word
;
7615 offset
= word
* UNITS_PER_WORD
;
7616 if (BYTES_BIG_ENDIAN
)
7617 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7619 offset
+= byte
% UNITS_PER_WORD
;
7622 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7623 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7625 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7628 real_from_target (&r
, tmp
, mode
);
7629 return build_real (type
, r
);
7633 /* Subroutine of native_interpret_expr. Interpret the contents of
7634 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7635 If the buffer cannot be interpreted, return NULL_TREE. */
7638 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7640 tree etype
, rpart
, ipart
;
7643 etype
= TREE_TYPE (type
);
7644 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7647 rpart
= native_interpret_expr (etype
, ptr
, size
);
7650 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7653 return build_complex (type
, rpart
, ipart
);
7657 /* Subroutine of native_interpret_expr. Interpret the contents of
7658 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7659 If the buffer cannot be interpreted, return NULL_TREE. */
7662 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7668 etype
= TREE_TYPE (type
);
7669 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7670 count
= TYPE_VECTOR_SUBPARTS (type
);
7671 if (size
* count
> len
)
7674 elements
= XALLOCAVEC (tree
, count
);
7675 for (i
= count
- 1; i
>= 0; i
--)
7677 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7682 return build_vector (type
, elements
);
7686 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7687 the buffer PTR of length LEN as a constant of type TYPE. For
7688 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7689 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7690 return NULL_TREE. */
7693 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7695 switch (TREE_CODE (type
))
7701 case REFERENCE_TYPE
:
7702 return native_interpret_int (type
, ptr
, len
);
7705 return native_interpret_real (type
, ptr
, len
);
7707 case FIXED_POINT_TYPE
:
7708 return native_interpret_fixed (type
, ptr
, len
);
7711 return native_interpret_complex (type
, ptr
, len
);
7714 return native_interpret_vector (type
, ptr
, len
);
7721 /* Returns true if we can interpret the contents of a native encoding
7725 can_native_interpret_type_p (tree type
)
7727 switch (TREE_CODE (type
))
7733 case REFERENCE_TYPE
:
7734 case FIXED_POINT_TYPE
:
7744 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7745 TYPE at compile-time. If we're unable to perform the conversion
7746 return NULL_TREE. */
7749 fold_view_convert_expr (tree type
, tree expr
)
7751 /* We support up to 512-bit values (for V8DFmode). */
7752 unsigned char buffer
[64];
7755 /* Check that the host and target are sane. */
7756 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7759 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7763 return native_interpret_expr (type
, buffer
, len
);
7766 /* Build an expression for the address of T. Folds away INDIRECT_REF
7767 to avoid confusing the gimplify process. */
7770 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7772 /* The size of the object is not relevant when talking about its address. */
7773 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7774 t
= TREE_OPERAND (t
, 0);
7776 if (TREE_CODE (t
) == INDIRECT_REF
)
7778 t
= TREE_OPERAND (t
, 0);
7780 if (TREE_TYPE (t
) != ptrtype
)
7781 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7783 else if (TREE_CODE (t
) == MEM_REF
7784 && integer_zerop (TREE_OPERAND (t
, 1)))
7785 return TREE_OPERAND (t
, 0);
7786 else if (TREE_CODE (t
) == MEM_REF
7787 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7788 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7789 TREE_OPERAND (t
, 0),
7790 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7791 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7793 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7795 if (TREE_TYPE (t
) != ptrtype
)
7796 t
= fold_convert_loc (loc
, ptrtype
, t
);
7799 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7804 /* Build an expression for the address of T. */
7807 build_fold_addr_expr_loc (location_t loc
, tree t
)
7809 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7811 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7814 static bool vec_cst_ctor_to_array (tree
, tree
*);
7816 /* Fold a unary expression of code CODE and type TYPE with operand
7817 OP0. Return the folded expression if folding is successful.
7818 Otherwise, return NULL_TREE. */
7821 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7825 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7827 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7828 && TREE_CODE_LENGTH (code
) == 1);
7833 if (CONVERT_EXPR_CODE_P (code
)
7834 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7836 /* Don't use STRIP_NOPS, because signedness of argument type
7838 STRIP_SIGN_NOPS (arg0
);
7842 /* Strip any conversions that don't change the mode. This
7843 is safe for every expression, except for a comparison
7844 expression because its signedness is derived from its
7847 Note that this is done as an internal manipulation within
7848 the constant folder, in order to find the simplest
7849 representation of the arguments so that their form can be
7850 studied. In any cases, the appropriate type conversions
7851 should be put back in the tree that will get out of the
7857 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7859 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7860 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7861 fold_build1_loc (loc
, code
, type
,
7862 fold_convert_loc (loc
, TREE_TYPE (op0
),
7863 TREE_OPERAND (arg0
, 1))));
7864 else if (TREE_CODE (arg0
) == COND_EXPR
)
7866 tree arg01
= TREE_OPERAND (arg0
, 1);
7867 tree arg02
= TREE_OPERAND (arg0
, 2);
7868 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7869 arg01
= fold_build1_loc (loc
, code
, type
,
7870 fold_convert_loc (loc
,
7871 TREE_TYPE (op0
), arg01
));
7872 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7873 arg02
= fold_build1_loc (loc
, code
, type
,
7874 fold_convert_loc (loc
,
7875 TREE_TYPE (op0
), arg02
));
7876 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7879 /* If this was a conversion, and all we did was to move into
7880 inside the COND_EXPR, bring it back out. But leave it if
7881 it is a conversion from integer to integer and the
7882 result precision is no wider than a word since such a
7883 conversion is cheap and may be optimized away by combine,
7884 while it couldn't if it were outside the COND_EXPR. Then return
7885 so we don't get into an infinite recursion loop taking the
7886 conversion out and then back in. */
7888 if ((CONVERT_EXPR_CODE_P (code
)
7889 || code
== NON_LVALUE_EXPR
)
7890 && TREE_CODE (tem
) == COND_EXPR
7891 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7892 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7893 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7894 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7895 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7896 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7897 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7899 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7900 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7901 || flag_syntax_only
))
7902 tem
= build1_loc (loc
, code
, type
,
7904 TREE_TYPE (TREE_OPERAND
7905 (TREE_OPERAND (tem
, 1), 0)),
7906 TREE_OPERAND (tem
, 0),
7907 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7908 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7917 /* Re-association barriers around constants and other re-association
7918 barriers can be removed. */
7919 if (CONSTANT_CLASS_P (op0
)
7920 || TREE_CODE (op0
) == PAREN_EXPR
)
7921 return fold_convert_loc (loc
, type
, op0
);
7926 case FIX_TRUNC_EXPR
:
7927 if (TREE_TYPE (op0
) == type
)
7930 if (COMPARISON_CLASS_P (op0
))
7932 /* If we have (type) (a CMP b) and type is an integral type, return
7933 new expression involving the new type. Canonicalize
7934 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7936 Do not fold the result as that would not simplify further, also
7937 folding again results in recursions. */
7938 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7939 return build2_loc (loc
, TREE_CODE (op0
), type
,
7940 TREE_OPERAND (op0
, 0),
7941 TREE_OPERAND (op0
, 1));
7942 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7943 && TREE_CODE (type
) != VECTOR_TYPE
)
7944 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7945 constant_boolean_node (true, type
),
7946 constant_boolean_node (false, type
));
7949 /* Handle cases of two conversions in a row. */
7950 if (CONVERT_EXPR_P (op0
))
7952 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7953 tree inter_type
= TREE_TYPE (op0
);
7954 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7955 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7956 int inside_float
= FLOAT_TYPE_P (inside_type
);
7957 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7958 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7959 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7960 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7961 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7962 int inter_float
= FLOAT_TYPE_P (inter_type
);
7963 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7964 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7965 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7966 int final_int
= INTEGRAL_TYPE_P (type
);
7967 int final_ptr
= POINTER_TYPE_P (type
);
7968 int final_float
= FLOAT_TYPE_P (type
);
7969 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7970 unsigned int final_prec
= TYPE_PRECISION (type
);
7971 int final_unsignedp
= TYPE_UNSIGNED (type
);
7973 /* In addition to the cases of two conversions in a row
7974 handled below, if we are converting something to its own
7975 type via an object of identical or wider precision, neither
7976 conversion is needed. */
7977 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7978 && (((inter_int
|| inter_ptr
) && final_int
)
7979 || (inter_float
&& final_float
))
7980 && inter_prec
>= final_prec
)
7981 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7983 /* Likewise, if the intermediate and initial types are either both
7984 float or both integer, we don't need the middle conversion if the
7985 former is wider than the latter and doesn't change the signedness
7986 (for integers). Avoid this if the final type is a pointer since
7987 then we sometimes need the middle conversion. Likewise if the
7988 final type has a precision not equal to the size of its mode. */
7989 if (((inter_int
&& inside_int
)
7990 || (inter_float
&& inside_float
)
7991 || (inter_vec
&& inside_vec
))
7992 && inter_prec
>= inside_prec
7993 && (inter_float
|| inter_vec
7994 || inter_unsignedp
== inside_unsignedp
)
7995 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7996 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7998 && (! final_vec
|| inter_prec
== inside_prec
))
7999 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8001 /* If we have a sign-extension of a zero-extended value, we can
8002 replace that by a single zero-extension. Likewise if the
8003 final conversion does not change precision we can drop the
8004 intermediate conversion. */
8005 if (inside_int
&& inter_int
&& final_int
8006 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
8007 && inside_unsignedp
&& !inter_unsignedp
)
8008 || final_prec
== inter_prec
))
8009 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8011 /* Two conversions in a row are not needed unless:
8012 - some conversion is floating-point (overstrict for now), or
8013 - some conversion is a vector (overstrict for now), or
8014 - the intermediate type is narrower than both initial and
8016 - the intermediate type and innermost type differ in signedness,
8017 and the outermost type is wider than the intermediate, or
8018 - the initial type is a pointer type and the precisions of the
8019 intermediate and final types differ, or
8020 - the final type is a pointer type and the precisions of the
8021 initial and intermediate types differ. */
8022 if (! inside_float
&& ! inter_float
&& ! final_float
8023 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8024 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8025 && ! (inside_int
&& inter_int
8026 && inter_unsignedp
!= inside_unsignedp
8027 && inter_prec
< final_prec
)
8028 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8029 == (final_unsignedp
&& final_prec
> inter_prec
))
8030 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8031 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8032 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
8033 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8034 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8037 /* Handle (T *)&A.B.C for A being of type T and B and C
8038 living at offset zero. This occurs frequently in
8039 C++ upcasting and then accessing the base. */
8040 if (TREE_CODE (op0
) == ADDR_EXPR
8041 && POINTER_TYPE_P (type
)
8042 && handled_component_p (TREE_OPERAND (op0
, 0)))
8044 HOST_WIDE_INT bitsize
, bitpos
;
8046 enum machine_mode mode
;
8047 int unsignedp
, volatilep
;
8048 tree base
= TREE_OPERAND (op0
, 0);
8049 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8050 &mode
, &unsignedp
, &volatilep
, false);
8051 /* If the reference was to a (constant) zero offset, we can use
8052 the address of the base if it has the same base type
8053 as the result type and the pointer type is unqualified. */
8054 if (! offset
&& bitpos
== 0
8055 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8056 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8057 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8058 return fold_convert_loc (loc
, type
,
8059 build_fold_addr_expr_loc (loc
, base
));
8062 if (TREE_CODE (op0
) == MODIFY_EXPR
8063 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8064 /* Detect assigning a bitfield. */
8065 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8067 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8069 /* Don't leave an assignment inside a conversion
8070 unless assigning a bitfield. */
8071 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8072 /* First do the assignment, then return converted constant. */
8073 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8074 TREE_NO_WARNING (tem
) = 1;
8075 TREE_USED (tem
) = 1;
8079 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8080 constants (if x has signed type, the sign bit cannot be set
8081 in c). This folds extension into the BIT_AND_EXPR.
8082 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8083 very likely don't have maximal range for their precision and this
8084 transformation effectively doesn't preserve non-maximal ranges. */
8085 if (TREE_CODE (type
) == INTEGER_TYPE
8086 && TREE_CODE (op0
) == BIT_AND_EXPR
8087 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8089 tree and_expr
= op0
;
8090 tree and0
= TREE_OPERAND (and_expr
, 0);
8091 tree and1
= TREE_OPERAND (and_expr
, 1);
8094 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8095 || (TYPE_PRECISION (type
)
8096 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8098 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8099 <= HOST_BITS_PER_WIDE_INT
8100 && host_integerp (and1
, 1))
8102 unsigned HOST_WIDE_INT cst
;
8104 cst
= tree_low_cst (and1
, 1);
8105 cst
&= HOST_WIDE_INT_M1U
8106 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8107 change
= (cst
== 0);
8108 #ifdef LOAD_EXTEND_OP
8110 && !flag_syntax_only
8111 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8114 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8115 and0
= fold_convert_loc (loc
, uns
, and0
);
8116 and1
= fold_convert_loc (loc
, uns
, and1
);
8122 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
8123 0, TREE_OVERFLOW (and1
));
8124 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8125 fold_convert_loc (loc
, type
, and0
), tem
);
8129 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8130 when one of the new casts will fold away. Conservatively we assume
8131 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8132 if (POINTER_TYPE_P (type
)
8133 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8134 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8135 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8136 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8137 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8139 tree arg00
= TREE_OPERAND (arg0
, 0);
8140 tree arg01
= TREE_OPERAND (arg0
, 1);
8142 return fold_build_pointer_plus_loc
8143 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8146 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8147 of the same precision, and X is an integer type not narrower than
8148 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8149 if (INTEGRAL_TYPE_P (type
)
8150 && TREE_CODE (op0
) == BIT_NOT_EXPR
8151 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8152 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8153 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8155 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8156 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8157 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8158 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8159 fold_convert_loc (loc
, type
, tem
));
8162 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8163 type of X and Y (integer types only). */
8164 if (INTEGRAL_TYPE_P (type
)
8165 && TREE_CODE (op0
) == MULT_EXPR
8166 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8167 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8169 /* Be careful not to introduce new overflows. */
8171 if (TYPE_OVERFLOW_WRAPS (type
))
8174 mult_type
= unsigned_type_for (type
);
8176 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8178 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8179 fold_convert_loc (loc
, mult_type
,
8180 TREE_OPERAND (op0
, 0)),
8181 fold_convert_loc (loc
, mult_type
,
8182 TREE_OPERAND (op0
, 1)));
8183 return fold_convert_loc (loc
, type
, tem
);
8187 tem
= fold_convert_const (code
, type
, op0
);
8188 return tem
? tem
: NULL_TREE
;
8190 case ADDR_SPACE_CONVERT_EXPR
:
8191 if (integer_zerop (arg0
))
8192 return fold_convert_const (code
, type
, arg0
);
8195 case FIXED_CONVERT_EXPR
:
8196 tem
= fold_convert_const (code
, type
, arg0
);
8197 return tem
? tem
: NULL_TREE
;
8199 case VIEW_CONVERT_EXPR
:
8200 if (TREE_TYPE (op0
) == type
)
8202 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8203 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8204 type
, TREE_OPERAND (op0
, 0));
8205 if (TREE_CODE (op0
) == MEM_REF
)
8206 return fold_build2_loc (loc
, MEM_REF
, type
,
8207 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8209 /* For integral conversions with the same precision or pointer
8210 conversions use a NOP_EXPR instead. */
8211 if ((INTEGRAL_TYPE_P (type
)
8212 || POINTER_TYPE_P (type
))
8213 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8214 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8215 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8216 return fold_convert_loc (loc
, type
, op0
);
8218 /* Strip inner integral conversions that do not change the precision. */
8219 if (CONVERT_EXPR_P (op0
)
8220 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8221 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8222 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8223 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8224 && (TYPE_PRECISION (TREE_TYPE (op0
))
8225 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8226 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8227 type
, TREE_OPERAND (op0
, 0));
8229 return fold_view_convert_expr (type
, op0
);
8232 tem
= fold_negate_expr (loc
, arg0
);
8234 return fold_convert_loc (loc
, type
, tem
);
8238 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8239 return fold_abs_const (arg0
, type
);
8240 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8241 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8242 /* Convert fabs((double)float) into (double)fabsf(float). */
8243 else if (TREE_CODE (arg0
) == NOP_EXPR
8244 && TREE_CODE (type
) == REAL_TYPE
)
8246 tree targ0
= strip_float_extensions (arg0
);
8248 return fold_convert_loc (loc
, type
,
8249 fold_build1_loc (loc
, ABS_EXPR
,
8253 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8254 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8256 else if (tree_expr_nonnegative_p (arg0
))
8259 /* Strip sign ops from argument. */
8260 if (TREE_CODE (type
) == REAL_TYPE
)
8262 tem
= fold_strip_sign_ops (arg0
);
8264 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8265 fold_convert_loc (loc
, type
, tem
));
8270 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8271 return fold_convert_loc (loc
, type
, arg0
);
8272 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8274 tree itype
= TREE_TYPE (type
);
8275 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8276 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8277 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8278 negate_expr (ipart
));
8280 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8282 tree itype
= TREE_TYPE (type
);
8283 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8284 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8285 return build_complex (type
, rpart
, negate_expr (ipart
));
8287 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8288 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8292 if (TREE_CODE (arg0
) == INTEGER_CST
)
8293 return fold_not_const (arg0
, type
);
8294 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8295 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8296 /* Convert ~ (-A) to A - 1. */
8297 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8298 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8299 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8300 build_int_cst (type
, 1));
8301 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8302 else if (INTEGRAL_TYPE_P (type
)
8303 && ((TREE_CODE (arg0
) == MINUS_EXPR
8304 && integer_onep (TREE_OPERAND (arg0
, 1)))
8305 || (TREE_CODE (arg0
) == PLUS_EXPR
8306 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8307 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8308 fold_convert_loc (loc
, type
,
8309 TREE_OPERAND (arg0
, 0)));
8310 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8311 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8312 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8313 fold_convert_loc (loc
, type
,
8314 TREE_OPERAND (arg0
, 0)))))
8315 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8316 fold_convert_loc (loc
, type
,
8317 TREE_OPERAND (arg0
, 1)));
8318 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8319 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8320 fold_convert_loc (loc
, type
,
8321 TREE_OPERAND (arg0
, 1)))))
8322 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8323 fold_convert_loc (loc
, type
,
8324 TREE_OPERAND (arg0
, 0)), tem
);
8325 /* Perform BIT_NOT_EXPR on each element individually. */
8326 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8330 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8332 elements
= XALLOCAVEC (tree
, count
);
8333 for (i
= 0; i
< count
; i
++)
8335 elem
= VECTOR_CST_ELT (arg0
, i
);
8336 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8337 if (elem
== NULL_TREE
)
8342 return build_vector (type
, elements
);
8344 else if (COMPARISON_CLASS_P (arg0
)
8345 && (VECTOR_TYPE_P (type
)
8346 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8348 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8349 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8350 HONOR_NANS (TYPE_MODE (op_type
)));
8351 if (subcode
!= ERROR_MARK
)
8352 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8353 TREE_OPERAND (arg0
, 1));
8359 case TRUTH_NOT_EXPR
:
8360 /* Note that the operand of this must be an int
8361 and its values must be 0 or 1.
8362 ("true" is a fixed value perhaps depending on the language,
8363 but we don't handle values other than 1 correctly yet.) */
8364 tem
= fold_truth_not_expr (loc
, arg0
);
8367 return fold_convert_loc (loc
, type
, tem
);
8370 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8371 return fold_convert_loc (loc
, type
, arg0
);
8372 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8373 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8374 TREE_OPERAND (arg0
, 1));
8375 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8376 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8377 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8379 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8380 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8381 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8382 TREE_OPERAND (arg0
, 0)),
8383 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8384 TREE_OPERAND (arg0
, 1)));
8385 return fold_convert_loc (loc
, type
, tem
);
8387 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8389 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8390 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8391 TREE_OPERAND (arg0
, 0));
8392 return fold_convert_loc (loc
, type
, tem
);
8394 if (TREE_CODE (arg0
) == CALL_EXPR
)
8396 tree fn
= get_callee_fndecl (arg0
);
8397 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8398 switch (DECL_FUNCTION_CODE (fn
))
8400 CASE_FLT_FN (BUILT_IN_CEXPI
):
8401 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8403 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8413 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8414 return build_zero_cst (type
);
8415 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8416 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8417 TREE_OPERAND (arg0
, 0));
8418 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8419 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8420 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8422 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8423 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8424 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8425 TREE_OPERAND (arg0
, 0)),
8426 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8427 TREE_OPERAND (arg0
, 1)));
8428 return fold_convert_loc (loc
, type
, tem
);
8430 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8432 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8433 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8434 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8436 if (TREE_CODE (arg0
) == CALL_EXPR
)
8438 tree fn
= get_callee_fndecl (arg0
);
8439 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8440 switch (DECL_FUNCTION_CODE (fn
))
8442 CASE_FLT_FN (BUILT_IN_CEXPI
):
8443 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8445 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8455 /* Fold *&X to X if X is an lvalue. */
8456 if (TREE_CODE (op0
) == ADDR_EXPR
)
8458 tree op00
= TREE_OPERAND (op0
, 0);
8459 if ((TREE_CODE (op00
) == VAR_DECL
8460 || TREE_CODE (op00
) == PARM_DECL
8461 || TREE_CODE (op00
) == RESULT_DECL
)
8462 && !TREE_READONLY (op00
))
8467 case VEC_UNPACK_LO_EXPR
:
8468 case VEC_UNPACK_HI_EXPR
:
8469 case VEC_UNPACK_FLOAT_LO_EXPR
:
8470 case VEC_UNPACK_FLOAT_HI_EXPR
:
8472 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8474 enum tree_code subcode
;
8476 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8477 if (TREE_CODE (arg0
) != VECTOR_CST
)
8480 elts
= XALLOCAVEC (tree
, nelts
* 2);
8481 if (!vec_cst_ctor_to_array (arg0
, elts
))
8484 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8485 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8488 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8491 subcode
= FLOAT_EXPR
;
8493 for (i
= 0; i
< nelts
; i
++)
8495 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8496 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8500 return build_vector (type
, elts
);
8503 case REDUC_MIN_EXPR
:
8504 case REDUC_MAX_EXPR
:
8505 case REDUC_PLUS_EXPR
:
8507 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8509 enum tree_code subcode
;
8511 if (TREE_CODE (op0
) != VECTOR_CST
)
8514 elts
= XALLOCAVEC (tree
, nelts
);
8515 if (!vec_cst_ctor_to_array (op0
, elts
))
8520 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8521 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8522 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8523 default: gcc_unreachable ();
8526 for (i
= 1; i
< nelts
; i
++)
8528 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8529 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8531 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8534 return build_vector (type
, elts
);
8539 } /* switch (code) */
8543 /* If the operation was a conversion do _not_ mark a resulting constant
8544 with TREE_OVERFLOW if the original constant was not. These conversions
8545 have implementation defined behavior and retaining the TREE_OVERFLOW
8546 flag here would confuse later passes such as VRP. */
8548 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8549 tree type
, tree op0
)
8551 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8553 && TREE_CODE (res
) == INTEGER_CST
8554 && TREE_CODE (op0
) == INTEGER_CST
8555 && CONVERT_EXPR_CODE_P (code
))
8556 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8561 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8562 operands OP0 and OP1. LOC is the location of the resulting expression.
8563 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8564 Return the folded expression if folding is successful. Otherwise,
8565 return NULL_TREE. */
8567 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8568 tree arg0
, tree arg1
, tree op0
, tree op1
)
8572 /* We only do these simplifications if we are optimizing. */
8576 /* Check for things like (A || B) && (A || C). We can convert this
8577 to A || (B && C). Note that either operator can be any of the four
8578 truth and/or operations and the transformation will still be
8579 valid. Also note that we only care about order for the
8580 ANDIF and ORIF operators. If B contains side effects, this
8581 might change the truth-value of A. */
8582 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8583 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8584 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8585 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8586 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8587 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8589 tree a00
= TREE_OPERAND (arg0
, 0);
8590 tree a01
= TREE_OPERAND (arg0
, 1);
8591 tree a10
= TREE_OPERAND (arg1
, 0);
8592 tree a11
= TREE_OPERAND (arg1
, 1);
8593 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8594 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8595 && (code
== TRUTH_AND_EXPR
8596 || code
== TRUTH_OR_EXPR
));
8598 if (operand_equal_p (a00
, a10
, 0))
8599 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8600 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8601 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8602 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8603 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8604 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8605 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8606 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8608 /* This case if tricky because we must either have commutative
8609 operators or else A10 must not have side-effects. */
8611 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8612 && operand_equal_p (a01
, a11
, 0))
8613 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8614 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8618 /* See if we can build a range comparison. */
8619 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8622 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8623 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8625 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8627 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8630 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8631 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8633 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8635 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8638 /* Check for the possibility of merging component references. If our
8639 lhs is another similar operation, try to merge its rhs with our
8640 rhs. Then try to merge our lhs and rhs. */
8641 if (TREE_CODE (arg0
) == code
8642 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8643 TREE_OPERAND (arg0
, 1), arg1
)))
8644 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8646 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8649 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8650 && (code
== TRUTH_AND_EXPR
8651 || code
== TRUTH_ANDIF_EXPR
8652 || code
== TRUTH_OR_EXPR
8653 || code
== TRUTH_ORIF_EXPR
))
8655 enum tree_code ncode
, icode
;
8657 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8658 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8659 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8661 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8662 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8663 We don't want to pack more than two leafs to a non-IF AND/OR
8665 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8666 equal to IF-CODE, then we don't want to add right-hand operand.
8667 If the inner right-hand side of left-hand operand has
8668 side-effects, or isn't simple, then we can't add to it,
8669 as otherwise we might destroy if-sequence. */
8670 if (TREE_CODE (arg0
) == icode
8671 && simple_operand_p_2 (arg1
)
8672 /* Needed for sequence points to handle trappings, and
8674 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8676 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8678 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8681 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8682 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8683 else if (TREE_CODE (arg1
) == icode
8684 && simple_operand_p_2 (arg0
)
8685 /* Needed for sequence points to handle trappings, and
8687 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8689 tem
= fold_build2_loc (loc
, ncode
, type
,
8690 arg0
, TREE_OPERAND (arg1
, 0));
8691 return fold_build2_loc (loc
, icode
, type
, tem
,
8692 TREE_OPERAND (arg1
, 1));
8694 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8696 For sequence point consistancy, we need to check for trapping,
8697 and side-effects. */
8698 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8699 && simple_operand_p_2 (arg1
))
8700 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8706 /* Fold a binary expression of code CODE and type TYPE with operands
8707 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8708 Return the folded expression if folding is successful. Otherwise,
8709 return NULL_TREE. */
8712 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8714 enum tree_code compl_code
;
8716 if (code
== MIN_EXPR
)
8717 compl_code
= MAX_EXPR
;
8718 else if (code
== MAX_EXPR
)
8719 compl_code
= MIN_EXPR
;
8723 /* MIN (MAX (a, b), b) == b. */
8724 if (TREE_CODE (op0
) == compl_code
8725 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8726 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8728 /* MIN (MAX (b, a), b) == b. */
8729 if (TREE_CODE (op0
) == compl_code
8730 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8731 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8732 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8734 /* MIN (a, MAX (a, b)) == a. */
8735 if (TREE_CODE (op1
) == compl_code
8736 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8737 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8738 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8740 /* MIN (a, MAX (b, a)) == a. */
8741 if (TREE_CODE (op1
) == compl_code
8742 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8743 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8744 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8749 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8750 by changing CODE to reduce the magnitude of constants involved in
8751 ARG0 of the comparison.
8752 Returns a canonicalized comparison tree if a simplification was
8753 possible, otherwise returns NULL_TREE.
8754 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8755 valid if signed overflow is undefined. */
8758 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8759 tree arg0
, tree arg1
,
8760 bool *strict_overflow_p
)
8762 enum tree_code code0
= TREE_CODE (arg0
);
8763 tree t
, cst0
= NULL_TREE
;
8767 /* Match A +- CST code arg1 and CST code arg1. We can change the
8768 first form only if overflow is undefined. */
8769 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8770 /* In principle pointers also have undefined overflow behavior,
8771 but that causes problems elsewhere. */
8772 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8773 && (code0
== MINUS_EXPR
8774 || code0
== PLUS_EXPR
)
8775 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8776 || code0
== INTEGER_CST
))
8779 /* Identify the constant in arg0 and its sign. */
8780 if (code0
== INTEGER_CST
)
8783 cst0
= TREE_OPERAND (arg0
, 1);
8784 sgn0
= tree_int_cst_sgn (cst0
);
8786 /* Overflowed constants and zero will cause problems. */
8787 if (integer_zerop (cst0
)
8788 || TREE_OVERFLOW (cst0
))
8791 /* See if we can reduce the magnitude of the constant in
8792 arg0 by changing the comparison code. */
8793 if (code0
== INTEGER_CST
)
8795 /* CST <= arg1 -> CST-1 < arg1. */
8796 if (code
== LE_EXPR
&& sgn0
== 1)
8798 /* -CST < arg1 -> -CST-1 <= arg1. */
8799 else if (code
== LT_EXPR
&& sgn0
== -1)
8801 /* CST > arg1 -> CST-1 >= arg1. */
8802 else if (code
== GT_EXPR
&& sgn0
== 1)
8804 /* -CST >= arg1 -> -CST-1 > arg1. */
8805 else if (code
== GE_EXPR
&& sgn0
== -1)
8809 /* arg1 code' CST' might be more canonical. */
8814 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8816 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8818 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8819 else if (code
== GT_EXPR
8820 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8822 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8823 else if (code
== LE_EXPR
8824 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8826 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8827 else if (code
== GE_EXPR
8828 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8832 *strict_overflow_p
= true;
8835 /* Now build the constant reduced in magnitude. But not if that
8836 would produce one outside of its types range. */
8837 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8839 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8840 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8842 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8843 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8844 /* We cannot swap the comparison here as that would cause us to
8845 endlessly recurse. */
8848 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8849 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8850 if (code0
!= INTEGER_CST
)
8851 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8852 t
= fold_convert (TREE_TYPE (arg1
), t
);
8854 /* If swapping might yield to a more canonical form, do so. */
8856 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8858 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8861 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8862 overflow further. Try to decrease the magnitude of constants involved
8863 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8864 and put sole constants at the second argument position.
8865 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8868 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8869 tree arg0
, tree arg1
)
8872 bool strict_overflow_p
;
8873 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8874 "when reducing constant in comparison");
8876 /* Try canonicalization by simplifying arg0. */
8877 strict_overflow_p
= false;
8878 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8879 &strict_overflow_p
);
8882 if (strict_overflow_p
)
8883 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8887 /* Try canonicalization by simplifying arg1 using the swapped
8889 code
= swap_tree_comparison (code
);
8890 strict_overflow_p
= false;
8891 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8892 &strict_overflow_p
);
8893 if (t
&& strict_overflow_p
)
8894 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8898 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8899 space. This is used to avoid issuing overflow warnings for
8900 expressions like &p->x which can not wrap. */
8903 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8905 double_int di_offset
, total
;
8907 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8913 if (offset
== NULL_TREE
)
8914 di_offset
= double_int_zero
;
8915 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8918 di_offset
= TREE_INT_CST (offset
);
8921 double_int units
= double_int::from_uhwi (bitpos
/ BITS_PER_UNIT
);
8922 total
= di_offset
.add_with_sign (units
, true, &overflow
);
8926 if (total
.high
!= 0)
8929 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8933 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8935 if (TREE_CODE (base
) == ADDR_EXPR
)
8937 HOST_WIDE_INT base_size
;
8939 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8940 if (base_size
> 0 && size
< base_size
)
8944 return total
.low
> (unsigned HOST_WIDE_INT
) size
;
8947 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8948 kind INTEGER_CST. This makes sure to properly sign-extend the
8951 static HOST_WIDE_INT
8952 size_low_cst (const_tree t
)
8954 double_int d
= tree_to_double_int (t
);
8955 return d
.sext (TYPE_PRECISION (TREE_TYPE (t
))).low
;
8958 /* Subroutine of fold_binary. This routine performs all of the
8959 transformations that are common to the equality/inequality
8960 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8961 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8962 fold_binary should call fold_binary. Fold a comparison with
8963 tree code CODE and type TYPE with operands OP0 and OP1. Return
8964 the folded comparison or NULL_TREE. */
8967 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8970 tree arg0
, arg1
, tem
;
8975 STRIP_SIGN_NOPS (arg0
);
8976 STRIP_SIGN_NOPS (arg1
);
8978 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8979 if (tem
!= NULL_TREE
)
8982 /* If one arg is a real or integer constant, put it last. */
8983 if (tree_swap_operands_p (arg0
, arg1
, true))
8984 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8986 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8987 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8988 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8989 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8990 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8991 && (TREE_CODE (arg1
) == INTEGER_CST
8992 && !TREE_OVERFLOW (arg1
)))
8994 tree const1
= TREE_OPERAND (arg0
, 1);
8996 tree variable
= TREE_OPERAND (arg0
, 0);
8999 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9001 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9002 TREE_TYPE (arg1
), const2
, const1
);
9004 /* If the constant operation overflowed this can be
9005 simplified as a comparison against INT_MAX/INT_MIN. */
9006 if (TREE_CODE (lhs
) == INTEGER_CST
9007 && TREE_OVERFLOW (lhs
))
9009 int const1_sgn
= tree_int_cst_sgn (const1
);
9010 enum tree_code code2
= code
;
9012 /* Get the sign of the constant on the lhs if the
9013 operation were VARIABLE + CONST1. */
9014 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9015 const1_sgn
= -const1_sgn
;
9017 /* The sign of the constant determines if we overflowed
9018 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9019 Canonicalize to the INT_MIN overflow by swapping the comparison
9021 if (const1_sgn
== -1)
9022 code2
= swap_tree_comparison (code
);
9024 /* We now can look at the canonicalized case
9025 VARIABLE + 1 CODE2 INT_MIN
9026 and decide on the result. */
9027 if (code2
== LT_EXPR
9029 || code2
== EQ_EXPR
)
9030 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9031 else if (code2
== NE_EXPR
9033 || code2
== GT_EXPR
)
9034 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9037 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9038 && (TREE_CODE (lhs
) != INTEGER_CST
9039 || !TREE_OVERFLOW (lhs
)))
9041 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9042 fold_overflow_warning ("assuming signed overflow does not occur "
9043 "when changing X +- C1 cmp C2 to "
9045 WARN_STRICT_OVERFLOW_COMPARISON
);
9046 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9050 /* For comparisons of pointers we can decompose it to a compile time
9051 comparison of the base objects and the offsets into the object.
9052 This requires at least one operand being an ADDR_EXPR or a
9053 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9054 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9055 && (TREE_CODE (arg0
) == ADDR_EXPR
9056 || TREE_CODE (arg1
) == ADDR_EXPR
9057 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9058 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9060 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9061 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9062 enum machine_mode mode
;
9063 int volatilep
, unsignedp
;
9064 bool indirect_base0
= false, indirect_base1
= false;
9066 /* Get base and offset for the access. Strip ADDR_EXPR for
9067 get_inner_reference, but put it back by stripping INDIRECT_REF
9068 off the base object if possible. indirect_baseN will be true
9069 if baseN is not an address but refers to the object itself. */
9071 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9073 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9074 &bitsize
, &bitpos0
, &offset0
, &mode
,
9075 &unsignedp
, &volatilep
, false);
9076 if (TREE_CODE (base0
) == INDIRECT_REF
)
9077 base0
= TREE_OPERAND (base0
, 0);
9079 indirect_base0
= true;
9081 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9083 base0
= TREE_OPERAND (arg0
, 0);
9084 STRIP_SIGN_NOPS (base0
);
9085 if (TREE_CODE (base0
) == ADDR_EXPR
)
9087 base0
= TREE_OPERAND (base0
, 0);
9088 indirect_base0
= true;
9090 offset0
= TREE_OPERAND (arg0
, 1);
9091 if (host_integerp (offset0
, 0))
9093 HOST_WIDE_INT off
= size_low_cst (offset0
);
9094 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9096 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9098 bitpos0
= off
* BITS_PER_UNIT
;
9099 offset0
= NULL_TREE
;
9105 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9107 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9108 &bitsize
, &bitpos1
, &offset1
, &mode
,
9109 &unsignedp
, &volatilep
, false);
9110 if (TREE_CODE (base1
) == INDIRECT_REF
)
9111 base1
= TREE_OPERAND (base1
, 0);
9113 indirect_base1
= true;
9115 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9117 base1
= TREE_OPERAND (arg1
, 0);
9118 STRIP_SIGN_NOPS (base1
);
9119 if (TREE_CODE (base1
) == ADDR_EXPR
)
9121 base1
= TREE_OPERAND (base1
, 0);
9122 indirect_base1
= true;
9124 offset1
= TREE_OPERAND (arg1
, 1);
9125 if (host_integerp (offset1
, 0))
9127 HOST_WIDE_INT off
= size_low_cst (offset1
);
9128 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9130 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9132 bitpos1
= off
* BITS_PER_UNIT
;
9133 offset1
= NULL_TREE
;
9138 /* A local variable can never be pointed to by
9139 the default SSA name of an incoming parameter. */
9140 if ((TREE_CODE (arg0
) == ADDR_EXPR
9142 && TREE_CODE (base0
) == VAR_DECL
9143 && auto_var_in_fn_p (base0
, current_function_decl
)
9145 && TREE_CODE (base1
) == SSA_NAME
9146 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9147 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9148 || (TREE_CODE (arg1
) == ADDR_EXPR
9150 && TREE_CODE (base1
) == VAR_DECL
9151 && auto_var_in_fn_p (base1
, current_function_decl
)
9153 && TREE_CODE (base0
) == SSA_NAME
9154 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9155 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9157 if (code
== NE_EXPR
)
9158 return constant_boolean_node (1, type
);
9159 else if (code
== EQ_EXPR
)
9160 return constant_boolean_node (0, type
);
9162 /* If we have equivalent bases we might be able to simplify. */
9163 else if (indirect_base0
== indirect_base1
9164 && operand_equal_p (base0
, base1
, 0))
9166 /* We can fold this expression to a constant if the non-constant
9167 offset parts are equal. */
9168 if ((offset0
== offset1
9169 || (offset0
&& offset1
9170 && operand_equal_p (offset0
, offset1
, 0)))
9173 || (indirect_base0
&& DECL_P (base0
))
9174 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9179 && bitpos0
!= bitpos1
9180 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9181 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9182 fold_overflow_warning (("assuming pointer wraparound does not "
9183 "occur when comparing P +- C1 with "
9185 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9190 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9192 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9194 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9196 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9198 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9200 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9204 /* We can simplify the comparison to a comparison of the variable
9205 offset parts if the constant offset parts are equal.
9206 Be careful to use signed sizetype here because otherwise we
9207 mess with array offsets in the wrong way. This is possible
9208 because pointer arithmetic is restricted to retain within an
9209 object and overflow on pointer differences is undefined as of
9210 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9211 else if (bitpos0
== bitpos1
9212 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9213 || (indirect_base0
&& DECL_P (base0
))
9214 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9216 /* By converting to signed sizetype we cover middle-end pointer
9217 arithmetic which operates on unsigned pointer types of size
9218 type size and ARRAY_REF offsets which are properly sign or
9219 zero extended from their type in case it is narrower than
9221 if (offset0
== NULL_TREE
)
9222 offset0
= build_int_cst (ssizetype
, 0);
9224 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9225 if (offset1
== NULL_TREE
)
9226 offset1
= build_int_cst (ssizetype
, 0);
9228 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9232 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9233 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9234 fold_overflow_warning (("assuming pointer wraparound does not "
9235 "occur when comparing P +- C1 with "
9237 WARN_STRICT_OVERFLOW_COMPARISON
);
9239 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9242 /* For non-equal bases we can simplify if they are addresses
9243 of local binding decls or constants. */
9244 else if (indirect_base0
&& indirect_base1
9245 /* We know that !operand_equal_p (base0, base1, 0)
9246 because the if condition was false. But make
9247 sure two decls are not the same. */
9249 && TREE_CODE (arg0
) == ADDR_EXPR
9250 && TREE_CODE (arg1
) == ADDR_EXPR
9251 && (((TREE_CODE (base0
) == VAR_DECL
9252 || TREE_CODE (base0
) == PARM_DECL
)
9253 && (targetm
.binds_local_p (base0
)
9254 || CONSTANT_CLASS_P (base1
)))
9255 || CONSTANT_CLASS_P (base0
))
9256 && (((TREE_CODE (base1
) == VAR_DECL
9257 || TREE_CODE (base1
) == PARM_DECL
)
9258 && (targetm
.binds_local_p (base1
)
9259 || CONSTANT_CLASS_P (base0
)))
9260 || CONSTANT_CLASS_P (base1
)))
9262 if (code
== EQ_EXPR
)
9263 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9265 else if (code
== NE_EXPR
)
9266 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9269 /* For equal offsets we can simplify to a comparison of the
9271 else if (bitpos0
== bitpos1
9273 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9275 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9276 && ((offset0
== offset1
)
9277 || (offset0
&& offset1
9278 && operand_equal_p (offset0
, offset1
, 0))))
9281 base0
= build_fold_addr_expr_loc (loc
, base0
);
9283 base1
= build_fold_addr_expr_loc (loc
, base1
);
9284 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9288 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9289 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9290 the resulting offset is smaller in absolute value than the
9292 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9293 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9294 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9295 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9296 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9297 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9298 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9300 tree const1
= TREE_OPERAND (arg0
, 1);
9301 tree const2
= TREE_OPERAND (arg1
, 1);
9302 tree variable1
= TREE_OPERAND (arg0
, 0);
9303 tree variable2
= TREE_OPERAND (arg1
, 0);
9305 const char * const warnmsg
= G_("assuming signed overflow does not "
9306 "occur when combining constants around "
9309 /* Put the constant on the side where it doesn't overflow and is
9310 of lower absolute value than before. */
9311 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9312 ? MINUS_EXPR
: PLUS_EXPR
,
9314 if (!TREE_OVERFLOW (cst
)
9315 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9317 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9318 return fold_build2_loc (loc
, code
, type
,
9320 fold_build2_loc (loc
,
9321 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9325 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9326 ? MINUS_EXPR
: PLUS_EXPR
,
9328 if (!TREE_OVERFLOW (cst
)
9329 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9331 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9332 return fold_build2_loc (loc
, code
, type
,
9333 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9339 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9340 signed arithmetic case. That form is created by the compiler
9341 often enough for folding it to be of value. One example is in
9342 computing loop trip counts after Operator Strength Reduction. */
9343 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9344 && TREE_CODE (arg0
) == MULT_EXPR
9345 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9346 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9347 && integer_zerop (arg1
))
9349 tree const1
= TREE_OPERAND (arg0
, 1);
9350 tree const2
= arg1
; /* zero */
9351 tree variable1
= TREE_OPERAND (arg0
, 0);
9352 enum tree_code cmp_code
= code
;
9354 /* Handle unfolded multiplication by zero. */
9355 if (integer_zerop (const1
))
9356 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9358 fold_overflow_warning (("assuming signed overflow does not occur when "
9359 "eliminating multiplication in comparison "
9361 WARN_STRICT_OVERFLOW_COMPARISON
);
9363 /* If const1 is negative we swap the sense of the comparison. */
9364 if (tree_int_cst_sgn (const1
) < 0)
9365 cmp_code
= swap_tree_comparison (cmp_code
);
9367 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9370 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9374 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9376 tree targ0
= strip_float_extensions (arg0
);
9377 tree targ1
= strip_float_extensions (arg1
);
9378 tree newtype
= TREE_TYPE (targ0
);
9380 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9381 newtype
= TREE_TYPE (targ1
);
9383 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9384 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9385 return fold_build2_loc (loc
, code
, type
,
9386 fold_convert_loc (loc
, newtype
, targ0
),
9387 fold_convert_loc (loc
, newtype
, targ1
));
9389 /* (-a) CMP (-b) -> b CMP a */
9390 if (TREE_CODE (arg0
) == NEGATE_EXPR
9391 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9392 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9393 TREE_OPERAND (arg0
, 0));
9395 if (TREE_CODE (arg1
) == REAL_CST
)
9397 REAL_VALUE_TYPE cst
;
9398 cst
= TREE_REAL_CST (arg1
);
9400 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9401 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9402 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9403 TREE_OPERAND (arg0
, 0),
9404 build_real (TREE_TYPE (arg1
),
9405 real_value_negate (&cst
)));
9407 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9408 /* a CMP (-0) -> a CMP 0 */
9409 if (REAL_VALUE_MINUS_ZERO (cst
))
9410 return fold_build2_loc (loc
, code
, type
, arg0
,
9411 build_real (TREE_TYPE (arg1
), dconst0
));
9413 /* x != NaN is always true, other ops are always false. */
9414 if (REAL_VALUE_ISNAN (cst
)
9415 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9417 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9418 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9421 /* Fold comparisons against infinity. */
9422 if (REAL_VALUE_ISINF (cst
)
9423 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9425 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9426 if (tem
!= NULL_TREE
)
9431 /* If this is a comparison of a real constant with a PLUS_EXPR
9432 or a MINUS_EXPR of a real constant, we can convert it into a
9433 comparison with a revised real constant as long as no overflow
9434 occurs when unsafe_math_optimizations are enabled. */
9435 if (flag_unsafe_math_optimizations
9436 && TREE_CODE (arg1
) == REAL_CST
9437 && (TREE_CODE (arg0
) == PLUS_EXPR
9438 || TREE_CODE (arg0
) == MINUS_EXPR
)
9439 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9440 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9441 ? MINUS_EXPR
: PLUS_EXPR
,
9442 arg1
, TREE_OPERAND (arg0
, 1)))
9443 && !TREE_OVERFLOW (tem
))
9444 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9446 /* Likewise, we can simplify a comparison of a real constant with
9447 a MINUS_EXPR whose first operand is also a real constant, i.e.
9448 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9449 floating-point types only if -fassociative-math is set. */
9450 if (flag_associative_math
9451 && TREE_CODE (arg1
) == REAL_CST
9452 && TREE_CODE (arg0
) == MINUS_EXPR
9453 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9454 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9456 && !TREE_OVERFLOW (tem
))
9457 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9458 TREE_OPERAND (arg0
, 1), tem
);
9460 /* Fold comparisons against built-in math functions. */
9461 if (TREE_CODE (arg1
) == REAL_CST
9462 && flag_unsafe_math_optimizations
9463 && ! flag_errno_math
)
9465 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9467 if (fcode
!= END_BUILTINS
)
9469 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9470 if (tem
!= NULL_TREE
)
9476 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9477 && CONVERT_EXPR_P (arg0
))
9479 /* If we are widening one operand of an integer comparison,
9480 see if the other operand is similarly being widened. Perhaps we
9481 can do the comparison in the narrower type. */
9482 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9486 /* Or if we are changing signedness. */
9487 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9492 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9493 constant, we can simplify it. */
9494 if (TREE_CODE (arg1
) == INTEGER_CST
9495 && (TREE_CODE (arg0
) == MIN_EXPR
9496 || TREE_CODE (arg0
) == MAX_EXPR
)
9497 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9499 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9504 /* Simplify comparison of something with itself. (For IEEE
9505 floating-point, we can only do some of these simplifications.) */
9506 if (operand_equal_p (arg0
, arg1
, 0))
9511 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9512 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9513 return constant_boolean_node (1, type
);
9518 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9519 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9520 return constant_boolean_node (1, type
);
9521 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9524 /* For NE, we can only do this simplification if integer
9525 or we don't honor IEEE floating point NaNs. */
9526 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9527 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9529 /* ... fall through ... */
9532 return constant_boolean_node (0, type
);
9538 /* If we are comparing an expression that just has comparisons
9539 of two integer values, arithmetic expressions of those comparisons,
9540 and constants, we can simplify it. There are only three cases
9541 to check: the two values can either be equal, the first can be
9542 greater, or the second can be greater. Fold the expression for
9543 those three values. Since each value must be 0 or 1, we have
9544 eight possibilities, each of which corresponds to the constant 0
9545 or 1 or one of the six possible comparisons.
9547 This handles common cases like (a > b) == 0 but also handles
9548 expressions like ((x > y) - (y > x)) > 0, which supposedly
9549 occur in macroized code. */
9551 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9553 tree cval1
= 0, cval2
= 0;
9556 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9557 /* Don't handle degenerate cases here; they should already
9558 have been handled anyway. */
9559 && cval1
!= 0 && cval2
!= 0
9560 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9561 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9562 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9563 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9564 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9565 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9566 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9568 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9569 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9571 /* We can't just pass T to eval_subst in case cval1 or cval2
9572 was the same as ARG1. */
9575 = fold_build2_loc (loc
, code
, type
,
9576 eval_subst (loc
, arg0
, cval1
, maxval
,
9580 = fold_build2_loc (loc
, code
, type
,
9581 eval_subst (loc
, arg0
, cval1
, maxval
,
9585 = fold_build2_loc (loc
, code
, type
,
9586 eval_subst (loc
, arg0
, cval1
, minval
,
9590 /* All three of these results should be 0 or 1. Confirm they are.
9591 Then use those values to select the proper code to use. */
9593 if (TREE_CODE (high_result
) == INTEGER_CST
9594 && TREE_CODE (equal_result
) == INTEGER_CST
9595 && TREE_CODE (low_result
) == INTEGER_CST
)
9597 /* Make a 3-bit mask with the high-order bit being the
9598 value for `>', the next for '=', and the low for '<'. */
9599 switch ((integer_onep (high_result
) * 4)
9600 + (integer_onep (equal_result
) * 2)
9601 + integer_onep (low_result
))
9605 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9626 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9631 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9632 SET_EXPR_LOCATION (tem
, loc
);
9635 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9640 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9641 into a single range test. */
9642 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9643 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9644 && TREE_CODE (arg1
) == INTEGER_CST
9645 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9646 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9647 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9648 && !TREE_OVERFLOW (arg1
))
9650 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9651 if (tem
!= NULL_TREE
)
9655 /* Fold ~X op ~Y as Y op X. */
9656 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9657 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9659 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9660 return fold_build2_loc (loc
, code
, type
,
9661 fold_convert_loc (loc
, cmp_type
,
9662 TREE_OPERAND (arg1
, 0)),
9663 TREE_OPERAND (arg0
, 0));
9666 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9667 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9668 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9670 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9671 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9672 TREE_OPERAND (arg0
, 0),
9673 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9674 fold_convert_loc (loc
, cmp_type
, arg1
)));
9681 /* Subroutine of fold_binary. Optimize complex multiplications of the
9682 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9683 argument EXPR represents the expression "z" of type TYPE. */
9686 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9688 tree itype
= TREE_TYPE (type
);
9689 tree rpart
, ipart
, tem
;
9691 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9693 rpart
= TREE_OPERAND (expr
, 0);
9694 ipart
= TREE_OPERAND (expr
, 1);
9696 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9698 rpart
= TREE_REALPART (expr
);
9699 ipart
= TREE_IMAGPART (expr
);
9703 expr
= save_expr (expr
);
9704 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9705 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9708 rpart
= save_expr (rpart
);
9709 ipart
= save_expr (ipart
);
9710 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9711 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9712 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9713 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9714 build_zero_cst (itype
));
9718 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9719 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9720 guarantees that P and N have the same least significant log2(M) bits.
9721 N is not otherwise constrained. In particular, N is not normalized to
9722 0 <= N < M as is common. In general, the precise value of P is unknown.
9723 M is chosen as large as possible such that constant N can be determined.
9725 Returns M and sets *RESIDUE to N.
9727 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9728 account. This is not always possible due to PR 35705.
9731 static unsigned HOST_WIDE_INT
9732 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9733 bool allow_func_align
)
9735 enum tree_code code
;
9739 code
= TREE_CODE (expr
);
9740 if (code
== ADDR_EXPR
)
9742 unsigned int bitalign
;
9743 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9744 *residue
/= BITS_PER_UNIT
;
9745 return bitalign
/ BITS_PER_UNIT
;
9747 else if (code
== POINTER_PLUS_EXPR
)
9750 unsigned HOST_WIDE_INT modulus
;
9751 enum tree_code inner_code
;
9753 op0
= TREE_OPERAND (expr
, 0);
9755 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9758 op1
= TREE_OPERAND (expr
, 1);
9760 inner_code
= TREE_CODE (op1
);
9761 if (inner_code
== INTEGER_CST
)
9763 *residue
+= TREE_INT_CST_LOW (op1
);
9766 else if (inner_code
== MULT_EXPR
)
9768 op1
= TREE_OPERAND (op1
, 1);
9769 if (TREE_CODE (op1
) == INTEGER_CST
)
9771 unsigned HOST_WIDE_INT align
;
9773 /* Compute the greatest power-of-2 divisor of op1. */
9774 align
= TREE_INT_CST_LOW (op1
);
9777 /* If align is non-zero and less than *modulus, replace
9778 *modulus with align., If align is 0, then either op1 is 0
9779 or the greatest power-of-2 divisor of op1 doesn't fit in an
9780 unsigned HOST_WIDE_INT. In either case, no additional
9781 constraint is imposed. */
9783 modulus
= MIN (modulus
, align
);
9790 /* If we get here, we were unable to determine anything useful about the
9795 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9796 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9799 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9801 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9803 if (TREE_CODE (arg
) == VECTOR_CST
)
9805 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9806 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9808 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9810 constructor_elt
*elt
;
9812 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9813 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9816 elts
[i
] = elt
->value
;
9820 for (; i
< nelts
; i
++)
9822 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9826 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9827 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9828 NULL_TREE otherwise. */
9831 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9833 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9835 bool need_ctor
= false;
9837 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9838 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9839 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9840 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9843 elts
= XALLOCAVEC (tree
, nelts
* 3);
9844 if (!vec_cst_ctor_to_array (arg0
, elts
)
9845 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9848 for (i
= 0; i
< nelts
; i
++)
9850 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9852 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9857 vec
<constructor_elt
, va_gc
> *v
;
9858 vec_alloc (v
, nelts
);
9859 for (i
= 0; i
< nelts
; i
++)
9860 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9861 return build_constructor (type
, v
);
9864 return build_vector (type
, &elts
[2 * nelts
]);
9867 /* Try to fold a pointer difference of type TYPE two address expressions of
9868 array references AREF0 and AREF1 using location LOC. Return a
9869 simplified expression for the difference or NULL_TREE. */
9872 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9873 tree aref0
, tree aref1
)
9875 tree base0
= TREE_OPERAND (aref0
, 0);
9876 tree base1
= TREE_OPERAND (aref1
, 0);
9877 tree base_offset
= build_int_cst (type
, 0);
9879 /* If the bases are array references as well, recurse. If the bases
9880 are pointer indirections compute the difference of the pointers.
9881 If the bases are equal, we are set. */
9882 if ((TREE_CODE (base0
) == ARRAY_REF
9883 && TREE_CODE (base1
) == ARRAY_REF
9885 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9886 || (INDIRECT_REF_P (base0
)
9887 && INDIRECT_REF_P (base1
)
9888 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9889 TREE_OPERAND (base0
, 0),
9890 TREE_OPERAND (base1
, 0))))
9891 || operand_equal_p (base0
, base1
, 0))
9893 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9894 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9895 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9896 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9897 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9899 fold_build2_loc (loc
, MULT_EXPR
, type
,
9905 /* If the real or vector real constant CST of type TYPE has an exact
9906 inverse, return it, else return NULL. */
9909 exact_inverse (tree type
, tree cst
)
9912 tree unit_type
, *elts
;
9913 enum machine_mode mode
;
9914 unsigned vec_nelts
, i
;
9916 switch (TREE_CODE (cst
))
9919 r
= TREE_REAL_CST (cst
);
9921 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9922 return build_real (type
, r
);
9927 vec_nelts
= VECTOR_CST_NELTS (cst
);
9928 elts
= XALLOCAVEC (tree
, vec_nelts
);
9929 unit_type
= TREE_TYPE (type
);
9930 mode
= TYPE_MODE (unit_type
);
9932 for (i
= 0; i
< vec_nelts
; i
++)
9934 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9935 if (!exact_real_inverse (mode
, &r
))
9937 elts
[i
] = build_real (unit_type
, r
);
9940 return build_vector (type
, elts
);
9947 /* Mask out the tz least significant bits of X of type TYPE where
9948 tz is the number of trailing zeroes in Y. */
9950 mask_with_tz (tree type
, double_int x
, double_int y
)
9952 int tz
= y
.trailing_zeros ();
9958 mask
= ~double_int::mask (tz
);
9959 mask
= mask
.ext (TYPE_PRECISION (type
), TYPE_UNSIGNED (type
));
9965 /* Return true when T is an address and is known to be nonzero.
9966 For floating point we further ensure that T is not denormal.
9967 Similar logic is present in nonzero_address in rtlanal.h.
9969 If the return value is based on the assumption that signed overflow
9970 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9971 change *STRICT_OVERFLOW_P. */
9974 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9976 tree type
= TREE_TYPE (t
);
9977 enum tree_code code
;
9979 /* Doing something useful for floating point would need more work. */
9980 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9983 code
= TREE_CODE (t
);
9984 switch (TREE_CODE_CLASS (code
))
9987 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9990 case tcc_comparison
:
9991 return tree_binary_nonzero_warnv_p (code
, type
,
9992 TREE_OPERAND (t
, 0),
9993 TREE_OPERAND (t
, 1),
9996 case tcc_declaration
:
9998 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10006 case TRUTH_NOT_EXPR
:
10007 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10008 strict_overflow_p
);
10010 case TRUTH_AND_EXPR
:
10011 case TRUTH_OR_EXPR
:
10012 case TRUTH_XOR_EXPR
:
10013 return tree_binary_nonzero_warnv_p (code
, type
,
10014 TREE_OPERAND (t
, 0),
10015 TREE_OPERAND (t
, 1),
10016 strict_overflow_p
);
10023 case WITH_SIZE_EXPR
:
10025 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10027 case COMPOUND_EXPR
:
10030 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10031 strict_overflow_p
);
10034 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10035 strict_overflow_p
);
10039 tree fndecl
= get_callee_fndecl (t
);
10040 if (!fndecl
) return false;
10041 if (flag_delete_null_pointer_checks
&& !flag_check_new
10042 && DECL_IS_OPERATOR_NEW (fndecl
)
10043 && !TREE_NOTHROW (fndecl
))
10045 if (flag_delete_null_pointer_checks
10046 && lookup_attribute ("returns_nonnull",
10047 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10049 return alloca_call_p (t
);
10058 /* Return true when T is an address and is known to be nonzero.
10059 Handle warnings about undefined signed overflow. */
10062 tree_expr_nonzero_p (tree t
)
10064 bool ret
, strict_overflow_p
;
10066 strict_overflow_p
= false;
10067 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10068 if (strict_overflow_p
)
10069 fold_overflow_warning (("assuming signed overflow does not occur when "
10070 "determining that expression is always "
10072 WARN_STRICT_OVERFLOW_MISC
);
10076 /* Fold a binary expression of code CODE and type TYPE with operands
10077 OP0 and OP1. LOC is the location of the resulting expression.
10078 Return the folded expression if folding is successful. Otherwise,
10079 return NULL_TREE. */
10082 fold_binary_loc (location_t loc
,
10083 enum tree_code code
, tree type
, tree op0
, tree op1
)
10085 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10086 tree arg0
, arg1
, tem
;
10087 tree t1
= NULL_TREE
;
10088 bool strict_overflow_p
;
10091 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10092 && TREE_CODE_LENGTH (code
) == 2
10093 && op0
!= NULL_TREE
10094 && op1
!= NULL_TREE
);
10099 /* Strip any conversions that don't change the mode. This is
10100 safe for every expression, except for a comparison expression
10101 because its signedness is derived from its operands. So, in
10102 the latter case, only strip conversions that don't change the
10103 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10106 Note that this is done as an internal manipulation within the
10107 constant folder, in order to find the simplest representation
10108 of the arguments so that their form can be studied. In any
10109 cases, the appropriate type conversions should be put back in
10110 the tree that will get out of the constant folder. */
10112 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10114 STRIP_SIGN_NOPS (arg0
);
10115 STRIP_SIGN_NOPS (arg1
);
10123 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10124 constant but we can't do arithmetic on them. */
10125 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10126 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10127 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10128 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10129 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10130 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10131 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10133 if (kind
== tcc_binary
)
10135 /* Make sure type and arg0 have the same saturating flag. */
10136 gcc_assert (TYPE_SATURATING (type
)
10137 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10138 tem
= const_binop (code
, arg0
, arg1
);
10140 else if (kind
== tcc_comparison
)
10141 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10145 if (tem
!= NULL_TREE
)
10147 if (TREE_TYPE (tem
) != type
)
10148 tem
= fold_convert_loc (loc
, type
, tem
);
10153 /* If this is a commutative operation, and ARG0 is a constant, move it
10154 to ARG1 to reduce the number of tests below. */
10155 if (commutative_tree_code (code
)
10156 && tree_swap_operands_p (arg0
, arg1
, true))
10157 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10159 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10161 First check for cases where an arithmetic operation is applied to a
10162 compound, conditional, or comparison operation. Push the arithmetic
10163 operation inside the compound or conditional to see if any folding
10164 can then be done. Convert comparison to conditional for this purpose.
10165 The also optimizes non-constant cases that used to be done in
10168 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10169 one of the operands is a comparison and the other is a comparison, a
10170 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10171 code below would make the expression more complex. Change it to a
10172 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10173 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10175 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10176 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10177 && TREE_CODE (type
) != VECTOR_TYPE
10178 && ((truth_value_p (TREE_CODE (arg0
))
10179 && (truth_value_p (TREE_CODE (arg1
))
10180 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10181 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10182 || (truth_value_p (TREE_CODE (arg1
))
10183 && (truth_value_p (TREE_CODE (arg0
))
10184 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10185 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10187 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10188 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10191 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10192 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10194 if (code
== EQ_EXPR
)
10195 tem
= invert_truthvalue_loc (loc
, tem
);
10197 return fold_convert_loc (loc
, type
, tem
);
10200 if (TREE_CODE_CLASS (code
) == tcc_binary
10201 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10203 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10205 tem
= fold_build2_loc (loc
, code
, type
,
10206 fold_convert_loc (loc
, TREE_TYPE (op0
),
10207 TREE_OPERAND (arg0
, 1)), op1
);
10208 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10211 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10212 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10214 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10215 fold_convert_loc (loc
, TREE_TYPE (op1
),
10216 TREE_OPERAND (arg1
, 1)));
10217 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10221 if (TREE_CODE (arg0
) == COND_EXPR
10222 || TREE_CODE (arg0
) == VEC_COND_EXPR
10223 || COMPARISON_CLASS_P (arg0
))
10225 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10227 /*cond_first_p=*/1);
10228 if (tem
!= NULL_TREE
)
10232 if (TREE_CODE (arg1
) == COND_EXPR
10233 || TREE_CODE (arg1
) == VEC_COND_EXPR
10234 || COMPARISON_CLASS_P (arg1
))
10236 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10238 /*cond_first_p=*/0);
10239 if (tem
!= NULL_TREE
)
10247 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10248 if (TREE_CODE (arg0
) == ADDR_EXPR
10249 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10251 tree iref
= TREE_OPERAND (arg0
, 0);
10252 return fold_build2 (MEM_REF
, type
,
10253 TREE_OPERAND (iref
, 0),
10254 int_const_binop (PLUS_EXPR
, arg1
,
10255 TREE_OPERAND (iref
, 1)));
10258 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10259 if (TREE_CODE (arg0
) == ADDR_EXPR
10260 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10263 HOST_WIDE_INT coffset
;
10264 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10268 return fold_build2 (MEM_REF
, type
,
10269 build_fold_addr_expr (base
),
10270 int_const_binop (PLUS_EXPR
, arg1
,
10271 size_int (coffset
)));
10276 case POINTER_PLUS_EXPR
:
10277 /* 0 +p index -> (type)index */
10278 if (integer_zerop (arg0
))
10279 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10281 /* PTR +p 0 -> PTR */
10282 if (integer_zerop (arg1
))
10283 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10285 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10286 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10287 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10288 return fold_convert_loc (loc
, type
,
10289 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10290 fold_convert_loc (loc
, sizetype
,
10292 fold_convert_loc (loc
, sizetype
,
10295 /* (PTR +p B) +p A -> PTR +p (B + A) */
10296 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10299 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10300 tree arg00
= TREE_OPERAND (arg0
, 0);
10301 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10302 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10303 return fold_convert_loc (loc
, type
,
10304 fold_build_pointer_plus_loc (loc
,
10308 /* PTR_CST +p CST -> CST1 */
10309 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10310 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10311 fold_convert_loc (loc
, type
, arg1
));
10313 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10314 of the array. Loop optimizer sometimes produce this type of
10316 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10318 tem
= try_move_mult_to_index (loc
, arg0
,
10319 fold_convert_loc (loc
,
10322 return fold_convert_loc (loc
, type
, tem
);
10328 /* A + (-B) -> A - B */
10329 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10330 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10331 fold_convert_loc (loc
, type
, arg0
),
10332 fold_convert_loc (loc
, type
,
10333 TREE_OPERAND (arg1
, 0)));
10334 /* (-A) + B -> B - A */
10335 if (TREE_CODE (arg0
) == NEGATE_EXPR
10336 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10337 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10338 fold_convert_loc (loc
, type
, arg1
),
10339 fold_convert_loc (loc
, type
,
10340 TREE_OPERAND (arg0
, 0)));
10342 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10344 /* Convert ~A + 1 to -A. */
10345 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10346 && integer_onep (arg1
))
10347 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10348 fold_convert_loc (loc
, type
,
10349 TREE_OPERAND (arg0
, 0)));
10351 /* ~X + X is -1. */
10352 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10353 && !TYPE_OVERFLOW_TRAPS (type
))
10355 tree tem
= TREE_OPERAND (arg0
, 0);
10358 if (operand_equal_p (tem
, arg1
, 0))
10360 t1
= build_all_ones_cst (type
);
10361 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10365 /* X + ~X is -1. */
10366 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10367 && !TYPE_OVERFLOW_TRAPS (type
))
10369 tree tem
= TREE_OPERAND (arg1
, 0);
10372 if (operand_equal_p (arg0
, tem
, 0))
10374 t1
= build_all_ones_cst (type
);
10375 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10379 /* X + (X / CST) * -CST is X % CST. */
10380 if (TREE_CODE (arg1
) == MULT_EXPR
10381 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10382 && operand_equal_p (arg0
,
10383 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10385 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10386 tree cst1
= TREE_OPERAND (arg1
, 1);
10387 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10389 if (sum
&& integer_zerop (sum
))
10390 return fold_convert_loc (loc
, type
,
10391 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10392 TREE_TYPE (arg0
), arg0
,
10397 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10398 one. Make sure the type is not saturating and has the signedness of
10399 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10400 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10401 if ((TREE_CODE (arg0
) == MULT_EXPR
10402 || TREE_CODE (arg1
) == MULT_EXPR
)
10403 && !TYPE_SATURATING (type
)
10404 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10405 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10406 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10408 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10413 if (! FLOAT_TYPE_P (type
))
10415 if (integer_zerop (arg1
))
10416 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10418 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10419 with a constant, and the two constants have no bits in common,
10420 we should treat this as a BIT_IOR_EXPR since this may produce more
10421 simplifications. */
10422 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10423 && TREE_CODE (arg1
) == BIT_AND_EXPR
10424 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10425 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10426 && integer_zerop (const_binop (BIT_AND_EXPR
,
10427 TREE_OPERAND (arg0
, 1),
10428 TREE_OPERAND (arg1
, 1))))
10430 code
= BIT_IOR_EXPR
;
10434 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10435 (plus (plus (mult) (mult)) (foo)) so that we can
10436 take advantage of the factoring cases below. */
10437 if (TYPE_OVERFLOW_WRAPS (type
)
10438 && (((TREE_CODE (arg0
) == PLUS_EXPR
10439 || TREE_CODE (arg0
) == MINUS_EXPR
)
10440 && TREE_CODE (arg1
) == MULT_EXPR
)
10441 || ((TREE_CODE (arg1
) == PLUS_EXPR
10442 || TREE_CODE (arg1
) == MINUS_EXPR
)
10443 && TREE_CODE (arg0
) == MULT_EXPR
)))
10445 tree parg0
, parg1
, parg
, marg
;
10446 enum tree_code pcode
;
10448 if (TREE_CODE (arg1
) == MULT_EXPR
)
10449 parg
= arg0
, marg
= arg1
;
10451 parg
= arg1
, marg
= arg0
;
10452 pcode
= TREE_CODE (parg
);
10453 parg0
= TREE_OPERAND (parg
, 0);
10454 parg1
= TREE_OPERAND (parg
, 1);
10455 STRIP_NOPS (parg0
);
10456 STRIP_NOPS (parg1
);
10458 if (TREE_CODE (parg0
) == MULT_EXPR
10459 && TREE_CODE (parg1
) != MULT_EXPR
)
10460 return fold_build2_loc (loc
, pcode
, type
,
10461 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10462 fold_convert_loc (loc
, type
,
10464 fold_convert_loc (loc
, type
,
10466 fold_convert_loc (loc
, type
, parg1
));
10467 if (TREE_CODE (parg0
) != MULT_EXPR
10468 && TREE_CODE (parg1
) == MULT_EXPR
)
10470 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10471 fold_convert_loc (loc
, type
, parg0
),
10472 fold_build2_loc (loc
, pcode
, type
,
10473 fold_convert_loc (loc
, type
, marg
),
10474 fold_convert_loc (loc
, type
,
10480 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10481 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10482 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10484 /* Likewise if the operands are reversed. */
10485 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10486 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10488 /* Convert X + -C into X - C. */
10489 if (TREE_CODE (arg1
) == REAL_CST
10490 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10492 tem
= fold_negate_const (arg1
, type
);
10493 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10494 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10495 fold_convert_loc (loc
, type
, arg0
),
10496 fold_convert_loc (loc
, type
, tem
));
10499 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10500 to __complex__ ( x, y ). This is not the same for SNaNs or
10501 if signed zeros are involved. */
10502 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10503 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10504 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10506 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10507 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10508 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10509 bool arg0rz
= false, arg0iz
= false;
10510 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10511 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10513 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10514 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10515 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10517 tree rp
= arg1r
? arg1r
10518 : build1 (REALPART_EXPR
, rtype
, arg1
);
10519 tree ip
= arg0i
? arg0i
10520 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10521 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10523 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10525 tree rp
= arg0r
? arg0r
10526 : build1 (REALPART_EXPR
, rtype
, arg0
);
10527 tree ip
= arg1i
? arg1i
10528 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10529 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10534 if (flag_unsafe_math_optimizations
10535 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10536 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10537 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10540 /* Convert x+x into x*2.0. */
10541 if (operand_equal_p (arg0
, arg1
, 0)
10542 && SCALAR_FLOAT_TYPE_P (type
))
10543 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10544 build_real (type
, dconst2
));
10546 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10547 We associate floats only if the user has specified
10548 -fassociative-math. */
10549 if (flag_associative_math
10550 && TREE_CODE (arg1
) == PLUS_EXPR
10551 && TREE_CODE (arg0
) != MULT_EXPR
)
10553 tree tree10
= TREE_OPERAND (arg1
, 0);
10554 tree tree11
= TREE_OPERAND (arg1
, 1);
10555 if (TREE_CODE (tree11
) == MULT_EXPR
10556 && TREE_CODE (tree10
) == MULT_EXPR
)
10559 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10560 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10563 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10564 We associate floats only if the user has specified
10565 -fassociative-math. */
10566 if (flag_associative_math
10567 && TREE_CODE (arg0
) == PLUS_EXPR
10568 && TREE_CODE (arg1
) != MULT_EXPR
)
10570 tree tree00
= TREE_OPERAND (arg0
, 0);
10571 tree tree01
= TREE_OPERAND (arg0
, 1);
10572 if (TREE_CODE (tree01
) == MULT_EXPR
10573 && TREE_CODE (tree00
) == MULT_EXPR
)
10576 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10577 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10583 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10584 is a rotate of A by C1 bits. */
10585 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10586 is a rotate of A by B bits. */
10588 enum tree_code code0
, code1
;
10590 code0
= TREE_CODE (arg0
);
10591 code1
= TREE_CODE (arg1
);
10592 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10593 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10594 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10595 TREE_OPERAND (arg1
, 0), 0)
10596 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10597 TYPE_UNSIGNED (rtype
))
10598 /* Only create rotates in complete modes. Other cases are not
10599 expanded properly. */
10600 && (element_precision (rtype
)
10601 == element_precision (TYPE_MODE (rtype
))))
10603 tree tree01
, tree11
;
10604 enum tree_code code01
, code11
;
10606 tree01
= TREE_OPERAND (arg0
, 1);
10607 tree11
= TREE_OPERAND (arg1
, 1);
10608 STRIP_NOPS (tree01
);
10609 STRIP_NOPS (tree11
);
10610 code01
= TREE_CODE (tree01
);
10611 code11
= TREE_CODE (tree11
);
10612 if (code01
== INTEGER_CST
10613 && code11
== INTEGER_CST
10614 && TREE_INT_CST_HIGH (tree01
) == 0
10615 && TREE_INT_CST_HIGH (tree11
) == 0
10616 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10617 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10619 tem
= build2_loc (loc
, LROTATE_EXPR
,
10620 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10621 TREE_OPERAND (arg0
, 0),
10622 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10623 return fold_convert_loc (loc
, type
, tem
);
10625 else if (code11
== MINUS_EXPR
)
10627 tree tree110
, tree111
;
10628 tree110
= TREE_OPERAND (tree11
, 0);
10629 tree111
= TREE_OPERAND (tree11
, 1);
10630 STRIP_NOPS (tree110
);
10631 STRIP_NOPS (tree111
);
10632 if (TREE_CODE (tree110
) == INTEGER_CST
10633 && 0 == compare_tree_int (tree110
,
10635 (TREE_TYPE (TREE_OPERAND
10637 && operand_equal_p (tree01
, tree111
, 0))
10639 fold_convert_loc (loc
, type
,
10640 build2 ((code0
== LSHIFT_EXPR
10643 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10644 TREE_OPERAND (arg0
, 0), tree01
));
10646 else if (code01
== MINUS_EXPR
)
10648 tree tree010
, tree011
;
10649 tree010
= TREE_OPERAND (tree01
, 0);
10650 tree011
= TREE_OPERAND (tree01
, 1);
10651 STRIP_NOPS (tree010
);
10652 STRIP_NOPS (tree011
);
10653 if (TREE_CODE (tree010
) == INTEGER_CST
10654 && 0 == compare_tree_int (tree010
,
10656 (TREE_TYPE (TREE_OPERAND
10658 && operand_equal_p (tree11
, tree011
, 0))
10659 return fold_convert_loc
10661 build2 ((code0
!= LSHIFT_EXPR
10664 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10665 TREE_OPERAND (arg0
, 0), tree11
));
10671 /* In most languages, can't associate operations on floats through
10672 parentheses. Rather than remember where the parentheses were, we
10673 don't associate floats at all, unless the user has specified
10674 -fassociative-math.
10675 And, we need to make sure type is not saturating. */
10677 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10678 && !TYPE_SATURATING (type
))
10680 tree var0
, con0
, lit0
, minus_lit0
;
10681 tree var1
, con1
, lit1
, minus_lit1
;
10685 /* Split both trees into variables, constants, and literals. Then
10686 associate each group together, the constants with literals,
10687 then the result with variables. This increases the chances of
10688 literals being recombined later and of generating relocatable
10689 expressions for the sum of a constant and literal. */
10690 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10691 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10692 code
== MINUS_EXPR
);
10694 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10695 if (code
== MINUS_EXPR
)
10698 /* With undefined overflow prefer doing association in a type
10699 which wraps on overflow, if that is one of the operand types. */
10700 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10701 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10703 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10704 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10705 atype
= TREE_TYPE (arg0
);
10706 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10707 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10708 atype
= TREE_TYPE (arg1
);
10709 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10712 /* With undefined overflow we can only associate constants with one
10713 variable, and constants whose association doesn't overflow. */
10714 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10715 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10722 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10723 tmp0
= TREE_OPERAND (tmp0
, 0);
10724 if (CONVERT_EXPR_P (tmp0
)
10725 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10726 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10727 <= TYPE_PRECISION (atype
)))
10728 tmp0
= TREE_OPERAND (tmp0
, 0);
10729 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10730 tmp1
= TREE_OPERAND (tmp1
, 0);
10731 if (CONVERT_EXPR_P (tmp1
)
10732 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10733 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10734 <= TYPE_PRECISION (atype
)))
10735 tmp1
= TREE_OPERAND (tmp1
, 0);
10736 /* The only case we can still associate with two variables
10737 is if they are the same, modulo negation and bit-pattern
10738 preserving conversions. */
10739 if (!operand_equal_p (tmp0
, tmp1
, 0))
10744 /* Only do something if we found more than two objects. Otherwise,
10745 nothing has changed and we risk infinite recursion. */
10747 && (2 < ((var0
!= 0) + (var1
!= 0)
10748 + (con0
!= 0) + (con1
!= 0)
10749 + (lit0
!= 0) + (lit1
!= 0)
10750 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10752 bool any_overflows
= false;
10753 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10754 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10755 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10756 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10757 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10758 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10759 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10760 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10763 /* Preserve the MINUS_EXPR if the negative part of the literal is
10764 greater than the positive part. Otherwise, the multiplicative
10765 folding code (i.e extract_muldiv) may be fooled in case
10766 unsigned constants are subtracted, like in the following
10767 example: ((X*2 + 4) - 8U)/2. */
10768 if (minus_lit0
&& lit0
)
10770 if (TREE_CODE (lit0
) == INTEGER_CST
10771 && TREE_CODE (minus_lit0
) == INTEGER_CST
10772 && tree_int_cst_lt (lit0
, minus_lit0
))
10774 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10775 MINUS_EXPR
, atype
);
10780 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10781 MINUS_EXPR
, atype
);
10786 /* Don't introduce overflows through reassociation. */
10788 && ((lit0
&& TREE_OVERFLOW (lit0
))
10789 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10796 fold_convert_loc (loc
, type
,
10797 associate_trees (loc
, var0
, minus_lit0
,
10798 MINUS_EXPR
, atype
));
10801 con0
= associate_trees (loc
, con0
, minus_lit0
,
10802 MINUS_EXPR
, atype
);
10804 fold_convert_loc (loc
, type
,
10805 associate_trees (loc
, var0
, con0
,
10806 PLUS_EXPR
, atype
));
10810 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10812 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10820 /* Pointer simplifications for subtraction, simple reassociations. */
10821 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10823 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10824 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10825 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10827 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10828 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10829 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10830 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10831 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10832 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10834 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10837 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10838 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10840 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10841 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10842 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10843 fold_convert_loc (loc
, type
, arg1
));
10845 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10848 /* A - (-B) -> A + B */
10849 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10850 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10851 fold_convert_loc (loc
, type
,
10852 TREE_OPERAND (arg1
, 0)));
10853 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10854 if (TREE_CODE (arg0
) == NEGATE_EXPR
10855 && negate_expr_p (arg1
)
10856 && reorder_operands_p (arg0
, arg1
))
10857 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10858 fold_convert_loc (loc
, type
,
10859 negate_expr (arg1
)),
10860 fold_convert_loc (loc
, type
,
10861 TREE_OPERAND (arg0
, 0)));
10862 /* Convert -A - 1 to ~A. */
10863 if (TREE_CODE (type
) != COMPLEX_TYPE
10864 && TREE_CODE (arg0
) == NEGATE_EXPR
10865 && integer_onep (arg1
)
10866 && !TYPE_OVERFLOW_TRAPS (type
))
10867 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10868 fold_convert_loc (loc
, type
,
10869 TREE_OPERAND (arg0
, 0)));
10871 /* Convert -1 - A to ~A. */
10872 if (TREE_CODE (type
) != COMPLEX_TYPE
10873 && integer_all_onesp (arg0
))
10874 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10877 /* X - (X / Y) * Y is X % Y. */
10878 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10879 && TREE_CODE (arg1
) == MULT_EXPR
10880 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10881 && operand_equal_p (arg0
,
10882 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10884 TREE_OPERAND (arg1
, 1), 0))
10886 fold_convert_loc (loc
, type
,
10887 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10888 arg0
, TREE_OPERAND (arg1
, 1)));
10890 if (! FLOAT_TYPE_P (type
))
10892 if (integer_zerop (arg0
))
10893 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10894 if (integer_zerop (arg1
))
10895 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10897 /* Fold A - (A & B) into ~B & A. */
10898 if (!TREE_SIDE_EFFECTS (arg0
)
10899 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10901 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10903 tree arg10
= fold_convert_loc (loc
, type
,
10904 TREE_OPERAND (arg1
, 0));
10905 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10906 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10908 fold_convert_loc (loc
, type
, arg0
));
10910 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10912 tree arg11
= fold_convert_loc (loc
,
10913 type
, TREE_OPERAND (arg1
, 1));
10914 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10915 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10917 fold_convert_loc (loc
, type
, arg0
));
10921 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10922 any power of 2 minus 1. */
10923 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10924 && TREE_CODE (arg1
) == BIT_AND_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10926 TREE_OPERAND (arg1
, 0), 0))
10928 tree mask0
= TREE_OPERAND (arg0
, 1);
10929 tree mask1
= TREE_OPERAND (arg1
, 1);
10930 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10932 if (operand_equal_p (tem
, mask1
, 0))
10934 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10935 TREE_OPERAND (arg0
, 0), mask1
);
10936 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10941 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10942 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10943 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10945 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10946 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10947 (-ARG1 + ARG0) reduces to -ARG1. */
10948 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10949 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10951 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10952 __complex__ ( x, -y ). This is not the same for SNaNs or if
10953 signed zeros are involved. */
10954 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10955 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10956 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10958 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10959 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10960 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10961 bool arg0rz
= false, arg0iz
= false;
10962 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10963 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10965 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10966 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10967 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10969 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10971 : build1 (REALPART_EXPR
, rtype
, arg1
));
10972 tree ip
= arg0i
? arg0i
10973 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10974 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10976 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10978 tree rp
= arg0r
? arg0r
10979 : build1 (REALPART_EXPR
, rtype
, arg0
);
10980 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10982 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10983 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10988 /* Fold &x - &x. This can happen from &x.foo - &x.
10989 This is unsafe for certain floats even in non-IEEE formats.
10990 In IEEE, it is unsafe because it does wrong for NaNs.
10991 Also note that operand_equal_p is always false if an operand
10994 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10995 && operand_equal_p (arg0
, arg1
, 0))
10996 return build_zero_cst (type
);
10998 /* A - B -> A + (-B) if B is easily negatable. */
10999 if (negate_expr_p (arg1
)
11000 && ((FLOAT_TYPE_P (type
)
11001 /* Avoid this transformation if B is a positive REAL_CST. */
11002 && (TREE_CODE (arg1
) != REAL_CST
11003 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
11004 || INTEGRAL_TYPE_P (type
)))
11005 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11006 fold_convert_loc (loc
, type
, arg0
),
11007 fold_convert_loc (loc
, type
,
11008 negate_expr (arg1
)));
11010 /* Try folding difference of addresses. */
11012 HOST_WIDE_INT diff
;
11014 if ((TREE_CODE (arg0
) == ADDR_EXPR
11015 || TREE_CODE (arg1
) == ADDR_EXPR
)
11016 && ptr_difference_const (arg0
, arg1
, &diff
))
11017 return build_int_cst_type (type
, diff
);
11020 /* Fold &a[i] - &a[j] to i-j. */
11021 if (TREE_CODE (arg0
) == ADDR_EXPR
11022 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11023 && TREE_CODE (arg1
) == ADDR_EXPR
11024 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11026 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11027 TREE_OPERAND (arg0
, 0),
11028 TREE_OPERAND (arg1
, 0));
11033 if (FLOAT_TYPE_P (type
)
11034 && flag_unsafe_math_optimizations
11035 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
11036 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
11037 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
11040 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11041 one. Make sure the type is not saturating and has the signedness of
11042 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11043 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11044 if ((TREE_CODE (arg0
) == MULT_EXPR
11045 || TREE_CODE (arg1
) == MULT_EXPR
)
11046 && !TYPE_SATURATING (type
)
11047 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11048 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11049 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11051 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11059 /* (-A) * (-B) -> A * B */
11060 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11061 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11062 fold_convert_loc (loc
, type
,
11063 TREE_OPERAND (arg0
, 0)),
11064 fold_convert_loc (loc
, type
,
11065 negate_expr (arg1
)));
11066 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11067 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11068 fold_convert_loc (loc
, type
,
11069 negate_expr (arg0
)),
11070 fold_convert_loc (loc
, type
,
11071 TREE_OPERAND (arg1
, 0)));
11073 if (! FLOAT_TYPE_P (type
))
11075 if (integer_zerop (arg1
))
11076 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11077 if (integer_onep (arg1
))
11078 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11079 /* Transform x * -1 into -x. Make sure to do the negation
11080 on the original operand with conversions not stripped
11081 because we can only strip non-sign-changing conversions. */
11082 if (integer_minus_onep (arg1
))
11083 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11084 /* Transform x * -C into -x * C if x is easily negatable. */
11085 if (TREE_CODE (arg1
) == INTEGER_CST
11086 && tree_int_cst_sgn (arg1
) == -1
11087 && negate_expr_p (arg0
)
11088 && (tem
= negate_expr (arg1
)) != arg1
11089 && !TREE_OVERFLOW (tem
))
11090 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11091 fold_convert_loc (loc
, type
,
11092 negate_expr (arg0
)),
11095 /* (a * (1 << b)) is (a << b) */
11096 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11097 && integer_onep (TREE_OPERAND (arg1
, 0)))
11098 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11099 TREE_OPERAND (arg1
, 1));
11100 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11101 && integer_onep (TREE_OPERAND (arg0
, 0)))
11102 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11103 TREE_OPERAND (arg0
, 1));
11105 /* (A + A) * C -> A * 2 * C */
11106 if (TREE_CODE (arg0
) == PLUS_EXPR
11107 && TREE_CODE (arg1
) == INTEGER_CST
11108 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11109 TREE_OPERAND (arg0
, 1), 0))
11110 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11111 omit_one_operand_loc (loc
, type
,
11112 TREE_OPERAND (arg0
, 0),
11113 TREE_OPERAND (arg0
, 1)),
11114 fold_build2_loc (loc
, MULT_EXPR
, type
,
11115 build_int_cst (type
, 2) , arg1
));
11117 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11118 sign-changing only. */
11119 if (TREE_CODE (arg1
) == INTEGER_CST
11120 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11121 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11122 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11124 strict_overflow_p
= false;
11125 if (TREE_CODE (arg1
) == INTEGER_CST
11126 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11127 &strict_overflow_p
)))
11129 if (strict_overflow_p
)
11130 fold_overflow_warning (("assuming signed overflow does not "
11131 "occur when simplifying "
11133 WARN_STRICT_OVERFLOW_MISC
);
11134 return fold_convert_loc (loc
, type
, tem
);
11137 /* Optimize z * conj(z) for integer complex numbers. */
11138 if (TREE_CODE (arg0
) == CONJ_EXPR
11139 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11140 return fold_mult_zconjz (loc
, type
, arg1
);
11141 if (TREE_CODE (arg1
) == CONJ_EXPR
11142 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11143 return fold_mult_zconjz (loc
, type
, arg0
);
11147 /* Maybe fold x * 0 to 0. The expressions aren't the same
11148 when x is NaN, since x * 0 is also NaN. Nor are they the
11149 same in modes with signed zeros, since multiplying a
11150 negative value by 0 gives -0, not +0. */
11151 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11153 && real_zerop (arg1
))
11154 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11155 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11156 Likewise for complex arithmetic with signed zeros. */
11157 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11158 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11159 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11160 && real_onep (arg1
))
11161 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11163 /* Transform x * -1.0 into -x. */
11164 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11165 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11166 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11167 && real_minus_onep (arg1
))
11168 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11170 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11171 the result for floating point types due to rounding so it is applied
11172 only if -fassociative-math was specify. */
11173 if (flag_associative_math
11174 && TREE_CODE (arg0
) == RDIV_EXPR
11175 && TREE_CODE (arg1
) == REAL_CST
11176 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11178 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11181 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11182 TREE_OPERAND (arg0
, 1));
11185 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11186 if (operand_equal_p (arg0
, arg1
, 0))
11188 tree tem
= fold_strip_sign_ops (arg0
);
11189 if (tem
!= NULL_TREE
)
11191 tem
= fold_convert_loc (loc
, type
, tem
);
11192 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11196 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11197 This is not the same for NaNs or if signed zeros are
11199 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11200 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11201 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11202 && TREE_CODE (arg1
) == COMPLEX_CST
11203 && real_zerop (TREE_REALPART (arg1
)))
11205 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11206 if (real_onep (TREE_IMAGPART (arg1
)))
11208 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11209 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11211 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11212 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11214 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11215 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11216 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11220 /* Optimize z * conj(z) for floating point complex numbers.
11221 Guarded by flag_unsafe_math_optimizations as non-finite
11222 imaginary components don't produce scalar results. */
11223 if (flag_unsafe_math_optimizations
11224 && TREE_CODE (arg0
) == CONJ_EXPR
11225 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11226 return fold_mult_zconjz (loc
, type
, arg1
);
11227 if (flag_unsafe_math_optimizations
11228 && TREE_CODE (arg1
) == CONJ_EXPR
11229 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11230 return fold_mult_zconjz (loc
, type
, arg0
);
11232 if (flag_unsafe_math_optimizations
)
11234 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11235 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11237 /* Optimizations of root(...)*root(...). */
11238 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11241 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11242 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11244 /* Optimize sqrt(x)*sqrt(x) as x. */
11245 if (BUILTIN_SQRT_P (fcode0
)
11246 && operand_equal_p (arg00
, arg10
, 0)
11247 && ! HONOR_SNANS (TYPE_MODE (type
)))
11250 /* Optimize root(x)*root(y) as root(x*y). */
11251 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11252 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11253 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11256 /* Optimize expN(x)*expN(y) as expN(x+y). */
11257 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11259 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11260 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11261 CALL_EXPR_ARG (arg0
, 0),
11262 CALL_EXPR_ARG (arg1
, 0));
11263 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11266 /* Optimizations of pow(...)*pow(...). */
11267 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11268 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11269 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11271 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11272 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11273 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11274 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11276 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11277 if (operand_equal_p (arg01
, arg11
, 0))
11279 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11280 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11282 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11285 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11286 if (operand_equal_p (arg00
, arg10
, 0))
11288 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11289 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11291 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11295 /* Optimize tan(x)*cos(x) as sin(x). */
11296 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11297 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11298 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11299 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11300 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11301 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11302 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11303 CALL_EXPR_ARG (arg1
, 0), 0))
11305 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11307 if (sinfn
!= NULL_TREE
)
11308 return build_call_expr_loc (loc
, sinfn
, 1,
11309 CALL_EXPR_ARG (arg0
, 0));
11312 /* Optimize x*pow(x,c) as pow(x,c+1). */
11313 if (fcode1
== BUILT_IN_POW
11314 || fcode1
== BUILT_IN_POWF
11315 || fcode1
== BUILT_IN_POWL
)
11317 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11318 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11319 if (TREE_CODE (arg11
) == REAL_CST
11320 && !TREE_OVERFLOW (arg11
)
11321 && operand_equal_p (arg0
, arg10
, 0))
11323 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11327 c
= TREE_REAL_CST (arg11
);
11328 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11329 arg
= build_real (type
, c
);
11330 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11334 /* Optimize pow(x,c)*x as pow(x,c+1). */
11335 if (fcode0
== BUILT_IN_POW
11336 || fcode0
== BUILT_IN_POWF
11337 || fcode0
== BUILT_IN_POWL
)
11339 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11340 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11341 if (TREE_CODE (arg01
) == REAL_CST
11342 && !TREE_OVERFLOW (arg01
)
11343 && operand_equal_p (arg1
, arg00
, 0))
11345 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11349 c
= TREE_REAL_CST (arg01
);
11350 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11351 arg
= build_real (type
, c
);
11352 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11356 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11357 if (!in_gimple_form
11359 && operand_equal_p (arg0
, arg1
, 0))
11361 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11365 tree arg
= build_real (type
, dconst2
);
11366 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11375 if (integer_all_onesp (arg1
))
11376 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11377 if (integer_zerop (arg1
))
11378 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11379 if (operand_equal_p (arg0
, arg1
, 0))
11380 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11382 /* ~X | X is -1. */
11383 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11386 t1
= build_zero_cst (type
);
11387 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11388 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11391 /* X | ~X is -1. */
11392 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11393 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11395 t1
= build_zero_cst (type
);
11396 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11397 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11400 /* Canonicalize (X & C1) | C2. */
11401 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11402 && TREE_CODE (arg1
) == INTEGER_CST
11403 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11405 double_int c1
, c2
, c3
, msk
;
11406 int width
= TYPE_PRECISION (type
), w
;
11407 bool try_simplify
= true;
11409 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
11410 c2
= tree_to_double_int (arg1
);
11412 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11413 if ((c1
& c2
) == c1
)
11414 return omit_one_operand_loc (loc
, type
, arg1
,
11415 TREE_OPERAND (arg0
, 0));
11417 msk
= double_int::mask (width
);
11419 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11420 if (msk
.and_not (c1
| c2
).is_zero ())
11421 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11422 TREE_OPERAND (arg0
, 0), arg1
);
11424 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11425 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11426 mode which allows further optimizations. */
11429 c3
= c1
.and_not (c2
);
11430 for (w
= BITS_PER_UNIT
;
11431 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11434 unsigned HOST_WIDE_INT mask
11435 = HOST_WIDE_INT_M1U
>> (HOST_BITS_PER_WIDE_INT
- w
);
11436 if (((c1
.low
| c2
.low
) & mask
) == mask
11437 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
11439 c3
= double_int::from_uhwi (mask
);
11444 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11445 with that optimization from the BIT_AND_EXPR optimizations.
11446 This could end up in an infinite recursion. */
11447 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == MULT_EXPR
11448 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11451 tree t
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11452 double_int masked
= mask_with_tz (type
, c3
, tree_to_double_int (t
));
11454 try_simplify
= (masked
!= c1
);
11457 if (try_simplify
&& c3
!= c1
)
11458 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11459 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11460 TREE_OPERAND (arg0
, 0),
11461 double_int_to_tree (type
,
11466 /* (X & Y) | Y is (X, Y). */
11467 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11468 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11469 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11470 /* (X & Y) | X is (Y, X). */
11471 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11472 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11473 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11474 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11475 /* X | (X & Y) is (Y, X). */
11476 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11477 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11478 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11479 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11480 /* X | (Y & X) is (Y, X). */
11481 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11482 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11483 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11484 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11486 /* (X & ~Y) | (~X & Y) is X ^ Y */
11487 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11488 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11490 tree a0
, a1
, l0
, l1
, n0
, n1
;
11492 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11493 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11495 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11496 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11498 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11499 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11501 if ((operand_equal_p (n0
, a0
, 0)
11502 && operand_equal_p (n1
, a1
, 0))
11503 || (operand_equal_p (n0
, a1
, 0)
11504 && operand_equal_p (n1
, a0
, 0)))
11505 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11508 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11509 if (t1
!= NULL_TREE
)
11512 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11514 This results in more efficient code for machines without a NAND
11515 instruction. Combine will canonicalize to the first form
11516 which will allow use of NAND instructions provided by the
11517 backend if they exist. */
11518 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11519 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11522 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11523 build2 (BIT_AND_EXPR
, type
,
11524 fold_convert_loc (loc
, type
,
11525 TREE_OPERAND (arg0
, 0)),
11526 fold_convert_loc (loc
, type
,
11527 TREE_OPERAND (arg1
, 0))));
11530 /* See if this can be simplified into a rotate first. If that
11531 is unsuccessful continue in the association code. */
11535 if (integer_zerop (arg1
))
11536 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11537 if (integer_all_onesp (arg1
))
11538 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11539 if (operand_equal_p (arg0
, arg1
, 0))
11540 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11542 /* ~X ^ X is -1. */
11543 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11544 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11546 t1
= build_zero_cst (type
);
11547 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11548 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11551 /* X ^ ~X is -1. */
11552 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11553 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11555 t1
= build_zero_cst (type
);
11556 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11557 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11560 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11561 with a constant, and the two constants have no bits in common,
11562 we should treat this as a BIT_IOR_EXPR since this may produce more
11563 simplifications. */
11564 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11565 && TREE_CODE (arg1
) == BIT_AND_EXPR
11566 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11567 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11568 && integer_zerop (const_binop (BIT_AND_EXPR
,
11569 TREE_OPERAND (arg0
, 1),
11570 TREE_OPERAND (arg1
, 1))))
11572 code
= BIT_IOR_EXPR
;
11576 /* (X | Y) ^ X -> Y & ~ X*/
11577 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11578 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11580 tree t2
= TREE_OPERAND (arg0
, 1);
11581 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11583 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11584 fold_convert_loc (loc
, type
, t2
),
11585 fold_convert_loc (loc
, type
, t1
));
11589 /* (Y | X) ^ X -> Y & ~ X*/
11590 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11591 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11593 tree t2
= TREE_OPERAND (arg0
, 0);
11594 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11596 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11597 fold_convert_loc (loc
, type
, t2
),
11598 fold_convert_loc (loc
, type
, t1
));
11602 /* X ^ (X | Y) -> Y & ~ X*/
11603 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11604 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11606 tree t2
= TREE_OPERAND (arg1
, 1);
11607 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11609 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11610 fold_convert_loc (loc
, type
, t2
),
11611 fold_convert_loc (loc
, type
, t1
));
11615 /* X ^ (Y | X) -> Y & ~ X*/
11616 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11619 tree t2
= TREE_OPERAND (arg1
, 0);
11620 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11622 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11623 fold_convert_loc (loc
, type
, t2
),
11624 fold_convert_loc (loc
, type
, t1
));
11628 /* Convert ~X ^ ~Y to X ^ Y. */
11629 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11630 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11631 return fold_build2_loc (loc
, code
, type
,
11632 fold_convert_loc (loc
, type
,
11633 TREE_OPERAND (arg0
, 0)),
11634 fold_convert_loc (loc
, type
,
11635 TREE_OPERAND (arg1
, 0)));
11637 /* Convert ~X ^ C to X ^ ~C. */
11638 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11639 && TREE_CODE (arg1
) == INTEGER_CST
)
11640 return fold_build2_loc (loc
, code
, type
,
11641 fold_convert_loc (loc
, type
,
11642 TREE_OPERAND (arg0
, 0)),
11643 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11645 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11646 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11647 && integer_onep (TREE_OPERAND (arg0
, 1))
11648 && integer_onep (arg1
))
11649 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11650 build_zero_cst (TREE_TYPE (arg0
)));
11652 /* Fold (X & Y) ^ Y as ~X & Y. */
11653 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11654 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11656 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11657 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11658 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11659 fold_convert_loc (loc
, type
, arg1
));
11661 /* Fold (X & Y) ^ X as ~Y & X. */
11662 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11663 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11664 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11666 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11667 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11668 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11669 fold_convert_loc (loc
, type
, arg1
));
11671 /* Fold X ^ (X & Y) as X & ~Y. */
11672 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11673 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11675 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11676 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11677 fold_convert_loc (loc
, type
, arg0
),
11678 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11680 /* Fold X ^ (Y & X) as ~Y & X. */
11681 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11682 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11683 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11685 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11686 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11687 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11688 fold_convert_loc (loc
, type
, arg0
));
11691 /* See if this can be simplified into a rotate first. If that
11692 is unsuccessful continue in the association code. */
11696 if (integer_all_onesp (arg1
))
11697 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11698 if (integer_zerop (arg1
))
11699 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11700 if (operand_equal_p (arg0
, arg1
, 0))
11701 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11703 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11704 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11705 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11706 || (TREE_CODE (arg0
) == EQ_EXPR
11707 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11708 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11709 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11711 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11712 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11713 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11714 || (TREE_CODE (arg1
) == EQ_EXPR
11715 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11716 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11717 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11719 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11720 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11721 && TREE_CODE (arg1
) == INTEGER_CST
11722 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11724 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11725 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11726 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11727 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11728 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11730 fold_convert_loc (loc
, type
,
11731 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11732 type
, tmp2
, tmp3
));
11735 /* (X | Y) & Y is (X, Y). */
11736 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11737 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11738 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11739 /* (X | Y) & X is (Y, X). */
11740 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11741 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11742 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11743 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11744 /* X & (X | Y) is (Y, X). */
11745 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11746 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11747 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11748 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11749 /* X & (Y | X) is (Y, X). */
11750 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11751 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11752 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11753 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11755 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11756 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11757 && integer_onep (TREE_OPERAND (arg0
, 1))
11758 && integer_onep (arg1
))
11761 tem
= TREE_OPERAND (arg0
, 0);
11762 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11763 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11765 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11766 build_zero_cst (TREE_TYPE (tem
)));
11768 /* Fold ~X & 1 as (X & 1) == 0. */
11769 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11770 && integer_onep (arg1
))
11773 tem
= TREE_OPERAND (arg0
, 0);
11774 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11775 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11777 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11778 build_zero_cst (TREE_TYPE (tem
)));
11780 /* Fold !X & 1 as X == 0. */
11781 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11782 && integer_onep (arg1
))
11784 tem
= TREE_OPERAND (arg0
, 0);
11785 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11786 build_zero_cst (TREE_TYPE (tem
)));
11789 /* Fold (X ^ Y) & Y as ~X & Y. */
11790 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11791 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11793 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11794 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11795 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11796 fold_convert_loc (loc
, type
, arg1
));
11798 /* Fold (X ^ Y) & X as ~Y & X. */
11799 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11800 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11801 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11803 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11804 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11805 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11806 fold_convert_loc (loc
, type
, arg1
));
11808 /* Fold X & (X ^ Y) as X & ~Y. */
11809 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11810 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11812 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11813 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11814 fold_convert_loc (loc
, type
, arg0
),
11815 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11817 /* Fold X & (Y ^ X) as ~Y & X. */
11818 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11819 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11820 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11822 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11823 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11824 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11825 fold_convert_loc (loc
, type
, arg0
));
11828 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11829 multiple of 1 << CST. */
11830 if (TREE_CODE (arg1
) == INTEGER_CST
)
11832 double_int cst1
= tree_to_double_int (arg1
);
11833 double_int ncst1
= (-cst1
).ext (TYPE_PRECISION (TREE_TYPE (arg1
)),
11834 TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11835 if ((cst1
& ncst1
) == ncst1
11836 && multiple_of_p (type
, arg0
,
11837 double_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11838 return fold_convert_loc (loc
, type
, arg0
);
11841 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11843 if (TREE_CODE (arg1
) == INTEGER_CST
11844 && TREE_CODE (arg0
) == MULT_EXPR
11845 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11848 = mask_with_tz (type
, tree_to_double_int (arg1
),
11849 tree_to_double_int (TREE_OPERAND (arg0
, 1)));
11851 if (masked
.is_zero ())
11852 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11854 else if (masked
!= tree_to_double_int (arg1
))
11855 return fold_build2_loc (loc
, code
, type
, op0
,
11856 double_int_to_tree (type
, masked
));
11859 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11860 ((A & N) + B) & M -> (A + B) & M
11861 Similarly if (N & M) == 0,
11862 ((A | N) + B) & M -> (A + B) & M
11863 and for - instead of + (or unary - instead of +)
11864 and/or ^ instead of |.
11865 If B is constant and (B & M) == 0, fold into A & M. */
11866 if (host_integerp (arg1
, 1))
11868 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11869 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11870 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11871 && (TREE_CODE (arg0
) == PLUS_EXPR
11872 || TREE_CODE (arg0
) == MINUS_EXPR
11873 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11874 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11875 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11879 unsigned HOST_WIDE_INT cst0
;
11881 /* Now we know that arg0 is (C + D) or (C - D) or
11882 -C and arg1 (M) is == (1LL << cst) - 1.
11883 Store C into PMOP[0] and D into PMOP[1]. */
11884 pmop
[0] = TREE_OPERAND (arg0
, 0);
11886 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11888 pmop
[1] = TREE_OPERAND (arg0
, 1);
11892 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11893 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11897 for (; which
>= 0; which
--)
11898 switch (TREE_CODE (pmop
[which
]))
11903 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11906 /* tree_low_cst not used, because we don't care about
11908 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11910 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11915 else if (cst0
!= 0)
11917 /* If C or D is of the form (A & N) where
11918 (N & M) == M, or of the form (A | N) or
11919 (A ^ N) where (N & M) == 0, replace it with A. */
11920 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11923 /* If C or D is a N where (N & M) == 0, it can be
11924 omitted (assumed 0). */
11925 if ((TREE_CODE (arg0
) == PLUS_EXPR
11926 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11927 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11928 pmop
[which
] = NULL
;
11934 /* Only build anything new if we optimized one or both arguments
11936 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11937 || (TREE_CODE (arg0
) != NEGATE_EXPR
11938 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11940 tree utype
= TREE_TYPE (arg0
);
11941 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11943 /* Perform the operations in a type that has defined
11944 overflow behavior. */
11945 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11946 if (pmop
[0] != NULL
)
11947 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11948 if (pmop
[1] != NULL
)
11949 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11952 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11953 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11954 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11956 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11957 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11959 else if (pmop
[0] != NULL
)
11961 else if (pmop
[1] != NULL
)
11964 return build_int_cst (type
, 0);
11966 else if (pmop
[0] == NULL
)
11967 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11969 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11971 /* TEM is now the new binary +, - or unary - replacement. */
11972 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11973 fold_convert_loc (loc
, utype
, arg1
));
11974 return fold_convert_loc (loc
, type
, tem
);
11979 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11980 if (t1
!= NULL_TREE
)
11982 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11983 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11984 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11986 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11988 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11989 && (~TREE_INT_CST_LOW (arg1
)
11990 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11992 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11995 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11997 This results in more efficient code for machines without a NOR
11998 instruction. Combine will canonicalize to the first form
11999 which will allow use of NOR instructions provided by the
12000 backend if they exist. */
12001 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
12002 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
12004 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
12005 build2 (BIT_IOR_EXPR
, type
,
12006 fold_convert_loc (loc
, type
,
12007 TREE_OPERAND (arg0
, 0)),
12008 fold_convert_loc (loc
, type
,
12009 TREE_OPERAND (arg1
, 0))));
12012 /* If arg0 is derived from the address of an object or function, we may
12013 be able to fold this expression using the object or function's
12015 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
12017 unsigned HOST_WIDE_INT modulus
, residue
;
12018 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
12020 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
12021 integer_onep (arg1
));
12023 /* This works because modulus is a power of 2. If this weren't the
12024 case, we'd have to replace it by its greatest power-of-2
12025 divisor: modulus & -modulus. */
12027 return build_int_cst (type
, residue
& low
);
12030 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12031 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12032 if the new mask might be further optimized. */
12033 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
12034 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
12035 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
12036 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
12037 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
12038 < TYPE_PRECISION (TREE_TYPE (arg0
))
12039 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
12040 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
12042 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
12043 unsigned HOST_WIDE_INT mask
12044 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
12045 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
12046 tree shift_type
= TREE_TYPE (arg0
);
12048 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
12049 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
12050 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
12051 && TYPE_PRECISION (TREE_TYPE (arg0
))
12052 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
12054 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
12055 tree arg00
= TREE_OPERAND (arg0
, 0);
12056 /* See if more bits can be proven as zero because of
12058 if (TREE_CODE (arg00
) == NOP_EXPR
12059 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
12061 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
12062 if (TYPE_PRECISION (inner_type
)
12063 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
12064 && TYPE_PRECISION (inner_type
) < prec
)
12066 prec
= TYPE_PRECISION (inner_type
);
12067 /* See if we can shorten the right shift. */
12069 shift_type
= inner_type
;
12072 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12073 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12074 zerobits
<<= prec
- shiftc
;
12075 /* For arithmetic shift if sign bit could be set, zerobits
12076 can contain actually sign bits, so no transformation is
12077 possible, unless MASK masks them all away. In that
12078 case the shift needs to be converted into logical shift. */
12079 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12080 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12082 if ((mask
& zerobits
) == 0)
12083 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12089 /* ((X << 16) & 0xff00) is (X, 0). */
12090 if ((mask
& zerobits
) == mask
)
12091 return omit_one_operand_loc (loc
, type
,
12092 build_int_cst (type
, 0), arg0
);
12094 newmask
= mask
| zerobits
;
12095 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12097 /* Only do the transformation if NEWMASK is some integer
12099 for (prec
= BITS_PER_UNIT
;
12100 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12101 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12103 if (prec
< HOST_BITS_PER_WIDE_INT
12104 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12108 if (shift_type
!= TREE_TYPE (arg0
))
12110 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12111 fold_convert_loc (loc
, shift_type
,
12112 TREE_OPERAND (arg0
, 0)),
12113 TREE_OPERAND (arg0
, 1));
12114 tem
= fold_convert_loc (loc
, type
, tem
);
12118 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12119 if (!tree_int_cst_equal (newmaskt
, arg1
))
12120 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12128 /* Don't touch a floating-point divide by zero unless the mode
12129 of the constant can represent infinity. */
12130 if (TREE_CODE (arg1
) == REAL_CST
12131 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12132 && real_zerop (arg1
))
12135 /* Optimize A / A to 1.0 if we don't care about
12136 NaNs or Infinities. Skip the transformation
12137 for non-real operands. */
12138 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12139 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12140 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12141 && operand_equal_p (arg0
, arg1
, 0))
12143 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12145 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12148 /* The complex version of the above A / A optimization. */
12149 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12150 && operand_equal_p (arg0
, arg1
, 0))
12152 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12153 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12154 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12156 tree r
= build_real (elem_type
, dconst1
);
12157 /* omit_two_operands will call fold_convert for us. */
12158 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12162 /* (-A) / (-B) -> A / B */
12163 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12164 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12165 TREE_OPERAND (arg0
, 0),
12166 negate_expr (arg1
));
12167 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12168 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12169 negate_expr (arg0
),
12170 TREE_OPERAND (arg1
, 0));
12172 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12173 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12174 && real_onep (arg1
))
12175 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12177 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12178 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12179 && real_minus_onep (arg1
))
12180 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12181 negate_expr (arg0
)));
12183 /* If ARG1 is a constant, we can convert this to a multiply by the
12184 reciprocal. This does not have the same rounding properties,
12185 so only do this if -freciprocal-math. We can actually
12186 always safely do it if ARG1 is a power of two, but it's hard to
12187 tell if it is or not in a portable manner. */
12189 && (TREE_CODE (arg1
) == REAL_CST
12190 || (TREE_CODE (arg1
) == COMPLEX_CST
12191 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12192 || (TREE_CODE (arg1
) == VECTOR_CST
12193 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12195 if (flag_reciprocal_math
12196 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12197 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12198 /* Find the reciprocal if optimizing and the result is exact.
12199 TODO: Complex reciprocal not implemented. */
12200 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12202 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12205 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12208 /* Convert A/B/C to A/(B*C). */
12209 if (flag_reciprocal_math
12210 && TREE_CODE (arg0
) == RDIV_EXPR
)
12211 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12212 fold_build2_loc (loc
, MULT_EXPR
, type
,
12213 TREE_OPERAND (arg0
, 1), arg1
));
12215 /* Convert A/(B/C) to (A/B)*C. */
12216 if (flag_reciprocal_math
12217 && TREE_CODE (arg1
) == RDIV_EXPR
)
12218 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12219 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12220 TREE_OPERAND (arg1
, 0)),
12221 TREE_OPERAND (arg1
, 1));
12223 /* Convert C1/(X*C2) into (C1/C2)/X. */
12224 if (flag_reciprocal_math
12225 && TREE_CODE (arg1
) == MULT_EXPR
12226 && TREE_CODE (arg0
) == REAL_CST
12227 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12229 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12230 TREE_OPERAND (arg1
, 1));
12232 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12233 TREE_OPERAND (arg1
, 0));
12236 if (flag_unsafe_math_optimizations
)
12238 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12239 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12241 /* Optimize sin(x)/cos(x) as tan(x). */
12242 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12243 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12244 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12245 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12246 CALL_EXPR_ARG (arg1
, 0), 0))
12248 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12250 if (tanfn
!= NULL_TREE
)
12251 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12254 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12255 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12256 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12257 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12258 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12259 CALL_EXPR_ARG (arg1
, 0), 0))
12261 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12263 if (tanfn
!= NULL_TREE
)
12265 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12266 CALL_EXPR_ARG (arg0
, 0));
12267 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12268 build_real (type
, dconst1
), tmp
);
12272 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12273 NaNs or Infinities. */
12274 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12275 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12276 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12278 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12279 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12281 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12282 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12283 && operand_equal_p (arg00
, arg01
, 0))
12285 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12287 if (cosfn
!= NULL_TREE
)
12288 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12292 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12293 NaNs or Infinities. */
12294 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12295 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12296 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12298 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12299 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12301 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12302 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12303 && operand_equal_p (arg00
, arg01
, 0))
12305 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12307 if (cosfn
!= NULL_TREE
)
12309 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12310 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12311 build_real (type
, dconst1
),
12317 /* Optimize pow(x,c)/x as pow(x,c-1). */
12318 if (fcode0
== BUILT_IN_POW
12319 || fcode0
== BUILT_IN_POWF
12320 || fcode0
== BUILT_IN_POWL
)
12322 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12323 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12324 if (TREE_CODE (arg01
) == REAL_CST
12325 && !TREE_OVERFLOW (arg01
)
12326 && operand_equal_p (arg1
, arg00
, 0))
12328 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12332 c
= TREE_REAL_CST (arg01
);
12333 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12334 arg
= build_real (type
, c
);
12335 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12339 /* Optimize a/root(b/c) into a*root(c/b). */
12340 if (BUILTIN_ROOT_P (fcode1
))
12342 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12344 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12346 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12347 tree b
= TREE_OPERAND (rootarg
, 0);
12348 tree c
= TREE_OPERAND (rootarg
, 1);
12350 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12352 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12353 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12357 /* Optimize x/expN(y) into x*expN(-y). */
12358 if (BUILTIN_EXPONENT_P (fcode1
))
12360 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12361 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12362 arg1
= build_call_expr_loc (loc
,
12364 fold_convert_loc (loc
, type
, arg
));
12365 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12368 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12369 if (fcode1
== BUILT_IN_POW
12370 || fcode1
== BUILT_IN_POWF
12371 || fcode1
== BUILT_IN_POWL
)
12373 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12374 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12375 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12376 tree neg11
= fold_convert_loc (loc
, type
,
12377 negate_expr (arg11
));
12378 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12379 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12384 case TRUNC_DIV_EXPR
:
12385 /* Optimize (X & (-A)) / A where A is a power of 2,
12387 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12388 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12389 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12391 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12392 arg1
, TREE_OPERAND (arg0
, 1));
12393 if (sum
&& integer_zerop (sum
)) {
12394 unsigned long pow2
;
12396 if (TREE_INT_CST_LOW (arg1
))
12397 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
12399 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
12400 + HOST_BITS_PER_WIDE_INT
;
12402 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12403 TREE_OPERAND (arg0
, 0),
12404 build_int_cst (integer_type_node
, pow2
));
12410 case FLOOR_DIV_EXPR
:
12411 /* Simplify A / (B << N) where A and B are positive and B is
12412 a power of 2, to A >> (N + log2(B)). */
12413 strict_overflow_p
= false;
12414 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12415 && (TYPE_UNSIGNED (type
)
12416 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12418 tree sval
= TREE_OPERAND (arg1
, 0);
12419 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12421 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12422 unsigned long pow2
;
12424 if (TREE_INT_CST_LOW (sval
))
12425 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12427 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
12428 + HOST_BITS_PER_WIDE_INT
;
12430 if (strict_overflow_p
)
12431 fold_overflow_warning (("assuming signed overflow does not "
12432 "occur when simplifying A / (B << N)"),
12433 WARN_STRICT_OVERFLOW_MISC
);
12435 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12437 build_int_cst (TREE_TYPE (sh_cnt
),
12439 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12440 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12444 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12445 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12446 if (INTEGRAL_TYPE_P (type
)
12447 && TYPE_UNSIGNED (type
)
12448 && code
== FLOOR_DIV_EXPR
)
12449 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12453 case ROUND_DIV_EXPR
:
12454 case CEIL_DIV_EXPR
:
12455 case EXACT_DIV_EXPR
:
12456 if (integer_onep (arg1
))
12457 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12458 if (integer_zerop (arg1
))
12460 /* X / -1 is -X. */
12461 if (!TYPE_UNSIGNED (type
)
12462 && TREE_CODE (arg1
) == INTEGER_CST
12463 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12464 && TREE_INT_CST_HIGH (arg1
) == -1)
12465 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12467 /* Convert -A / -B to A / B when the type is signed and overflow is
12469 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12470 && TREE_CODE (arg0
) == NEGATE_EXPR
12471 && negate_expr_p (arg1
))
12473 if (INTEGRAL_TYPE_P (type
))
12474 fold_overflow_warning (("assuming signed overflow does not occur "
12475 "when distributing negation across "
12477 WARN_STRICT_OVERFLOW_MISC
);
12478 return fold_build2_loc (loc
, code
, type
,
12479 fold_convert_loc (loc
, type
,
12480 TREE_OPERAND (arg0
, 0)),
12481 fold_convert_loc (loc
, type
,
12482 negate_expr (arg1
)));
12484 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12485 && TREE_CODE (arg1
) == NEGATE_EXPR
12486 && negate_expr_p (arg0
))
12488 if (INTEGRAL_TYPE_P (type
))
12489 fold_overflow_warning (("assuming signed overflow does not occur "
12490 "when distributing negation across "
12492 WARN_STRICT_OVERFLOW_MISC
);
12493 return fold_build2_loc (loc
, code
, type
,
12494 fold_convert_loc (loc
, type
,
12495 negate_expr (arg0
)),
12496 fold_convert_loc (loc
, type
,
12497 TREE_OPERAND (arg1
, 0)));
12500 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12501 operation, EXACT_DIV_EXPR.
12503 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12504 At one time others generated faster code, it's not clear if they do
12505 after the last round to changes to the DIV code in expmed.c. */
12506 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12507 && multiple_of_p (type
, arg0
, arg1
))
12508 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12510 strict_overflow_p
= false;
12511 if (TREE_CODE (arg1
) == INTEGER_CST
12512 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12513 &strict_overflow_p
)))
12515 if (strict_overflow_p
)
12516 fold_overflow_warning (("assuming signed overflow does not occur "
12517 "when simplifying division"),
12518 WARN_STRICT_OVERFLOW_MISC
);
12519 return fold_convert_loc (loc
, type
, tem
);
12524 case CEIL_MOD_EXPR
:
12525 case FLOOR_MOD_EXPR
:
12526 case ROUND_MOD_EXPR
:
12527 case TRUNC_MOD_EXPR
:
12528 /* X % 1 is always zero, but be sure to preserve any side
12530 if (integer_onep (arg1
))
12531 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12533 /* X % 0, return X % 0 unchanged so that we can get the
12534 proper warnings and errors. */
12535 if (integer_zerop (arg1
))
12538 /* 0 % X is always zero, but be sure to preserve any side
12539 effects in X. Place this after checking for X == 0. */
12540 if (integer_zerop (arg0
))
12541 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12543 /* X % -1 is zero. */
12544 if (!TYPE_UNSIGNED (type
)
12545 && TREE_CODE (arg1
) == INTEGER_CST
12546 && TREE_INT_CST_LOW (arg1
) == HOST_WIDE_INT_M1U
12547 && TREE_INT_CST_HIGH (arg1
) == -1)
12548 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12550 /* X % -C is the same as X % C. */
12551 if (code
== TRUNC_MOD_EXPR
12552 && !TYPE_UNSIGNED (type
)
12553 && TREE_CODE (arg1
) == INTEGER_CST
12554 && !TREE_OVERFLOW (arg1
)
12555 && TREE_INT_CST_HIGH (arg1
) < 0
12556 && !TYPE_OVERFLOW_TRAPS (type
)
12557 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12558 && !sign_bit_p (arg1
, arg1
))
12559 return fold_build2_loc (loc
, code
, type
,
12560 fold_convert_loc (loc
, type
, arg0
),
12561 fold_convert_loc (loc
, type
,
12562 negate_expr (arg1
)));
12564 /* X % -Y is the same as X % Y. */
12565 if (code
== TRUNC_MOD_EXPR
12566 && !TYPE_UNSIGNED (type
)
12567 && TREE_CODE (arg1
) == NEGATE_EXPR
12568 && !TYPE_OVERFLOW_TRAPS (type
))
12569 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12570 fold_convert_loc (loc
, type
,
12571 TREE_OPERAND (arg1
, 0)));
12573 strict_overflow_p
= false;
12574 if (TREE_CODE (arg1
) == INTEGER_CST
12575 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12576 &strict_overflow_p
)))
12578 if (strict_overflow_p
)
12579 fold_overflow_warning (("assuming signed overflow does not occur "
12580 "when simplifying modulus"),
12581 WARN_STRICT_OVERFLOW_MISC
);
12582 return fold_convert_loc (loc
, type
, tem
);
12585 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12586 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12587 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12588 && (TYPE_UNSIGNED (type
)
12589 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12592 /* Also optimize A % (C << N) where C is a power of 2,
12593 to A & ((C << N) - 1). */
12594 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12595 c
= TREE_OPERAND (arg1
, 0);
12597 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12600 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12601 build_int_cst (TREE_TYPE (arg1
), 1));
12602 if (strict_overflow_p
)
12603 fold_overflow_warning (("assuming signed overflow does not "
12604 "occur when simplifying "
12605 "X % (power of two)"),
12606 WARN_STRICT_OVERFLOW_MISC
);
12607 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12608 fold_convert_loc (loc
, type
, arg0
),
12609 fold_convert_loc (loc
, type
, mask
));
12617 if (integer_all_onesp (arg0
))
12618 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12622 /* Optimize -1 >> x for arithmetic right shifts. */
12623 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12624 && tree_expr_nonnegative_p (arg1
))
12625 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12626 /* ... fall through ... */
12630 if (integer_zerop (arg1
))
12631 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12632 if (integer_zerop (arg0
))
12633 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12635 /* Prefer vector1 << scalar to vector1 << vector2
12636 if vector2 is uniform. */
12637 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12638 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12639 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12641 /* Since negative shift count is not well-defined,
12642 don't try to compute it in the compiler. */
12643 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12646 prec
= element_precision (type
);
12648 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12649 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, true)
12650 && TREE_INT_CST_LOW (arg1
) < prec
12651 && host_integerp (TREE_OPERAND (arg0
, 1), true)
12652 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12654 unsigned int low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12655 + TREE_INT_CST_LOW (arg1
));
12657 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12658 being well defined. */
12661 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12663 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12664 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12665 TREE_OPERAND (arg0
, 0));
12670 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12671 build_int_cst (TREE_TYPE (arg1
), low
));
12674 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12675 into x & ((unsigned)-1 >> c) for unsigned types. */
12676 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12677 || (TYPE_UNSIGNED (type
)
12678 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12679 && host_integerp (arg1
, false)
12680 && TREE_INT_CST_LOW (arg1
) < prec
12681 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12682 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < prec
)
12684 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12685 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12691 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12693 lshift
= build_minus_one_cst (type
);
12694 lshift
= const_binop (code
, lshift
, arg1
);
12696 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12700 /* Rewrite an LROTATE_EXPR by a constant into an
12701 RROTATE_EXPR by a new constant. */
12702 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12704 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12705 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12706 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12709 /* If we have a rotate of a bit operation with the rotate count and
12710 the second operand of the bit operation both constant,
12711 permute the two operations. */
12712 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12713 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12714 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12715 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12716 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12717 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12718 fold_build2_loc (loc
, code
, type
,
12719 TREE_OPERAND (arg0
, 0), arg1
),
12720 fold_build2_loc (loc
, code
, type
,
12721 TREE_OPERAND (arg0
, 1), arg1
));
12723 /* Two consecutive rotates adding up to the precision of the
12724 type can be ignored. */
12725 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12726 && TREE_CODE (arg0
) == RROTATE_EXPR
12727 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12728 && TREE_INT_CST_HIGH (arg1
) == 0
12729 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12730 && ((TREE_INT_CST_LOW (arg1
)
12731 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12733 return TREE_OPERAND (arg0
, 0);
12735 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12736 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12737 if the latter can be further optimized. */
12738 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12739 && TREE_CODE (arg0
) == BIT_AND_EXPR
12740 && TREE_CODE (arg1
) == INTEGER_CST
12741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12743 tree mask
= fold_build2_loc (loc
, code
, type
,
12744 fold_convert_loc (loc
, type
,
12745 TREE_OPERAND (arg0
, 1)),
12747 tree shift
= fold_build2_loc (loc
, code
, type
,
12748 fold_convert_loc (loc
, type
,
12749 TREE_OPERAND (arg0
, 0)),
12751 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12759 if (operand_equal_p (arg0
, arg1
, 0))
12760 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12761 if (INTEGRAL_TYPE_P (type
)
12762 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12763 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12764 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12770 if (operand_equal_p (arg0
, arg1
, 0))
12771 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12772 if (INTEGRAL_TYPE_P (type
)
12773 && TYPE_MAX_VALUE (type
)
12774 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12775 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12776 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12781 case TRUTH_ANDIF_EXPR
:
12782 /* Note that the operands of this must be ints
12783 and their values must be 0 or 1.
12784 ("true" is a fixed value perhaps depending on the language.) */
12785 /* If first arg is constant zero, return it. */
12786 if (integer_zerop (arg0
))
12787 return fold_convert_loc (loc
, type
, arg0
);
12788 case TRUTH_AND_EXPR
:
12789 /* If either arg is constant true, drop it. */
12790 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12791 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12792 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12793 /* Preserve sequence points. */
12794 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12795 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12796 /* If second arg is constant zero, result is zero, but first arg
12797 must be evaluated. */
12798 if (integer_zerop (arg1
))
12799 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12800 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12801 case will be handled here. */
12802 if (integer_zerop (arg0
))
12803 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12805 /* !X && X is always false. */
12806 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12807 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12808 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12809 /* X && !X is always false. */
12810 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12811 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12812 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12814 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12815 means A >= Y && A != MAX, but in this case we know that
12818 if (!TREE_SIDE_EFFECTS (arg0
)
12819 && !TREE_SIDE_EFFECTS (arg1
))
12821 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12822 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12823 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12825 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12826 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12827 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12830 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12836 case TRUTH_ORIF_EXPR
:
12837 /* Note that the operands of this must be ints
12838 and their values must be 0 or true.
12839 ("true" is a fixed value perhaps depending on the language.) */
12840 /* If first arg is constant true, return it. */
12841 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12842 return fold_convert_loc (loc
, type
, arg0
);
12843 case TRUTH_OR_EXPR
:
12844 /* If either arg is constant zero, drop it. */
12845 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12846 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12847 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12848 /* Preserve sequence points. */
12849 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12850 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12851 /* If second arg is constant true, result is true, but we must
12852 evaluate first arg. */
12853 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12854 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12855 /* Likewise for first arg, but note this only occurs here for
12857 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12858 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12860 /* !X || X is always true. */
12861 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12862 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12863 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12864 /* X || !X is always true. */
12865 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12866 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12867 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12869 /* (X && !Y) || (!X && Y) is X ^ Y */
12870 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12871 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12873 tree a0
, a1
, l0
, l1
, n0
, n1
;
12875 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12876 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12878 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12879 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12881 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12882 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12884 if ((operand_equal_p (n0
, a0
, 0)
12885 && operand_equal_p (n1
, a1
, 0))
12886 || (operand_equal_p (n0
, a1
, 0)
12887 && operand_equal_p (n1
, a0
, 0)))
12888 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12891 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12897 case TRUTH_XOR_EXPR
:
12898 /* If the second arg is constant zero, drop it. */
12899 if (integer_zerop (arg1
))
12900 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12901 /* If the second arg is constant true, this is a logical inversion. */
12902 if (integer_onep (arg1
))
12904 tem
= invert_truthvalue_loc (loc
, arg0
);
12905 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12907 /* Identical arguments cancel to zero. */
12908 if (operand_equal_p (arg0
, arg1
, 0))
12909 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12911 /* !X ^ X is always true. */
12912 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12913 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12914 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12916 /* X ^ !X is always true. */
12917 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12918 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12919 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12928 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12929 if (tem
!= NULL_TREE
)
12932 /* bool_var != 0 becomes bool_var. */
12933 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12934 && code
== NE_EXPR
)
12935 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12937 /* bool_var == 1 becomes bool_var. */
12938 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12939 && code
== EQ_EXPR
)
12940 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12942 /* bool_var != 1 becomes !bool_var. */
12943 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12944 && code
== NE_EXPR
)
12945 return fold_convert_loc (loc
, type
,
12946 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12947 TREE_TYPE (arg0
), arg0
));
12949 /* bool_var == 0 becomes !bool_var. */
12950 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12951 && code
== EQ_EXPR
)
12952 return fold_convert_loc (loc
, type
,
12953 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12954 TREE_TYPE (arg0
), arg0
));
12956 /* !exp != 0 becomes !exp */
12957 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12958 && code
== NE_EXPR
)
12959 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12961 /* If this is an equality comparison of the address of two non-weak,
12962 unaliased symbols neither of which are extern (since we do not
12963 have access to attributes for externs), then we know the result. */
12964 if (TREE_CODE (arg0
) == ADDR_EXPR
12965 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12966 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12967 && ! lookup_attribute ("alias",
12968 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12969 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12970 && TREE_CODE (arg1
) == ADDR_EXPR
12971 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12972 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12973 && ! lookup_attribute ("alias",
12974 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12975 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12977 /* We know that we're looking at the address of two
12978 non-weak, unaliased, static _DECL nodes.
12980 It is both wasteful and incorrect to call operand_equal_p
12981 to compare the two ADDR_EXPR nodes. It is wasteful in that
12982 all we need to do is test pointer equality for the arguments
12983 to the two ADDR_EXPR nodes. It is incorrect to use
12984 operand_equal_p as that function is NOT equivalent to a
12985 C equality test. It can in fact return false for two
12986 objects which would test as equal using the C equality
12988 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12989 return constant_boolean_node (equal
12990 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12994 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12995 a MINUS_EXPR of a constant, we can convert it into a comparison with
12996 a revised constant as long as no overflow occurs. */
12997 if (TREE_CODE (arg1
) == INTEGER_CST
12998 && (TREE_CODE (arg0
) == PLUS_EXPR
12999 || TREE_CODE (arg0
) == MINUS_EXPR
)
13000 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13001 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
13002 ? MINUS_EXPR
: PLUS_EXPR
,
13003 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13005 TREE_OPERAND (arg0
, 1)))
13006 && !TREE_OVERFLOW (tem
))
13007 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13009 /* Similarly for a NEGATE_EXPR. */
13010 if (TREE_CODE (arg0
) == NEGATE_EXPR
13011 && TREE_CODE (arg1
) == INTEGER_CST
13012 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
13014 && TREE_CODE (tem
) == INTEGER_CST
13015 && !TREE_OVERFLOW (tem
))
13016 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
13018 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13019 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13020 && TREE_CODE (arg1
) == INTEGER_CST
13021 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13022 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13023 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
13024 fold_convert_loc (loc
,
13027 TREE_OPERAND (arg0
, 1)));
13029 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13030 if ((TREE_CODE (arg0
) == PLUS_EXPR
13031 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
13032 || TREE_CODE (arg0
) == MINUS_EXPR
)
13033 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13036 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13037 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
13039 tree val
= TREE_OPERAND (arg0
, 1);
13040 return omit_two_operands_loc (loc
, type
,
13041 fold_build2_loc (loc
, code
, type
,
13043 build_int_cst (TREE_TYPE (val
),
13045 TREE_OPERAND (arg0
, 0), arg1
);
13048 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13049 if (TREE_CODE (arg0
) == MINUS_EXPR
13050 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
13051 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
13054 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
13056 return omit_two_operands_loc (loc
, type
,
13058 ? boolean_true_node
: boolean_false_node
,
13059 TREE_OPERAND (arg0
, 1), arg1
);
13062 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13063 for !=. Don't do this for ordered comparisons due to overflow. */
13064 if (TREE_CODE (arg0
) == MINUS_EXPR
13065 && integer_zerop (arg1
))
13066 return fold_build2_loc (loc
, code
, type
,
13067 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
13069 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13070 if (TREE_CODE (arg0
) == ABS_EXPR
13071 && (integer_zerop (arg1
) || real_zerop (arg1
)))
13072 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
13074 /* If this is an EQ or NE comparison with zero and ARG0 is
13075 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13076 two operations, but the latter can be done in one less insn
13077 on machines that have only two-operand insns or on which a
13078 constant cannot be the first operand. */
13079 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13080 && integer_zerop (arg1
))
13082 tree arg00
= TREE_OPERAND (arg0
, 0);
13083 tree arg01
= TREE_OPERAND (arg0
, 1);
13084 if (TREE_CODE (arg00
) == LSHIFT_EXPR
13085 && integer_onep (TREE_OPERAND (arg00
, 0)))
13087 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
13088 arg01
, TREE_OPERAND (arg00
, 1));
13089 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13090 build_int_cst (TREE_TYPE (arg0
), 1));
13091 return fold_build2_loc (loc
, code
, type
,
13092 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13095 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg01
, 0)))
13098 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
13099 arg00
, TREE_OPERAND (arg01
, 1));
13100 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
13101 build_int_cst (TREE_TYPE (arg0
), 1));
13102 return fold_build2_loc (loc
, code
, type
,
13103 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13108 /* If this is an NE or EQ comparison of zero against the result of a
13109 signed MOD operation whose second operand is a power of 2, make
13110 the MOD operation unsigned since it is simpler and equivalent. */
13111 if (integer_zerop (arg1
)
13112 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13113 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13114 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13115 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13116 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13117 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13119 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13120 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13121 fold_convert_loc (loc
, newtype
,
13122 TREE_OPERAND (arg0
, 0)),
13123 fold_convert_loc (loc
, newtype
,
13124 TREE_OPERAND (arg0
, 1)));
13126 return fold_build2_loc (loc
, code
, type
, newmod
,
13127 fold_convert_loc (loc
, newtype
, arg1
));
13130 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13131 C1 is a valid shift constant, and C2 is a power of two, i.e.
13133 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13134 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13137 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13138 && integer_zerop (arg1
))
13140 tree itype
= TREE_TYPE (arg0
);
13141 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13142 prec
= TYPE_PRECISION (itype
);
13144 /* Check for a valid shift count. */
13145 if (TREE_INT_CST_HIGH (arg001
) == 0
13146 && TREE_INT_CST_LOW (arg001
) < prec
)
13148 tree arg01
= TREE_OPERAND (arg0
, 1);
13149 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13150 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13151 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13152 can be rewritten as (X & (C2 << C1)) != 0. */
13153 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13155 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13156 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13157 return fold_build2_loc (loc
, code
, type
, tem
,
13158 fold_convert_loc (loc
, itype
, arg1
));
13160 /* Otherwise, for signed (arithmetic) shifts,
13161 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13162 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13163 else if (!TYPE_UNSIGNED (itype
))
13164 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13165 arg000
, build_int_cst (itype
, 0));
13166 /* Otherwise, of unsigned (logical) shifts,
13167 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13168 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13170 return omit_one_operand_loc (loc
, type
,
13171 code
== EQ_EXPR
? integer_one_node
13172 : integer_zero_node
,
13177 /* If we have (A & C) == C where C is a power of 2, convert this into
13178 (A & C) != 0. Similarly for NE_EXPR. */
13179 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13180 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13181 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13182 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13183 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13184 integer_zero_node
));
13186 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13187 bit, then fold the expression into A < 0 or A >= 0. */
13188 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13192 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13193 Similarly for NE_EXPR. */
13194 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13195 && TREE_CODE (arg1
) == INTEGER_CST
13196 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13198 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13199 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13200 TREE_OPERAND (arg0
, 1));
13202 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13203 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13205 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13206 if (integer_nonzerop (dandnotc
))
13207 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13210 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13211 Similarly for NE_EXPR. */
13212 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13213 && TREE_CODE (arg1
) == INTEGER_CST
13214 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13216 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13218 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13219 TREE_OPERAND (arg0
, 1),
13220 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13221 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13222 if (integer_nonzerop (candnotd
))
13223 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13226 /* If this is a comparison of a field, we may be able to simplify it. */
13227 if ((TREE_CODE (arg0
) == COMPONENT_REF
13228 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13229 /* Handle the constant case even without -O
13230 to make sure the warnings are given. */
13231 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13233 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13238 /* Optimize comparisons of strlen vs zero to a compare of the
13239 first character of the string vs zero. To wit,
13240 strlen(ptr) == 0 => *ptr == 0
13241 strlen(ptr) != 0 => *ptr != 0
13242 Other cases should reduce to one of these two (or a constant)
13243 due to the return value of strlen being unsigned. */
13244 if (TREE_CODE (arg0
) == CALL_EXPR
13245 && integer_zerop (arg1
))
13247 tree fndecl
= get_callee_fndecl (arg0
);
13250 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13251 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13252 && call_expr_nargs (arg0
) == 1
13253 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13255 tree iref
= build_fold_indirect_ref_loc (loc
,
13256 CALL_EXPR_ARG (arg0
, 0));
13257 return fold_build2_loc (loc
, code
, type
, iref
,
13258 build_int_cst (TREE_TYPE (iref
), 0));
13262 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13263 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13264 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13265 && integer_zerop (arg1
)
13266 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13268 tree arg00
= TREE_OPERAND (arg0
, 0);
13269 tree arg01
= TREE_OPERAND (arg0
, 1);
13270 tree itype
= TREE_TYPE (arg00
);
13271 if (TREE_INT_CST_HIGH (arg01
) == 0
13272 && TREE_INT_CST_LOW (arg01
)
13273 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
13275 if (TYPE_UNSIGNED (itype
))
13277 itype
= signed_type_for (itype
);
13278 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13280 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13281 type
, arg00
, build_zero_cst (itype
));
13285 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13286 if (integer_zerop (arg1
)
13287 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13288 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13289 TREE_OPERAND (arg0
, 1));
13291 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13292 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13293 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13294 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13295 build_zero_cst (TREE_TYPE (arg0
)));
13296 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13297 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13298 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13299 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13300 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13301 build_zero_cst (TREE_TYPE (arg0
)));
13303 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13304 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13305 && TREE_CODE (arg1
) == INTEGER_CST
13306 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13307 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13308 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13309 TREE_OPERAND (arg0
, 1), arg1
));
13311 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13312 (X & C) == 0 when C is a single bit. */
13313 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13314 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13315 && integer_zerop (arg1
)
13316 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13318 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13319 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13320 TREE_OPERAND (arg0
, 1));
13321 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13323 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13327 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13328 constant C is a power of two, i.e. a single bit. */
13329 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13330 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13331 && integer_zerop (arg1
)
13332 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13333 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13334 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13336 tree arg00
= TREE_OPERAND (arg0
, 0);
13337 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13338 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13341 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13342 when is C is a power of two, i.e. a single bit. */
13343 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13344 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13345 && integer_zerop (arg1
)
13346 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13347 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13348 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13350 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13351 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13352 arg000
, TREE_OPERAND (arg0
, 1));
13353 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13354 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13357 if (integer_zerop (arg1
)
13358 && tree_expr_nonzero_p (arg0
))
13360 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13361 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13364 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13365 if (TREE_CODE (arg0
) == NEGATE_EXPR
13366 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13367 return fold_build2_loc (loc
, code
, type
,
13368 TREE_OPERAND (arg0
, 0),
13369 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13370 TREE_OPERAND (arg1
, 0)));
13372 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13373 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13374 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13376 tree arg00
= TREE_OPERAND (arg0
, 0);
13377 tree arg01
= TREE_OPERAND (arg0
, 1);
13378 tree arg10
= TREE_OPERAND (arg1
, 0);
13379 tree arg11
= TREE_OPERAND (arg1
, 1);
13380 tree itype
= TREE_TYPE (arg0
);
13382 if (operand_equal_p (arg01
, arg11
, 0))
13383 return fold_build2_loc (loc
, code
, type
,
13384 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13385 fold_build2_loc (loc
,
13386 BIT_XOR_EXPR
, itype
,
13389 build_zero_cst (itype
));
13391 if (operand_equal_p (arg01
, arg10
, 0))
13392 return fold_build2_loc (loc
, code
, type
,
13393 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13394 fold_build2_loc (loc
,
13395 BIT_XOR_EXPR
, itype
,
13398 build_zero_cst (itype
));
13400 if (operand_equal_p (arg00
, arg11
, 0))
13401 return fold_build2_loc (loc
, code
, type
,
13402 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13403 fold_build2_loc (loc
,
13404 BIT_XOR_EXPR
, itype
,
13407 build_zero_cst (itype
));
13409 if (operand_equal_p (arg00
, arg10
, 0))
13410 return fold_build2_loc (loc
, code
, type
,
13411 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13412 fold_build2_loc (loc
,
13413 BIT_XOR_EXPR
, itype
,
13416 build_zero_cst (itype
));
13419 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13420 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13422 tree arg00
= TREE_OPERAND (arg0
, 0);
13423 tree arg01
= TREE_OPERAND (arg0
, 1);
13424 tree arg10
= TREE_OPERAND (arg1
, 0);
13425 tree arg11
= TREE_OPERAND (arg1
, 1);
13426 tree itype
= TREE_TYPE (arg0
);
13428 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13429 operand_equal_p guarantees no side-effects so we don't need
13430 to use omit_one_operand on Z. */
13431 if (operand_equal_p (arg01
, arg11
, 0))
13432 return fold_build2_loc (loc
, code
, type
, arg00
,
13433 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13435 if (operand_equal_p (arg01
, arg10
, 0))
13436 return fold_build2_loc (loc
, code
, type
, arg00
,
13437 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13439 if (operand_equal_p (arg00
, arg11
, 0))
13440 return fold_build2_loc (loc
, code
, type
, arg01
,
13441 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13443 if (operand_equal_p (arg00
, arg10
, 0))
13444 return fold_build2_loc (loc
, code
, type
, arg01
,
13445 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13448 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13449 if (TREE_CODE (arg01
) == INTEGER_CST
13450 && TREE_CODE (arg11
) == INTEGER_CST
)
13452 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13453 fold_convert_loc (loc
, itype
, arg11
));
13454 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13455 return fold_build2_loc (loc
, code
, type
, tem
,
13456 fold_convert_loc (loc
, itype
, arg10
));
13460 /* Attempt to simplify equality/inequality comparisons of complex
13461 values. Only lower the comparison if the result is known or
13462 can be simplified to a single scalar comparison. */
13463 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13464 || TREE_CODE (arg0
) == COMPLEX_CST
)
13465 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13466 || TREE_CODE (arg1
) == COMPLEX_CST
))
13468 tree real0
, imag0
, real1
, imag1
;
13471 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13473 real0
= TREE_OPERAND (arg0
, 0);
13474 imag0
= TREE_OPERAND (arg0
, 1);
13478 real0
= TREE_REALPART (arg0
);
13479 imag0
= TREE_IMAGPART (arg0
);
13482 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13484 real1
= TREE_OPERAND (arg1
, 0);
13485 imag1
= TREE_OPERAND (arg1
, 1);
13489 real1
= TREE_REALPART (arg1
);
13490 imag1
= TREE_IMAGPART (arg1
);
13493 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13494 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13496 if (integer_zerop (rcond
))
13498 if (code
== EQ_EXPR
)
13499 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13501 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13505 if (code
== NE_EXPR
)
13506 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13508 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13512 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13513 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13515 if (integer_zerop (icond
))
13517 if (code
== EQ_EXPR
)
13518 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13520 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13524 if (code
== NE_EXPR
)
13525 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13527 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13538 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13539 if (tem
!= NULL_TREE
)
13542 /* Transform comparisons of the form X +- C CMP X. */
13543 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13544 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13545 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13546 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13547 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13548 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13550 tree arg01
= TREE_OPERAND (arg0
, 1);
13551 enum tree_code code0
= TREE_CODE (arg0
);
13554 if (TREE_CODE (arg01
) == REAL_CST
)
13555 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13557 is_positive
= tree_int_cst_sgn (arg01
);
13559 /* (X - c) > X becomes false. */
13560 if (code
== GT_EXPR
13561 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13562 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13564 if (TREE_CODE (arg01
) == INTEGER_CST
13565 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13566 fold_overflow_warning (("assuming signed overflow does not "
13567 "occur when assuming that (X - c) > X "
13568 "is always false"),
13569 WARN_STRICT_OVERFLOW_ALL
);
13570 return constant_boolean_node (0, type
);
13573 /* Likewise (X + c) < X becomes false. */
13574 if (code
== LT_EXPR
13575 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13576 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13578 if (TREE_CODE (arg01
) == INTEGER_CST
13579 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13580 fold_overflow_warning (("assuming signed overflow does not "
13581 "occur when assuming that "
13582 "(X + c) < X is always false"),
13583 WARN_STRICT_OVERFLOW_ALL
);
13584 return constant_boolean_node (0, type
);
13587 /* Convert (X - c) <= X to true. */
13588 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13590 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13591 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13593 if (TREE_CODE (arg01
) == INTEGER_CST
13594 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13595 fold_overflow_warning (("assuming signed overflow does not "
13596 "occur when assuming that "
13597 "(X - c) <= X is always true"),
13598 WARN_STRICT_OVERFLOW_ALL
);
13599 return constant_boolean_node (1, type
);
13602 /* Convert (X + c) >= X to true. */
13603 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13605 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13606 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13608 if (TREE_CODE (arg01
) == INTEGER_CST
13609 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13610 fold_overflow_warning (("assuming signed overflow does not "
13611 "occur when assuming that "
13612 "(X + c) >= X is always true"),
13613 WARN_STRICT_OVERFLOW_ALL
);
13614 return constant_boolean_node (1, type
);
13617 if (TREE_CODE (arg01
) == INTEGER_CST
)
13619 /* Convert X + c > X and X - c < X to true for integers. */
13620 if (code
== GT_EXPR
13621 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13622 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13624 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13625 fold_overflow_warning (("assuming signed overflow does "
13626 "not occur when assuming that "
13627 "(X + c) > X is always true"),
13628 WARN_STRICT_OVERFLOW_ALL
);
13629 return constant_boolean_node (1, type
);
13632 if (code
== LT_EXPR
13633 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13634 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13636 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13637 fold_overflow_warning (("assuming signed overflow does "
13638 "not occur when assuming that "
13639 "(X - c) < X is always true"),
13640 WARN_STRICT_OVERFLOW_ALL
);
13641 return constant_boolean_node (1, type
);
13644 /* Convert X + c <= X and X - c >= X to false for integers. */
13645 if (code
== LE_EXPR
13646 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13647 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13649 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13650 fold_overflow_warning (("assuming signed overflow does "
13651 "not occur when assuming that "
13652 "(X + c) <= X is always false"),
13653 WARN_STRICT_OVERFLOW_ALL
);
13654 return constant_boolean_node (0, type
);
13657 if (code
== GE_EXPR
13658 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13659 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13661 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13662 fold_overflow_warning (("assuming signed overflow does "
13663 "not occur when assuming that "
13664 "(X - c) >= X is always false"),
13665 WARN_STRICT_OVERFLOW_ALL
);
13666 return constant_boolean_node (0, type
);
13671 /* Comparisons with the highest or lowest possible integer of
13672 the specified precision will have known values. */
13674 tree arg1_type
= TREE_TYPE (arg1
);
13675 unsigned int width
= TYPE_PRECISION (arg1_type
);
13677 if (TREE_CODE (arg1
) == INTEGER_CST
13678 && width
<= HOST_BITS_PER_DOUBLE_INT
13679 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13681 HOST_WIDE_INT signed_max_hi
;
13682 unsigned HOST_WIDE_INT signed_max_lo
;
13683 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13685 if (width
<= HOST_BITS_PER_WIDE_INT
)
13687 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13692 if (TYPE_UNSIGNED (arg1_type
))
13694 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13700 max_lo
= signed_max_lo
;
13701 min_lo
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13707 width
-= HOST_BITS_PER_WIDE_INT
;
13708 signed_max_lo
= -1;
13709 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13714 if (TYPE_UNSIGNED (arg1_type
))
13716 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13721 max_hi
= signed_max_hi
;
13722 min_hi
= (HOST_WIDE_INT_M1U
<< (width
- 1));
13726 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13727 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13731 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13734 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13737 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13740 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13742 /* The GE_EXPR and LT_EXPR cases above are not normally
13743 reached because of previous transformations. */
13748 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13750 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13754 arg1
= const_binop (PLUS_EXPR
, arg1
,
13755 build_int_cst (TREE_TYPE (arg1
), 1));
13756 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13757 fold_convert_loc (loc
,
13758 TREE_TYPE (arg1
), arg0
),
13761 arg1
= const_binop (PLUS_EXPR
, arg1
,
13762 build_int_cst (TREE_TYPE (arg1
), 1));
13763 return fold_build2_loc (loc
, NE_EXPR
, type
,
13764 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13770 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13772 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13776 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13779 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13782 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13785 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13790 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13792 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13796 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13797 return fold_build2_loc (loc
, NE_EXPR
, type
,
13798 fold_convert_loc (loc
,
13799 TREE_TYPE (arg1
), arg0
),
13802 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13803 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13804 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13811 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13812 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13813 && TYPE_UNSIGNED (arg1_type
)
13814 /* We will flip the signedness of the comparison operator
13815 associated with the mode of arg1, so the sign bit is
13816 specified by this mode. Check that arg1 is the signed
13817 max associated with this sign bit. */
13818 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13819 /* signed_type does not work on pointer types. */
13820 && INTEGRAL_TYPE_P (arg1_type
))
13822 /* The following case also applies to X < signed_max+1
13823 and X >= signed_max+1 because previous transformations. */
13824 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13827 st
= signed_type_for (TREE_TYPE (arg1
));
13828 return fold_build2_loc (loc
,
13829 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13830 type
, fold_convert_loc (loc
, st
, arg0
),
13831 build_int_cst (st
, 0));
13837 /* If we are comparing an ABS_EXPR with a constant, we can
13838 convert all the cases into explicit comparisons, but they may
13839 well not be faster than doing the ABS and one comparison.
13840 But ABS (X) <= C is a range comparison, which becomes a subtraction
13841 and a comparison, and is probably faster. */
13842 if (code
== LE_EXPR
13843 && TREE_CODE (arg1
) == INTEGER_CST
13844 && TREE_CODE (arg0
) == ABS_EXPR
13845 && ! TREE_SIDE_EFFECTS (arg0
)
13846 && (0 != (tem
= negate_expr (arg1
)))
13847 && TREE_CODE (tem
) == INTEGER_CST
13848 && !TREE_OVERFLOW (tem
))
13849 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13850 build2 (GE_EXPR
, type
,
13851 TREE_OPERAND (arg0
, 0), tem
),
13852 build2 (LE_EXPR
, type
,
13853 TREE_OPERAND (arg0
, 0), arg1
));
13855 /* Convert ABS_EXPR<x> >= 0 to true. */
13856 strict_overflow_p
= false;
13857 if (code
== GE_EXPR
13858 && (integer_zerop (arg1
)
13859 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13860 && real_zerop (arg1
)))
13861 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13863 if (strict_overflow_p
)
13864 fold_overflow_warning (("assuming signed overflow does not occur "
13865 "when simplifying comparison of "
13866 "absolute value and zero"),
13867 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13868 return omit_one_operand_loc (loc
, type
,
13869 constant_boolean_node (true, type
),
13873 /* Convert ABS_EXPR<x> < 0 to false. */
13874 strict_overflow_p
= false;
13875 if (code
== LT_EXPR
13876 && (integer_zerop (arg1
) || real_zerop (arg1
))
13877 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13879 if (strict_overflow_p
)
13880 fold_overflow_warning (("assuming signed overflow does not occur "
13881 "when simplifying comparison of "
13882 "absolute value and zero"),
13883 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13884 return omit_one_operand_loc (loc
, type
,
13885 constant_boolean_node (false, type
),
13889 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13890 and similarly for >= into !=. */
13891 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13892 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13893 && TREE_CODE (arg1
) == LSHIFT_EXPR
13894 && integer_onep (TREE_OPERAND (arg1
, 0)))
13895 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13896 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13897 TREE_OPERAND (arg1
, 1)),
13898 build_zero_cst (TREE_TYPE (arg0
)));
13900 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13901 otherwise Y might be >= # of bits in X's type and thus e.g.
13902 (unsigned char) (1 << Y) for Y 15 might be 0.
13903 If the cast is widening, then 1 << Y should have unsigned type,
13904 otherwise if Y is number of bits in the signed shift type minus 1,
13905 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13906 31 might be 0xffffffff80000000. */
13907 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13908 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13909 && CONVERT_EXPR_P (arg1
)
13910 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13911 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13912 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13913 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13914 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13915 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13916 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13918 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13919 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13920 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13921 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13922 build_zero_cst (TREE_TYPE (arg0
)));
13927 case UNORDERED_EXPR
:
13935 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13937 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13938 if (t1
!= NULL_TREE
)
13942 /* If the first operand is NaN, the result is constant. */
13943 if (TREE_CODE (arg0
) == REAL_CST
13944 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13945 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13947 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13948 ? integer_zero_node
13949 : integer_one_node
;
13950 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13953 /* If the second operand is NaN, the result is constant. */
13954 if (TREE_CODE (arg1
) == REAL_CST
13955 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13956 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13958 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13959 ? integer_zero_node
13960 : integer_one_node
;
13961 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13964 /* Simplify unordered comparison of something with itself. */
13965 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13966 && operand_equal_p (arg0
, arg1
, 0))
13967 return constant_boolean_node (1, type
);
13969 if (code
== LTGT_EXPR
13970 && !flag_trapping_math
13971 && operand_equal_p (arg0
, arg1
, 0))
13972 return constant_boolean_node (0, type
);
13974 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13976 tree targ0
= strip_float_extensions (arg0
);
13977 tree targ1
= strip_float_extensions (arg1
);
13978 tree newtype
= TREE_TYPE (targ0
);
13980 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13981 newtype
= TREE_TYPE (targ1
);
13983 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13984 return fold_build2_loc (loc
, code
, type
,
13985 fold_convert_loc (loc
, newtype
, targ0
),
13986 fold_convert_loc (loc
, newtype
, targ1
));
13991 case COMPOUND_EXPR
:
13992 /* When pedantic, a compound expression can be neither an lvalue
13993 nor an integer constant expression. */
13994 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13996 /* Don't let (0, 0) be null pointer constant. */
13997 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13998 : fold_convert_loc (loc
, type
, arg1
);
13999 return pedantic_non_lvalue_loc (loc
, tem
);
14002 if ((TREE_CODE (arg0
) == REAL_CST
14003 && TREE_CODE (arg1
) == REAL_CST
)
14004 || (TREE_CODE (arg0
) == INTEGER_CST
14005 && TREE_CODE (arg1
) == INTEGER_CST
))
14006 return build_complex (type
, arg0
, arg1
);
14007 if (TREE_CODE (arg0
) == REALPART_EXPR
14008 && TREE_CODE (arg1
) == IMAGPART_EXPR
14009 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
14010 && operand_equal_p (TREE_OPERAND (arg0
, 0),
14011 TREE_OPERAND (arg1
, 0), 0))
14012 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
14013 TREE_OPERAND (arg1
, 0));
14017 /* An ASSERT_EXPR should never be passed to fold_binary. */
14018 gcc_unreachable ();
14020 case VEC_PACK_TRUNC_EXPR
:
14021 case VEC_PACK_FIX_TRUNC_EXPR
:
14023 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14026 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
14027 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
14028 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14031 elts
= XALLOCAVEC (tree
, nelts
);
14032 if (!vec_cst_ctor_to_array (arg0
, elts
)
14033 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
14036 for (i
= 0; i
< nelts
; i
++)
14038 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
14039 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
14040 TREE_TYPE (type
), elts
[i
]);
14041 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
14045 return build_vector (type
, elts
);
14048 case VEC_WIDEN_MULT_LO_EXPR
:
14049 case VEC_WIDEN_MULT_HI_EXPR
:
14050 case VEC_WIDEN_MULT_EVEN_EXPR
:
14051 case VEC_WIDEN_MULT_ODD_EXPR
:
14053 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
14054 unsigned int out
, ofs
, scale
;
14057 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
14058 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
14059 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
14062 elts
= XALLOCAVEC (tree
, nelts
* 4);
14063 if (!vec_cst_ctor_to_array (arg0
, elts
)
14064 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
14067 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
14068 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
14069 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
14070 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
14071 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
14072 scale
= 1, ofs
= 0;
14073 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14074 scale
= 1, ofs
= 1;
14076 for (out
= 0; out
< nelts
; out
++)
14078 unsigned int in1
= (out
<< scale
) + ofs
;
14079 unsigned int in2
= in1
+ nelts
* 2;
14082 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
14083 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
14085 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
14087 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
14088 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
14092 return build_vector (type
, elts
);
14097 } /* switch (code) */
14100 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14101 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14105 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
14107 switch (TREE_CODE (*tp
))
14113 *walk_subtrees
= 0;
14115 /* ... fall through ... */
14122 /* Return whether the sub-tree ST contains a label which is accessible from
14123 outside the sub-tree. */
14126 contains_label_p (tree st
)
14129 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
14132 /* Fold a ternary expression of code CODE and type TYPE with operands
14133 OP0, OP1, and OP2. Return the folded expression if folding is
14134 successful. Otherwise, return NULL_TREE. */
14137 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
14138 tree op0
, tree op1
, tree op2
)
14141 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
14142 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14144 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
14145 && TREE_CODE_LENGTH (code
) == 3);
14147 /* Strip any conversions that don't change the mode. This is safe
14148 for every expression, except for a comparison expression because
14149 its signedness is derived from its operands. So, in the latter
14150 case, only strip conversions that don't change the signedness.
14152 Note that this is done as an internal manipulation within the
14153 constant folder, in order to find the simplest representation of
14154 the arguments so that their form can be studied. In any cases,
14155 the appropriate type conversions should be put back in the tree
14156 that will get out of the constant folder. */
14177 case COMPONENT_REF
:
14178 if (TREE_CODE (arg0
) == CONSTRUCTOR
14179 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14181 unsigned HOST_WIDE_INT idx
;
14183 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14190 case VEC_COND_EXPR
:
14191 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14192 so all simple results must be passed through pedantic_non_lvalue. */
14193 if (TREE_CODE (arg0
) == INTEGER_CST
)
14195 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14196 tem
= integer_zerop (arg0
) ? op2
: op1
;
14197 /* Only optimize constant conditions when the selected branch
14198 has the same type as the COND_EXPR. This avoids optimizing
14199 away "c ? x : throw", where the throw has a void type.
14200 Avoid throwing away that operand which contains label. */
14201 if ((!TREE_SIDE_EFFECTS (unused_op
)
14202 || !contains_label_p (unused_op
))
14203 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14204 || VOID_TYPE_P (type
)))
14205 return pedantic_non_lvalue_loc (loc
, tem
);
14208 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14210 if (integer_all_onesp (arg0
))
14211 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14212 if (integer_zerop (arg0
))
14213 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14215 if ((TREE_CODE (arg1
) == VECTOR_CST
14216 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14217 && (TREE_CODE (arg2
) == VECTOR_CST
14218 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14220 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14221 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14222 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14223 for (i
= 0; i
< nelts
; i
++)
14225 tree val
= VECTOR_CST_ELT (arg0
, i
);
14226 if (integer_all_onesp (val
))
14228 else if (integer_zerop (val
))
14229 sel
[i
] = nelts
+ i
;
14230 else /* Currently unreachable. */
14233 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14234 if (t
!= NULL_TREE
)
14239 if (operand_equal_p (arg1
, op2
, 0))
14240 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14242 /* If we have A op B ? A : C, we may be able to convert this to a
14243 simpler expression, depending on the operation and the values
14244 of B and C. Signed zeros prevent all of these transformations,
14245 for reasons given above each one.
14247 Also try swapping the arguments and inverting the conditional. */
14248 if (COMPARISON_CLASS_P (arg0
)
14249 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14250 arg1
, TREE_OPERAND (arg0
, 1))
14251 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14253 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14258 if (COMPARISON_CLASS_P (arg0
)
14259 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14261 TREE_OPERAND (arg0
, 1))
14262 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14264 location_t loc0
= expr_location_or (arg0
, loc
);
14265 tem
= fold_invert_truthvalue (loc0
, arg0
);
14266 if (tem
&& COMPARISON_CLASS_P (tem
))
14268 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14274 /* If the second operand is simpler than the third, swap them
14275 since that produces better jump optimization results. */
14276 if (truth_value_p (TREE_CODE (arg0
))
14277 && tree_swap_operands_p (op1
, op2
, false))
14279 location_t loc0
= expr_location_or (arg0
, loc
);
14280 /* See if this can be inverted. If it can't, possibly because
14281 it was a floating-point inequality comparison, don't do
14283 tem
= fold_invert_truthvalue (loc0
, arg0
);
14285 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14288 /* Convert A ? 1 : 0 to simply A. */
14289 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14290 : (integer_onep (op1
)
14291 && !VECTOR_TYPE_P (type
)))
14292 && integer_zerop (op2
)
14293 /* If we try to convert OP0 to our type, the
14294 call to fold will try to move the conversion inside
14295 a COND, which will recurse. In that case, the COND_EXPR
14296 is probably the best choice, so leave it alone. */
14297 && type
== TREE_TYPE (arg0
))
14298 return pedantic_non_lvalue_loc (loc
, arg0
);
14300 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14301 over COND_EXPR in cases such as floating point comparisons. */
14302 if (integer_zerop (op1
)
14303 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14304 : (integer_onep (op2
)
14305 && !VECTOR_TYPE_P (type
)))
14306 && truth_value_p (TREE_CODE (arg0
)))
14307 return pedantic_non_lvalue_loc (loc
,
14308 fold_convert_loc (loc
, type
,
14309 invert_truthvalue_loc (loc
,
14312 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14313 if (TREE_CODE (arg0
) == LT_EXPR
14314 && integer_zerop (TREE_OPERAND (arg0
, 1))
14315 && integer_zerop (op2
)
14316 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14318 /* sign_bit_p looks through both zero and sign extensions,
14319 but for this optimization only sign extensions are
14321 tree tem2
= TREE_OPERAND (arg0
, 0);
14322 while (tem
!= tem2
)
14324 if (TREE_CODE (tem2
) != NOP_EXPR
14325 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14330 tem2
= TREE_OPERAND (tem2
, 0);
14332 /* sign_bit_p only checks ARG1 bits within A's precision.
14333 If <sign bit of A> has wider type than A, bits outside
14334 of A's precision in <sign bit of A> need to be checked.
14335 If they are all 0, this optimization needs to be done
14336 in unsigned A's type, if they are all 1 in signed A's type,
14337 otherwise this can't be done. */
14339 && TYPE_PRECISION (TREE_TYPE (tem
))
14340 < TYPE_PRECISION (TREE_TYPE (arg1
))
14341 && TYPE_PRECISION (TREE_TYPE (tem
))
14342 < TYPE_PRECISION (type
))
14344 unsigned HOST_WIDE_INT mask_lo
;
14345 HOST_WIDE_INT mask_hi
;
14346 int inner_width
, outer_width
;
14349 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14350 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14351 if (outer_width
> TYPE_PRECISION (type
))
14352 outer_width
= TYPE_PRECISION (type
);
14354 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
14356 mask_hi
= (HOST_WIDE_INT_M1U
14357 >> (HOST_BITS_PER_DOUBLE_INT
- outer_width
));
14363 mask_lo
= (HOST_WIDE_INT_M1U
14364 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
14366 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
14368 mask_hi
&= ~(HOST_WIDE_INT_M1U
14369 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14373 mask_lo
&= ~(HOST_WIDE_INT_M1U
14374 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
14376 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
14377 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
14379 tem_type
= signed_type_for (TREE_TYPE (tem
));
14380 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14382 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
14383 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
14385 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14386 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14394 fold_convert_loc (loc
, type
,
14395 fold_build2_loc (loc
, BIT_AND_EXPR
,
14396 TREE_TYPE (tem
), tem
,
14397 fold_convert_loc (loc
,
14402 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14403 already handled above. */
14404 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14405 && integer_onep (TREE_OPERAND (arg0
, 1))
14406 && integer_zerop (op2
)
14407 && integer_pow2p (arg1
))
14409 tree tem
= TREE_OPERAND (arg0
, 0);
14411 if (TREE_CODE (tem
) == RSHIFT_EXPR
14412 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
14413 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14414 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
14415 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14416 TREE_OPERAND (tem
, 0), arg1
);
14419 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14420 is probably obsolete because the first operand should be a
14421 truth value (that's why we have the two cases above), but let's
14422 leave it in until we can confirm this for all front-ends. */
14423 if (integer_zerop (op2
)
14424 && TREE_CODE (arg0
) == NE_EXPR
14425 && integer_zerop (TREE_OPERAND (arg0
, 1))
14426 && integer_pow2p (arg1
)
14427 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14428 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14429 arg1
, OEP_ONLY_CONST
))
14430 return pedantic_non_lvalue_loc (loc
,
14431 fold_convert_loc (loc
, type
,
14432 TREE_OPERAND (arg0
, 0)));
14434 /* Disable the transformations below for vectors, since
14435 fold_binary_op_with_conditional_arg may undo them immediately,
14436 yielding an infinite loop. */
14437 if (code
== VEC_COND_EXPR
)
14440 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14441 if (integer_zerop (op2
)
14442 && truth_value_p (TREE_CODE (arg0
))
14443 && truth_value_p (TREE_CODE (arg1
))
14444 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14445 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14446 : TRUTH_ANDIF_EXPR
,
14447 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14449 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14450 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14451 && truth_value_p (TREE_CODE (arg0
))
14452 && truth_value_p (TREE_CODE (arg1
))
14453 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14455 location_t loc0
= expr_location_or (arg0
, loc
);
14456 /* Only perform transformation if ARG0 is easily inverted. */
14457 tem
= fold_invert_truthvalue (loc0
, arg0
);
14459 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14462 type
, fold_convert_loc (loc
, type
, tem
),
14466 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14467 if (integer_zerop (arg1
)
14468 && truth_value_p (TREE_CODE (arg0
))
14469 && truth_value_p (TREE_CODE (op2
))
14470 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14472 location_t loc0
= expr_location_or (arg0
, loc
);
14473 /* Only perform transformation if ARG0 is easily inverted. */
14474 tem
= fold_invert_truthvalue (loc0
, arg0
);
14476 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14477 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14478 type
, fold_convert_loc (loc
, type
, tem
),
14482 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14483 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14484 && truth_value_p (TREE_CODE (arg0
))
14485 && truth_value_p (TREE_CODE (op2
))
14486 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14487 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14488 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14489 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14494 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14495 of fold_ternary on them. */
14496 gcc_unreachable ();
14498 case BIT_FIELD_REF
:
14499 if ((TREE_CODE (arg0
) == VECTOR_CST
14500 || (TREE_CODE (arg0
) == CONSTRUCTOR
14501 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14502 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14503 || (TREE_CODE (type
) == VECTOR_TYPE
14504 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14506 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14507 unsigned HOST_WIDE_INT width
= tree_low_cst (TYPE_SIZE (eltype
), 1);
14508 unsigned HOST_WIDE_INT n
= tree_low_cst (arg1
, 1);
14509 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14512 && (idx
% width
) == 0
14513 && (n
% width
) == 0
14514 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14519 if (TREE_CODE (arg0
) == VECTOR_CST
)
14522 return VECTOR_CST_ELT (arg0
, idx
);
14524 tree
*vals
= XALLOCAVEC (tree
, n
);
14525 for (unsigned i
= 0; i
< n
; ++i
)
14526 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14527 return build_vector (type
, vals
);
14530 /* Constructor elements can be subvectors. */
14531 unsigned HOST_WIDE_INT k
= 1;
14532 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14534 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14535 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14536 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14539 /* We keep an exact subset of the constructor elements. */
14540 if ((idx
% k
) == 0 && (n
% k
) == 0)
14542 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14543 return build_constructor (type
, NULL
);
14548 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14549 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14550 return build_zero_cst (type
);
14553 vec
<constructor_elt
, va_gc
> *vals
;
14554 vec_alloc (vals
, n
);
14555 for (unsigned i
= 0;
14556 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14558 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14560 (arg0
, idx
+ i
)->value
);
14561 return build_constructor (type
, vals
);
14563 /* The bitfield references a single constructor element. */
14564 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14566 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14567 return build_zero_cst (type
);
14569 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14571 return fold_build3_loc (loc
, code
, type
,
14572 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14573 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14578 /* A bit-field-ref that referenced the full argument can be stripped. */
14579 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14580 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14581 && integer_zerop (op2
))
14582 return fold_convert_loc (loc
, type
, arg0
);
14584 /* On constants we can use native encode/interpret to constant
14585 fold (nearly) all BIT_FIELD_REFs. */
14586 if (CONSTANT_CLASS_P (arg0
)
14587 && can_native_interpret_type_p (type
)
14588 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1)
14589 /* This limitation should not be necessary, we just need to
14590 round this up to mode size. */
14591 && tree_low_cst (op1
, 1) % BITS_PER_UNIT
== 0
14592 /* Need bit-shifting of the buffer to relax the following. */
14593 && tree_low_cst (op2
, 1) % BITS_PER_UNIT
== 0)
14595 unsigned HOST_WIDE_INT bitpos
= tree_low_cst (op2
, 1);
14596 unsigned HOST_WIDE_INT bitsize
= tree_low_cst (op1
, 1);
14597 unsigned HOST_WIDE_INT clen
;
14598 clen
= tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)), 1);
14599 /* ??? We cannot tell native_encode_expr to start at
14600 some random byte only. So limit us to a reasonable amount
14604 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14605 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14607 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14609 tree v
= native_interpret_expr (type
,
14610 b
+ bitpos
/ BITS_PER_UNIT
,
14611 bitsize
/ BITS_PER_UNIT
);
14621 /* For integers we can decompose the FMA if possible. */
14622 if (TREE_CODE (arg0
) == INTEGER_CST
14623 && TREE_CODE (arg1
) == INTEGER_CST
)
14624 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14625 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14626 if (integer_zerop (arg2
))
14627 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14629 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14631 case VEC_PERM_EXPR
:
14632 if (TREE_CODE (arg2
) == VECTOR_CST
)
14634 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14635 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14637 bool need_mask_canon
= false;
14638 bool all_in_vec0
= true;
14639 bool all_in_vec1
= true;
14640 bool maybe_identity
= true;
14641 bool single_arg
= (op0
== op1
);
14642 bool changed
= false;
14644 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14645 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14646 for (i
= 0; i
< nelts
; i
++)
14648 tree val
= VECTOR_CST_ELT (arg2
, i
);
14649 if (TREE_CODE (val
) != INTEGER_CST
)
14652 sel
[i
] = TREE_INT_CST_LOW (val
) & mask
;
14653 if (TREE_INT_CST_HIGH (val
)
14654 || ((unsigned HOST_WIDE_INT
)
14655 TREE_INT_CST_LOW (val
) != sel
[i
]))
14656 need_mask_canon
= true;
14658 if (sel
[i
] < nelts
)
14659 all_in_vec1
= false;
14661 all_in_vec0
= false;
14663 if ((sel
[i
] & (nelts
-1)) != i
)
14664 maybe_identity
= false;
14667 if (maybe_identity
)
14677 else if (all_in_vec1
)
14680 for (i
= 0; i
< nelts
; i
++)
14682 need_mask_canon
= true;
14685 if ((TREE_CODE (op0
) == VECTOR_CST
14686 || TREE_CODE (op0
) == CONSTRUCTOR
)
14687 && (TREE_CODE (op1
) == VECTOR_CST
14688 || TREE_CODE (op1
) == CONSTRUCTOR
))
14690 t
= fold_vec_perm (type
, op0
, op1
, sel
);
14691 if (t
!= NULL_TREE
)
14695 if (op0
== op1
&& !single_arg
)
14698 if (need_mask_canon
&& arg2
== op2
)
14700 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14701 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14702 for (i
= 0; i
< nelts
; i
++)
14703 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14704 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14709 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14715 } /* switch (code) */
14718 /* Perform constant folding and related simplification of EXPR.
14719 The related simplifications include x*1 => x, x*0 => 0, etc.,
14720 and application of the associative law.
14721 NOP_EXPR conversions may be removed freely (as long as we
14722 are careful not to change the type of the overall expression).
14723 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14724 but we can constant-fold them if they have constant operands. */
14726 #ifdef ENABLE_FOLD_CHECKING
14727 # define fold(x) fold_1 (x)
14728 static tree
fold_1 (tree
);
14734 const tree t
= expr
;
14735 enum tree_code code
= TREE_CODE (t
);
14736 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14738 location_t loc
= EXPR_LOCATION (expr
);
14740 /* Return right away if a constant. */
14741 if (kind
== tcc_constant
)
14744 /* CALL_EXPR-like objects with variable numbers of operands are
14745 treated specially. */
14746 if (kind
== tcc_vl_exp
)
14748 if (code
== CALL_EXPR
)
14750 tem
= fold_call_expr (loc
, expr
, false);
14751 return tem
? tem
: expr
;
14756 if (IS_EXPR_CODE_CLASS (kind
))
14758 tree type
= TREE_TYPE (t
);
14759 tree op0
, op1
, op2
;
14761 switch (TREE_CODE_LENGTH (code
))
14764 op0
= TREE_OPERAND (t
, 0);
14765 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14766 return tem
? tem
: expr
;
14768 op0
= TREE_OPERAND (t
, 0);
14769 op1
= TREE_OPERAND (t
, 1);
14770 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14771 return tem
? tem
: expr
;
14773 op0
= TREE_OPERAND (t
, 0);
14774 op1
= TREE_OPERAND (t
, 1);
14775 op2
= TREE_OPERAND (t
, 2);
14776 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14777 return tem
? tem
: expr
;
14787 tree op0
= TREE_OPERAND (t
, 0);
14788 tree op1
= TREE_OPERAND (t
, 1);
14790 if (TREE_CODE (op1
) == INTEGER_CST
14791 && TREE_CODE (op0
) == CONSTRUCTOR
14792 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14794 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14795 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14796 unsigned HOST_WIDE_INT begin
= 0;
14798 /* Find a matching index by means of a binary search. */
14799 while (begin
!= end
)
14801 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14802 tree index
= (*elts
)[middle
].index
;
14804 if (TREE_CODE (index
) == INTEGER_CST
14805 && tree_int_cst_lt (index
, op1
))
14806 begin
= middle
+ 1;
14807 else if (TREE_CODE (index
) == INTEGER_CST
14808 && tree_int_cst_lt (op1
, index
))
14810 else if (TREE_CODE (index
) == RANGE_EXPR
14811 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14812 begin
= middle
+ 1;
14813 else if (TREE_CODE (index
) == RANGE_EXPR
14814 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14817 return (*elts
)[middle
].value
;
14824 /* Return a VECTOR_CST if possible. */
14827 tree type
= TREE_TYPE (t
);
14828 if (TREE_CODE (type
) != VECTOR_TYPE
)
14831 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14832 unsigned HOST_WIDE_INT idx
, pos
= 0;
14835 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14837 if (!CONSTANT_CLASS_P (value
))
14839 if (TREE_CODE (value
) == VECTOR_CST
)
14841 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14842 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14845 vec
[pos
++] = value
;
14847 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14848 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14850 return build_vector (type
, vec
);
14854 return fold (DECL_INITIAL (t
));
14858 } /* switch (code) */
14861 #ifdef ENABLE_FOLD_CHECKING
14864 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14865 hash_table
<pointer_hash
<tree_node
> >);
14866 static void fold_check_failed (const_tree
, const_tree
);
14867 void print_fold_checksum (const_tree
);
14869 /* When --enable-checking=fold, compute a digest of expr before
14870 and after actual fold call to see if fold did not accidentally
14871 change original expr. */
14877 struct md5_ctx ctx
;
14878 unsigned char checksum_before
[16], checksum_after
[16];
14879 hash_table
<pointer_hash
<tree_node
> > ht
;
14882 md5_init_ctx (&ctx
);
14883 fold_checksum_tree (expr
, &ctx
, ht
);
14884 md5_finish_ctx (&ctx
, checksum_before
);
14887 ret
= fold_1 (expr
);
14889 md5_init_ctx (&ctx
);
14890 fold_checksum_tree (expr
, &ctx
, ht
);
14891 md5_finish_ctx (&ctx
, checksum_after
);
14894 if (memcmp (checksum_before
, checksum_after
, 16))
14895 fold_check_failed (expr
, ret
);
14901 print_fold_checksum (const_tree expr
)
14903 struct md5_ctx ctx
;
14904 unsigned char checksum
[16], cnt
;
14905 hash_table
<pointer_hash
<tree_node
> > ht
;
14908 md5_init_ctx (&ctx
);
14909 fold_checksum_tree (expr
, &ctx
, ht
);
14910 md5_finish_ctx (&ctx
, checksum
);
14912 for (cnt
= 0; cnt
< 16; ++cnt
)
14913 fprintf (stderr
, "%02x", checksum
[cnt
]);
14914 putc ('\n', stderr
);
14918 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14920 internal_error ("fold check: original tree changed by fold");
14924 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14925 hash_table
<pointer_hash
<tree_node
> > ht
)
14928 enum tree_code code
;
14929 union tree_node buf
;
14935 slot
= ht
.find_slot (expr
, INSERT
);
14938 *slot
= CONST_CAST_TREE (expr
);
14939 code
= TREE_CODE (expr
);
14940 if (TREE_CODE_CLASS (code
) == tcc_declaration
14941 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14943 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14944 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14945 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14946 expr
= (tree
) &buf
;
14948 else if (TREE_CODE_CLASS (code
) == tcc_type
14949 && (TYPE_POINTER_TO (expr
)
14950 || TYPE_REFERENCE_TO (expr
)
14951 || TYPE_CACHED_VALUES_P (expr
)
14952 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14953 || TYPE_NEXT_VARIANT (expr
)))
14955 /* Allow these fields to be modified. */
14957 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14958 expr
= tmp
= (tree
) &buf
;
14959 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14960 TYPE_POINTER_TO (tmp
) = NULL
;
14961 TYPE_REFERENCE_TO (tmp
) = NULL
;
14962 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14963 if (TYPE_CACHED_VALUES_P (tmp
))
14965 TYPE_CACHED_VALUES_P (tmp
) = 0;
14966 TYPE_CACHED_VALUES (tmp
) = NULL
;
14969 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14970 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14971 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14972 if (TREE_CODE_CLASS (code
) != tcc_type
14973 && TREE_CODE_CLASS (code
) != tcc_declaration
14974 && code
!= TREE_LIST
14975 && code
!= SSA_NAME
14976 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14977 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14978 switch (TREE_CODE_CLASS (code
))
14984 md5_process_bytes (TREE_STRING_POINTER (expr
),
14985 TREE_STRING_LENGTH (expr
), ctx
);
14988 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14989 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14992 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14993 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14999 case tcc_exceptional
:
15003 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
15004 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
15005 expr
= TREE_CHAIN (expr
);
15006 goto recursive_label
;
15009 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
15010 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
15016 case tcc_expression
:
15017 case tcc_reference
:
15018 case tcc_comparison
:
15021 case tcc_statement
:
15023 len
= TREE_OPERAND_LENGTH (expr
);
15024 for (i
= 0; i
< len
; ++i
)
15025 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
15027 case tcc_declaration
:
15028 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
15029 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
15030 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
15032 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
15033 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
15034 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
15035 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
15036 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
15038 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
15039 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
15041 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
15043 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
15044 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
15045 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
15049 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
15050 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
15051 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
15052 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
15053 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
15054 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
15055 if (INTEGRAL_TYPE_P (expr
)
15056 || SCALAR_FLOAT_TYPE_P (expr
))
15058 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
15059 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
15061 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
15062 if (TREE_CODE (expr
) == RECORD_TYPE
15063 || TREE_CODE (expr
) == UNION_TYPE
15064 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
15065 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
15066 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
15073 /* Helper function for outputting the checksum of a tree T. When
15074 debugging with gdb, you can "define mynext" to be "next" followed
15075 by "call debug_fold_checksum (op0)", then just trace down till the
15078 DEBUG_FUNCTION
void
15079 debug_fold_checksum (const_tree t
)
15082 unsigned char checksum
[16];
15083 struct md5_ctx ctx
;
15084 hash_table
<pointer_hash
<tree_node
> > ht
;
15087 md5_init_ctx (&ctx
);
15088 fold_checksum_tree (t
, &ctx
, ht
);
15089 md5_finish_ctx (&ctx
, checksum
);
15092 for (i
= 0; i
< 16; i
++)
15093 fprintf (stderr
, "%d ", checksum
[i
]);
15095 fprintf (stderr
, "\n");
15100 /* Fold a unary tree expression with code CODE of type TYPE with an
15101 operand OP0. LOC is the location of the resulting expression.
15102 Return a folded expression if successful. Otherwise, return a tree
15103 expression with code CODE of type TYPE with an operand OP0. */
15106 fold_build1_stat_loc (location_t loc
,
15107 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
15110 #ifdef ENABLE_FOLD_CHECKING
15111 unsigned char checksum_before
[16], checksum_after
[16];
15112 struct md5_ctx ctx
;
15113 hash_table
<pointer_hash
<tree_node
> > ht
;
15116 md5_init_ctx (&ctx
);
15117 fold_checksum_tree (op0
, &ctx
, ht
);
15118 md5_finish_ctx (&ctx
, checksum_before
);
15122 tem
= fold_unary_loc (loc
, code
, type
, op0
);
15124 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
15126 #ifdef ENABLE_FOLD_CHECKING
15127 md5_init_ctx (&ctx
);
15128 fold_checksum_tree (op0
, &ctx
, ht
);
15129 md5_finish_ctx (&ctx
, checksum_after
);
15132 if (memcmp (checksum_before
, checksum_after
, 16))
15133 fold_check_failed (op0
, tem
);
15138 /* Fold a binary tree expression with code CODE of type TYPE with
15139 operands OP0 and OP1. LOC is the location of the resulting
15140 expression. Return a folded expression if successful. Otherwise,
15141 return a tree expression with code CODE of type TYPE with operands
15145 fold_build2_stat_loc (location_t loc
,
15146 enum tree_code code
, tree type
, tree op0
, tree op1
15150 #ifdef ENABLE_FOLD_CHECKING
15151 unsigned char checksum_before_op0
[16],
15152 checksum_before_op1
[16],
15153 checksum_after_op0
[16],
15154 checksum_after_op1
[16];
15155 struct md5_ctx ctx
;
15156 hash_table
<pointer_hash
<tree_node
> > ht
;
15159 md5_init_ctx (&ctx
);
15160 fold_checksum_tree (op0
, &ctx
, ht
);
15161 md5_finish_ctx (&ctx
, checksum_before_op0
);
15164 md5_init_ctx (&ctx
);
15165 fold_checksum_tree (op1
, &ctx
, ht
);
15166 md5_finish_ctx (&ctx
, checksum_before_op1
);
15170 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
15172 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
15174 #ifdef ENABLE_FOLD_CHECKING
15175 md5_init_ctx (&ctx
);
15176 fold_checksum_tree (op0
, &ctx
, ht
);
15177 md5_finish_ctx (&ctx
, checksum_after_op0
);
15180 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15181 fold_check_failed (op0
, tem
);
15183 md5_init_ctx (&ctx
);
15184 fold_checksum_tree (op1
, &ctx
, ht
);
15185 md5_finish_ctx (&ctx
, checksum_after_op1
);
15188 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15189 fold_check_failed (op1
, tem
);
15194 /* Fold a ternary tree expression with code CODE of type TYPE with
15195 operands OP0, OP1, and OP2. Return a folded expression if
15196 successful. Otherwise, return a tree expression with code CODE of
15197 type TYPE with operands OP0, OP1, and OP2. */
15200 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15201 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15204 #ifdef ENABLE_FOLD_CHECKING
15205 unsigned char checksum_before_op0
[16],
15206 checksum_before_op1
[16],
15207 checksum_before_op2
[16],
15208 checksum_after_op0
[16],
15209 checksum_after_op1
[16],
15210 checksum_after_op2
[16];
15211 struct md5_ctx ctx
;
15212 hash_table
<pointer_hash
<tree_node
> > ht
;
15215 md5_init_ctx (&ctx
);
15216 fold_checksum_tree (op0
, &ctx
, ht
);
15217 md5_finish_ctx (&ctx
, checksum_before_op0
);
15220 md5_init_ctx (&ctx
);
15221 fold_checksum_tree (op1
, &ctx
, ht
);
15222 md5_finish_ctx (&ctx
, checksum_before_op1
);
15225 md5_init_ctx (&ctx
);
15226 fold_checksum_tree (op2
, &ctx
, ht
);
15227 md5_finish_ctx (&ctx
, checksum_before_op2
);
15231 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15232 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15234 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15236 #ifdef ENABLE_FOLD_CHECKING
15237 md5_init_ctx (&ctx
);
15238 fold_checksum_tree (op0
, &ctx
, ht
);
15239 md5_finish_ctx (&ctx
, checksum_after_op0
);
15242 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15243 fold_check_failed (op0
, tem
);
15245 md5_init_ctx (&ctx
);
15246 fold_checksum_tree (op1
, &ctx
, ht
);
15247 md5_finish_ctx (&ctx
, checksum_after_op1
);
15250 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15251 fold_check_failed (op1
, tem
);
15253 md5_init_ctx (&ctx
);
15254 fold_checksum_tree (op2
, &ctx
, ht
);
15255 md5_finish_ctx (&ctx
, checksum_after_op2
);
15258 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15259 fold_check_failed (op2
, tem
);
15264 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15265 arguments in ARGARRAY, and a null static chain.
15266 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15267 of type TYPE from the given operands as constructed by build_call_array. */
15270 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15271 int nargs
, tree
*argarray
)
15274 #ifdef ENABLE_FOLD_CHECKING
15275 unsigned char checksum_before_fn
[16],
15276 checksum_before_arglist
[16],
15277 checksum_after_fn
[16],
15278 checksum_after_arglist
[16];
15279 struct md5_ctx ctx
;
15280 hash_table
<pointer_hash
<tree_node
> > ht
;
15284 md5_init_ctx (&ctx
);
15285 fold_checksum_tree (fn
, &ctx
, ht
);
15286 md5_finish_ctx (&ctx
, checksum_before_fn
);
15289 md5_init_ctx (&ctx
);
15290 for (i
= 0; i
< nargs
; i
++)
15291 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15292 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15296 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15298 #ifdef ENABLE_FOLD_CHECKING
15299 md5_init_ctx (&ctx
);
15300 fold_checksum_tree (fn
, &ctx
, ht
);
15301 md5_finish_ctx (&ctx
, checksum_after_fn
);
15304 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15305 fold_check_failed (fn
, tem
);
15307 md5_init_ctx (&ctx
);
15308 for (i
= 0; i
< nargs
; i
++)
15309 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
15310 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15313 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15314 fold_check_failed (NULL_TREE
, tem
);
15319 /* Perform constant folding and related simplification of initializer
15320 expression EXPR. These behave identically to "fold_buildN" but ignore
15321 potential run-time traps and exceptions that fold must preserve. */
15323 #define START_FOLD_INIT \
15324 int saved_signaling_nans = flag_signaling_nans;\
15325 int saved_trapping_math = flag_trapping_math;\
15326 int saved_rounding_math = flag_rounding_math;\
15327 int saved_trapv = flag_trapv;\
15328 int saved_folding_initializer = folding_initializer;\
15329 flag_signaling_nans = 0;\
15330 flag_trapping_math = 0;\
15331 flag_rounding_math = 0;\
15333 folding_initializer = 1;
15335 #define END_FOLD_INIT \
15336 flag_signaling_nans = saved_signaling_nans;\
15337 flag_trapping_math = saved_trapping_math;\
15338 flag_rounding_math = saved_rounding_math;\
15339 flag_trapv = saved_trapv;\
15340 folding_initializer = saved_folding_initializer;
15343 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15344 tree type
, tree op
)
15349 result
= fold_build1_loc (loc
, code
, type
, op
);
15356 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15357 tree type
, tree op0
, tree op1
)
15362 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15369 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15370 int nargs
, tree
*argarray
)
15375 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15381 #undef START_FOLD_INIT
15382 #undef END_FOLD_INIT
15384 /* Determine if first argument is a multiple of second argument. Return 0 if
15385 it is not, or we cannot easily determined it to be.
15387 An example of the sort of thing we care about (at this point; this routine
15388 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15389 fold cases do now) is discovering that
15391 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15397 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15399 This code also handles discovering that
15401 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15403 is a multiple of 8 so we don't have to worry about dealing with a
15404 possible remainder.
15406 Note that we *look* inside a SAVE_EXPR only to determine how it was
15407 calculated; it is not safe for fold to do much of anything else with the
15408 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15409 at run time. For example, the latter example above *cannot* be implemented
15410 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15411 evaluation time of the original SAVE_EXPR is not necessarily the same at
15412 the time the new expression is evaluated. The only optimization of this
15413 sort that would be valid is changing
15415 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15419 SAVE_EXPR (I) * SAVE_EXPR (J)
15421 (where the same SAVE_EXPR (J) is used in the original and the
15422 transformed version). */
15425 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15427 if (operand_equal_p (top
, bottom
, 0))
15430 if (TREE_CODE (type
) != INTEGER_TYPE
)
15433 switch (TREE_CODE (top
))
15436 /* Bitwise and provides a power of two multiple. If the mask is
15437 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15438 if (!integer_pow2p (bottom
))
15443 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15444 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15448 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15449 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15452 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15456 op1
= TREE_OPERAND (top
, 1);
15457 /* const_binop may not detect overflow correctly,
15458 so check for it explicitly here. */
15459 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
15460 > TREE_INT_CST_LOW (op1
)
15461 && TREE_INT_CST_HIGH (op1
) == 0
15462 && 0 != (t1
= fold_convert (type
,
15463 const_binop (LSHIFT_EXPR
,
15466 && !TREE_OVERFLOW (t1
))
15467 return multiple_of_p (type
, t1
, bottom
);
15472 /* Can't handle conversions from non-integral or wider integral type. */
15473 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15474 || (TYPE_PRECISION (type
)
15475 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15478 /* .. fall through ... */
15481 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15484 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15485 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15488 if (TREE_CODE (bottom
) != INTEGER_CST
15489 || integer_zerop (bottom
)
15490 || (TYPE_UNSIGNED (type
)
15491 && (tree_int_cst_sgn (top
) < 0
15492 || tree_int_cst_sgn (bottom
) < 0)))
15494 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
15502 /* Return true if CODE or TYPE is known to be non-negative. */
15505 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15507 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15508 && truth_value_p (code
))
15509 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15510 have a signed:1 type (where the value is -1 and 0). */
15515 /* Return true if (CODE OP0) is known to be non-negative. If the return
15516 value is based on the assumption that signed overflow is undefined,
15517 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15518 *STRICT_OVERFLOW_P. */
15521 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15522 bool *strict_overflow_p
)
15524 if (TYPE_UNSIGNED (type
))
15530 /* We can't return 1 if flag_wrapv is set because
15531 ABS_EXPR<INT_MIN> = INT_MIN. */
15532 if (!INTEGRAL_TYPE_P (type
))
15534 if (TYPE_OVERFLOW_UNDEFINED (type
))
15536 *strict_overflow_p
= true;
15541 case NON_LVALUE_EXPR
:
15543 case FIX_TRUNC_EXPR
:
15544 return tree_expr_nonnegative_warnv_p (op0
,
15545 strict_overflow_p
);
15549 tree inner_type
= TREE_TYPE (op0
);
15550 tree outer_type
= type
;
15552 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15554 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15555 return tree_expr_nonnegative_warnv_p (op0
,
15556 strict_overflow_p
);
15557 if (INTEGRAL_TYPE_P (inner_type
))
15559 if (TYPE_UNSIGNED (inner_type
))
15561 return tree_expr_nonnegative_warnv_p (op0
,
15562 strict_overflow_p
);
15565 else if (INTEGRAL_TYPE_P (outer_type
))
15567 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15568 return tree_expr_nonnegative_warnv_p (op0
,
15569 strict_overflow_p
);
15570 if (INTEGRAL_TYPE_P (inner_type
))
15571 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15572 && TYPE_UNSIGNED (inner_type
);
15578 return tree_simple_nonnegative_warnv_p (code
, type
);
15581 /* We don't know sign of `t', so be conservative and return false. */
15585 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15586 value is based on the assumption that signed overflow is undefined,
15587 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15588 *STRICT_OVERFLOW_P. */
15591 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15592 tree op1
, bool *strict_overflow_p
)
15594 if (TYPE_UNSIGNED (type
))
15599 case POINTER_PLUS_EXPR
:
15601 if (FLOAT_TYPE_P (type
))
15602 return (tree_expr_nonnegative_warnv_p (op0
,
15604 && tree_expr_nonnegative_warnv_p (op1
,
15605 strict_overflow_p
));
15607 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15608 both unsigned and at least 2 bits shorter than the result. */
15609 if (TREE_CODE (type
) == INTEGER_TYPE
15610 && TREE_CODE (op0
) == NOP_EXPR
15611 && TREE_CODE (op1
) == NOP_EXPR
)
15613 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15614 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15615 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15616 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15618 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15619 TYPE_PRECISION (inner2
)) + 1;
15620 return prec
< TYPE_PRECISION (type
);
15626 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15628 /* x * x is always non-negative for floating point x
15629 or without overflow. */
15630 if (operand_equal_p (op0
, op1
, 0)
15631 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15632 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15634 if (TYPE_OVERFLOW_UNDEFINED (type
))
15635 *strict_overflow_p
= true;
15640 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15641 both unsigned and their total bits is shorter than the result. */
15642 if (TREE_CODE (type
) == INTEGER_TYPE
15643 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15644 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15646 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15647 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15649 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15650 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15653 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15654 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15656 if (TREE_CODE (op0
) == INTEGER_CST
)
15657 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15659 if (TREE_CODE (op1
) == INTEGER_CST
)
15660 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15662 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15663 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15665 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15666 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15667 : TYPE_PRECISION (inner0
);
15669 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15670 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15671 : TYPE_PRECISION (inner1
);
15673 return precision0
+ precision1
< TYPE_PRECISION (type
);
15680 return (tree_expr_nonnegative_warnv_p (op0
,
15682 || tree_expr_nonnegative_warnv_p (op1
,
15683 strict_overflow_p
));
15689 case TRUNC_DIV_EXPR
:
15690 case CEIL_DIV_EXPR
:
15691 case FLOOR_DIV_EXPR
:
15692 case ROUND_DIV_EXPR
:
15693 return (tree_expr_nonnegative_warnv_p (op0
,
15695 && tree_expr_nonnegative_warnv_p (op1
,
15696 strict_overflow_p
));
15698 case TRUNC_MOD_EXPR
:
15699 case CEIL_MOD_EXPR
:
15700 case FLOOR_MOD_EXPR
:
15701 case ROUND_MOD_EXPR
:
15702 return tree_expr_nonnegative_warnv_p (op0
,
15703 strict_overflow_p
);
15705 return tree_simple_nonnegative_warnv_p (code
, type
);
15708 /* We don't know sign of `t', so be conservative and return false. */
15712 /* Return true if T is known to be non-negative. If the return
15713 value is based on the assumption that signed overflow is undefined,
15714 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15715 *STRICT_OVERFLOW_P. */
15718 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15720 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15723 switch (TREE_CODE (t
))
15726 return tree_int_cst_sgn (t
) >= 0;
15729 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15732 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15735 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15737 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15738 strict_overflow_p
));
15740 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15743 /* We don't know sign of `t', so be conservative and return false. */
15747 /* Return true if T is known to be non-negative. If the return
15748 value is based on the assumption that signed overflow is undefined,
15749 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15750 *STRICT_OVERFLOW_P. */
15753 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15754 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15756 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15757 switch (DECL_FUNCTION_CODE (fndecl
))
15759 CASE_FLT_FN (BUILT_IN_ACOS
):
15760 CASE_FLT_FN (BUILT_IN_ACOSH
):
15761 CASE_FLT_FN (BUILT_IN_CABS
):
15762 CASE_FLT_FN (BUILT_IN_COSH
):
15763 CASE_FLT_FN (BUILT_IN_ERFC
):
15764 CASE_FLT_FN (BUILT_IN_EXP
):
15765 CASE_FLT_FN (BUILT_IN_EXP10
):
15766 CASE_FLT_FN (BUILT_IN_EXP2
):
15767 CASE_FLT_FN (BUILT_IN_FABS
):
15768 CASE_FLT_FN (BUILT_IN_FDIM
):
15769 CASE_FLT_FN (BUILT_IN_HYPOT
):
15770 CASE_FLT_FN (BUILT_IN_POW10
):
15771 CASE_INT_FN (BUILT_IN_FFS
):
15772 CASE_INT_FN (BUILT_IN_PARITY
):
15773 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15774 CASE_INT_FN (BUILT_IN_CLZ
):
15775 CASE_INT_FN (BUILT_IN_CLRSB
):
15776 case BUILT_IN_BSWAP32
:
15777 case BUILT_IN_BSWAP64
:
15781 CASE_FLT_FN (BUILT_IN_SQRT
):
15782 /* sqrt(-0.0) is -0.0. */
15783 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15785 return tree_expr_nonnegative_warnv_p (arg0
,
15786 strict_overflow_p
);
15788 CASE_FLT_FN (BUILT_IN_ASINH
):
15789 CASE_FLT_FN (BUILT_IN_ATAN
):
15790 CASE_FLT_FN (BUILT_IN_ATANH
):
15791 CASE_FLT_FN (BUILT_IN_CBRT
):
15792 CASE_FLT_FN (BUILT_IN_CEIL
):
15793 CASE_FLT_FN (BUILT_IN_ERF
):
15794 CASE_FLT_FN (BUILT_IN_EXPM1
):
15795 CASE_FLT_FN (BUILT_IN_FLOOR
):
15796 CASE_FLT_FN (BUILT_IN_FMOD
):
15797 CASE_FLT_FN (BUILT_IN_FREXP
):
15798 CASE_FLT_FN (BUILT_IN_ICEIL
):
15799 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15800 CASE_FLT_FN (BUILT_IN_IRINT
):
15801 CASE_FLT_FN (BUILT_IN_IROUND
):
15802 CASE_FLT_FN (BUILT_IN_LCEIL
):
15803 CASE_FLT_FN (BUILT_IN_LDEXP
):
15804 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15805 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15806 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15807 CASE_FLT_FN (BUILT_IN_LLRINT
):
15808 CASE_FLT_FN (BUILT_IN_LLROUND
):
15809 CASE_FLT_FN (BUILT_IN_LRINT
):
15810 CASE_FLT_FN (BUILT_IN_LROUND
):
15811 CASE_FLT_FN (BUILT_IN_MODF
):
15812 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15813 CASE_FLT_FN (BUILT_IN_RINT
):
15814 CASE_FLT_FN (BUILT_IN_ROUND
):
15815 CASE_FLT_FN (BUILT_IN_SCALB
):
15816 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15817 CASE_FLT_FN (BUILT_IN_SCALBN
):
15818 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15819 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15820 CASE_FLT_FN (BUILT_IN_SINH
):
15821 CASE_FLT_FN (BUILT_IN_TANH
):
15822 CASE_FLT_FN (BUILT_IN_TRUNC
):
15823 /* True if the 1st argument is nonnegative. */
15824 return tree_expr_nonnegative_warnv_p (arg0
,
15825 strict_overflow_p
);
15827 CASE_FLT_FN (BUILT_IN_FMAX
):
15828 /* True if the 1st OR 2nd arguments are nonnegative. */
15829 return (tree_expr_nonnegative_warnv_p (arg0
,
15831 || (tree_expr_nonnegative_warnv_p (arg1
,
15832 strict_overflow_p
)));
15834 CASE_FLT_FN (BUILT_IN_FMIN
):
15835 /* True if the 1st AND 2nd arguments are nonnegative. */
15836 return (tree_expr_nonnegative_warnv_p (arg0
,
15838 && (tree_expr_nonnegative_warnv_p (arg1
,
15839 strict_overflow_p
)));
15841 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15842 /* True if the 2nd argument is nonnegative. */
15843 return tree_expr_nonnegative_warnv_p (arg1
,
15844 strict_overflow_p
);
15846 CASE_FLT_FN (BUILT_IN_POWI
):
15847 /* True if the 1st argument is nonnegative or the second
15848 argument is an even integer. */
15849 if (TREE_CODE (arg1
) == INTEGER_CST
15850 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15852 return tree_expr_nonnegative_warnv_p (arg0
,
15853 strict_overflow_p
);
15855 CASE_FLT_FN (BUILT_IN_POW
):
15856 /* True if the 1st argument is nonnegative or the second
15857 argument is an even integer valued real. */
15858 if (TREE_CODE (arg1
) == REAL_CST
)
15863 c
= TREE_REAL_CST (arg1
);
15864 n
= real_to_integer (&c
);
15867 REAL_VALUE_TYPE cint
;
15868 real_from_integer (&cint
, VOIDmode
, n
,
15869 n
< 0 ? -1 : 0, 0);
15870 if (real_identical (&c
, &cint
))
15874 return tree_expr_nonnegative_warnv_p (arg0
,
15875 strict_overflow_p
);
15880 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15884 /* Return true if T is known to be non-negative. If the return
15885 value is based on the assumption that signed overflow is undefined,
15886 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15887 *STRICT_OVERFLOW_P. */
15890 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15892 enum tree_code code
= TREE_CODE (t
);
15893 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15900 tree temp
= TARGET_EXPR_SLOT (t
);
15901 t
= TARGET_EXPR_INITIAL (t
);
15903 /* If the initializer is non-void, then it's a normal expression
15904 that will be assigned to the slot. */
15905 if (!VOID_TYPE_P (t
))
15906 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15908 /* Otherwise, the initializer sets the slot in some way. One common
15909 way is an assignment statement at the end of the initializer. */
15912 if (TREE_CODE (t
) == BIND_EXPR
)
15913 t
= expr_last (BIND_EXPR_BODY (t
));
15914 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15915 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15916 t
= expr_last (TREE_OPERAND (t
, 0));
15917 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15922 if (TREE_CODE (t
) == MODIFY_EXPR
15923 && TREE_OPERAND (t
, 0) == temp
)
15924 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15925 strict_overflow_p
);
15932 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15933 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15935 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15936 get_callee_fndecl (t
),
15939 strict_overflow_p
);
15941 case COMPOUND_EXPR
:
15943 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15944 strict_overflow_p
);
15946 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15947 strict_overflow_p
);
15949 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15950 strict_overflow_p
);
15953 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15957 /* We don't know sign of `t', so be conservative and return false. */
15961 /* Return true if T is known to be non-negative. If the return
15962 value is based on the assumption that signed overflow is undefined,
15963 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15964 *STRICT_OVERFLOW_P. */
15967 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15969 enum tree_code code
;
15970 if (t
== error_mark_node
)
15973 code
= TREE_CODE (t
);
15974 switch (TREE_CODE_CLASS (code
))
15977 case tcc_comparison
:
15978 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15980 TREE_OPERAND (t
, 0),
15981 TREE_OPERAND (t
, 1),
15982 strict_overflow_p
);
15985 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15987 TREE_OPERAND (t
, 0),
15988 strict_overflow_p
);
15991 case tcc_declaration
:
15992 case tcc_reference
:
15993 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16001 case TRUTH_AND_EXPR
:
16002 case TRUTH_OR_EXPR
:
16003 case TRUTH_XOR_EXPR
:
16004 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
16006 TREE_OPERAND (t
, 0),
16007 TREE_OPERAND (t
, 1),
16008 strict_overflow_p
);
16009 case TRUTH_NOT_EXPR
:
16010 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
16012 TREE_OPERAND (t
, 0),
16013 strict_overflow_p
);
16020 case WITH_SIZE_EXPR
:
16022 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
16025 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
16029 /* Return true if `t' is known to be non-negative. Handle warnings
16030 about undefined signed overflow. */
16033 tree_expr_nonnegative_p (tree t
)
16035 bool ret
, strict_overflow_p
;
16037 strict_overflow_p
= false;
16038 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
16039 if (strict_overflow_p
)
16040 fold_overflow_warning (("assuming signed overflow does not occur when "
16041 "determining that expression is always "
16043 WARN_STRICT_OVERFLOW_MISC
);
16048 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16049 For floating point we further ensure that T is not denormal.
16050 Similar logic is present in nonzero_address in rtlanal.h.
16052 If the return value is based on the assumption that signed overflow
16053 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16054 change *STRICT_OVERFLOW_P. */
16057 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
16058 bool *strict_overflow_p
)
16063 return tree_expr_nonzero_warnv_p (op0
,
16064 strict_overflow_p
);
16068 tree inner_type
= TREE_TYPE (op0
);
16069 tree outer_type
= type
;
16071 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
16072 && tree_expr_nonzero_warnv_p (op0
,
16073 strict_overflow_p
));
16077 case NON_LVALUE_EXPR
:
16078 return tree_expr_nonzero_warnv_p (op0
,
16079 strict_overflow_p
);
16088 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16089 For floating point we further ensure that T is not denormal.
16090 Similar logic is present in nonzero_address in rtlanal.h.
16092 If the return value is based on the assumption that signed overflow
16093 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16094 change *STRICT_OVERFLOW_P. */
16097 tree_binary_nonzero_warnv_p (enum tree_code code
,
16100 tree op1
, bool *strict_overflow_p
)
16102 bool sub_strict_overflow_p
;
16105 case POINTER_PLUS_EXPR
:
16107 if (TYPE_OVERFLOW_UNDEFINED (type
))
16109 /* With the presence of negative values it is hard
16110 to say something. */
16111 sub_strict_overflow_p
= false;
16112 if (!tree_expr_nonnegative_warnv_p (op0
,
16113 &sub_strict_overflow_p
)
16114 || !tree_expr_nonnegative_warnv_p (op1
,
16115 &sub_strict_overflow_p
))
16117 /* One of operands must be positive and the other non-negative. */
16118 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16119 overflows, on a twos-complement machine the sum of two
16120 nonnegative numbers can never be zero. */
16121 return (tree_expr_nonzero_warnv_p (op0
,
16123 || tree_expr_nonzero_warnv_p (op1
,
16124 strict_overflow_p
));
16129 if (TYPE_OVERFLOW_UNDEFINED (type
))
16131 if (tree_expr_nonzero_warnv_p (op0
,
16133 && tree_expr_nonzero_warnv_p (op1
,
16134 strict_overflow_p
))
16136 *strict_overflow_p
= true;
16143 sub_strict_overflow_p
= false;
16144 if (tree_expr_nonzero_warnv_p (op0
,
16145 &sub_strict_overflow_p
)
16146 && tree_expr_nonzero_warnv_p (op1
,
16147 &sub_strict_overflow_p
))
16149 if (sub_strict_overflow_p
)
16150 *strict_overflow_p
= true;
16155 sub_strict_overflow_p
= false;
16156 if (tree_expr_nonzero_warnv_p (op0
,
16157 &sub_strict_overflow_p
))
16159 if (sub_strict_overflow_p
)
16160 *strict_overflow_p
= true;
16162 /* When both operands are nonzero, then MAX must be too. */
16163 if (tree_expr_nonzero_warnv_p (op1
,
16164 strict_overflow_p
))
16167 /* MAX where operand 0 is positive is positive. */
16168 return tree_expr_nonnegative_warnv_p (op0
,
16169 strict_overflow_p
);
16171 /* MAX where operand 1 is positive is positive. */
16172 else if (tree_expr_nonzero_warnv_p (op1
,
16173 &sub_strict_overflow_p
)
16174 && tree_expr_nonnegative_warnv_p (op1
,
16175 &sub_strict_overflow_p
))
16177 if (sub_strict_overflow_p
)
16178 *strict_overflow_p
= true;
16184 return (tree_expr_nonzero_warnv_p (op1
,
16186 || tree_expr_nonzero_warnv_p (op0
,
16187 strict_overflow_p
));
16196 /* Return true when T is an address and is known to be nonzero.
16197 For floating point we further ensure that T is not denormal.
16198 Similar logic is present in nonzero_address in rtlanal.h.
16200 If the return value is based on the assumption that signed overflow
16201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16202 change *STRICT_OVERFLOW_P. */
16205 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16207 bool sub_strict_overflow_p
;
16208 switch (TREE_CODE (t
))
16211 return !integer_zerop (t
);
16215 tree base
= TREE_OPERAND (t
, 0);
16216 if (!DECL_P (base
))
16217 base
= get_base_address (base
);
16222 /* Weak declarations may link to NULL. Other things may also be NULL
16223 so protect with -fdelete-null-pointer-checks; but not variables
16224 allocated on the stack. */
16226 && (flag_delete_null_pointer_checks
16227 || (DECL_CONTEXT (base
)
16228 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16229 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
16230 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
16232 /* Constants are never weak. */
16233 if (CONSTANT_CLASS_P (base
))
16240 sub_strict_overflow_p
= false;
16241 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16242 &sub_strict_overflow_p
)
16243 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16244 &sub_strict_overflow_p
))
16246 if (sub_strict_overflow_p
)
16247 *strict_overflow_p
= true;
16258 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16259 attempt to fold the expression to a constant without modifying TYPE,
16262 If the expression could be simplified to a constant, then return
16263 the constant. If the expression would not be simplified to a
16264 constant, then return NULL_TREE. */
16267 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16269 tree tem
= fold_binary (code
, type
, op0
, op1
);
16270 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16273 /* Given the components of a unary expression CODE, TYPE and OP0,
16274 attempt to fold the expression to a constant without modifying
16277 If the expression could be simplified to a constant, then return
16278 the constant. If the expression would not be simplified to a
16279 constant, then return NULL_TREE. */
16282 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16284 tree tem
= fold_unary (code
, type
, op0
);
16285 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16288 /* If EXP represents referencing an element in a constant string
16289 (either via pointer arithmetic or array indexing), return the
16290 tree representing the value accessed, otherwise return NULL. */
16293 fold_read_from_constant_string (tree exp
)
16295 if ((TREE_CODE (exp
) == INDIRECT_REF
16296 || TREE_CODE (exp
) == ARRAY_REF
)
16297 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16299 tree exp1
= TREE_OPERAND (exp
, 0);
16302 location_t loc
= EXPR_LOCATION (exp
);
16304 if (TREE_CODE (exp
) == INDIRECT_REF
)
16305 string
= string_constant (exp1
, &index
);
16308 tree low_bound
= array_ref_low_bound (exp
);
16309 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16311 /* Optimize the special-case of a zero lower bound.
16313 We convert the low_bound to sizetype to avoid some problems
16314 with constant folding. (E.g. suppose the lower bound is 1,
16315 and its mode is QI. Without the conversion,l (ARRAY
16316 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16317 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16318 if (! integer_zerop (low_bound
))
16319 index
= size_diffop_loc (loc
, index
,
16320 fold_convert_loc (loc
, sizetype
, low_bound
));
16326 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16327 && TREE_CODE (string
) == STRING_CST
16328 && TREE_CODE (index
) == INTEGER_CST
16329 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16330 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16332 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16333 return build_int_cst_type (TREE_TYPE (exp
),
16334 (TREE_STRING_POINTER (string
)
16335 [TREE_INT_CST_LOW (index
)]));
16340 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16341 an integer constant, real, or fixed-point constant.
16343 TYPE is the type of the result. */
16346 fold_negate_const (tree arg0
, tree type
)
16348 tree t
= NULL_TREE
;
16350 switch (TREE_CODE (arg0
))
16354 double_int val
= tree_to_double_int (arg0
);
16356 val
= val
.neg_with_overflow (&overflow
);
16357 t
= force_fit_type_double (type
, val
, 1,
16358 (overflow
| TREE_OVERFLOW (arg0
))
16359 && !TYPE_UNSIGNED (type
));
16364 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16369 FIXED_VALUE_TYPE f
;
16370 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16371 &(TREE_FIXED_CST (arg0
)), NULL
,
16372 TYPE_SATURATING (type
));
16373 t
= build_fixed (type
, f
);
16374 /* Propagate overflow flags. */
16375 if (overflow_p
| TREE_OVERFLOW (arg0
))
16376 TREE_OVERFLOW (t
) = 1;
16381 gcc_unreachable ();
16387 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16388 an integer constant or real constant.
16390 TYPE is the type of the result. */
16393 fold_abs_const (tree arg0
, tree type
)
16395 tree t
= NULL_TREE
;
16397 switch (TREE_CODE (arg0
))
16401 double_int val
= tree_to_double_int (arg0
);
16403 /* If the value is unsigned or non-negative, then the absolute value
16404 is the same as the ordinary value. */
16405 if (TYPE_UNSIGNED (type
)
16406 || !val
.is_negative ())
16409 /* If the value is negative, then the absolute value is
16414 val
= val
.neg_with_overflow (&overflow
);
16415 t
= force_fit_type_double (type
, val
, -1,
16416 overflow
| TREE_OVERFLOW (arg0
));
16422 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16423 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16429 gcc_unreachable ();
16435 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16436 constant. TYPE is the type of the result. */
16439 fold_not_const (const_tree arg0
, tree type
)
16443 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16445 val
= ~tree_to_double_int (arg0
);
16446 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
16449 /* Given CODE, a relational operator, the target type, TYPE and two
16450 constant operands OP0 and OP1, return the result of the
16451 relational operation. If the result is not a compile time
16452 constant, then return NULL_TREE. */
16455 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16457 int result
, invert
;
16459 /* From here on, the only cases we handle are when the result is
16460 known to be a constant. */
16462 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16464 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16465 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16467 /* Handle the cases where either operand is a NaN. */
16468 if (real_isnan (c0
) || real_isnan (c1
))
16478 case UNORDERED_EXPR
:
16492 if (flag_trapping_math
)
16498 gcc_unreachable ();
16501 return constant_boolean_node (result
, type
);
16504 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16507 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16509 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16510 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16511 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16514 /* Handle equality/inequality of complex constants. */
16515 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16517 tree rcond
= fold_relational_const (code
, type
,
16518 TREE_REALPART (op0
),
16519 TREE_REALPART (op1
));
16520 tree icond
= fold_relational_const (code
, type
,
16521 TREE_IMAGPART (op0
),
16522 TREE_IMAGPART (op1
));
16523 if (code
== EQ_EXPR
)
16524 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16525 else if (code
== NE_EXPR
)
16526 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16531 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16533 unsigned count
= VECTOR_CST_NELTS (op0
);
16534 tree
*elts
= XALLOCAVEC (tree
, count
);
16535 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16536 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16538 for (unsigned i
= 0; i
< count
; i
++)
16540 tree elem_type
= TREE_TYPE (type
);
16541 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16542 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16544 tree tem
= fold_relational_const (code
, elem_type
,
16547 if (tem
== NULL_TREE
)
16550 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16553 return build_vector (type
, elts
);
16556 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16558 To compute GT, swap the arguments and do LT.
16559 To compute GE, do LT and invert the result.
16560 To compute LE, swap the arguments, do LT and invert the result.
16561 To compute NE, do EQ and invert the result.
16563 Therefore, the code below must handle only EQ and LT. */
16565 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16570 code
= swap_tree_comparison (code
);
16573 /* Note that it is safe to invert for real values here because we
16574 have already handled the one case that it matters. */
16577 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16580 code
= invert_tree_comparison (code
, false);
16583 /* Compute a result for LT or EQ if args permit;
16584 Otherwise return T. */
16585 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16587 if (code
== EQ_EXPR
)
16588 result
= tree_int_cst_equal (op0
, op1
);
16589 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
16590 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
16592 result
= INT_CST_LT (op0
, op1
);
16599 return constant_boolean_node (result
, type
);
16602 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16603 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16607 fold_build_cleanup_point_expr (tree type
, tree expr
)
16609 /* If the expression does not have side effects then we don't have to wrap
16610 it with a cleanup point expression. */
16611 if (!TREE_SIDE_EFFECTS (expr
))
16614 /* If the expression is a return, check to see if the expression inside the
16615 return has no side effects or the right hand side of the modify expression
16616 inside the return. If either don't have side effects set we don't need to
16617 wrap the expression in a cleanup point expression. Note we don't check the
16618 left hand side of the modify because it should always be a return decl. */
16619 if (TREE_CODE (expr
) == RETURN_EXPR
)
16621 tree op
= TREE_OPERAND (expr
, 0);
16622 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16624 op
= TREE_OPERAND (op
, 1);
16625 if (!TREE_SIDE_EFFECTS (op
))
16629 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16632 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16633 of an indirection through OP0, or NULL_TREE if no simplification is
16637 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16643 subtype
= TREE_TYPE (sub
);
16644 if (!POINTER_TYPE_P (subtype
))
16647 if (TREE_CODE (sub
) == ADDR_EXPR
)
16649 tree op
= TREE_OPERAND (sub
, 0);
16650 tree optype
= TREE_TYPE (op
);
16651 /* *&CONST_DECL -> to the value of the const decl. */
16652 if (TREE_CODE (op
) == CONST_DECL
)
16653 return DECL_INITIAL (op
);
16654 /* *&p => p; make sure to handle *&"str"[cst] here. */
16655 if (type
== optype
)
16657 tree fop
= fold_read_from_constant_string (op
);
16663 /* *(foo *)&fooarray => fooarray[0] */
16664 else if (TREE_CODE (optype
) == ARRAY_TYPE
16665 && type
== TREE_TYPE (optype
)
16666 && (!in_gimple_form
16667 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16669 tree type_domain
= TYPE_DOMAIN (optype
);
16670 tree min_val
= size_zero_node
;
16671 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16672 min_val
= TYPE_MIN_VALUE (type_domain
);
16674 && TREE_CODE (min_val
) != INTEGER_CST
)
16676 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16677 NULL_TREE
, NULL_TREE
);
16679 /* *(foo *)&complexfoo => __real__ complexfoo */
16680 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16681 && type
== TREE_TYPE (optype
))
16682 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16683 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16684 else if (TREE_CODE (optype
) == VECTOR_TYPE
16685 && type
== TREE_TYPE (optype
))
16687 tree part_width
= TYPE_SIZE (type
);
16688 tree index
= bitsize_int (0);
16689 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16693 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16694 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16696 tree op00
= TREE_OPERAND (sub
, 0);
16697 tree op01
= TREE_OPERAND (sub
, 1);
16700 if (TREE_CODE (op00
) == ADDR_EXPR
)
16703 op00
= TREE_OPERAND (op00
, 0);
16704 op00type
= TREE_TYPE (op00
);
16706 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16707 if (TREE_CODE (op00type
) == VECTOR_TYPE
16708 && type
== TREE_TYPE (op00type
))
16710 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16711 tree part_width
= TYPE_SIZE (type
);
16712 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16713 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16714 tree index
= bitsize_int (indexi
);
16716 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16717 return fold_build3_loc (loc
,
16718 BIT_FIELD_REF
, type
, op00
,
16719 part_width
, index
);
16722 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16723 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16724 && type
== TREE_TYPE (op00type
))
16726 tree size
= TYPE_SIZE_UNIT (type
);
16727 if (tree_int_cst_equal (size
, op01
))
16728 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16730 /* ((foo *)&fooarray)[1] => fooarray[1] */
16731 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16732 && type
== TREE_TYPE (op00type
))
16734 tree type_domain
= TYPE_DOMAIN (op00type
);
16735 tree min_val
= size_zero_node
;
16736 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16737 min_val
= TYPE_MIN_VALUE (type_domain
);
16738 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16739 TYPE_SIZE_UNIT (type
));
16740 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16741 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16742 NULL_TREE
, NULL_TREE
);
16747 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16748 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16749 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16750 && (!in_gimple_form
16751 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16754 tree min_val
= size_zero_node
;
16755 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16756 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16757 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16758 min_val
= TYPE_MIN_VALUE (type_domain
);
16760 && TREE_CODE (min_val
) != INTEGER_CST
)
16762 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16769 /* Builds an expression for an indirection through T, simplifying some
16773 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16775 tree type
= TREE_TYPE (TREE_TYPE (t
));
16776 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16781 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16784 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16787 fold_indirect_ref_loc (location_t loc
, tree t
)
16789 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16797 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16798 whose result is ignored. The type of the returned tree need not be
16799 the same as the original expression. */
16802 fold_ignored_result (tree t
)
16804 if (!TREE_SIDE_EFFECTS (t
))
16805 return integer_zero_node
;
16808 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16811 t
= TREE_OPERAND (t
, 0);
16815 case tcc_comparison
:
16816 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16817 t
= TREE_OPERAND (t
, 0);
16818 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16819 t
= TREE_OPERAND (t
, 1);
16824 case tcc_expression
:
16825 switch (TREE_CODE (t
))
16827 case COMPOUND_EXPR
:
16828 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16830 t
= TREE_OPERAND (t
, 0);
16834 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16835 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16837 t
= TREE_OPERAND (t
, 0);
16850 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16851 This can only be applied to objects of a sizetype. */
16854 round_up_loc (location_t loc
, tree value
, int divisor
)
16856 tree div
= NULL_TREE
;
16858 gcc_assert (divisor
> 0);
16862 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16863 have to do anything. Only do this when we are not given a const,
16864 because in that case, this check is more expensive than just
16866 if (TREE_CODE (value
) != INTEGER_CST
)
16868 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16870 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16874 /* If divisor is a power of two, simplify this to bit manipulation. */
16875 if (divisor
== (divisor
& -divisor
))
16877 if (TREE_CODE (value
) == INTEGER_CST
)
16879 double_int val
= tree_to_double_int (value
);
16882 if ((val
.low
& (divisor
- 1)) == 0)
16885 overflow_p
= TREE_OVERFLOW (value
);
16886 val
.low
&= ~(divisor
- 1);
16887 val
.low
+= divisor
;
16895 return force_fit_type_double (TREE_TYPE (value
), val
,
16902 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16903 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16904 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16905 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16911 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16912 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16913 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16919 /* Likewise, but round down. */
16922 round_down_loc (location_t loc
, tree value
, int divisor
)
16924 tree div
= NULL_TREE
;
16926 gcc_assert (divisor
> 0);
16930 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16931 have to do anything. Only do this when we are not given a const,
16932 because in that case, this check is more expensive than just
16934 if (TREE_CODE (value
) != INTEGER_CST
)
16936 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16938 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16942 /* If divisor is a power of two, simplify this to bit manipulation. */
16943 if (divisor
== (divisor
& -divisor
))
16947 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16948 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16953 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16954 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16955 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16961 /* Returns the pointer to the base of the object addressed by EXP and
16962 extracts the information about the offset of the access, storing it
16963 to PBITPOS and POFFSET. */
16966 split_address_to_core_and_offset (tree exp
,
16967 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16970 enum machine_mode mode
;
16971 int unsignedp
, volatilep
;
16972 HOST_WIDE_INT bitsize
;
16973 location_t loc
= EXPR_LOCATION (exp
);
16975 if (TREE_CODE (exp
) == ADDR_EXPR
)
16977 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16978 poffset
, &mode
, &unsignedp
, &volatilep
,
16980 core
= build_fold_addr_expr_loc (loc
, core
);
16986 *poffset
= NULL_TREE
;
16992 /* Returns true if addresses of E1 and E2 differ by a constant, false
16993 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16996 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16999 HOST_WIDE_INT bitpos1
, bitpos2
;
17000 tree toffset1
, toffset2
, tdiff
, type
;
17002 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
17003 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
17005 if (bitpos1
% BITS_PER_UNIT
!= 0
17006 || bitpos2
% BITS_PER_UNIT
!= 0
17007 || !operand_equal_p (core1
, core2
, 0))
17010 if (toffset1
&& toffset2
)
17012 type
= TREE_TYPE (toffset1
);
17013 if (type
!= TREE_TYPE (toffset2
))
17014 toffset2
= fold_convert (type
, toffset2
);
17016 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
17017 if (!cst_and_fits_in_hwi (tdiff
))
17020 *diff
= int_cst_value (tdiff
);
17022 else if (toffset1
|| toffset2
)
17024 /* If only one of the offsets is non-constant, the difference cannot
17031 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
17035 /* Simplify the floating point expression EXP when the sign of the
17036 result is not significant. Return NULL_TREE if no simplification
17040 fold_strip_sign_ops (tree exp
)
17043 location_t loc
= EXPR_LOCATION (exp
);
17045 switch (TREE_CODE (exp
))
17049 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17050 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
17054 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
17056 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
17057 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17058 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
17059 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
17060 arg0
? arg0
: TREE_OPERAND (exp
, 0),
17061 arg1
? arg1
: TREE_OPERAND (exp
, 1));
17064 case COMPOUND_EXPR
:
17065 arg0
= TREE_OPERAND (exp
, 0);
17066 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17068 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
17072 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
17073 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
17075 return fold_build3_loc (loc
,
17076 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
17077 arg0
? arg0
: TREE_OPERAND (exp
, 1),
17078 arg1
? arg1
: TREE_OPERAND (exp
, 2));
17083 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
17086 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
17087 /* Strip copysign function call, return the 1st argument. */
17088 arg0
= CALL_EXPR_ARG (exp
, 0);
17089 arg1
= CALL_EXPR_ARG (exp
, 1);
17090 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
17093 /* Strip sign ops from the argument of "odd" math functions. */
17094 if (negate_mathfn_p (fcode
))
17096 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
17098 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);