1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
80 #include "tree-ssanames.h"
82 #include "stringpool.h"
84 #include "tree-vector-builder.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
88 int folding_initializer
= 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code
{
112 static bool negate_expr_p (tree
);
113 static tree
negate_expr (tree
);
114 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
115 static enum comparison_code
comparison_to_compcode (enum tree_code
);
116 static enum tree_code
compcode_to_comparison (enum comparison_code
);
117 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
118 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static int simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
fold_binary_op_with_conditional_arg (location_t
,
132 enum tree_code
, tree
,
135 static tree
fold_negate_const (tree
, tree
);
136 static tree
fold_not_const (const_tree
, tree
);
137 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 static tree
fold_view_convert_expr (tree
, tree
);
140 static tree
fold_negate_expr (location_t
, tree
);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
147 expr_location_or (tree t
, location_t loc
)
149 location_t tloc
= EXPR_LOCATION (t
);
150 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
157 protected_set_expr_location_unshare (tree x
, location_t loc
)
159 if (CAN_HAVE_LOCATION_P (x
)
160 && EXPR_LOCATION (x
) != loc
161 && !(TREE_CODE (x
) == SAVE_EXPR
162 || TREE_CODE (x
) == TARGET_EXPR
163 || TREE_CODE (x
) == BIND_EXPR
))
166 SET_EXPR_LOCATION (x
, loc
);
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
176 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
180 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
182 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
196 static int fold_deferring_overflow_warnings
;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning
;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings
;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
229 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
234 gcc_assert (fold_deferring_overflow_warnings
> 0);
235 --fold_deferring_overflow_warnings
;
236 if (fold_deferring_overflow_warnings
> 0)
238 if (fold_deferred_overflow_warning
!= NULL
240 && code
< (int) fold_deferred_overflow_code
)
241 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
245 warnmsg
= fold_deferred_overflow_warning
;
246 fold_deferred_overflow_warning
= NULL
;
248 if (!issue
|| warnmsg
== NULL
)
251 if (gimple_no_warning_p (stmt
))
254 /* Use the smallest code level when deciding to issue the
256 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
257 code
= fold_deferred_overflow_code
;
259 if (!issue_strict_overflow_warning (code
))
263 locus
= input_location
;
265 locus
= gimple_location (stmt
);
266 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
269 /* Stop deferring overflow warnings, ignoring any deferred
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL
, 0);
278 /* Whether we are deferring overflow warnings. */
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings
> 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
290 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
292 if (fold_deferring_overflow_warnings
> 0)
294 if (fold_deferred_overflow_warning
== NULL
295 || wc
< fold_deferred_overflow_code
)
297 fold_deferred_overflow_warning
= gmsgid
;
298 fold_deferred_overflow_code
= wc
;
301 else if (issue_strict_overflow_warning (wc
))
302 warning (OPT_Wstrict_overflow
, gmsgid
);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
309 negate_mathfn_p (combined_fn fn
)
342 return !flag_rounding_math
;
350 /* Check whether we may negate an integer constant T without causing
354 may_negate_without_overflow_p (const_tree t
)
358 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
360 type
= TREE_TYPE (t
);
361 if (TYPE_UNSIGNED (type
))
364 return !wi::only_sign_bit_p (wi::to_wide (t
));
367 /* Determine whether an expression T can be cheaply negated using
368 the function negate_expr without introducing undefined overflow. */
371 negate_expr_p (tree t
)
378 type
= TREE_TYPE (t
);
381 switch (TREE_CODE (t
))
384 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
387 /* Check that -CST will not overflow type. */
388 return may_negate_without_overflow_p (t
);
390 return (INTEGRAL_TYPE_P (type
)
391 && TYPE_OVERFLOW_WRAPS (type
));
397 return !TYPE_OVERFLOW_SANITIZED (type
);
400 /* We want to canonicalize to positive real constants. Pretend
401 that only negative ones can be easily negated. */
402 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
405 return negate_expr_p (TREE_REALPART (t
))
406 && negate_expr_p (TREE_IMAGPART (t
));
410 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
413 /* Steps don't prevent negation. */
414 unsigned int count
= vector_cst_encoded_nelts (t
);
415 for (unsigned int i
= 0; i
< count
; ++i
)
416 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
423 return negate_expr_p (TREE_OPERAND (t
, 0))
424 && negate_expr_p (TREE_OPERAND (t
, 1));
427 return negate_expr_p (TREE_OPERAND (t
, 0));
430 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
431 || HONOR_SIGNED_ZEROS (element_mode (type
))
432 || (ANY_INTEGRAL_TYPE_P (type
)
433 && ! TYPE_OVERFLOW_WRAPS (type
)))
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t
, 1)))
438 /* -(A + B) -> (-A) - B. */
439 return negate_expr_p (TREE_OPERAND (t
, 0));
442 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
443 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
444 && !HONOR_SIGNED_ZEROS (element_mode (type
))
445 && (! ANY_INTEGRAL_TYPE_P (type
)
446 || TYPE_OVERFLOW_WRAPS (type
));
449 if (TYPE_UNSIGNED (type
))
451 /* INT_MIN/n * n doesn't overflow while negating one operand it does
452 if n is a (negative) power of two. */
453 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
454 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
455 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
457 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
460 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
467 return negate_expr_p (TREE_OPERAND (t
, 1))
468 || negate_expr_p (TREE_OPERAND (t
, 0));
474 if (TYPE_UNSIGNED (type
))
476 if (negate_expr_p (TREE_OPERAND (t
, 0)))
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
483 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t
, 1))))
485 return negate_expr_p (TREE_OPERAND (t
, 1));
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type
) == REAL_TYPE
)
492 tree tem
= strip_float_extensions (t
);
494 return negate_expr_p (tem
);
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t
)))
501 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
508 tree op1
= TREE_OPERAND (t
, 1);
509 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
526 fold_negate_expr_1 (location_t loc
, tree t
)
528 tree type
= TREE_TYPE (t
);
531 switch (TREE_CODE (t
))
533 /* Convert - (~A) to A + 1. */
535 if (INTEGRAL_TYPE_P (type
))
536 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
537 build_one_cst (type
));
541 tem
= fold_negate_const (t
, type
);
542 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
543 || (ANY_INTEGRAL_TYPE_P (type
)
544 && !TYPE_OVERFLOW_TRAPS (type
)
545 && TYPE_OVERFLOW_WRAPS (type
))
546 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
551 tem
= fold_negate_const (t
, type
);
555 tem
= fold_negate_const (t
, type
);
560 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
561 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
563 return build_complex (type
, rpart
, ipart
);
569 tree_vector_builder elts
;
570 elts
.new_unary_operation (type
, t
, true);
571 unsigned int count
= elts
.encoded_nelts ();
572 for (unsigned int i
= 0; i
< count
; ++i
)
574 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
575 if (elt
== NULL_TREE
)
577 elts
.quick_push (elt
);
580 return elts
.build ();
584 if (negate_expr_p (t
))
585 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
586 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
587 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
591 if (negate_expr_p (t
))
592 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
597 if (!TYPE_OVERFLOW_SANITIZED (type
))
598 return TREE_OPERAND (t
, 0);
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
603 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
605 /* -(A + B) -> (-B) - A. */
606 if (negate_expr_p (TREE_OPERAND (t
, 1)))
608 tem
= negate_expr (TREE_OPERAND (t
, 1));
609 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
610 tem
, TREE_OPERAND (t
, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t
, 0)))
616 tem
= negate_expr (TREE_OPERAND (t
, 0));
617 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
618 tem
, TREE_OPERAND (t
, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
626 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
627 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
628 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
632 if (TYPE_UNSIGNED (type
))
638 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
640 tem
= TREE_OPERAND (t
, 1);
641 if (negate_expr_p (tem
))
642 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
643 TREE_OPERAND (t
, 0), negate_expr (tem
));
644 tem
= TREE_OPERAND (t
, 0);
645 if (negate_expr_p (tem
))
646 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
647 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 if (TYPE_UNSIGNED (type
))
656 if (negate_expr_p (TREE_OPERAND (t
, 0)))
657 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
658 negate_expr (TREE_OPERAND (t
, 0)),
659 TREE_OPERAND (t
, 1));
660 /* In general we can't negate B in A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. */
663 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
664 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
665 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
666 && ! integer_onep (TREE_OPERAND (t
, 1))))
667 && negate_expr_p (TREE_OPERAND (t
, 1)))
668 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
670 negate_expr (TREE_OPERAND (t
, 1)));
674 /* Convert -((double)float) into (double)(-float). */
675 if (TREE_CODE (type
) == REAL_TYPE
)
677 tem
= strip_float_extensions (t
);
678 if (tem
!= t
&& negate_expr_p (tem
))
679 return fold_convert_loc (loc
, type
, negate_expr (tem
));
684 /* Negate -f(x) as f(-x). */
685 if (negate_mathfn_p (get_call_combined_fn (t
))
686 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
690 fndecl
= get_callee_fndecl (t
);
691 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
692 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
697 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
698 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
700 tree op1
= TREE_OPERAND (t
, 1);
701 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
703 tree ntype
= TYPE_UNSIGNED (type
)
704 ? signed_type_for (type
)
705 : unsigned_type_for (type
);
706 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
707 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
708 return fold_convert_loc (loc
, type
, temp
);
720 /* A wrapper for fold_negate_expr_1. */
723 fold_negate_expr (location_t loc
, tree t
)
725 tree type
= TREE_TYPE (t
);
727 tree tem
= fold_negate_expr_1 (loc
, t
);
728 if (tem
== NULL_TREE
)
730 return fold_convert_loc (loc
, type
, tem
);
733 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
734 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
746 loc
= EXPR_LOCATION (t
);
747 type
= TREE_TYPE (t
);
750 tem
= fold_negate_expr (loc
, t
);
752 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
753 return fold_convert_loc (loc
, type
, tem
);
756 /* Split a tree IN into a constant, literal and variable parts that could be
757 combined with CODE to make IN. "constant" means an expression with
758 TREE_CONSTANT but that isn't an actual constant. CODE must be a
759 commutative arithmetic operation. Store the constant part into *CONP,
760 the literal in *LITP and return the variable part. If a part isn't
761 present, set it to null. If the tree does not decompose in this way,
762 return the entire tree as the variable part and the other parts as null.
764 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
765 case, we negate an operand that was subtracted. Except if it is a
766 literal for which we use *MINUS_LITP instead.
768 If NEGATE_P is true, we are negating all of IN, again except a literal
769 for which we use *MINUS_LITP instead. If a variable part is of pointer
770 type, it is negated after converting to TYPE. This prevents us from
771 generating illegal MINUS pointer expression. LOC is the location of
772 the converted variable part.
774 If IN is itself a literal or constant, return it as appropriate.
776 Note that we do not guarantee that any of the three values will be the
777 same type as IN, but they will have the same signedness and mode. */
780 split_tree (tree in
, tree type
, enum tree_code code
,
781 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
782 tree
*litp
, tree
*minus_litp
, int negate_p
)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in
);
794 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
795 || TREE_CODE (in
) == FIXED_CST
)
797 else if (TREE_CODE (in
) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
805 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
806 || (code
== MINUS_EXPR
807 && (TREE_CODE (in
) == PLUS_EXPR
808 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
810 tree op0
= TREE_OPERAND (in
, 0);
811 tree op1
= TREE_OPERAND (in
, 1);
812 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
813 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
815 /* First see if either of the operands is a literal, then a constant. */
816 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
817 || TREE_CODE (op0
) == FIXED_CST
)
818 *litp
= op0
, op0
= 0;
819 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
820 || TREE_CODE (op1
) == FIXED_CST
)
821 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
823 if (op0
!= 0 && TREE_CONSTANT (op0
))
824 *conp
= op0
, op0
= 0;
825 else if (op1
!= 0 && TREE_CONSTANT (op1
))
826 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
828 /* If we haven't dealt with either operand, this is not a case we can
829 decompose. Otherwise, VAR is either of the ones remaining, if any. */
830 if (op0
!= 0 && op1
!= 0)
835 var
= op1
, neg_var_p
= neg1_p
;
837 /* Now do any needed negations. */
839 *minus_litp
= *litp
, *litp
= 0;
840 if (neg_conp_p
&& *conp
)
841 *minus_conp
= *conp
, *conp
= 0;
842 if (neg_var_p
&& var
)
843 *minus_varp
= var
, var
= 0;
845 else if (TREE_CONSTANT (in
))
847 else if (TREE_CODE (in
) == BIT_NOT_EXPR
848 && code
== PLUS_EXPR
)
850 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
851 when IN is constant. */
852 *litp
= build_minus_one_cst (type
);
853 *minus_varp
= TREE_OPERAND (in
, 0);
861 *minus_litp
= *litp
, *litp
= 0;
862 else if (*minus_litp
)
863 *litp
= *minus_litp
, *minus_litp
= 0;
865 *minus_conp
= *conp
, *conp
= 0;
866 else if (*minus_conp
)
867 *conp
= *minus_conp
, *minus_conp
= 0;
869 *minus_varp
= var
, var
= 0;
870 else if (*minus_varp
)
871 var
= *minus_varp
, *minus_varp
= 0;
875 && TREE_OVERFLOW_P (*litp
))
876 *litp
= drop_tree_overflow (*litp
);
878 && TREE_OVERFLOW_P (*minus_litp
))
879 *minus_litp
= drop_tree_overflow (*minus_litp
);
884 /* Re-associate trees split by the above function. T1 and T2 are
885 either expressions to associate or null. Return the new
886 expression, if any. LOC is the location of the new expression. If
887 we build an operation, do it in TYPE and with CODE. */
890 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
894 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
904 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
905 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
907 if (code
== PLUS_EXPR
)
909 if (TREE_CODE (t1
) == NEGATE_EXPR
)
910 return build2_loc (loc
, MINUS_EXPR
, type
,
911 fold_convert_loc (loc
, type
, t2
),
912 fold_convert_loc (loc
, type
,
913 TREE_OPERAND (t1
, 0)));
914 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
915 return build2_loc (loc
, MINUS_EXPR
, type
,
916 fold_convert_loc (loc
, type
, t1
),
917 fold_convert_loc (loc
, type
,
918 TREE_OPERAND (t2
, 0)));
919 else if (integer_zerop (t2
))
920 return fold_convert_loc (loc
, type
, t1
);
922 else if (code
== MINUS_EXPR
)
924 if (integer_zerop (t2
))
925 return fold_convert_loc (loc
, type
, t1
);
928 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
929 fold_convert_loc (loc
, type
, t2
));
932 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
933 fold_convert_loc (loc
, type
, t2
));
936 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
937 for use in int_const_binop, size_binop and size_diffop. */
940 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
942 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
944 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
959 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
960 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
961 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
965 /* Combine two integer constants PARG1 and PARG2 under operation CODE
966 to produce a new constant. Return NULL_TREE if we don't know how
967 to evaluate CODE at compile-time. */
970 int_const_binop_1 (enum tree_code code
, const_tree parg1
, const_tree parg2
,
975 tree type
= TREE_TYPE (parg1
);
976 signop sign
= TYPE_SIGN (type
);
977 bool overflow
= false;
979 wi::tree_to_wide_ref arg1
= wi::to_wide (parg1
);
980 wide_int arg2
= wi::to_wide (parg2
, TYPE_PRECISION (type
));
985 res
= wi::bit_or (arg1
, arg2
);
989 res
= wi::bit_xor (arg1
, arg2
);
993 res
= wi::bit_and (arg1
, arg2
);
998 if (wi::neg_p (arg2
))
1001 if (code
== RSHIFT_EXPR
)
1007 if (code
== RSHIFT_EXPR
)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res
= wi::rshift (arg1
, arg2
, sign
);
1013 res
= wi::lshift (arg1
, arg2
);
1018 if (wi::neg_p (arg2
))
1021 if (code
== RROTATE_EXPR
)
1022 code
= LROTATE_EXPR
;
1024 code
= RROTATE_EXPR
;
1027 if (code
== RROTATE_EXPR
)
1028 res
= wi::rrotate (arg1
, arg2
);
1030 res
= wi::lrotate (arg1
, arg2
);
1034 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1038 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1042 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1045 case MULT_HIGHPART_EXPR
:
1046 res
= wi::mul_high (arg1
, arg2
, sign
);
1049 case TRUNC_DIV_EXPR
:
1050 case EXACT_DIV_EXPR
:
1053 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1056 case FLOOR_DIV_EXPR
:
1059 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1065 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1068 case ROUND_DIV_EXPR
:
1071 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1074 case TRUNC_MOD_EXPR
:
1077 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1080 case FLOOR_MOD_EXPR
:
1083 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1089 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1092 case ROUND_MOD_EXPR
:
1095 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1099 res
= wi::min (arg1
, arg2
, sign
);
1103 res
= wi::max (arg1
, arg2
, sign
);
1110 t
= force_fit_type (type
, res
, overflowable
,
1111 (((sign
== SIGNED
|| overflowable
== -1)
1113 | TREE_OVERFLOW (parg1
) | TREE_OVERFLOW (parg2
)));
1119 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1121 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1124 /* Return true if binary operation OP distributes over addition in operand
1125 OPNO, with the other operand being held constant. OPNO counts from 1. */
1128 distributes_over_addition_p (tree_code op
, int opno
)
1145 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1146 constant. We assume ARG1 and ARG2 have the same data type, or at least
1147 are the same kind of constant and the same machine mode. Return zero if
1148 combining the constants is not allowed in the current operating mode. */
1151 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1153 /* Sanity check for the recursive cases. */
1160 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1162 if (code
== POINTER_PLUS_EXPR
)
1163 return int_const_binop (PLUS_EXPR
,
1164 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1166 return int_const_binop (code
, arg1
, arg2
);
1169 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1174 REAL_VALUE_TYPE value
;
1175 REAL_VALUE_TYPE result
;
1179 /* The following codes are handled by real_arithmetic. */
1194 d1
= TREE_REAL_CST (arg1
);
1195 d2
= TREE_REAL_CST (arg2
);
1197 type
= TREE_TYPE (arg1
);
1198 mode
= TYPE_MODE (type
);
1200 /* Don't perform operation if we honor signaling NaNs and
1201 either operand is a signaling NaN. */
1202 if (HONOR_SNANS (mode
)
1203 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1204 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code
== RDIV_EXPR
1210 && real_equal (&d2
, &dconst0
)
1211 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1
))
1218 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1221 t
= build_real (type
, d1
);
1224 else if (REAL_VALUE_ISNAN (d2
))
1226 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1229 t
= build_real (type
, d2
);
1233 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1234 real_convert (&result
, mode
, &value
);
1236 /* Don't constant fold this floating point operation if
1237 the result has overflowed and flag_trapping_math. */
1238 if (flag_trapping_math
1239 && MODE_HAS_INFINITIES (mode
)
1240 && REAL_VALUE_ISINF (result
)
1241 && !REAL_VALUE_ISINF (d1
)
1242 && !REAL_VALUE_ISINF (d2
))
1245 /* Don't constant fold this floating point operation if the
1246 result may dependent upon the run-time rounding mode and
1247 flag_rounding_math is set, or if GCC's software emulation
1248 is unable to accurately represent the result. */
1249 if ((flag_rounding_math
1250 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1251 && (inexact
|| !real_identical (&result
, &value
)))
1254 t
= build_real (type
, result
);
1256 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1260 if (TREE_CODE (arg1
) == FIXED_CST
)
1262 FIXED_VALUE_TYPE f1
;
1263 FIXED_VALUE_TYPE f2
;
1264 FIXED_VALUE_TYPE result
;
1269 /* The following codes are handled by fixed_arithmetic. */
1275 case TRUNC_DIV_EXPR
:
1276 if (TREE_CODE (arg2
) != FIXED_CST
)
1278 f2
= TREE_FIXED_CST (arg2
);
1284 if (TREE_CODE (arg2
) != INTEGER_CST
)
1286 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1287 f2
.data
.high
= w2
.elt (1);
1288 f2
.data
.low
= w2
.ulow ();
1297 f1
= TREE_FIXED_CST (arg1
);
1298 type
= TREE_TYPE (arg1
);
1299 sat_p
= TYPE_SATURATING (type
);
1300 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1301 t
= build_fixed (type
, result
);
1302 /* Propagate overflow flags. */
1303 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1304 TREE_OVERFLOW (t
) = 1;
1308 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1310 tree type
= TREE_TYPE (arg1
);
1311 tree r1
= TREE_REALPART (arg1
);
1312 tree i1
= TREE_IMAGPART (arg1
);
1313 tree r2
= TREE_REALPART (arg2
);
1314 tree i2
= TREE_IMAGPART (arg2
);
1321 real
= const_binop (code
, r1
, r2
);
1322 imag
= const_binop (code
, i1
, i2
);
1326 if (COMPLEX_FLOAT_TYPE_P (type
))
1327 return do_mpc_arg2 (arg1
, arg2
, type
,
1328 /* do_nonfinite= */ folding_initializer
,
1331 real
= const_binop (MINUS_EXPR
,
1332 const_binop (MULT_EXPR
, r1
, r2
),
1333 const_binop (MULT_EXPR
, i1
, i2
));
1334 imag
= const_binop (PLUS_EXPR
,
1335 const_binop (MULT_EXPR
, r1
, i2
),
1336 const_binop (MULT_EXPR
, i1
, r2
));
1340 if (COMPLEX_FLOAT_TYPE_P (type
))
1341 return do_mpc_arg2 (arg1
, arg2
, type
,
1342 /* do_nonfinite= */ folding_initializer
,
1345 case TRUNC_DIV_EXPR
:
1347 case FLOOR_DIV_EXPR
:
1348 case ROUND_DIV_EXPR
:
1349 if (flag_complex_method
== 0)
1351 /* Keep this algorithm in sync with
1352 tree-complex.c:expand_complex_div_straight().
1354 Expand complex division to scalars, straightforward algorithm.
1355 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1359 = const_binop (PLUS_EXPR
,
1360 const_binop (MULT_EXPR
, r2
, r2
),
1361 const_binop (MULT_EXPR
, i2
, i2
));
1363 = const_binop (PLUS_EXPR
,
1364 const_binop (MULT_EXPR
, r1
, r2
),
1365 const_binop (MULT_EXPR
, i1
, i2
));
1367 = const_binop (MINUS_EXPR
,
1368 const_binop (MULT_EXPR
, i1
, r2
),
1369 const_binop (MULT_EXPR
, r1
, i2
));
1371 real
= const_binop (code
, t1
, magsquared
);
1372 imag
= const_binop (code
, t2
, magsquared
);
1376 /* Keep this algorithm in sync with
1377 tree-complex.c:expand_complex_div_wide().
1379 Expand complex division to scalars, modified algorithm to minimize
1380 overflow with wide input ranges. */
1381 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1382 fold_abs_const (r2
, TREE_TYPE (type
)),
1383 fold_abs_const (i2
, TREE_TYPE (type
)));
1385 if (integer_nonzerop (compare
))
1387 /* In the TRUE branch, we compute
1389 div = (br * ratio) + bi;
1390 tr = (ar * ratio) + ai;
1391 ti = (ai * ratio) - ar;
1394 tree ratio
= const_binop (code
, r2
, i2
);
1395 tree div
= const_binop (PLUS_EXPR
, i2
,
1396 const_binop (MULT_EXPR
, r2
, ratio
));
1397 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1398 real
= const_binop (PLUS_EXPR
, real
, i1
);
1399 real
= const_binop (code
, real
, div
);
1401 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1402 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1403 imag
= const_binop (code
, imag
, div
);
1407 /* In the FALSE branch, we compute
1409 divisor = (d * ratio) + c;
1410 tr = (b * ratio) + a;
1411 ti = b - (a * ratio);
1414 tree ratio
= const_binop (code
, i2
, r2
);
1415 tree div
= const_binop (PLUS_EXPR
, r2
,
1416 const_binop (MULT_EXPR
, i2
, ratio
));
1418 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1419 real
= const_binop (PLUS_EXPR
, real
, r1
);
1420 real
= const_binop (code
, real
, div
);
1422 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1423 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1424 imag
= const_binop (code
, imag
, div
);
1434 return build_complex (type
, real
, imag
);
1437 if (TREE_CODE (arg1
) == VECTOR_CST
1438 && TREE_CODE (arg2
) == VECTOR_CST
1439 && (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))
1440 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1442 tree type
= TREE_TYPE (arg1
);
1444 if (VECTOR_CST_STEPPED_P (arg1
)
1445 && VECTOR_CST_STEPPED_P (arg2
))
1446 /* We can operate directly on the encoding if:
1448 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1450 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1452 Addition and subtraction are the supported operators
1453 for which this is true. */
1454 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1455 else if (VECTOR_CST_STEPPED_P (arg1
))
1456 /* We can operate directly on stepped encodings if:
1460 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1462 which is true if (x -> x op c) distributes over addition. */
1463 step_ok_p
= distributes_over_addition_p (code
, 1);
1465 /* Similarly in reverse. */
1466 step_ok_p
= distributes_over_addition_p (code
, 2);
1467 tree_vector_builder elts
;
1468 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1470 unsigned int count
= elts
.encoded_nelts ();
1471 for (unsigned int i
= 0; i
< count
; ++i
)
1473 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1474 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1476 tree elt
= const_binop (code
, elem1
, elem2
);
1478 /* It is possible that const_binop cannot handle the given
1479 code and return NULL_TREE */
1480 if (elt
== NULL_TREE
)
1482 elts
.quick_push (elt
);
1485 return elts
.build ();
1488 /* Shifts allow a scalar offset for a vector. */
1489 if (TREE_CODE (arg1
) == VECTOR_CST
1490 && TREE_CODE (arg2
) == INTEGER_CST
)
1492 tree type
= TREE_TYPE (arg1
);
1493 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1494 tree_vector_builder elts
;
1495 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1497 unsigned int count
= elts
.encoded_nelts ();
1498 for (unsigned int i
= 0; i
< count
; ++i
)
1500 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1502 tree elt
= const_binop (code
, elem1
, arg2
);
1504 /* It is possible that const_binop cannot handle the given
1505 code and return NULL_TREE. */
1506 if (elt
== NULL_TREE
)
1508 elts
.quick_push (elt
);
1511 return elts
.build ();
1516 /* Overload that adds a TYPE parameter to be able to dispatch
1517 to fold_relational_const. */
1520 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1522 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1523 return fold_relational_const (code
, type
, arg1
, arg2
);
1525 /* ??? Until we make the const_binop worker take the type of the
1526 result as argument put those cases that need it here. */
1529 case VEC_SERIES_EXPR
:
1530 if (CONSTANT_CLASS_P (arg1
)
1531 && CONSTANT_CLASS_P (arg2
))
1532 return build_vec_series (type
, arg1
, arg2
);
1536 if ((TREE_CODE (arg1
) == REAL_CST
1537 && TREE_CODE (arg2
) == REAL_CST
)
1538 || (TREE_CODE (arg1
) == INTEGER_CST
1539 && TREE_CODE (arg2
) == INTEGER_CST
))
1540 return build_complex (type
, arg1
, arg2
);
1543 case POINTER_DIFF_EXPR
:
1544 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1546 offset_int res
= wi::sub (wi::to_offset (arg1
),
1547 wi::to_offset (arg2
));
1548 return force_fit_type (type
, res
, 1,
1549 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1553 case VEC_PACK_TRUNC_EXPR
:
1554 case VEC_PACK_FIX_TRUNC_EXPR
:
1556 unsigned int out_nelts
, in_nelts
, i
;
1558 if (TREE_CODE (arg1
) != VECTOR_CST
1559 || TREE_CODE (arg2
) != VECTOR_CST
)
1562 in_nelts
= VECTOR_CST_NELTS (arg1
);
1563 out_nelts
= in_nelts
* 2;
1564 gcc_assert (in_nelts
== VECTOR_CST_NELTS (arg2
)
1565 && out_nelts
== TYPE_VECTOR_SUBPARTS (type
));
1567 tree_vector_builder
elts (type
, out_nelts
, 1);
1568 for (i
= 0; i
< out_nelts
; i
++)
1570 tree elt
= (i
< in_nelts
1571 ? VECTOR_CST_ELT (arg1
, i
)
1572 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1573 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1574 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1575 TREE_TYPE (type
), elt
);
1576 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1578 elts
.quick_push (elt
);
1581 return elts
.build ();
1584 case VEC_WIDEN_MULT_LO_EXPR
:
1585 case VEC_WIDEN_MULT_HI_EXPR
:
1586 case VEC_WIDEN_MULT_EVEN_EXPR
:
1587 case VEC_WIDEN_MULT_ODD_EXPR
:
1589 unsigned int out_nelts
, in_nelts
, out
, ofs
, scale
;
1591 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1594 in_nelts
= VECTOR_CST_NELTS (arg1
);
1595 out_nelts
= in_nelts
/ 2;
1596 gcc_assert (in_nelts
== VECTOR_CST_NELTS (arg2
)
1597 && out_nelts
== TYPE_VECTOR_SUBPARTS (type
));
1599 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1600 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1601 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1602 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1603 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1605 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1608 tree_vector_builder
elts (type
, out_nelts
, 1);
1609 for (out
= 0; out
< out_nelts
; out
++)
1611 unsigned int in
= (out
<< scale
) + ofs
;
1612 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1613 VECTOR_CST_ELT (arg1
, in
));
1614 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1615 VECTOR_CST_ELT (arg2
, in
));
1617 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1619 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1620 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1622 elts
.quick_push (elt
);
1625 return elts
.build ();
1631 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1634 /* Make sure type and arg0 have the same saturating flag. */
1635 gcc_checking_assert (TYPE_SATURATING (type
)
1636 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1638 return const_binop (code
, arg1
, arg2
);
1641 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1642 Return zero if computing the constants is not possible. */
1645 const_unop (enum tree_code code
, tree type
, tree arg0
)
1647 /* Don't perform the operation, other than NEGATE and ABS, if
1648 flag_signaling_nans is on and the operand is a signaling NaN. */
1649 if (TREE_CODE (arg0
) == REAL_CST
1650 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1651 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1652 && code
!= NEGATE_EXPR
1653 && code
!= ABS_EXPR
)
1660 case FIX_TRUNC_EXPR
:
1661 case FIXED_CONVERT_EXPR
:
1662 return fold_convert_const (code
, type
, arg0
);
1664 case ADDR_SPACE_CONVERT_EXPR
:
1665 /* If the source address is 0, and the source address space
1666 cannot have a valid object at 0, fold to dest type null. */
1667 if (integer_zerop (arg0
)
1668 && !(targetm
.addr_space
.zero_address_valid
1669 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1670 return fold_convert_const (code
, type
, arg0
);
1673 case VIEW_CONVERT_EXPR
:
1674 return fold_view_convert_expr (type
, arg0
);
1678 /* Can't call fold_negate_const directly here as that doesn't
1679 handle all cases and we might not be able to negate some
1681 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1682 if (tem
&& CONSTANT_CLASS_P (tem
))
1688 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1689 return fold_abs_const (arg0
, type
);
1693 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1695 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1697 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1702 if (TREE_CODE (arg0
) == INTEGER_CST
)
1703 return fold_not_const (arg0
, type
);
1704 /* Perform BIT_NOT_EXPR on each element individually. */
1705 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1709 /* This can cope with stepped encodings because ~x == -1 - x. */
1710 tree_vector_builder elements
;
1711 elements
.new_unary_operation (type
, arg0
, true);
1712 unsigned int i
, count
= elements
.encoded_nelts ();
1713 for (i
= 0; i
< count
; ++i
)
1715 elem
= VECTOR_CST_ELT (arg0
, i
);
1716 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1717 if (elem
== NULL_TREE
)
1719 elements
.quick_push (elem
);
1722 return elements
.build ();
1726 case TRUTH_NOT_EXPR
:
1727 if (TREE_CODE (arg0
) == INTEGER_CST
)
1728 return constant_boolean_node (integer_zerop (arg0
), type
);
1732 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1733 return fold_convert (type
, TREE_REALPART (arg0
));
1737 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1738 return fold_convert (type
, TREE_IMAGPART (arg0
));
1741 case VEC_UNPACK_LO_EXPR
:
1742 case VEC_UNPACK_HI_EXPR
:
1743 case VEC_UNPACK_FLOAT_LO_EXPR
:
1744 case VEC_UNPACK_FLOAT_HI_EXPR
:
1746 unsigned int out_nelts
, in_nelts
, i
;
1747 enum tree_code subcode
;
1749 if (TREE_CODE (arg0
) != VECTOR_CST
)
1752 in_nelts
= VECTOR_CST_NELTS (arg0
);
1753 out_nelts
= in_nelts
/ 2;
1754 gcc_assert (out_nelts
== TYPE_VECTOR_SUBPARTS (type
));
1756 unsigned int offset
= 0;
1757 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1758 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1761 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1764 subcode
= FLOAT_EXPR
;
1766 tree_vector_builder
elts (type
, out_nelts
, 1);
1767 for (i
= 0; i
< out_nelts
; i
++)
1769 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1770 VECTOR_CST_ELT (arg0
, i
+ offset
));
1771 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1773 elts
.quick_push (elt
);
1776 return elts
.build ();
1779 case VEC_DUPLICATE_EXPR
:
1780 if (CONSTANT_CLASS_P (arg0
))
1781 return build_vector_from_val (type
, arg0
);
1791 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1792 indicates which particular sizetype to create. */
1795 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1797 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1800 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1801 is a tree code. The type of the result is taken from the operands.
1802 Both must be equivalent integer types, ala int_binop_types_match_p.
1803 If the operands are constant, so is the result. */
1806 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1808 tree type
= TREE_TYPE (arg0
);
1810 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1811 return error_mark_node
;
1813 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1816 /* Handle the special case of two integer constants faster. */
1817 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1819 /* And some specific cases even faster than that. */
1820 if (code
== PLUS_EXPR
)
1822 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1824 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1827 else if (code
== MINUS_EXPR
)
1829 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1832 else if (code
== MULT_EXPR
)
1834 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1838 /* Handle general case of two integer constants. For sizetype
1839 constant calculations we always want to know about overflow,
1840 even in the unsigned case. */
1841 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1844 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1847 /* Given two values, either both of sizetype or both of bitsizetype,
1848 compute the difference between the two values. Return the value
1849 in signed type corresponding to the type of the operands. */
1852 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1854 tree type
= TREE_TYPE (arg0
);
1857 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1860 /* If the type is already signed, just do the simple thing. */
1861 if (!TYPE_UNSIGNED (type
))
1862 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1864 if (type
== sizetype
)
1866 else if (type
== bitsizetype
)
1867 ctype
= sbitsizetype
;
1869 ctype
= signed_type_for (type
);
1871 /* If either operand is not a constant, do the conversions to the signed
1872 type and subtract. The hardware will do the right thing with any
1873 overflow in the subtraction. */
1874 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1875 return size_binop_loc (loc
, MINUS_EXPR
,
1876 fold_convert_loc (loc
, ctype
, arg0
),
1877 fold_convert_loc (loc
, ctype
, arg1
));
1879 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1880 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1881 overflow) and negate (which can't either). Special-case a result
1882 of zero while we're here. */
1883 if (tree_int_cst_equal (arg0
, arg1
))
1884 return build_int_cst (ctype
, 0);
1885 else if (tree_int_cst_lt (arg1
, arg0
))
1886 return fold_convert_loc (loc
, ctype
,
1887 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1889 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1890 fold_convert_loc (loc
, ctype
,
1891 size_binop_loc (loc
,
1896 /* A subroutine of fold_convert_const handling conversions of an
1897 INTEGER_CST to another integer type. */
1900 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1902 /* Given an integer constant, make new constant with new type,
1903 appropriately sign-extended or truncated. Use widest_int
1904 so that any extension is done according ARG1's type. */
1905 return force_fit_type (type
, wi::to_widest (arg1
),
1906 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1907 TREE_OVERFLOW (arg1
));
1910 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1911 to an integer type. */
1914 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1916 bool overflow
= false;
1919 /* The following code implements the floating point to integer
1920 conversion rules required by the Java Language Specification,
1921 that IEEE NaNs are mapped to zero and values that overflow
1922 the target precision saturate, i.e. values greater than
1923 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1924 are mapped to INT_MIN. These semantics are allowed by the
1925 C and C++ standards that simply state that the behavior of
1926 FP-to-integer conversion is unspecified upon overflow. */
1930 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1934 case FIX_TRUNC_EXPR
:
1935 real_trunc (&r
, VOIDmode
, &x
);
1942 /* If R is NaN, return zero and show we have an overflow. */
1943 if (REAL_VALUE_ISNAN (r
))
1946 val
= wi::zero (TYPE_PRECISION (type
));
1949 /* See if R is less than the lower bound or greater than the
1954 tree lt
= TYPE_MIN_VALUE (type
);
1955 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1956 if (real_less (&r
, &l
))
1959 val
= wi::to_wide (lt
);
1965 tree ut
= TYPE_MAX_VALUE (type
);
1968 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1969 if (real_less (&u
, &r
))
1972 val
= wi::to_wide (ut
);
1978 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1980 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1984 /* A subroutine of fold_convert_const handling conversions of a
1985 FIXED_CST to an integer type. */
1988 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1991 double_int temp
, temp_trunc
;
1994 /* Right shift FIXED_CST to temp by fbit. */
1995 temp
= TREE_FIXED_CST (arg1
).data
;
1996 mode
= TREE_FIXED_CST (arg1
).mode
;
1997 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1999 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2000 HOST_BITS_PER_DOUBLE_INT
,
2001 SIGNED_FIXED_POINT_MODE_P (mode
));
2003 /* Left shift temp to temp_trunc by fbit. */
2004 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2005 HOST_BITS_PER_DOUBLE_INT
,
2006 SIGNED_FIXED_POINT_MODE_P (mode
));
2010 temp
= double_int_zero
;
2011 temp_trunc
= double_int_zero
;
2014 /* If FIXED_CST is negative, we need to round the value toward 0.
2015 By checking if the fractional bits are not zero to add 1 to temp. */
2016 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2017 && temp_trunc
.is_negative ()
2018 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2019 temp
+= double_int_one
;
2021 /* Given a fixed-point constant, make new constant with new type,
2022 appropriately sign-extended or truncated. */
2023 t
= force_fit_type (type
, temp
, -1,
2024 (temp
.is_negative ()
2025 && (TYPE_UNSIGNED (type
)
2026 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2027 | TREE_OVERFLOW (arg1
));
2032 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2033 to another floating point type. */
2036 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2038 REAL_VALUE_TYPE value
;
2041 /* Don't perform the operation if flag_signaling_nans is on
2042 and the operand is a signaling NaN. */
2043 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
2044 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2047 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2048 t
= build_real (type
, value
);
2050 /* If converting an infinity or NAN to a representation that doesn't
2051 have one, set the overflow bit so that we can produce some kind of
2052 error message at the appropriate point if necessary. It's not the
2053 most user-friendly message, but it's better than nothing. */
2054 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2055 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2056 TREE_OVERFLOW (t
) = 1;
2057 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2058 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2059 TREE_OVERFLOW (t
) = 1;
2060 /* Regular overflow, conversion produced an infinity in a mode that
2061 can't represent them. */
2062 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2063 && REAL_VALUE_ISINF (value
)
2064 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2065 TREE_OVERFLOW (t
) = 1;
2067 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2071 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2072 to a floating point type. */
2075 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2077 REAL_VALUE_TYPE value
;
2080 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2081 &TREE_FIXED_CST (arg1
));
2082 t
= build_real (type
, value
);
2084 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2088 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2089 to another fixed-point type. */
2092 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2094 FIXED_VALUE_TYPE value
;
2098 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2099 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2100 t
= build_fixed (type
, value
);
2102 /* Propagate overflow flags. */
2103 if (overflow_p
| TREE_OVERFLOW (arg1
))
2104 TREE_OVERFLOW (t
) = 1;
2108 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2109 to a fixed-point type. */
2112 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2114 FIXED_VALUE_TYPE value
;
2119 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2121 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2122 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2123 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2125 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2127 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2128 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2129 TYPE_SATURATING (type
));
2130 t
= build_fixed (type
, value
);
2132 /* Propagate overflow flags. */
2133 if (overflow_p
| TREE_OVERFLOW (arg1
))
2134 TREE_OVERFLOW (t
) = 1;
2138 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2139 to a fixed-point type. */
2142 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2144 FIXED_VALUE_TYPE value
;
2148 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2149 &TREE_REAL_CST (arg1
),
2150 TYPE_SATURATING (type
));
2151 t
= build_fixed (type
, value
);
2153 /* Propagate overflow flags. */
2154 if (overflow_p
| TREE_OVERFLOW (arg1
))
2155 TREE_OVERFLOW (t
) = 1;
2159 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2160 type TYPE. If no simplification can be done return NULL_TREE. */
2163 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2165 if (TREE_TYPE (arg1
) == type
)
2168 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2169 || TREE_CODE (type
) == OFFSET_TYPE
)
2171 if (TREE_CODE (arg1
) == INTEGER_CST
)
2172 return fold_convert_const_int_from_int (type
, arg1
);
2173 else if (TREE_CODE (arg1
) == REAL_CST
)
2174 return fold_convert_const_int_from_real (code
, type
, arg1
);
2175 else if (TREE_CODE (arg1
) == FIXED_CST
)
2176 return fold_convert_const_int_from_fixed (type
, arg1
);
2178 else if (TREE_CODE (type
) == REAL_TYPE
)
2180 if (TREE_CODE (arg1
) == INTEGER_CST
)
2181 return build_real_from_int_cst (type
, arg1
);
2182 else if (TREE_CODE (arg1
) == REAL_CST
)
2183 return fold_convert_const_real_from_real (type
, arg1
);
2184 else if (TREE_CODE (arg1
) == FIXED_CST
)
2185 return fold_convert_const_real_from_fixed (type
, arg1
);
2187 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2189 if (TREE_CODE (arg1
) == FIXED_CST
)
2190 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2191 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2192 return fold_convert_const_fixed_from_int (type
, arg1
);
2193 else if (TREE_CODE (arg1
) == REAL_CST
)
2194 return fold_convert_const_fixed_from_real (type
, arg1
);
2196 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2198 if (TREE_CODE (arg1
) == VECTOR_CST
2199 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2201 tree elttype
= TREE_TYPE (type
);
2202 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2203 /* We can't handle steps directly when extending, since the
2204 values need to wrap at the original precision first. */
2206 = (INTEGRAL_TYPE_P (elttype
)
2207 && INTEGRAL_TYPE_P (arg1_elttype
)
2208 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2209 tree_vector_builder v
;
2210 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2212 unsigned int len
= v
.encoded_nelts ();
2213 for (unsigned int i
= 0; i
< len
; ++i
)
2215 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2216 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2217 if (cvt
== NULL_TREE
)
2227 /* Construct a vector of zero elements of vector type TYPE. */
2230 build_zero_vector (tree type
)
2234 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2235 return build_vector_from_val (type
, t
);
2238 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2241 fold_convertible_p (const_tree type
, const_tree arg
)
2243 tree orig
= TREE_TYPE (arg
);
2248 if (TREE_CODE (arg
) == ERROR_MARK
2249 || TREE_CODE (type
) == ERROR_MARK
2250 || TREE_CODE (orig
) == ERROR_MARK
)
2253 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2256 switch (TREE_CODE (type
))
2258 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2259 case POINTER_TYPE
: case REFERENCE_TYPE
:
2261 return (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2262 || TREE_CODE (orig
) == OFFSET_TYPE
);
2265 case FIXED_POINT_TYPE
:
2268 return TREE_CODE (type
) == TREE_CODE (orig
);
2275 /* Convert expression ARG to type TYPE. Used by the middle-end for
2276 simple conversions in preference to calling the front-end's convert. */
2279 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2281 tree orig
= TREE_TYPE (arg
);
2287 if (TREE_CODE (arg
) == ERROR_MARK
2288 || TREE_CODE (type
) == ERROR_MARK
2289 || TREE_CODE (orig
) == ERROR_MARK
)
2290 return error_mark_node
;
2292 switch (TREE_CODE (type
))
2295 case REFERENCE_TYPE
:
2296 /* Handle conversions between pointers to different address spaces. */
2297 if (POINTER_TYPE_P (orig
)
2298 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2299 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2300 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2303 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2305 if (TREE_CODE (arg
) == INTEGER_CST
)
2307 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2308 if (tem
!= NULL_TREE
)
2311 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2312 || TREE_CODE (orig
) == OFFSET_TYPE
)
2313 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2314 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2315 return fold_convert_loc (loc
, type
,
2316 fold_build1_loc (loc
, REALPART_EXPR
,
2317 TREE_TYPE (orig
), arg
));
2318 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2319 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2320 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2323 if (TREE_CODE (arg
) == INTEGER_CST
)
2325 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2326 if (tem
!= NULL_TREE
)
2329 else if (TREE_CODE (arg
) == REAL_CST
)
2331 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2332 if (tem
!= NULL_TREE
)
2335 else if (TREE_CODE (arg
) == FIXED_CST
)
2337 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2338 if (tem
!= NULL_TREE
)
2342 switch (TREE_CODE (orig
))
2345 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2346 case POINTER_TYPE
: case REFERENCE_TYPE
:
2347 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2350 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2352 case FIXED_POINT_TYPE
:
2353 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2356 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2357 return fold_convert_loc (loc
, type
, tem
);
2363 case FIXED_POINT_TYPE
:
2364 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2365 || TREE_CODE (arg
) == REAL_CST
)
2367 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2368 if (tem
!= NULL_TREE
)
2369 goto fold_convert_exit
;
2372 switch (TREE_CODE (orig
))
2374 case FIXED_POINT_TYPE
:
2379 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2382 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2383 return fold_convert_loc (loc
, type
, tem
);
2390 switch (TREE_CODE (orig
))
2393 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2394 case POINTER_TYPE
: case REFERENCE_TYPE
:
2396 case FIXED_POINT_TYPE
:
2397 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2398 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2399 fold_convert_loc (loc
, TREE_TYPE (type
),
2400 integer_zero_node
));
2405 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2407 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2408 TREE_OPERAND (arg
, 0));
2409 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2410 TREE_OPERAND (arg
, 1));
2411 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2414 arg
= save_expr (arg
);
2415 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2416 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2417 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2418 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2419 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2427 if (integer_zerop (arg
))
2428 return build_zero_vector (type
);
2429 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2430 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2431 || TREE_CODE (orig
) == VECTOR_TYPE
);
2432 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2435 tem
= fold_ignored_result (arg
);
2436 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2439 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2440 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2444 protected_set_expr_location_unshare (tem
, loc
);
2448 /* Return false if expr can be assumed not to be an lvalue, true
2452 maybe_lvalue_p (const_tree x
)
2454 /* We only need to wrap lvalue tree codes. */
2455 switch (TREE_CODE (x
))
2468 case ARRAY_RANGE_REF
:
2474 case PREINCREMENT_EXPR
:
2475 case PREDECREMENT_EXPR
:
2477 case TRY_CATCH_EXPR
:
2478 case WITH_CLEANUP_EXPR
:
2487 /* Assume the worst for front-end tree codes. */
2488 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2496 /* Return an expr equal to X but certainly not valid as an lvalue. */
2499 non_lvalue_loc (location_t loc
, tree x
)
2501 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2506 if (! maybe_lvalue_p (x
))
2508 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2511 /* When pedantic, return an expr equal to X but certainly not valid as a
2512 pedantic lvalue. Otherwise, return X. */
2515 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2517 return protected_set_expr_location_unshare (x
, loc
);
2520 /* Given a tree comparison code, return the code that is the logical inverse.
2521 It is generally not safe to do this for floating-point comparisons, except
2522 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2523 ERROR_MARK in this case. */
2526 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2528 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2529 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2539 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2541 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2543 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2545 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2559 return UNORDERED_EXPR
;
2560 case UNORDERED_EXPR
:
2561 return ORDERED_EXPR
;
2567 /* Similar, but return the comparison that results if the operands are
2568 swapped. This is safe for floating-point. */
2571 swap_tree_comparison (enum tree_code code
)
2578 case UNORDERED_EXPR
:
2604 /* Convert a comparison tree code from an enum tree_code representation
2605 into a compcode bit-based encoding. This function is the inverse of
2606 compcode_to_comparison. */
2608 static enum comparison_code
2609 comparison_to_compcode (enum tree_code code
)
2626 return COMPCODE_ORD
;
2627 case UNORDERED_EXPR
:
2628 return COMPCODE_UNORD
;
2630 return COMPCODE_UNLT
;
2632 return COMPCODE_UNEQ
;
2634 return COMPCODE_UNLE
;
2636 return COMPCODE_UNGT
;
2638 return COMPCODE_LTGT
;
2640 return COMPCODE_UNGE
;
2646 /* Convert a compcode bit-based encoding of a comparison operator back
2647 to GCC's enum tree_code representation. This function is the
2648 inverse of comparison_to_compcode. */
2650 static enum tree_code
2651 compcode_to_comparison (enum comparison_code code
)
2668 return ORDERED_EXPR
;
2669 case COMPCODE_UNORD
:
2670 return UNORDERED_EXPR
;
2688 /* Return a tree for the comparison which is the combination of
2689 doing the AND or OR (depending on CODE) of the two operations LCODE
2690 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2691 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2692 if this makes the transformation invalid. */
2695 combine_comparisons (location_t loc
,
2696 enum tree_code code
, enum tree_code lcode
,
2697 enum tree_code rcode
, tree truth_type
,
2698 tree ll_arg
, tree lr_arg
)
2700 bool honor_nans
= HONOR_NANS (ll_arg
);
2701 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2702 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2707 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2708 compcode
= lcompcode
& rcompcode
;
2711 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2712 compcode
= lcompcode
| rcompcode
;
2721 /* Eliminate unordered comparisons, as well as LTGT and ORD
2722 which are not used unless the mode has NaNs. */
2723 compcode
&= ~COMPCODE_UNORD
;
2724 if (compcode
== COMPCODE_LTGT
)
2725 compcode
= COMPCODE_NE
;
2726 else if (compcode
== COMPCODE_ORD
)
2727 compcode
= COMPCODE_TRUE
;
2729 else if (flag_trapping_math
)
2731 /* Check that the original operation and the optimized ones will trap
2732 under the same condition. */
2733 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2734 && (lcompcode
!= COMPCODE_EQ
)
2735 && (lcompcode
!= COMPCODE_ORD
);
2736 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2737 && (rcompcode
!= COMPCODE_EQ
)
2738 && (rcompcode
!= COMPCODE_ORD
);
2739 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2740 && (compcode
!= COMPCODE_EQ
)
2741 && (compcode
!= COMPCODE_ORD
);
2743 /* In a short-circuited boolean expression the LHS might be
2744 such that the RHS, if evaluated, will never trap. For
2745 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2746 if neither x nor y is NaN. (This is a mixed blessing: for
2747 example, the expression above will never trap, hence
2748 optimizing it to x < y would be invalid). */
2749 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2750 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2753 /* If the comparison was short-circuited, and only the RHS
2754 trapped, we may now generate a spurious trap. */
2756 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2759 /* If we changed the conditions that cause a trap, we lose. */
2760 if ((ltrap
|| rtrap
) != trap
)
2764 if (compcode
== COMPCODE_TRUE
)
2765 return constant_boolean_node (true, truth_type
);
2766 else if (compcode
== COMPCODE_FALSE
)
2767 return constant_boolean_node (false, truth_type
);
2770 enum tree_code tcode
;
2772 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2773 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2777 /* Return nonzero if two operands (typically of the same tree node)
2778 are necessarily equal. FLAGS modifies behavior as follows:
2780 If OEP_ONLY_CONST is set, only return nonzero for constants.
2781 This function tests whether the operands are indistinguishable;
2782 it does not test whether they are equal using C's == operation.
2783 The distinction is important for IEEE floating point, because
2784 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2785 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2787 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2788 even though it may hold multiple values during a function.
2789 This is because a GCC tree node guarantees that nothing else is
2790 executed between the evaluation of its "operands" (which may often
2791 be evaluated in arbitrary order). Hence if the operands themselves
2792 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2793 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2794 unset means assuming isochronic (or instantaneous) tree equivalence.
2795 Unless comparing arbitrary expression trees, such as from different
2796 statements, this flag can usually be left unset.
2798 If OEP_PURE_SAME is set, then pure functions with identical arguments
2799 are considered the same. It is used when the caller has other ways
2800 to ensure that global memory is unchanged in between.
2802 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2803 not values of expressions.
2805 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2806 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2808 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2809 any operand with side effect. This is unnecesarily conservative in the
2810 case we know that arg0 and arg1 are in disjoint code paths (such as in
2811 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2812 addresses with TREE_CONSTANT flag set so we know that &var == &var
2813 even if var is volatile. */
2816 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2818 /* When checking, verify at the outermost operand_equal_p call that
2819 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2821 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
2823 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
2827 inchash::hash
hstate0 (0), hstate1 (0);
2828 inchash::add_expr (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
2829 inchash::add_expr (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
2830 hashval_t h0
= hstate0
.end ();
2831 hashval_t h1
= hstate1
.end ();
2832 gcc_assert (h0
== h1
);
2840 /* If either is ERROR_MARK, they aren't equal. */
2841 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2842 || TREE_TYPE (arg0
) == error_mark_node
2843 || TREE_TYPE (arg1
) == error_mark_node
)
2846 /* Similar, if either does not have a type (like a released SSA name),
2847 they aren't equal. */
2848 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2851 /* We cannot consider pointers to different address space equal. */
2852 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2853 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2854 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2855 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2858 /* Check equality of integer constants before bailing out due to
2859 precision differences. */
2860 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2862 /* Address of INTEGER_CST is not defined; check that we did not forget
2863 to drop the OEP_ADDRESS_OF flags. */
2864 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2865 return tree_int_cst_equal (arg0
, arg1
);
2868 if (!(flags
& OEP_ADDRESS_OF
))
2870 /* If both types don't have the same signedness, then we can't consider
2871 them equal. We must check this before the STRIP_NOPS calls
2872 because they may change the signedness of the arguments. As pointers
2873 strictly don't have a signedness, require either two pointers or
2874 two non-pointers as well. */
2875 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2876 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2877 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2880 /* If both types don't have the same precision, then it is not safe
2882 if (element_precision (TREE_TYPE (arg0
))
2883 != element_precision (TREE_TYPE (arg1
)))
2890 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2891 sanity check once the issue is solved. */
2893 /* Addresses of conversions and SSA_NAMEs (and many other things)
2894 are not defined. Check that we did not forget to drop the
2895 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2896 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2897 && TREE_CODE (arg0
) != SSA_NAME
);
2900 /* In case both args are comparisons but with different comparison
2901 code, try to swap the comparison operands of one arg to produce
2902 a match and compare that variant. */
2903 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2904 && COMPARISON_CLASS_P (arg0
)
2905 && COMPARISON_CLASS_P (arg1
))
2907 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2909 if (TREE_CODE (arg0
) == swap_code
)
2910 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2911 TREE_OPERAND (arg1
, 1), flags
)
2912 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2913 TREE_OPERAND (arg1
, 0), flags
);
2916 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2918 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2919 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2921 else if (flags
& OEP_ADDRESS_OF
)
2923 /* If we are interested in comparing addresses ignore
2924 MEM_REF wrappings of the base that can appear just for
2926 if (TREE_CODE (arg0
) == MEM_REF
2928 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2929 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2930 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2932 else if (TREE_CODE (arg1
) == MEM_REF
2934 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2935 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2936 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2944 /* When not checking adddresses, this is needed for conversions and for
2945 COMPONENT_REF. Might as well play it safe and always test this. */
2946 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2947 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2948 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2949 && !(flags
& OEP_ADDRESS_OF
)))
2952 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2953 We don't care about side effects in that case because the SAVE_EXPR
2954 takes care of that for us. In all other cases, two expressions are
2955 equal if they have no side effects. If we have two identical
2956 expressions with side effects that should be treated the same due
2957 to the only side effects being identical SAVE_EXPR's, that will
2958 be detected in the recursive calls below.
2959 If we are taking an invariant address of two identical objects
2960 they are necessarily equal as well. */
2961 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2962 && (TREE_CODE (arg0
) == SAVE_EXPR
2963 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2964 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2967 /* Next handle constant cases, those for which we can return 1 even
2968 if ONLY_CONST is set. */
2969 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2970 switch (TREE_CODE (arg0
))
2973 return tree_int_cst_equal (arg0
, arg1
);
2976 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2977 TREE_FIXED_CST (arg1
));
2980 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2984 if (!HONOR_SIGNED_ZEROS (arg0
))
2986 /* If we do not distinguish between signed and unsigned zero,
2987 consider them equal. */
2988 if (real_zerop (arg0
) && real_zerop (arg1
))
2995 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
2996 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
2999 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3000 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3003 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3004 for (unsigned int i
= 0; i
< count
; ++i
)
3005 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3006 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3012 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3014 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3018 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3019 && ! memcmp (TREE_STRING_POINTER (arg0
),
3020 TREE_STRING_POINTER (arg1
),
3021 TREE_STRING_LENGTH (arg0
)));
3024 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3025 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3026 flags
| OEP_ADDRESS_OF
3027 | OEP_MATCH_SIDE_EFFECTS
);
3029 /* In GIMPLE empty constructors are allowed in initializers of
3031 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3036 if (flags
& OEP_ONLY_CONST
)
3039 /* Define macros to test an operand from arg0 and arg1 for equality and a
3040 variant that allows null and views null as being different from any
3041 non-null value. In the latter case, if either is null, the both
3042 must be; otherwise, do the normal comparison. */
3043 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3044 TREE_OPERAND (arg1, N), flags)
3046 #define OP_SAME_WITH_NULL(N) \
3047 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3048 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3050 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3053 /* Two conversions are equal only if signedness and modes match. */
3054 switch (TREE_CODE (arg0
))
3057 case FIX_TRUNC_EXPR
:
3058 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3059 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3069 case tcc_comparison
:
3071 if (OP_SAME (0) && OP_SAME (1))
3074 /* For commutative ops, allow the other order. */
3075 return (commutative_tree_code (TREE_CODE (arg0
))
3076 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3077 TREE_OPERAND (arg1
, 1), flags
)
3078 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3079 TREE_OPERAND (arg1
, 0), flags
));
3082 /* If either of the pointer (or reference) expressions we are
3083 dereferencing contain a side effect, these cannot be equal,
3084 but their addresses can be. */
3085 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3086 && (TREE_SIDE_EFFECTS (arg0
)
3087 || TREE_SIDE_EFFECTS (arg1
)))
3090 switch (TREE_CODE (arg0
))
3093 if (!(flags
& OEP_ADDRESS_OF
)
3094 && (TYPE_ALIGN (TREE_TYPE (arg0
))
3095 != TYPE_ALIGN (TREE_TYPE (arg1
))))
3097 flags
&= ~OEP_ADDRESS_OF
;
3101 /* Require the same offset. */
3102 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3103 TYPE_SIZE (TREE_TYPE (arg1
)),
3104 flags
& ~OEP_ADDRESS_OF
))
3109 case VIEW_CONVERT_EXPR
:
3112 case TARGET_MEM_REF
:
3114 if (!(flags
& OEP_ADDRESS_OF
))
3116 /* Require equal access sizes */
3117 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3118 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3119 || !TYPE_SIZE (TREE_TYPE (arg1
))
3120 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3121 TYPE_SIZE (TREE_TYPE (arg1
)),
3124 /* Verify that access happens in similar types. */
3125 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3127 /* Verify that accesses are TBAA compatible. */
3128 if (!alias_ptr_types_compatible_p
3129 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3130 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3131 || (MR_DEPENDENCE_CLIQUE (arg0
)
3132 != MR_DEPENDENCE_CLIQUE (arg1
))
3133 || (MR_DEPENDENCE_BASE (arg0
)
3134 != MR_DEPENDENCE_BASE (arg1
)))
3136 /* Verify that alignment is compatible. */
3137 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3138 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3141 flags
&= ~OEP_ADDRESS_OF
;
3142 return (OP_SAME (0) && OP_SAME (1)
3143 /* TARGET_MEM_REF require equal extra operands. */
3144 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3145 || (OP_SAME_WITH_NULL (2)
3146 && OP_SAME_WITH_NULL (3)
3147 && OP_SAME_WITH_NULL (4))));
3150 case ARRAY_RANGE_REF
:
3153 flags
&= ~OEP_ADDRESS_OF
;
3154 /* Compare the array index by value if it is constant first as we
3155 may have different types but same value here. */
3156 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3157 TREE_OPERAND (arg1
, 1))
3159 && OP_SAME_WITH_NULL (2)
3160 && OP_SAME_WITH_NULL (3)
3161 /* Compare low bound and element size as with OEP_ADDRESS_OF
3162 we have to account for the offset of the ref. */
3163 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3164 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3165 || (operand_equal_p (array_ref_low_bound
3166 (CONST_CAST_TREE (arg0
)),
3168 (CONST_CAST_TREE (arg1
)), flags
)
3169 && operand_equal_p (array_ref_element_size
3170 (CONST_CAST_TREE (arg0
)),
3171 array_ref_element_size
3172 (CONST_CAST_TREE (arg1
)),
3176 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3177 may be NULL when we're called to compare MEM_EXPRs. */
3178 if (!OP_SAME_WITH_NULL (0)
3181 flags
&= ~OEP_ADDRESS_OF
;
3182 return OP_SAME_WITH_NULL (2);
3187 flags
&= ~OEP_ADDRESS_OF
;
3188 return OP_SAME (1) && OP_SAME (2);
3194 case tcc_expression
:
3195 switch (TREE_CODE (arg0
))
3198 /* Be sure we pass right ADDRESS_OF flag. */
3199 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3200 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3201 TREE_OPERAND (arg1
, 0),
3202 flags
| OEP_ADDRESS_OF
);
3204 case TRUTH_NOT_EXPR
:
3207 case TRUTH_ANDIF_EXPR
:
3208 case TRUTH_ORIF_EXPR
:
3209 return OP_SAME (0) && OP_SAME (1);
3212 case WIDEN_MULT_PLUS_EXPR
:
3213 case WIDEN_MULT_MINUS_EXPR
:
3216 /* The multiplcation operands are commutative. */
3219 case TRUTH_AND_EXPR
:
3221 case TRUTH_XOR_EXPR
:
3222 if (OP_SAME (0) && OP_SAME (1))
3225 /* Otherwise take into account this is a commutative operation. */
3226 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3227 TREE_OPERAND (arg1
, 1), flags
)
3228 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3229 TREE_OPERAND (arg1
, 0), flags
));
3232 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3234 flags
&= ~OEP_ADDRESS_OF
;
3237 case BIT_INSERT_EXPR
:
3238 /* BIT_INSERT_EXPR has an implict operand as the type precision
3239 of op1. Need to check to make sure they are the same. */
3240 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3241 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3242 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3243 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3249 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3254 case PREDECREMENT_EXPR
:
3255 case PREINCREMENT_EXPR
:
3256 case POSTDECREMENT_EXPR
:
3257 case POSTINCREMENT_EXPR
:
3258 if (flags
& OEP_LEXICOGRAPHIC
)
3259 return OP_SAME (0) && OP_SAME (1);
3262 case CLEANUP_POINT_EXPR
:
3264 if (flags
& OEP_LEXICOGRAPHIC
)
3273 switch (TREE_CODE (arg0
))
3276 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3277 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3278 /* If not both CALL_EXPRs are either internal or normal function
3279 functions, then they are not equal. */
3281 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3283 /* If the CALL_EXPRs call different internal functions, then they
3285 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3290 /* If the CALL_EXPRs call different functions, then they are not
3292 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3297 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3299 unsigned int cef
= call_expr_flags (arg0
);
3300 if (flags
& OEP_PURE_SAME
)
3301 cef
&= ECF_CONST
| ECF_PURE
;
3304 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3308 /* Now see if all the arguments are the same. */
3310 const_call_expr_arg_iterator iter0
, iter1
;
3312 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3313 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3315 a0
= next_const_call_expr_arg (&iter0
),
3316 a1
= next_const_call_expr_arg (&iter1
))
3317 if (! operand_equal_p (a0
, a1
, flags
))
3320 /* If we get here and both argument lists are exhausted
3321 then the CALL_EXPRs are equal. */
3322 return ! (a0
|| a1
);
3328 case tcc_declaration
:
3329 /* Consider __builtin_sqrt equal to sqrt. */
3330 return (TREE_CODE (arg0
) == FUNCTION_DECL
3331 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3332 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3333 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3335 case tcc_exceptional
:
3336 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3338 /* In GIMPLE constructors are used only to build vectors from
3339 elements. Individual elements in the constructor must be
3340 indexed in increasing order and form an initial sequence.
3342 We make no effort to compare constructors in generic.
3343 (see sem_variable::equals in ipa-icf which can do so for
3345 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3346 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3349 /* Be sure that vectors constructed have the same representation.
3350 We only tested element precision and modes to match.
3351 Vectors may be BLKmode and thus also check that the number of
3353 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3354 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3357 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3358 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3359 unsigned int len
= vec_safe_length (v0
);
3361 if (len
!= vec_safe_length (v1
))
3364 for (unsigned int i
= 0; i
< len
; i
++)
3366 constructor_elt
*c0
= &(*v0
)[i
];
3367 constructor_elt
*c1
= &(*v1
)[i
];
3369 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3370 /* In GIMPLE the indexes can be either NULL or matching i.
3371 Double check this so we won't get false
3372 positives for GENERIC. */
3374 && (TREE_CODE (c0
->index
) != INTEGER_CST
3375 || !compare_tree_int (c0
->index
, i
)))
3377 && (TREE_CODE (c1
->index
) != INTEGER_CST
3378 || !compare_tree_int (c1
->index
, i
))))
3383 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3384 && (flags
& OEP_LEXICOGRAPHIC
))
3386 /* Compare the STATEMENT_LISTs. */
3387 tree_stmt_iterator tsi1
, tsi2
;
3388 tree body1
= CONST_CAST_TREE (arg0
);
3389 tree body2
= CONST_CAST_TREE (arg1
);
3390 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3391 tsi_next (&tsi1
), tsi_next (&tsi2
))
3393 /* The lists don't have the same number of statements. */
3394 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3396 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3398 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3406 switch (TREE_CODE (arg0
))
3409 if (flags
& OEP_LEXICOGRAPHIC
)
3410 return OP_SAME_WITH_NULL (0);
3421 #undef OP_SAME_WITH_NULL
3424 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3425 with a different signedness or a narrower precision. */
3428 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
3430 if (operand_equal_p (arg0
, arg1
, 0))
3433 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3434 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3437 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3438 and see if the inner values are the same. This removes any
3439 signedness comparison, which doesn't matter here. */
3444 if (operand_equal_p (op0
, op1
, 0))
3447 /* Discard a single widening conversion from ARG1 and see if the inner
3448 value is the same as ARG0. */
3449 if (CONVERT_EXPR_P (arg1
)
3450 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3451 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3452 < TYPE_PRECISION (TREE_TYPE (arg1
))
3453 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
3459 /* See if ARG is an expression that is either a comparison or is performing
3460 arithmetic on comparisons. The comparisons must only be comparing
3461 two different values, which will be stored in *CVAL1 and *CVAL2; if
3462 they are nonzero it means that some operands have already been found.
3463 No variables may be used anywhere else in the expression except in the
3464 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3465 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3467 If this is true, return 1. Otherwise, return zero. */
3470 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3472 enum tree_code code
= TREE_CODE (arg
);
3473 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3475 /* We can handle some of the tcc_expression cases here. */
3476 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3478 else if (tclass
== tcc_expression
3479 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3480 || code
== COMPOUND_EXPR
))
3481 tclass
= tcc_binary
;
3483 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3484 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3486 /* If we've already found a CVAL1 or CVAL2, this expression is
3487 two complex to handle. */
3488 if (*cval1
|| *cval2
)
3498 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3501 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3502 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3503 cval1
, cval2
, save_p
));
3508 case tcc_expression
:
3509 if (code
== COND_EXPR
)
3510 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3511 cval1
, cval2
, save_p
)
3512 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3513 cval1
, cval2
, save_p
)
3514 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3515 cval1
, cval2
, save_p
));
3518 case tcc_comparison
:
3519 /* First see if we can handle the first operand, then the second. For
3520 the second operand, we know *CVAL1 can't be zero. It must be that
3521 one side of the comparison is each of the values; test for the
3522 case where this isn't true by failing if the two operands
3525 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3526 TREE_OPERAND (arg
, 1), 0))
3530 *cval1
= TREE_OPERAND (arg
, 0);
3531 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3533 else if (*cval2
== 0)
3534 *cval2
= TREE_OPERAND (arg
, 0);
3535 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3540 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3542 else if (*cval2
== 0)
3543 *cval2
= TREE_OPERAND (arg
, 1);
3544 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3556 /* ARG is a tree that is known to contain just arithmetic operations and
3557 comparisons. Evaluate the operations in the tree substituting NEW0 for
3558 any occurrence of OLD0 as an operand of a comparison and likewise for
3562 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3563 tree old1
, tree new1
)
3565 tree type
= TREE_TYPE (arg
);
3566 enum tree_code code
= TREE_CODE (arg
);
3567 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3569 /* We can handle some of the tcc_expression cases here. */
3570 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3572 else if (tclass
== tcc_expression
3573 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3574 tclass
= tcc_binary
;
3579 return fold_build1_loc (loc
, code
, type
,
3580 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3581 old0
, new0
, old1
, new1
));
3584 return fold_build2_loc (loc
, code
, type
,
3585 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3586 old0
, new0
, old1
, new1
),
3587 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3588 old0
, new0
, old1
, new1
));
3590 case tcc_expression
:
3594 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3598 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3602 return fold_build3_loc (loc
, code
, type
,
3603 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3604 old0
, new0
, old1
, new1
),
3605 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3606 old0
, new0
, old1
, new1
),
3607 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3608 old0
, new0
, old1
, new1
));
3612 /* Fall through - ??? */
3614 case tcc_comparison
:
3616 tree arg0
= TREE_OPERAND (arg
, 0);
3617 tree arg1
= TREE_OPERAND (arg
, 1);
3619 /* We need to check both for exact equality and tree equality. The
3620 former will be true if the operand has a side-effect. In that
3621 case, we know the operand occurred exactly once. */
3623 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3625 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3628 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3630 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3633 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3641 /* Return a tree for the case when the result of an expression is RESULT
3642 converted to TYPE and OMITTED was previously an operand of the expression
3643 but is now not needed (e.g., we folded OMITTED * 0).
3645 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3646 the conversion of RESULT to TYPE. */
3649 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3651 tree t
= fold_convert_loc (loc
, type
, result
);
3653 /* If the resulting operand is an empty statement, just return the omitted
3654 statement casted to void. */
3655 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3656 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3657 fold_ignored_result (omitted
));
3659 if (TREE_SIDE_EFFECTS (omitted
))
3660 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3661 fold_ignored_result (omitted
), t
);
3663 return non_lvalue_loc (loc
, t
);
3666 /* Return a tree for the case when the result of an expression is RESULT
3667 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3668 of the expression but are now not needed.
3670 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3671 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3672 evaluated before OMITTED2. Otherwise, if neither has side effects,
3673 just do the conversion of RESULT to TYPE. */
3676 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3677 tree omitted1
, tree omitted2
)
3679 tree t
= fold_convert_loc (loc
, type
, result
);
3681 if (TREE_SIDE_EFFECTS (omitted2
))
3682 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3683 if (TREE_SIDE_EFFECTS (omitted1
))
3684 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3686 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3690 /* Return a simplified tree node for the truth-negation of ARG. This
3691 never alters ARG itself. We assume that ARG is an operation that
3692 returns a truth value (0 or 1).
3694 FIXME: one would think we would fold the result, but it causes
3695 problems with the dominator optimizer. */
3698 fold_truth_not_expr (location_t loc
, tree arg
)
3700 tree type
= TREE_TYPE (arg
);
3701 enum tree_code code
= TREE_CODE (arg
);
3702 location_t loc1
, loc2
;
3704 /* If this is a comparison, we can simply invert it, except for
3705 floating-point non-equality comparisons, in which case we just
3706 enclose a TRUTH_NOT_EXPR around what we have. */
3708 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3710 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3711 if (FLOAT_TYPE_P (op_type
)
3712 && flag_trapping_math
3713 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3714 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3717 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3718 if (code
== ERROR_MARK
)
3721 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3722 TREE_OPERAND (arg
, 1));
3723 if (TREE_NO_WARNING (arg
))
3724 TREE_NO_WARNING (ret
) = 1;
3731 return constant_boolean_node (integer_zerop (arg
), type
);
3733 case TRUTH_AND_EXPR
:
3734 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3735 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3736 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3737 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3738 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3741 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3742 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3743 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3744 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3745 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3747 case TRUTH_XOR_EXPR
:
3748 /* Here we can invert either operand. We invert the first operand
3749 unless the second operand is a TRUTH_NOT_EXPR in which case our
3750 result is the XOR of the first operand with the inside of the
3751 negation of the second operand. */
3753 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3754 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3755 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3757 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3758 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3759 TREE_OPERAND (arg
, 1));
3761 case TRUTH_ANDIF_EXPR
:
3762 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3763 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3764 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3765 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3766 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3768 case TRUTH_ORIF_EXPR
:
3769 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3770 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3771 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3772 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3773 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3775 case TRUTH_NOT_EXPR
:
3776 return TREE_OPERAND (arg
, 0);
3780 tree arg1
= TREE_OPERAND (arg
, 1);
3781 tree arg2
= TREE_OPERAND (arg
, 2);
3783 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3784 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3786 /* A COND_EXPR may have a throw as one operand, which
3787 then has void type. Just leave void operands
3789 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3790 VOID_TYPE_P (TREE_TYPE (arg1
))
3791 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3792 VOID_TYPE_P (TREE_TYPE (arg2
))
3793 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3797 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3798 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3799 TREE_OPERAND (arg
, 0),
3800 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3802 case NON_LVALUE_EXPR
:
3803 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3804 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3807 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3808 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3813 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3814 return build1_loc (loc
, TREE_CODE (arg
), type
,
3815 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3818 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3820 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3823 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3825 case CLEANUP_POINT_EXPR
:
3826 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3827 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3828 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3835 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3836 assume that ARG is an operation that returns a truth value (0 or 1
3837 for scalars, 0 or -1 for vectors). Return the folded expression if
3838 folding is successful. Otherwise, return NULL_TREE. */
3841 fold_invert_truthvalue (location_t loc
, tree arg
)
3843 tree type
= TREE_TYPE (arg
);
3844 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3850 /* Return a simplified tree node for the truth-negation of ARG. This
3851 never alters ARG itself. We assume that ARG is an operation that
3852 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3855 invert_truthvalue_loc (location_t loc
, tree arg
)
3857 if (TREE_CODE (arg
) == ERROR_MARK
)
3860 tree type
= TREE_TYPE (arg
);
3861 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3867 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3868 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3869 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3870 is the original memory reference used to preserve the alias set of
3874 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
3875 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3876 int unsignedp
, int reversep
)
3878 tree result
, bftype
;
3880 /* Attempt not to lose the access path if possible. */
3881 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
3883 tree ninner
= TREE_OPERAND (orig_inner
, 0);
3885 HOST_WIDE_INT nbitsize
, nbitpos
;
3887 int nunsignedp
, nreversep
, nvolatilep
= 0;
3888 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
3889 &noffset
, &nmode
, &nunsignedp
,
3890 &nreversep
, &nvolatilep
);
3892 && noffset
== NULL_TREE
3893 && nbitsize
>= bitsize
3894 && nbitpos
<= bitpos
3895 && bitpos
+ bitsize
<= nbitpos
+ nbitsize
3905 alias_set_type iset
= get_alias_set (orig_inner
);
3906 if (iset
== 0 && get_alias_set (inner
) != iset
)
3907 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
3908 build_fold_addr_expr (inner
),
3909 build_int_cst (ptr_type_node
, 0));
3911 if (bitpos
== 0 && !reversep
)
3913 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3914 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3915 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3916 && tree_fits_shwi_p (size
)
3917 && tree_to_shwi (size
) == bitsize
)
3918 return fold_convert_loc (loc
, type
, inner
);
3922 if (TYPE_PRECISION (bftype
) != bitsize
3923 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3924 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3926 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3927 bitsize_int (bitsize
), bitsize_int (bitpos
));
3928 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3931 result
= fold_convert_loc (loc
, type
, result
);
3936 /* Optimize a bit-field compare.
3938 There are two cases: First is a compare against a constant and the
3939 second is a comparison of two items where the fields are at the same
3940 bit position relative to the start of a chunk (byte, halfword, word)
3941 large enough to contain it. In these cases we can avoid the shift
3942 implicit in bitfield extractions.
3944 For constants, we emit a compare of the shifted constant with the
3945 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3946 compared. For two fields at the same position, we do the ANDs with the
3947 similar mask and compare the result of the ANDs.
3949 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3950 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3951 are the left and right operands of the comparison, respectively.
3953 If the optimization described above can be done, we return the resulting
3954 tree. Otherwise we return zero. */
3957 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3958 tree compare_type
, tree lhs
, tree rhs
)
3960 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3961 tree type
= TREE_TYPE (lhs
);
3963 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3964 machine_mode lmode
, rmode
;
3965 scalar_int_mode nmode
;
3966 int lunsignedp
, runsignedp
;
3967 int lreversep
, rreversep
;
3968 int lvolatilep
= 0, rvolatilep
= 0;
3969 tree linner
, rinner
= NULL_TREE
;
3973 /* Get all the information about the extractions being done. If the bit size
3974 if the same as the size of the underlying object, we aren't doing an
3975 extraction at all and so can do nothing. We also don't want to
3976 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3977 then will no longer be able to replace it. */
3978 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3979 &lunsignedp
, &lreversep
, &lvolatilep
);
3980 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3981 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3985 rreversep
= lreversep
;
3988 /* If this is not a constant, we can only do something if bit positions,
3989 sizes, signedness and storage order are the same. */
3991 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3992 &runsignedp
, &rreversep
, &rvolatilep
);
3994 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3995 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3996 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
4000 /* Honor the C++ memory model and mimic what RTL expansion does. */
4001 unsigned HOST_WIDE_INT bitstart
= 0;
4002 unsigned HOST_WIDE_INT bitend
= 0;
4003 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4005 get_bit_range (&bitstart
, &bitend
, lhs
, &lbitpos
, &offset
);
4006 if (offset
!= NULL_TREE
)
4010 /* See if we can find a mode to refer to this field. We should be able to,
4011 but fail if we can't. */
4012 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4013 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4014 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4015 TYPE_ALIGN (TREE_TYPE (rinner
))),
4016 BITS_PER_WORD
, false, &nmode
))
4019 /* Set signed and unsigned types of the precision of this mode for the
4021 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4023 /* Compute the bit position and size for the new reference and our offset
4024 within it. If the new reference is the same size as the original, we
4025 won't optimize anything, so return zero. */
4026 nbitsize
= GET_MODE_BITSIZE (nmode
);
4027 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4029 if (nbitsize
== lbitsize
)
4032 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4033 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4035 /* Make the mask to be used against the extracted field. */
4036 mask
= build_int_cst_type (unsigned_type
, -1);
4037 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4038 mask
= const_binop (RSHIFT_EXPR
, mask
,
4039 size_int (nbitsize
- lbitsize
- lbitpos
));
4046 /* If not comparing with constant, just rework the comparison
4048 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4049 nbitsize
, nbitpos
, 1, lreversep
);
4050 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4051 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4052 nbitsize
, nbitpos
, 1, rreversep
);
4053 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4054 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4057 /* Otherwise, we are handling the constant case. See if the constant is too
4058 big for the field. Warn and return a tree for 0 (false) if so. We do
4059 this not only for its own sake, but to avoid having to test for this
4060 error case below. If we didn't, we might generate wrong code.
4062 For unsigned fields, the constant shifted right by the field length should
4063 be all zero. For signed fields, the high-order bits should agree with
4068 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4070 warning (0, "comparison is always %d due to width of bit-field",
4072 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4077 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4078 if (tem
!= 0 && tem
!= -1)
4080 warning (0, "comparison is always %d due to width of bit-field",
4082 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4089 /* Single-bit compares should always be against zero. */
4090 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4092 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4093 rhs
= build_int_cst (type
, 0);
4096 /* Make a new bitfield reference, shift the constant over the
4097 appropriate number of bits and mask it with the computed mask
4098 (in case this was a signed field). If we changed it, make a new one. */
4099 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4100 nbitsize
, nbitpos
, 1, lreversep
);
4102 rhs
= const_binop (BIT_AND_EXPR
,
4103 const_binop (LSHIFT_EXPR
,
4104 fold_convert_loc (loc
, unsigned_type
, rhs
),
4105 size_int (lbitpos
)),
4108 lhs
= build2_loc (loc
, code
, compare_type
,
4109 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4113 /* Subroutine for fold_truth_andor_1: decode a field reference.
4115 If EXP is a comparison reference, we return the innermost reference.
4117 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4118 set to the starting bit number.
4120 If the innermost field can be completely contained in a mode-sized
4121 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4123 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4124 otherwise it is not changed.
4126 *PUNSIGNEDP is set to the signedness of the field.
4128 *PREVERSEP is set to the storage order of the field.
4130 *PMASK is set to the mask used. This is either contained in a
4131 BIT_AND_EXPR or derived from the width of the field.
4133 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4135 Return 0 if this is not a component reference or is one that we can't
4136 do anything with. */
4139 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4140 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4141 int *punsignedp
, int *preversep
, int *pvolatilep
,
4142 tree
*pmask
, tree
*pand_mask
)
4145 tree outer_type
= 0;
4147 tree mask
, inner
, offset
;
4149 unsigned int precision
;
4151 /* All the optimizations using this function assume integer fields.
4152 There are problems with FP fields since the type_for_size call
4153 below can fail for, e.g., XFmode. */
4154 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4157 /* We are interested in the bare arrangement of bits, so strip everything
4158 that doesn't affect the machine mode. However, record the type of the
4159 outermost expression if it may matter below. */
4160 if (CONVERT_EXPR_P (exp
)
4161 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4162 outer_type
= TREE_TYPE (exp
);
4165 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4167 and_mask
= TREE_OPERAND (exp
, 1);
4168 exp
= TREE_OPERAND (exp
, 0);
4169 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4170 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4174 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4175 punsignedp
, preversep
, pvolatilep
);
4176 if ((inner
== exp
&& and_mask
== 0)
4177 || *pbitsize
< 0 || offset
!= 0
4178 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4179 /* Reject out-of-bound accesses (PR79731). */
4180 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4181 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4182 *pbitpos
+ *pbitsize
) < 0))
4187 /* If the number of bits in the reference is the same as the bitsize of
4188 the outer type, then the outer type gives the signedness. Otherwise
4189 (in case of a small bitfield) the signedness is unchanged. */
4190 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4191 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4193 /* Compute the mask to access the bitfield. */
4194 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4195 precision
= TYPE_PRECISION (unsigned_type
);
4197 mask
= build_int_cst_type (unsigned_type
, -1);
4199 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4200 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4202 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4204 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4205 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4208 *pand_mask
= and_mask
;
4212 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4213 bit positions and MASK is SIGNED. */
4216 all_ones_mask_p (const_tree mask
, unsigned int size
)
4218 tree type
= TREE_TYPE (mask
);
4219 unsigned int precision
= TYPE_PRECISION (type
);
4221 /* If this function returns true when the type of the mask is
4222 UNSIGNED, then there will be errors. In particular see
4223 gcc.c-torture/execute/990326-1.c. There does not appear to be
4224 any documentation paper trail as to why this is so. But the pre
4225 wide-int worked with that restriction and it has been preserved
4227 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4230 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4233 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4234 represents the sign bit of EXP's type. If EXP represents a sign
4235 or zero extension, also test VAL against the unextended type.
4236 The return value is the (sub)expression whose sign bit is VAL,
4237 or NULL_TREE otherwise. */
4240 sign_bit_p (tree exp
, const_tree val
)
4245 /* Tree EXP must have an integral type. */
4246 t
= TREE_TYPE (exp
);
4247 if (! INTEGRAL_TYPE_P (t
))
4250 /* Tree VAL must be an integer constant. */
4251 if (TREE_CODE (val
) != INTEGER_CST
4252 || TREE_OVERFLOW (val
))
4255 width
= TYPE_PRECISION (t
);
4256 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4259 /* Handle extension from a narrower type. */
4260 if (TREE_CODE (exp
) == NOP_EXPR
4261 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4262 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4267 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4268 to be evaluated unconditionally. */
4271 simple_operand_p (const_tree exp
)
4273 /* Strip any conversions that don't change the machine mode. */
4276 return (CONSTANT_CLASS_P (exp
)
4277 || TREE_CODE (exp
) == SSA_NAME
4279 && ! TREE_ADDRESSABLE (exp
)
4280 && ! TREE_THIS_VOLATILE (exp
)
4281 && ! DECL_NONLOCAL (exp
)
4282 /* Don't regard global variables as simple. They may be
4283 allocated in ways unknown to the compiler (shared memory,
4284 #pragma weak, etc). */
4285 && ! TREE_PUBLIC (exp
)
4286 && ! DECL_EXTERNAL (exp
)
4287 /* Weakrefs are not safe to be read, since they can be NULL.
4288 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4289 have DECL_WEAK flag set. */
4290 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4291 /* Loading a static variable is unduly expensive, but global
4292 registers aren't expensive. */
4293 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4296 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4297 to be evaluated unconditionally.
4298 I addition to simple_operand_p, we assume that comparisons, conversions,
4299 and logic-not operations are simple, if their operands are simple, too. */
4302 simple_operand_p_2 (tree exp
)
4304 enum tree_code code
;
4306 if (TREE_SIDE_EFFECTS (exp
)
4307 || tree_could_trap_p (exp
))
4310 while (CONVERT_EXPR_P (exp
))
4311 exp
= TREE_OPERAND (exp
, 0);
4313 code
= TREE_CODE (exp
);
4315 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4316 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4317 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4319 if (code
== TRUTH_NOT_EXPR
)
4320 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4322 return simple_operand_p (exp
);
4326 /* The following functions are subroutines to fold_range_test and allow it to
4327 try to change a logical combination of comparisons into a range test.
4330 X == 2 || X == 3 || X == 4 || X == 5
4334 (unsigned) (X - 2) <= 3
4336 We describe each set of comparisons as being either inside or outside
4337 a range, using a variable named like IN_P, and then describe the
4338 range with a lower and upper bound. If one of the bounds is omitted,
4339 it represents either the highest or lowest value of the type.
4341 In the comments below, we represent a range by two numbers in brackets
4342 preceded by a "+" to designate being inside that range, or a "-" to
4343 designate being outside that range, so the condition can be inverted by
4344 flipping the prefix. An omitted bound is represented by a "-". For
4345 example, "- [-, 10]" means being outside the range starting at the lowest
4346 possible value and ending at 10, in other words, being greater than 10.
4347 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4350 We set up things so that the missing bounds are handled in a consistent
4351 manner so neither a missing bound nor "true" and "false" need to be
4352 handled using a special case. */
4354 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4355 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4356 and UPPER1_P are nonzero if the respective argument is an upper bound
4357 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4358 must be specified for a comparison. ARG1 will be converted to ARG0's
4359 type if both are specified. */
4362 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4363 tree arg1
, int upper1_p
)
4369 /* If neither arg represents infinity, do the normal operation.
4370 Else, if not a comparison, return infinity. Else handle the special
4371 comparison rules. Note that most of the cases below won't occur, but
4372 are handled for consistency. */
4374 if (arg0
!= 0 && arg1
!= 0)
4376 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4377 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4379 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4382 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4385 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4386 for neither. In real maths, we cannot assume open ended ranges are
4387 the same. But, this is computer arithmetic, where numbers are finite.
4388 We can therefore make the transformation of any unbounded range with
4389 the value Z, Z being greater than any representable number. This permits
4390 us to treat unbounded ranges as equal. */
4391 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4392 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4396 result
= sgn0
== sgn1
;
4399 result
= sgn0
!= sgn1
;
4402 result
= sgn0
< sgn1
;
4405 result
= sgn0
<= sgn1
;
4408 result
= sgn0
> sgn1
;
4411 result
= sgn0
>= sgn1
;
4417 return constant_boolean_node (result
, type
);
4420 /* Helper routine for make_range. Perform one step for it, return
4421 new expression if the loop should continue or NULL_TREE if it should
4425 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4426 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4427 bool *strict_overflow_p
)
4429 tree arg0_type
= TREE_TYPE (arg0
);
4430 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4431 int in_p
= *p_in_p
, n_in_p
;
4435 case TRUTH_NOT_EXPR
:
4436 /* We can only do something if the range is testing for zero. */
4437 if (low
== NULL_TREE
|| high
== NULL_TREE
4438 || ! integer_zerop (low
) || ! integer_zerop (high
))
4443 case EQ_EXPR
: case NE_EXPR
:
4444 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4445 /* We can only do something if the range is testing for zero
4446 and if the second operand is an integer constant. Note that
4447 saying something is "in" the range we make is done by
4448 complementing IN_P since it will set in the initial case of
4449 being not equal to zero; "out" is leaving it alone. */
4450 if (low
== NULL_TREE
|| high
== NULL_TREE
4451 || ! integer_zerop (low
) || ! integer_zerop (high
)
4452 || TREE_CODE (arg1
) != INTEGER_CST
)
4457 case NE_EXPR
: /* - [c, c] */
4460 case EQ_EXPR
: /* + [c, c] */
4461 in_p
= ! in_p
, low
= high
= arg1
;
4463 case GT_EXPR
: /* - [-, c] */
4464 low
= 0, high
= arg1
;
4466 case GE_EXPR
: /* + [c, -] */
4467 in_p
= ! in_p
, low
= arg1
, high
= 0;
4469 case LT_EXPR
: /* - [c, -] */
4470 low
= arg1
, high
= 0;
4472 case LE_EXPR
: /* + [-, c] */
4473 in_p
= ! in_p
, low
= 0, high
= arg1
;
4479 /* If this is an unsigned comparison, we also know that EXP is
4480 greater than or equal to zero. We base the range tests we make
4481 on that fact, so we record it here so we can parse existing
4482 range tests. We test arg0_type since often the return type
4483 of, e.g. EQ_EXPR, is boolean. */
4484 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4486 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4488 build_int_cst (arg0_type
, 0),
4492 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4494 /* If the high bound is missing, but we have a nonzero low
4495 bound, reverse the range so it goes from zero to the low bound
4497 if (high
== 0 && low
&& ! integer_zerop (low
))
4500 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4501 build_int_cst (TREE_TYPE (low
), 1), 0);
4502 low
= build_int_cst (arg0_type
, 0);
4512 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4513 low and high are non-NULL, then normalize will DTRT. */
4514 if (!TYPE_UNSIGNED (arg0_type
)
4515 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4517 if (low
== NULL_TREE
)
4518 low
= TYPE_MIN_VALUE (arg0_type
);
4519 if (high
== NULL_TREE
)
4520 high
= TYPE_MAX_VALUE (arg0_type
);
4523 /* (-x) IN [a,b] -> x in [-b, -a] */
4524 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4525 build_int_cst (exp_type
, 0),
4527 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4528 build_int_cst (exp_type
, 0),
4530 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4536 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4537 build_int_cst (exp_type
, 1));
4541 if (TREE_CODE (arg1
) != INTEGER_CST
)
4544 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4545 move a constant to the other side. */
4546 if (!TYPE_UNSIGNED (arg0_type
)
4547 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4550 /* If EXP is signed, any overflow in the computation is undefined,
4551 so we don't worry about it so long as our computations on
4552 the bounds don't overflow. For unsigned, overflow is defined
4553 and this is exactly the right thing. */
4554 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4555 arg0_type
, low
, 0, arg1
, 0);
4556 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4557 arg0_type
, high
, 1, arg1
, 0);
4558 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4559 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4562 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4563 *strict_overflow_p
= true;
4566 /* Check for an unsigned range which has wrapped around the maximum
4567 value thus making n_high < n_low, and normalize it. */
4568 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4570 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4571 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4572 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4573 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4575 /* If the range is of the form +/- [ x+1, x ], we won't
4576 be able to normalize it. But then, it represents the
4577 whole range or the empty set, so make it
4579 if (tree_int_cst_equal (n_low
, low
)
4580 && tree_int_cst_equal (n_high
, high
))
4586 low
= n_low
, high
= n_high
;
4594 case NON_LVALUE_EXPR
:
4595 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4598 if (! INTEGRAL_TYPE_P (arg0_type
)
4599 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4600 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4603 n_low
= low
, n_high
= high
;
4606 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4609 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4611 /* If we're converting arg0 from an unsigned type, to exp,
4612 a signed type, we will be doing the comparison as unsigned.
4613 The tests above have already verified that LOW and HIGH
4616 So we have to ensure that we will handle large unsigned
4617 values the same way that the current signed bounds treat
4620 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4624 /* For fixed-point modes, we need to pass the saturating flag
4625 as the 2nd parameter. */
4626 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4628 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4629 TYPE_SATURATING (arg0_type
));
4632 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4634 /* A range without an upper bound is, naturally, unbounded.
4635 Since convert would have cropped a very large value, use
4636 the max value for the destination type. */
4638 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4639 : TYPE_MAX_VALUE (arg0_type
);
4641 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4642 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4643 fold_convert_loc (loc
, arg0_type
,
4645 build_int_cst (arg0_type
, 1));
4647 /* If the low bound is specified, "and" the range with the
4648 range for which the original unsigned value will be
4652 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4653 1, fold_convert_loc (loc
, arg0_type
,
4658 in_p
= (n_in_p
== in_p
);
4662 /* Otherwise, "or" the range with the range of the input
4663 that will be interpreted as negative. */
4664 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4665 1, fold_convert_loc (loc
, arg0_type
,
4670 in_p
= (in_p
!= n_in_p
);
4684 /* Given EXP, a logical expression, set the range it is testing into
4685 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4686 actually being tested. *PLOW and *PHIGH will be made of the same
4687 type as the returned expression. If EXP is not a comparison, we
4688 will most likely not be returning a useful value and range. Set
4689 *STRICT_OVERFLOW_P to true if the return value is only valid
4690 because signed overflow is undefined; otherwise, do not change
4691 *STRICT_OVERFLOW_P. */
4694 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4695 bool *strict_overflow_p
)
4697 enum tree_code code
;
4698 tree arg0
, arg1
= NULL_TREE
;
4699 tree exp_type
, nexp
;
4702 location_t loc
= EXPR_LOCATION (exp
);
4704 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4705 and see if we can refine the range. Some of the cases below may not
4706 happen, but it doesn't seem worth worrying about this. We "continue"
4707 the outer loop when we've changed something; otherwise we "break"
4708 the switch, which will "break" the while. */
4711 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4715 code
= TREE_CODE (exp
);
4716 exp_type
= TREE_TYPE (exp
);
4719 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4721 if (TREE_OPERAND_LENGTH (exp
) > 0)
4722 arg0
= TREE_OPERAND (exp
, 0);
4723 if (TREE_CODE_CLASS (code
) == tcc_binary
4724 || TREE_CODE_CLASS (code
) == tcc_comparison
4725 || (TREE_CODE_CLASS (code
) == tcc_expression
4726 && TREE_OPERAND_LENGTH (exp
) > 1))
4727 arg1
= TREE_OPERAND (exp
, 1);
4729 if (arg0
== NULL_TREE
)
4732 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4733 &high
, &in_p
, strict_overflow_p
);
4734 if (nexp
== NULL_TREE
)
4739 /* If EXP is a constant, we can evaluate whether this is true or false. */
4740 if (TREE_CODE (exp
) == INTEGER_CST
)
4742 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4744 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4750 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4754 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4755 a bitwise check i.e. when
4756 LOW == 0xXX...X00...0
4757 HIGH == 0xXX...X11...1
4758 Return corresponding mask in MASK and stem in VALUE. */
4761 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
4764 if (TREE_CODE (low
) != INTEGER_CST
4765 || TREE_CODE (high
) != INTEGER_CST
)
4768 unsigned prec
= TYPE_PRECISION (type
);
4769 wide_int lo
= wi::to_wide (low
, prec
);
4770 wide_int hi
= wi::to_wide (high
, prec
);
4772 wide_int end_mask
= lo
^ hi
;
4773 if ((end_mask
& (end_mask
+ 1)) != 0
4774 || (lo
& end_mask
) != 0)
4777 wide_int stem_mask
= ~end_mask
;
4778 wide_int stem
= lo
& stem_mask
;
4779 if (stem
!= (hi
& stem_mask
))
4782 *mask
= wide_int_to_tree (type
, stem_mask
);
4783 *value
= wide_int_to_tree (type
, stem
);
4788 /* Helper routine for build_range_check and match.pd. Return the type to
4789 perform the check or NULL if it shouldn't be optimized. */
4792 range_check_type (tree etype
)
4794 /* First make sure that arithmetics in this type is valid, then make sure
4795 that it wraps around. */
4796 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4797 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4798 TYPE_UNSIGNED (etype
));
4800 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4802 tree utype
, minv
, maxv
;
4804 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4805 for the type in question, as we rely on this here. */
4806 utype
= unsigned_type_for (etype
);
4807 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4808 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4809 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4810 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4812 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4821 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4822 type, TYPE, return an expression to test if EXP is in (or out of, depending
4823 on IN_P) the range. Return 0 if the test couldn't be created. */
4826 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4827 tree low
, tree high
)
4829 tree etype
= TREE_TYPE (exp
), mask
, value
;
4831 /* Disable this optimization for function pointer expressions
4832 on targets that require function pointer canonicalization. */
4833 if (targetm
.have_canonicalize_funcptr_for_compare ()
4834 && TREE_CODE (etype
) == POINTER_TYPE
4835 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4840 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4842 return invert_truthvalue_loc (loc
, value
);
4847 if (low
== 0 && high
== 0)
4848 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4851 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4852 fold_convert_loc (loc
, etype
, high
));
4855 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4856 fold_convert_loc (loc
, etype
, low
));
4858 if (operand_equal_p (low
, high
, 0))
4859 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4860 fold_convert_loc (loc
, etype
, low
));
4862 if (TREE_CODE (exp
) == BIT_AND_EXPR
4863 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
4864 return fold_build2_loc (loc
, EQ_EXPR
, type
,
4865 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
4869 if (integer_zerop (low
))
4871 if (! TYPE_UNSIGNED (etype
))
4873 etype
= unsigned_type_for (etype
);
4874 high
= fold_convert_loc (loc
, etype
, high
);
4875 exp
= fold_convert_loc (loc
, etype
, exp
);
4877 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4880 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4881 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4883 int prec
= TYPE_PRECISION (etype
);
4885 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
4887 if (TYPE_UNSIGNED (etype
))
4889 tree signed_etype
= signed_type_for (etype
);
4890 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4892 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4894 etype
= signed_etype
;
4895 exp
= fold_convert_loc (loc
, etype
, exp
);
4897 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4898 build_int_cst (etype
, 0));
4902 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4903 This requires wrap-around arithmetics for the type of the expression. */
4904 etype
= range_check_type (etype
);
4905 if (etype
== NULL_TREE
)
4908 if (POINTER_TYPE_P (etype
))
4909 etype
= unsigned_type_for (etype
);
4911 high
= fold_convert_loc (loc
, etype
, high
);
4912 low
= fold_convert_loc (loc
, etype
, low
);
4913 exp
= fold_convert_loc (loc
, etype
, exp
);
4915 value
= const_binop (MINUS_EXPR
, high
, low
);
4917 if (value
!= 0 && !TREE_OVERFLOW (value
))
4918 return build_range_check (loc
, type
,
4919 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4920 1, build_int_cst (etype
, 0), value
);
4925 /* Return the predecessor of VAL in its type, handling the infinite case. */
4928 range_predecessor (tree val
)
4930 tree type
= TREE_TYPE (val
);
4932 if (INTEGRAL_TYPE_P (type
)
4933 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4936 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4937 build_int_cst (TREE_TYPE (val
), 1), 0);
4940 /* Return the successor of VAL in its type, handling the infinite case. */
4943 range_successor (tree val
)
4945 tree type
= TREE_TYPE (val
);
4947 if (INTEGRAL_TYPE_P (type
)
4948 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4951 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4952 build_int_cst (TREE_TYPE (val
), 1), 0);
4955 /* Given two ranges, see if we can merge them into one. Return 1 if we
4956 can, 0 if we can't. Set the output range into the specified parameters. */
4959 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4960 tree high0
, int in1_p
, tree low1
, tree high1
)
4968 int lowequal
= ((low0
== 0 && low1
== 0)
4969 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4970 low0
, 0, low1
, 0)));
4971 int highequal
= ((high0
== 0 && high1
== 0)
4972 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4973 high0
, 1, high1
, 1)));
4975 /* Make range 0 be the range that starts first, or ends last if they
4976 start at the same value. Swap them if it isn't. */
4977 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4980 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4981 high1
, 1, high0
, 1))))
4983 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4984 tem
= low0
, low0
= low1
, low1
= tem
;
4985 tem
= high0
, high0
= high1
, high1
= tem
;
4988 /* Now flag two cases, whether the ranges are disjoint or whether the
4989 second range is totally subsumed in the first. Note that the tests
4990 below are simplified by the ones above. */
4991 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4992 high0
, 1, low1
, 0));
4993 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4994 high1
, 1, high0
, 1));
4996 /* We now have four cases, depending on whether we are including or
4997 excluding the two ranges. */
5000 /* If they don't overlap, the result is false. If the second range
5001 is a subset it is the result. Otherwise, the range is from the start
5002 of the second to the end of the first. */
5004 in_p
= 0, low
= high
= 0;
5006 in_p
= 1, low
= low1
, high
= high1
;
5008 in_p
= 1, low
= low1
, high
= high0
;
5011 else if (in0_p
&& ! in1_p
)
5013 /* If they don't overlap, the result is the first range. If they are
5014 equal, the result is false. If the second range is a subset of the
5015 first, and the ranges begin at the same place, we go from just after
5016 the end of the second range to the end of the first. If the second
5017 range is not a subset of the first, or if it is a subset and both
5018 ranges end at the same place, the range starts at the start of the
5019 first range and ends just before the second range.
5020 Otherwise, we can't describe this as a single range. */
5022 in_p
= 1, low
= low0
, high
= high0
;
5023 else if (lowequal
&& highequal
)
5024 in_p
= 0, low
= high
= 0;
5025 else if (subset
&& lowequal
)
5027 low
= range_successor (high1
);
5032 /* We are in the weird situation where high0 > high1 but
5033 high1 has no successor. Punt. */
5037 else if (! subset
|| highequal
)
5040 high
= range_predecessor (low1
);
5044 /* low0 < low1 but low1 has no predecessor. Punt. */
5052 else if (! in0_p
&& in1_p
)
5054 /* If they don't overlap, the result is the second range. If the second
5055 is a subset of the first, the result is false. Otherwise,
5056 the range starts just after the first range and ends at the
5057 end of the second. */
5059 in_p
= 1, low
= low1
, high
= high1
;
5060 else if (subset
|| highequal
)
5061 in_p
= 0, low
= high
= 0;
5064 low
= range_successor (high0
);
5069 /* high1 > high0 but high0 has no successor. Punt. */
5077 /* The case where we are excluding both ranges. Here the complex case
5078 is if they don't overlap. In that case, the only time we have a
5079 range is if they are adjacent. If the second is a subset of the
5080 first, the result is the first. Otherwise, the range to exclude
5081 starts at the beginning of the first range and ends at the end of the
5085 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5086 range_successor (high0
),
5088 in_p
= 0, low
= low0
, high
= high1
;
5091 /* Canonicalize - [min, x] into - [-, x]. */
5092 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5093 switch (TREE_CODE (TREE_TYPE (low0
)))
5096 if (TYPE_PRECISION (TREE_TYPE (low0
))
5097 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
5101 if (tree_int_cst_equal (low0
,
5102 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5106 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5107 && integer_zerop (low0
))
5114 /* Canonicalize - [x, max] into - [x, -]. */
5115 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5116 switch (TREE_CODE (TREE_TYPE (high1
)))
5119 if (TYPE_PRECISION (TREE_TYPE (high1
))
5120 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
5124 if (tree_int_cst_equal (high1
,
5125 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5129 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5130 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5132 build_int_cst (TREE_TYPE (high1
), 1),
5140 /* The ranges might be also adjacent between the maximum and
5141 minimum values of the given type. For
5142 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5143 return + [x + 1, y - 1]. */
5144 if (low0
== 0 && high1
== 0)
5146 low
= range_successor (high0
);
5147 high
= range_predecessor (low1
);
5148 if (low
== 0 || high
== 0)
5158 in_p
= 0, low
= low0
, high
= high0
;
5160 in_p
= 0, low
= low0
, high
= high1
;
5163 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5168 /* Subroutine of fold, looking inside expressions of the form
5169 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5170 of the COND_EXPR. This function is being used also to optimize
5171 A op B ? C : A, by reversing the comparison first.
5173 Return a folded expression whose code is not a COND_EXPR
5174 anymore, or NULL_TREE if no folding opportunity is found. */
5177 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5178 tree arg0
, tree arg1
, tree arg2
)
5180 enum tree_code comp_code
= TREE_CODE (arg0
);
5181 tree arg00
= TREE_OPERAND (arg0
, 0);
5182 tree arg01
= TREE_OPERAND (arg0
, 1);
5183 tree arg1_type
= TREE_TYPE (arg1
);
5189 /* If we have A op 0 ? A : -A, consider applying the following
5192 A == 0? A : -A same as -A
5193 A != 0? A : -A same as A
5194 A >= 0? A : -A same as abs (A)
5195 A > 0? A : -A same as abs (A)
5196 A <= 0? A : -A same as -abs (A)
5197 A < 0? A : -A same as -abs (A)
5199 None of these transformations work for modes with signed
5200 zeros. If A is +/-0, the first two transformations will
5201 change the sign of the result (from +0 to -0, or vice
5202 versa). The last four will fix the sign of the result,
5203 even though the original expressions could be positive or
5204 negative, depending on the sign of A.
5206 Note that all these transformations are correct if A is
5207 NaN, since the two alternatives (A and -A) are also NaNs. */
5208 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5209 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5210 ? real_zerop (arg01
)
5211 : integer_zerop (arg01
))
5212 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5213 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5214 /* In the case that A is of the form X-Y, '-A' (arg2) may
5215 have already been folded to Y-X, check for that. */
5216 || (TREE_CODE (arg1
) == MINUS_EXPR
5217 && TREE_CODE (arg2
) == MINUS_EXPR
5218 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5219 TREE_OPERAND (arg2
, 1), 0)
5220 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5221 TREE_OPERAND (arg2
, 0), 0))))
5226 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5227 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5230 return fold_convert_loc (loc
, type
, arg1
);
5233 if (flag_trapping_math
)
5238 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5240 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5241 return fold_convert_loc (loc
, type
, tem
);
5244 if (flag_trapping_math
)
5249 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5251 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5252 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5254 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5258 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5259 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5260 both transformations are correct when A is NaN: A != 0
5261 is then true, and A == 0 is false. */
5263 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5264 && integer_zerop (arg01
) && integer_zerop (arg2
))
5266 if (comp_code
== NE_EXPR
)
5267 return fold_convert_loc (loc
, type
, arg1
);
5268 else if (comp_code
== EQ_EXPR
)
5269 return build_zero_cst (type
);
5272 /* Try some transformations of A op B ? A : B.
5274 A == B? A : B same as B
5275 A != B? A : B same as A
5276 A >= B? A : B same as max (A, B)
5277 A > B? A : B same as max (B, A)
5278 A <= B? A : B same as min (A, B)
5279 A < B? A : B same as min (B, A)
5281 As above, these transformations don't work in the presence
5282 of signed zeros. For example, if A and B are zeros of
5283 opposite sign, the first two transformations will change
5284 the sign of the result. In the last four, the original
5285 expressions give different results for (A=+0, B=-0) and
5286 (A=-0, B=+0), but the transformed expressions do not.
5288 The first two transformations are correct if either A or B
5289 is a NaN. In the first transformation, the condition will
5290 be false, and B will indeed be chosen. In the case of the
5291 second transformation, the condition A != B will be true,
5292 and A will be chosen.
5294 The conversions to max() and min() are not correct if B is
5295 a number and A is not. The conditions in the original
5296 expressions will be false, so all four give B. The min()
5297 and max() versions would give a NaN instead. */
5298 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5299 && operand_equal_for_comparison_p (arg01
, arg2
)
5300 /* Avoid these transformations if the COND_EXPR may be used
5301 as an lvalue in the C++ front-end. PR c++/19199. */
5303 || VECTOR_TYPE_P (type
)
5304 || (! lang_GNU_CXX ()
5305 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5306 || ! maybe_lvalue_p (arg1
)
5307 || ! maybe_lvalue_p (arg2
)))
5309 tree comp_op0
= arg00
;
5310 tree comp_op1
= arg01
;
5311 tree comp_type
= TREE_TYPE (comp_op0
);
5316 return fold_convert_loc (loc
, type
, arg2
);
5318 return fold_convert_loc (loc
, type
, arg1
);
5323 /* In C++ a ?: expression can be an lvalue, so put the
5324 operand which will be used if they are equal first
5325 so that we can convert this back to the
5326 corresponding COND_EXPR. */
5327 if (!HONOR_NANS (arg1
))
5329 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5330 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5331 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5332 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5333 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5334 comp_op1
, comp_op0
);
5335 return fold_convert_loc (loc
, type
, tem
);
5342 if (!HONOR_NANS (arg1
))
5344 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5345 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5346 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5347 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5348 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5349 comp_op1
, comp_op0
);
5350 return fold_convert_loc (loc
, type
, tem
);
5354 if (!HONOR_NANS (arg1
))
5355 return fold_convert_loc (loc
, type
, arg2
);
5358 if (!HONOR_NANS (arg1
))
5359 return fold_convert_loc (loc
, type
, arg1
);
5362 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5372 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5373 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5374 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5378 /* EXP is some logical combination of boolean tests. See if we can
5379 merge it into some range test. Return the new tree if so. */
5382 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5385 int or_op
= (code
== TRUTH_ORIF_EXPR
5386 || code
== TRUTH_OR_EXPR
);
5387 int in0_p
, in1_p
, in_p
;
5388 tree low0
, low1
, low
, high0
, high1
, high
;
5389 bool strict_overflow_p
= false;
5391 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5392 "when simplifying range test");
5394 if (!INTEGRAL_TYPE_P (type
))
5397 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5398 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5400 /* If this is an OR operation, invert both sides; we will invert
5401 again at the end. */
5403 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5405 /* If both expressions are the same, if we can merge the ranges, and we
5406 can build the range test, return it or it inverted. If one of the
5407 ranges is always true or always false, consider it to be the same
5408 expression as the other. */
5409 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5410 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5412 && 0 != (tem
= (build_range_check (loc
, type
,
5414 : rhs
!= 0 ? rhs
: integer_zero_node
,
5417 if (strict_overflow_p
)
5418 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5419 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5422 /* On machines where the branch cost is expensive, if this is a
5423 short-circuited branch and the underlying object on both sides
5424 is the same, make a non-short-circuit operation. */
5425 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5426 && !flag_sanitize_coverage
5427 && lhs
!= 0 && rhs
!= 0
5428 && (code
== TRUTH_ANDIF_EXPR
5429 || code
== TRUTH_ORIF_EXPR
)
5430 && operand_equal_p (lhs
, rhs
, 0))
5432 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5433 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5434 which cases we can't do this. */
5435 if (simple_operand_p (lhs
))
5436 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5437 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5440 else if (!lang_hooks
.decls
.global_bindings_p ()
5441 && !CONTAINS_PLACEHOLDER_P (lhs
))
5443 tree common
= save_expr (lhs
);
5445 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5446 or_op
? ! in0_p
: in0_p
,
5448 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5449 or_op
? ! in1_p
: in1_p
,
5452 if (strict_overflow_p
)
5453 fold_overflow_warning (warnmsg
,
5454 WARN_STRICT_OVERFLOW_COMPARISON
);
5455 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5456 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5465 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5466 bit value. Arrange things so the extra bits will be set to zero if and
5467 only if C is signed-extended to its full width. If MASK is nonzero,
5468 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5471 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5473 tree type
= TREE_TYPE (c
);
5474 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
5477 if (p
== modesize
|| unsignedp
)
5480 /* We work by getting just the sign bit into the low-order bit, then
5481 into the high-order bit, then sign-extend. We then XOR that value
5483 temp
= build_int_cst (TREE_TYPE (c
),
5484 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
5486 /* We must use a signed type in order to get an arithmetic right shift.
5487 However, we must also avoid introducing accidental overflows, so that
5488 a subsequent call to integer_zerop will work. Hence we must
5489 do the type conversion here. At this point, the constant is either
5490 zero or one, and the conversion to a signed type can never overflow.
5491 We could get an overflow if this conversion is done anywhere else. */
5492 if (TYPE_UNSIGNED (type
))
5493 temp
= fold_convert (signed_type_for (type
), temp
);
5495 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5496 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5498 temp
= const_binop (BIT_AND_EXPR
, temp
,
5499 fold_convert (TREE_TYPE (c
), mask
));
5500 /* If necessary, convert the type back to match the type of C. */
5501 if (TYPE_UNSIGNED (type
))
5502 temp
= fold_convert (type
, temp
);
5504 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5507 /* For an expression that has the form
5511 we can drop one of the inner expressions and simplify to
5515 LOC is the location of the resulting expression. OP is the inner
5516 logical operation; the left-hand side in the examples above, while CMPOP
5517 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5518 removing a condition that guards another, as in
5519 (A != NULL && A->...) || A == NULL
5520 which we must not transform. If RHS_ONLY is true, only eliminate the
5521 right-most operand of the inner logical operation. */
5524 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5527 tree type
= TREE_TYPE (cmpop
);
5528 enum tree_code code
= TREE_CODE (cmpop
);
5529 enum tree_code truthop_code
= TREE_CODE (op
);
5530 tree lhs
= TREE_OPERAND (op
, 0);
5531 tree rhs
= TREE_OPERAND (op
, 1);
5532 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5533 enum tree_code rhs_code
= TREE_CODE (rhs
);
5534 enum tree_code lhs_code
= TREE_CODE (lhs
);
5535 enum tree_code inv_code
;
5537 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5540 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5543 if (rhs_code
== truthop_code
)
5545 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5546 if (newrhs
!= NULL_TREE
)
5549 rhs_code
= TREE_CODE (rhs
);
5552 if (lhs_code
== truthop_code
&& !rhs_only
)
5554 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5555 if (newlhs
!= NULL_TREE
)
5558 lhs_code
= TREE_CODE (lhs
);
5562 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5563 if (inv_code
== rhs_code
5564 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5565 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5567 if (!rhs_only
&& inv_code
== lhs_code
5568 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5569 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5571 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5572 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5577 /* Find ways of folding logical expressions of LHS and RHS:
5578 Try to merge two comparisons to the same innermost item.
5579 Look for range tests like "ch >= '0' && ch <= '9'".
5580 Look for combinations of simple terms on machines with expensive branches
5581 and evaluate the RHS unconditionally.
5583 For example, if we have p->a == 2 && p->b == 4 and we can make an
5584 object large enough to span both A and B, we can do this with a comparison
5585 against the object ANDed with the a mask.
5587 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5588 operations to do this with one comparison.
5590 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5591 function and the one above.
5593 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5594 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5596 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5599 We return the simplified tree or 0 if no optimization is possible. */
5602 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5605 /* If this is the "or" of two comparisons, we can do something if
5606 the comparisons are NE_EXPR. If this is the "and", we can do something
5607 if the comparisons are EQ_EXPR. I.e.,
5608 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5610 WANTED_CODE is this operation code. For single bit fields, we can
5611 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5612 comparison for one-bit fields. */
5614 enum tree_code wanted_code
;
5615 enum tree_code lcode
, rcode
;
5616 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5617 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5618 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5619 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5620 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5621 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5622 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5623 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5624 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5625 scalar_int_mode lnmode
, rnmode
;
5626 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5627 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5628 tree l_const
, r_const
;
5629 tree lntype
, rntype
, result
;
5630 HOST_WIDE_INT first_bit
, end_bit
;
5633 /* Start by getting the comparison codes. Fail if anything is volatile.
5634 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5635 it were surrounded with a NE_EXPR. */
5637 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5640 lcode
= TREE_CODE (lhs
);
5641 rcode
= TREE_CODE (rhs
);
5643 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5645 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5646 build_int_cst (TREE_TYPE (lhs
), 0));
5650 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5652 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5653 build_int_cst (TREE_TYPE (rhs
), 0));
5657 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5658 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5661 ll_arg
= TREE_OPERAND (lhs
, 0);
5662 lr_arg
= TREE_OPERAND (lhs
, 1);
5663 rl_arg
= TREE_OPERAND (rhs
, 0);
5664 rr_arg
= TREE_OPERAND (rhs
, 1);
5666 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5667 if (simple_operand_p (ll_arg
)
5668 && simple_operand_p (lr_arg
))
5670 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5671 && operand_equal_p (lr_arg
, rr_arg
, 0))
5673 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5674 truth_type
, ll_arg
, lr_arg
);
5678 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5679 && operand_equal_p (lr_arg
, rl_arg
, 0))
5681 result
= combine_comparisons (loc
, code
, lcode
,
5682 swap_tree_comparison (rcode
),
5683 truth_type
, ll_arg
, lr_arg
);
5689 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5690 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5692 /* If the RHS can be evaluated unconditionally and its operands are
5693 simple, it wins to evaluate the RHS unconditionally on machines
5694 with expensive branches. In this case, this isn't a comparison
5695 that can be merged. */
5697 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5699 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5700 && simple_operand_p (rl_arg
)
5701 && simple_operand_p (rr_arg
))
5703 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5704 if (code
== TRUTH_OR_EXPR
5705 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5706 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5707 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5708 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5709 return build2_loc (loc
, NE_EXPR
, truth_type
,
5710 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5712 build_int_cst (TREE_TYPE (ll_arg
), 0));
5714 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5715 if (code
== TRUTH_AND_EXPR
5716 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5717 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5718 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5719 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5720 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5721 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5723 build_int_cst (TREE_TYPE (ll_arg
), 0));
5726 /* See if the comparisons can be merged. Then get all the parameters for
5729 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5730 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5733 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5735 ll_inner
= decode_field_reference (loc
, &ll_arg
,
5736 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5737 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5738 &ll_mask
, &ll_and_mask
);
5739 lr_inner
= decode_field_reference (loc
, &lr_arg
,
5740 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5741 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5742 &lr_mask
, &lr_and_mask
);
5743 rl_inner
= decode_field_reference (loc
, &rl_arg
,
5744 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5745 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5746 &rl_mask
, &rl_and_mask
);
5747 rr_inner
= decode_field_reference (loc
, &rr_arg
,
5748 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5749 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5750 &rr_mask
, &rr_and_mask
);
5752 /* It must be true that the inner operation on the lhs of each
5753 comparison must be the same if we are to be able to do anything.
5754 Then see if we have constants. If not, the same must be true for
5757 || ll_reversep
!= rl_reversep
5758 || ll_inner
== 0 || rl_inner
== 0
5759 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5762 if (TREE_CODE (lr_arg
) == INTEGER_CST
5763 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5765 l_const
= lr_arg
, r_const
= rr_arg
;
5766 lr_reversep
= ll_reversep
;
5768 else if (lr_reversep
!= rr_reversep
5769 || lr_inner
== 0 || rr_inner
== 0
5770 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5773 l_const
= r_const
= 0;
5775 /* If either comparison code is not correct for our logical operation,
5776 fail. However, we can convert a one-bit comparison against zero into
5777 the opposite comparison against that bit being set in the field. */
5779 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5780 if (lcode
!= wanted_code
)
5782 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5784 /* Make the left operand unsigned, since we are only interested
5785 in the value of one bit. Otherwise we are doing the wrong
5794 /* This is analogous to the code for l_const above. */
5795 if (rcode
!= wanted_code
)
5797 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5806 /* See if we can find a mode that contains both fields being compared on
5807 the left. If we can't, fail. Otherwise, update all constants and masks
5808 to be relative to a field of that size. */
5809 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5810 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5811 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5812 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
5813 volatilep
, &lnmode
))
5816 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5817 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5818 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5819 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5821 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5823 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5824 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5827 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5828 size_int (xll_bitpos
));
5829 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5830 size_int (xrl_bitpos
));
5834 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5835 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5836 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5837 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5838 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5841 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5843 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5848 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5849 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5850 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5851 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5852 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5855 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5857 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5861 /* If the right sides are not constant, do the same for it. Also,
5862 disallow this optimization if a size or signedness mismatch occurs
5863 between the left and right sides. */
5866 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5867 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5868 /* Make sure the two fields on the right
5869 correspond to the left without being swapped. */
5870 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5873 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5874 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5875 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5876 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
5877 volatilep
, &rnmode
))
5880 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5881 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5882 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5883 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5885 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5887 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5888 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5891 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5893 size_int (xlr_bitpos
));
5894 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5896 size_int (xrr_bitpos
));
5898 /* Make a mask that corresponds to both fields being compared.
5899 Do this for both items being compared. If the operands are the
5900 same size and the bits being compared are in the same position
5901 then we can do this by masking both and comparing the masked
5903 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5904 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5905 if (lnbitsize
== rnbitsize
5906 && xll_bitpos
== xlr_bitpos
5910 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5911 lntype
, lnbitsize
, lnbitpos
,
5912 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5913 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5914 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5916 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
5917 rntype
, rnbitsize
, rnbitpos
,
5918 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5919 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5920 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5922 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5925 /* There is still another way we can do something: If both pairs of
5926 fields being compared are adjacent, we may be able to make a wider
5927 field containing them both.
5929 Note that we still must mask the lhs/rhs expressions. Furthermore,
5930 the mask must be shifted to account for the shift done by
5931 make_bit_field_ref. */
5932 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
5933 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5934 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5935 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5943 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
5944 ll_bitsize
+ rl_bitsize
,
5945 MIN (ll_bitpos
, rl_bitpos
),
5946 ll_unsignedp
, ll_reversep
);
5947 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
5948 lr_bitsize
+ rr_bitsize
,
5949 MIN (lr_bitpos
, rr_bitpos
),
5950 lr_unsignedp
, lr_reversep
);
5952 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5953 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5954 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5955 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5957 /* Convert to the smaller type before masking out unwanted bits. */
5959 if (lntype
!= rntype
)
5961 if (lnbitsize
> rnbitsize
)
5963 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5964 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5967 else if (lnbitsize
< rnbitsize
)
5969 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5970 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5975 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5976 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5978 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5979 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5981 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5987 /* Handle the case of comparisons with constants. If there is something in
5988 common between the masks, those bits of the constants must be the same.
5989 If not, the condition is always false. Test for this to avoid generating
5990 incorrect code below. */
5991 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5992 if (! integer_zerop (result
)
5993 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5994 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5996 if (wanted_code
== NE_EXPR
)
5998 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5999 return constant_boolean_node (true, truth_type
);
6003 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6004 return constant_boolean_node (false, truth_type
);
6011 /* Construct the expression we will return. First get the component
6012 reference we will make. Unless the mask is all ones the width of
6013 that field, perform the mask operation. Then compare with the
6015 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6016 lntype
, lnbitsize
, lnbitpos
,
6017 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6019 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6020 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6021 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6023 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6024 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6027 /* T is an integer expression that is being multiplied, divided, or taken a
6028 modulus (CODE says which and what kind of divide or modulus) by a
6029 constant C. See if we can eliminate that operation by folding it with
6030 other operations already in T. WIDE_TYPE, if non-null, is a type that
6031 should be used for the computation if wider than our type.
6033 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6034 (X * 2) + (Y * 4). We must, however, be assured that either the original
6035 expression would not overflow or that overflow is undefined for the type
6036 in the language in question.
6038 If we return a non-null expression, it is an equivalent form of the
6039 original computation, but need not be in the original type.
6041 We set *STRICT_OVERFLOW_P to true if the return values depends on
6042 signed overflow being undefined. Otherwise we do not change
6043 *STRICT_OVERFLOW_P. */
6046 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6047 bool *strict_overflow_p
)
6049 /* To avoid exponential search depth, refuse to allow recursion past
6050 three levels. Beyond that (1) it's highly unlikely that we'll find
6051 something interesting and (2) we've probably processed it before
6052 when we built the inner expression. */
6061 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6068 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6069 bool *strict_overflow_p
)
6071 tree type
= TREE_TYPE (t
);
6072 enum tree_code tcode
= TREE_CODE (t
);
6073 tree ctype
= (wide_type
!= 0
6074 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6075 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6076 ? wide_type
: type
);
6078 int same_p
= tcode
== code
;
6079 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6080 bool sub_strict_overflow_p
;
6082 /* Don't deal with constants of zero here; they confuse the code below. */
6083 if (integer_zerop (c
))
6086 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6087 op0
= TREE_OPERAND (t
, 0);
6089 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6090 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6092 /* Note that we need not handle conditional operations here since fold
6093 already handles those cases. So just do arithmetic here. */
6097 /* For a constant, we can always simplify if we are a multiply
6098 or (for divide and modulus) if it is a multiple of our constant. */
6099 if (code
== MULT_EXPR
6100 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6103 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6104 fold_convert (ctype
, c
));
6105 /* If the multiplication overflowed, we lost information on it.
6106 See PR68142 and PR69845. */
6107 if (TREE_OVERFLOW (tem
))
6113 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6114 /* If op0 is an expression ... */
6115 if ((COMPARISON_CLASS_P (op0
)
6116 || UNARY_CLASS_P (op0
)
6117 || BINARY_CLASS_P (op0
)
6118 || VL_EXP_CLASS_P (op0
)
6119 || EXPRESSION_CLASS_P (op0
))
6120 /* ... and has wrapping overflow, and its type is smaller
6121 than ctype, then we cannot pass through as widening. */
6122 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6123 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6124 && (TYPE_PRECISION (ctype
)
6125 > TYPE_PRECISION (TREE_TYPE (op0
))))
6126 /* ... or this is a truncation (t is narrower than op0),
6127 then we cannot pass through this narrowing. */
6128 || (TYPE_PRECISION (type
)
6129 < TYPE_PRECISION (TREE_TYPE (op0
)))
6130 /* ... or signedness changes for division or modulus,
6131 then we cannot pass through this conversion. */
6132 || (code
!= MULT_EXPR
6133 && (TYPE_UNSIGNED (ctype
)
6134 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6135 /* ... or has undefined overflow while the converted to
6136 type has not, we cannot do the operation in the inner type
6137 as that would introduce undefined overflow. */
6138 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6139 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6140 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6143 /* Pass the constant down and see if we can make a simplification. If
6144 we can, replace this expression with the inner simplification for
6145 possible later conversion to our or some other type. */
6146 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6147 && TREE_CODE (t2
) == INTEGER_CST
6148 && !TREE_OVERFLOW (t2
)
6149 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6151 ? ctype
: NULL_TREE
,
6152 strict_overflow_p
))))
6157 /* If widening the type changes it from signed to unsigned, then we
6158 must avoid building ABS_EXPR itself as unsigned. */
6159 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6161 tree cstype
= (*signed_type_for
) (ctype
);
6162 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6165 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6166 return fold_convert (ctype
, t1
);
6170 /* If the constant is negative, we cannot simplify this. */
6171 if (tree_int_cst_sgn (c
) == -1)
6175 /* For division and modulus, type can't be unsigned, as e.g.
6176 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6177 For signed types, even with wrapping overflow, this is fine. */
6178 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6180 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6182 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6185 case MIN_EXPR
: case MAX_EXPR
:
6186 /* If widening the type changes the signedness, then we can't perform
6187 this optimization as that changes the result. */
6188 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6191 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6192 sub_strict_overflow_p
= false;
6193 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6194 &sub_strict_overflow_p
)) != 0
6195 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6196 &sub_strict_overflow_p
)) != 0)
6198 if (tree_int_cst_sgn (c
) < 0)
6199 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6200 if (sub_strict_overflow_p
)
6201 *strict_overflow_p
= true;
6202 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6203 fold_convert (ctype
, t2
));
6207 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6208 /* If the second operand is constant, this is a multiplication
6209 or floor division, by a power of two, so we can treat it that
6210 way unless the multiplier or divisor overflows. Signed
6211 left-shift overflow is implementation-defined rather than
6212 undefined in C90, so do not convert signed left shift into
6214 if (TREE_CODE (op1
) == INTEGER_CST
6215 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6216 /* const_binop may not detect overflow correctly,
6217 so check for it explicitly here. */
6218 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6220 && 0 != (t1
= fold_convert (ctype
,
6221 const_binop (LSHIFT_EXPR
,
6224 && !TREE_OVERFLOW (t1
))
6225 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6226 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6228 fold_convert (ctype
, op0
),
6230 c
, code
, wide_type
, strict_overflow_p
);
6233 case PLUS_EXPR
: case MINUS_EXPR
:
6234 /* See if we can eliminate the operation on both sides. If we can, we
6235 can return a new PLUS or MINUS. If we can't, the only remaining
6236 cases where we can do anything are if the second operand is a
6238 sub_strict_overflow_p
= false;
6239 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6240 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6241 if (t1
!= 0 && t2
!= 0
6242 && TYPE_OVERFLOW_WRAPS (ctype
)
6243 && (code
== MULT_EXPR
6244 /* If not multiplication, we can only do this if both operands
6245 are divisible by c. */
6246 || (multiple_of_p (ctype
, op0
, c
)
6247 && multiple_of_p (ctype
, op1
, c
))))
6249 if (sub_strict_overflow_p
)
6250 *strict_overflow_p
= true;
6251 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6252 fold_convert (ctype
, t2
));
6255 /* If this was a subtraction, negate OP1 and set it to be an addition.
6256 This simplifies the logic below. */
6257 if (tcode
== MINUS_EXPR
)
6259 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6260 /* If OP1 was not easily negatable, the constant may be OP0. */
6261 if (TREE_CODE (op0
) == INTEGER_CST
)
6263 std::swap (op0
, op1
);
6268 if (TREE_CODE (op1
) != INTEGER_CST
)
6271 /* If either OP1 or C are negative, this optimization is not safe for
6272 some of the division and remainder types while for others we need
6273 to change the code. */
6274 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6276 if (code
== CEIL_DIV_EXPR
)
6277 code
= FLOOR_DIV_EXPR
;
6278 else if (code
== FLOOR_DIV_EXPR
)
6279 code
= CEIL_DIV_EXPR
;
6280 else if (code
!= MULT_EXPR
6281 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6285 /* If it's a multiply or a division/modulus operation of a multiple
6286 of our constant, do the operation and verify it doesn't overflow. */
6287 if (code
== MULT_EXPR
6288 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6291 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6292 fold_convert (ctype
, c
));
6293 /* We allow the constant to overflow with wrapping semantics. */
6295 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6301 /* If we have an unsigned type, we cannot widen the operation since it
6302 will change the result if the original computation overflowed. */
6303 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6306 /* The last case is if we are a multiply. In that case, we can
6307 apply the distributive law to commute the multiply and addition
6308 if the multiplication of the constants doesn't overflow
6309 and overflow is defined. With undefined overflow
6310 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6311 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6312 return fold_build2 (tcode
, ctype
,
6313 fold_build2 (code
, ctype
,
6314 fold_convert (ctype
, op0
),
6315 fold_convert (ctype
, c
)),
6321 /* We have a special case here if we are doing something like
6322 (C * 8) % 4 since we know that's zero. */
6323 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6324 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6325 /* If the multiplication can overflow we cannot optimize this. */
6326 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6327 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6328 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6331 *strict_overflow_p
= true;
6332 return omit_one_operand (type
, integer_zero_node
, op0
);
6335 /* ... fall through ... */
6337 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6338 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6339 /* If we can extract our operation from the LHS, do so and return a
6340 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6341 do something only if the second operand is a constant. */
6343 && TYPE_OVERFLOW_WRAPS (ctype
)
6344 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6345 strict_overflow_p
)) != 0)
6346 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6347 fold_convert (ctype
, op1
));
6348 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6349 && TYPE_OVERFLOW_WRAPS (ctype
)
6350 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6351 strict_overflow_p
)) != 0)
6352 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6353 fold_convert (ctype
, t1
));
6354 else if (TREE_CODE (op1
) != INTEGER_CST
)
6357 /* If these are the same operation types, we can associate them
6358 assuming no overflow. */
6361 bool overflow_p
= false;
6362 bool overflow_mul_p
;
6363 signop sign
= TYPE_SIGN (ctype
);
6364 unsigned prec
= TYPE_PRECISION (ctype
);
6365 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6366 wi::to_wide (c
, prec
),
6367 sign
, &overflow_mul_p
);
6368 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6370 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6373 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6374 wide_int_to_tree (ctype
, mul
));
6377 /* If these operations "cancel" each other, we have the main
6378 optimizations of this pass, which occur when either constant is a
6379 multiple of the other, in which case we replace this with either an
6380 operation or CODE or TCODE.
6382 If we have an unsigned type, we cannot do this since it will change
6383 the result if the original computation overflowed. */
6384 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6385 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6386 || (tcode
== MULT_EXPR
6387 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6388 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6389 && code
!= MULT_EXPR
)))
6391 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6394 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6395 *strict_overflow_p
= true;
6396 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6397 fold_convert (ctype
,
6398 const_binop (TRUNC_DIV_EXPR
,
6401 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
6404 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6405 *strict_overflow_p
= true;
6406 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6407 fold_convert (ctype
,
6408 const_binop (TRUNC_DIV_EXPR
,
6421 /* Return a node which has the indicated constant VALUE (either 0 or
6422 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6423 and is of the indicated TYPE. */
6426 constant_boolean_node (bool value
, tree type
)
6428 if (type
== integer_type_node
)
6429 return value
? integer_one_node
: integer_zero_node
;
6430 else if (type
== boolean_type_node
)
6431 return value
? boolean_true_node
: boolean_false_node
;
6432 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6433 return build_vector_from_val (type
,
6434 build_int_cst (TREE_TYPE (type
),
6437 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6441 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6442 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6443 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6444 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6445 COND is the first argument to CODE; otherwise (as in the example
6446 given here), it is the second argument. TYPE is the type of the
6447 original expression. Return NULL_TREE if no simplification is
6451 fold_binary_op_with_conditional_arg (location_t loc
,
6452 enum tree_code code
,
6453 tree type
, tree op0
, tree op1
,
6454 tree cond
, tree arg
, int cond_first_p
)
6456 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6457 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6458 tree test
, true_value
, false_value
;
6459 tree lhs
= NULL_TREE
;
6460 tree rhs
= NULL_TREE
;
6461 enum tree_code cond_code
= COND_EXPR
;
6463 if (TREE_CODE (cond
) == COND_EXPR
6464 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6466 test
= TREE_OPERAND (cond
, 0);
6467 true_value
= TREE_OPERAND (cond
, 1);
6468 false_value
= TREE_OPERAND (cond
, 2);
6469 /* If this operand throws an expression, then it does not make
6470 sense to try to perform a logical or arithmetic operation
6472 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6474 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6477 else if (!(TREE_CODE (type
) != VECTOR_TYPE
6478 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
6480 tree testtype
= TREE_TYPE (cond
);
6482 true_value
= constant_boolean_node (true, testtype
);
6483 false_value
= constant_boolean_node (false, testtype
);
6486 /* Detect the case of mixing vector and scalar types - bail out. */
6489 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6490 cond_code
= VEC_COND_EXPR
;
6492 /* This transformation is only worthwhile if we don't have to wrap ARG
6493 in a SAVE_EXPR and the operation can be simplified without recursing
6494 on at least one of the branches once its pushed inside the COND_EXPR. */
6495 if (!TREE_CONSTANT (arg
)
6496 && (TREE_SIDE_EFFECTS (arg
)
6497 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6498 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6501 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6504 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6506 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6508 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6512 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6514 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6516 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6519 /* Check that we have simplified at least one of the branches. */
6520 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6523 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6527 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6529 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6530 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6531 ADDEND is the same as X.
6533 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6534 and finite. The problematic cases are when X is zero, and its mode
6535 has signed zeros. In the case of rounding towards -infinity,
6536 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6537 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6540 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6542 if (!real_zerop (addend
))
6545 /* Don't allow the fold with -fsignaling-nans. */
6546 if (HONOR_SNANS (element_mode (type
)))
6549 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6550 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6553 /* In a vector or complex, we would need to check the sign of all zeros. */
6554 if (TREE_CODE (addend
) != REAL_CST
)
6557 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6558 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6561 /* The mode has signed zeros, and we have to honor their sign.
6562 In this situation, there is only one case we can return true for.
6563 X - 0 is the same as X unless rounding towards -infinity is
6565 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6568 /* Subroutine of match.pd that optimizes comparisons of a division by
6569 a nonzero integer constant against an integer constant, i.e.
6572 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6573 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6576 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
6577 tree
*hi
, bool *neg_overflow
)
6579 tree prod
, tmp
, type
= TREE_TYPE (c1
);
6580 signop sign
= TYPE_SIGN (type
);
6583 /* We have to do this the hard way to detect unsigned overflow.
6584 prod = int_const_binop (MULT_EXPR, c1, c2); */
6585 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
6586 prod
= force_fit_type (type
, val
, -1, overflow
);
6587 *neg_overflow
= false;
6589 if (sign
== UNSIGNED
)
6591 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
6594 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6595 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
6596 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
6598 else if (tree_int_cst_sgn (c1
) >= 0)
6600 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
6601 switch (tree_int_cst_sgn (c2
))
6604 *neg_overflow
= true;
6605 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6610 *lo
= fold_negate_const (tmp
, type
);
6615 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6625 /* A negative divisor reverses the relational operators. */
6626 code
= swap_tree_comparison (code
);
6628 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
6629 switch (tree_int_cst_sgn (c2
))
6632 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6637 *hi
= fold_negate_const (tmp
, type
);
6642 *neg_overflow
= true;
6643 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6652 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
6655 if (TREE_OVERFLOW (*lo
)
6656 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
6658 if (TREE_OVERFLOW (*hi
)
6659 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
6666 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6667 equality/inequality test, then return a simplified form of the test
6668 using a sign testing. Otherwise return NULL. TYPE is the desired
6672 fold_single_bit_test_into_sign_test (location_t loc
,
6673 enum tree_code code
, tree arg0
, tree arg1
,
6676 /* If this is testing a single bit, we can optimize the test. */
6677 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6678 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6679 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6681 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6682 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6683 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6685 if (arg00
!= NULL_TREE
6686 /* This is only a win if casting to a signed type is cheap,
6687 i.e. when arg00's type is not a partial mode. */
6688 && type_has_mode_precision_p (TREE_TYPE (arg00
)))
6690 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6691 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6693 fold_convert_loc (loc
, stype
, arg00
),
6694 build_int_cst (stype
, 0));
6701 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6702 equality/inequality test, then return a simplified form of
6703 the test using shifts and logical operations. Otherwise return
6704 NULL. TYPE is the desired result type. */
6707 fold_single_bit_test (location_t loc
, enum tree_code code
,
6708 tree arg0
, tree arg1
, tree result_type
)
6710 /* If this is testing a single bit, we can optimize the test. */
6711 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6712 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6713 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6715 tree inner
= TREE_OPERAND (arg0
, 0);
6716 tree type
= TREE_TYPE (arg0
);
6717 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6718 scalar_int_mode operand_mode
= SCALAR_INT_TYPE_MODE (type
);
6720 tree signed_type
, unsigned_type
, intermediate_type
;
6723 /* First, see if we can fold the single bit test into a sign-bit
6725 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6730 /* Otherwise we have (A & C) != 0 where C is a single bit,
6731 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6732 Similarly for (A & C) == 0. */
6734 /* If INNER is a right shift of a constant and it plus BITNUM does
6735 not overflow, adjust BITNUM and INNER. */
6736 if (TREE_CODE (inner
) == RSHIFT_EXPR
6737 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6738 && bitnum
< TYPE_PRECISION (type
)
6739 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner
, 1)),
6740 TYPE_PRECISION (type
) - bitnum
))
6742 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6743 inner
= TREE_OPERAND (inner
, 0);
6746 /* If we are going to be able to omit the AND below, we must do our
6747 operations as unsigned. If we must use the AND, we have a choice.
6748 Normally unsigned is faster, but for some machines signed is. */
6749 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
6750 && !flag_syntax_only
) ? 0 : 1;
6752 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6753 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6754 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6755 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6758 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6759 inner
, size_int (bitnum
));
6761 one
= build_int_cst (intermediate_type
, 1);
6763 if (code
== EQ_EXPR
)
6764 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6766 /* Put the AND last so it can combine with more things. */
6767 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6769 /* Make sure to return the proper type. */
6770 inner
= fold_convert_loc (loc
, result_type
, inner
);
6777 /* Test whether it is preferable two swap two operands, ARG0 and
6778 ARG1, for example because ARG0 is an integer constant and ARG1
6782 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
6784 if (CONSTANT_CLASS_P (arg1
))
6786 if (CONSTANT_CLASS_P (arg0
))
6792 if (TREE_CONSTANT (arg1
))
6794 if (TREE_CONSTANT (arg0
))
6797 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6798 for commutative and comparison operators. Ensuring a canonical
6799 form allows the optimizers to find additional redundancies without
6800 having to explicitly check for both orderings. */
6801 if (TREE_CODE (arg0
) == SSA_NAME
6802 && TREE_CODE (arg1
) == SSA_NAME
6803 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6806 /* Put SSA_NAMEs last. */
6807 if (TREE_CODE (arg1
) == SSA_NAME
)
6809 if (TREE_CODE (arg0
) == SSA_NAME
)
6812 /* Put variables last. */
6822 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6823 means A >= Y && A != MAX, but in this case we know that
6824 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6827 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6829 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6831 if (TREE_CODE (bound
) == LT_EXPR
)
6832 a
= TREE_OPERAND (bound
, 0);
6833 else if (TREE_CODE (bound
) == GT_EXPR
)
6834 a
= TREE_OPERAND (bound
, 1);
6838 typea
= TREE_TYPE (a
);
6839 if (!INTEGRAL_TYPE_P (typea
)
6840 && !POINTER_TYPE_P (typea
))
6843 if (TREE_CODE (ineq
) == LT_EXPR
)
6845 a1
= TREE_OPERAND (ineq
, 1);
6846 y
= TREE_OPERAND (ineq
, 0);
6848 else if (TREE_CODE (ineq
) == GT_EXPR
)
6850 a1
= TREE_OPERAND (ineq
, 0);
6851 y
= TREE_OPERAND (ineq
, 1);
6856 if (TREE_TYPE (a1
) != typea
)
6859 if (POINTER_TYPE_P (typea
))
6861 /* Convert the pointer types into integer before taking the difference. */
6862 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6863 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6864 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6867 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6869 if (!diff
|| !integer_onep (diff
))
6872 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6875 /* Fold a sum or difference of at least one multiplication.
6876 Returns the folded tree or NULL if no simplification could be made. */
6879 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6880 tree arg0
, tree arg1
)
6882 tree arg00
, arg01
, arg10
, arg11
;
6883 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6885 /* (A * C) +- (B * C) -> (A+-B) * C.
6886 (A * C) +- A -> A * (C+-1).
6887 We are most concerned about the case where C is a constant,
6888 but other combinations show up during loop reduction. Since
6889 it is not difficult, try all four possibilities. */
6891 if (TREE_CODE (arg0
) == MULT_EXPR
)
6893 arg00
= TREE_OPERAND (arg0
, 0);
6894 arg01
= TREE_OPERAND (arg0
, 1);
6896 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6898 arg00
= build_one_cst (type
);
6903 /* We cannot generate constant 1 for fract. */
6904 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6907 arg01
= build_one_cst (type
);
6909 if (TREE_CODE (arg1
) == MULT_EXPR
)
6911 arg10
= TREE_OPERAND (arg1
, 0);
6912 arg11
= TREE_OPERAND (arg1
, 1);
6914 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6916 arg10
= build_one_cst (type
);
6917 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6918 the purpose of this canonicalization. */
6919 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
6920 && negate_expr_p (arg1
)
6921 && code
== PLUS_EXPR
)
6923 arg11
= negate_expr (arg1
);
6931 /* We cannot generate constant 1 for fract. */
6932 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6935 arg11
= build_one_cst (type
);
6939 /* Prefer factoring a common non-constant. */
6940 if (operand_equal_p (arg00
, arg10
, 0))
6941 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6942 else if (operand_equal_p (arg01
, arg11
, 0))
6943 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6944 else if (operand_equal_p (arg00
, arg11
, 0))
6945 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6946 else if (operand_equal_p (arg01
, arg10
, 0))
6947 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6949 /* No identical multiplicands; see if we can find a common
6950 power-of-two factor in non-power-of-two multiplies. This
6951 can help in multi-dimensional array access. */
6952 else if (tree_fits_shwi_p (arg01
)
6953 && tree_fits_shwi_p (arg11
))
6955 HOST_WIDE_INT int01
, int11
, tmp
;
6958 int01
= tree_to_shwi (arg01
);
6959 int11
= tree_to_shwi (arg11
);
6961 /* Move min of absolute values to int11. */
6962 if (absu_hwi (int01
) < absu_hwi (int11
))
6964 tmp
= int01
, int01
= int11
, int11
= tmp
;
6965 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6972 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6973 /* The remainder should not be a constant, otherwise we
6974 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6975 increased the number of multiplications necessary. */
6976 && TREE_CODE (arg10
) != INTEGER_CST
)
6978 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6979 build_int_cst (TREE_TYPE (arg00
),
6984 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6991 if (! INTEGRAL_TYPE_P (type
)
6992 || TYPE_OVERFLOW_WRAPS (type
)
6993 /* We are neither factoring zero nor minus one. */
6994 || TREE_CODE (same
) == INTEGER_CST
)
6995 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6996 fold_build2_loc (loc
, code
, type
,
6997 fold_convert_loc (loc
, type
, alt0
),
6998 fold_convert_loc (loc
, type
, alt1
)),
6999 fold_convert_loc (loc
, type
, same
));
7001 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7002 same may be minus one and thus the multiplication may overflow. Perform
7003 the operations in an unsigned type. */
7004 tree utype
= unsigned_type_for (type
);
7005 tree tem
= fold_build2_loc (loc
, code
, utype
,
7006 fold_convert_loc (loc
, utype
, alt0
),
7007 fold_convert_loc (loc
, utype
, alt1
));
7008 /* If the sum evaluated to a constant that is not -INF the multiplication
7010 if (TREE_CODE (tem
) == INTEGER_CST
7011 && (wi::to_wide (tem
)
7012 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7013 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7014 fold_convert (type
, tem
), same
);
7016 return fold_convert_loc (loc
, type
,
7017 fold_build2_loc (loc
, MULT_EXPR
, utype
, tem
,
7018 fold_convert_loc (loc
, utype
, same
)));
7021 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7022 specified by EXPR into the buffer PTR of length LEN bytes.
7023 Return the number of bytes placed in the buffer, or zero
7027 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7029 tree type
= TREE_TYPE (expr
);
7030 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7031 int byte
, offset
, word
, words
;
7032 unsigned char value
;
7034 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7041 return MIN (len
, total_bytes
- off
);
7043 words
= total_bytes
/ UNITS_PER_WORD
;
7045 for (byte
= 0; byte
< total_bytes
; byte
++)
7047 int bitpos
= byte
* BITS_PER_UNIT
;
7048 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7050 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7052 if (total_bytes
> UNITS_PER_WORD
)
7054 word
= byte
/ UNITS_PER_WORD
;
7055 if (WORDS_BIG_ENDIAN
)
7056 word
= (words
- 1) - word
;
7057 offset
= word
* UNITS_PER_WORD
;
7058 if (BYTES_BIG_ENDIAN
)
7059 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7061 offset
+= byte
% UNITS_PER_WORD
;
7064 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7065 if (offset
>= off
&& offset
- off
< len
)
7066 ptr
[offset
- off
] = value
;
7068 return MIN (len
, total_bytes
- off
);
7072 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7073 specified by EXPR into the buffer PTR of length LEN bytes.
7074 Return the number of bytes placed in the buffer, or zero
7078 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7080 tree type
= TREE_TYPE (expr
);
7081 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7082 int total_bytes
= GET_MODE_SIZE (mode
);
7083 FIXED_VALUE_TYPE value
;
7084 tree i_value
, i_type
;
7086 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7089 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7091 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7094 value
= TREE_FIXED_CST (expr
);
7095 i_value
= double_int_to_tree (i_type
, value
.data
);
7097 return native_encode_int (i_value
, ptr
, len
, off
);
7101 /* Subroutine of native_encode_expr. Encode the REAL_CST
7102 specified by EXPR into the buffer PTR of length LEN bytes.
7103 Return the number of bytes placed in the buffer, or zero
7107 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7109 tree type
= TREE_TYPE (expr
);
7110 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7111 int byte
, offset
, word
, words
, bitpos
;
7112 unsigned char value
;
7114 /* There are always 32 bits in each long, no matter the size of
7115 the hosts long. We handle floating point representations with
7119 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7126 return MIN (len
, total_bytes
- off
);
7128 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7130 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7132 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7133 bitpos
+= BITS_PER_UNIT
)
7135 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7136 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7138 if (UNITS_PER_WORD
< 4)
7140 word
= byte
/ UNITS_PER_WORD
;
7141 if (WORDS_BIG_ENDIAN
)
7142 word
= (words
- 1) - word
;
7143 offset
= word
* UNITS_PER_WORD
;
7144 if (BYTES_BIG_ENDIAN
)
7145 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7147 offset
+= byte
% UNITS_PER_WORD
;
7152 if (BYTES_BIG_ENDIAN
)
7154 /* Reverse bytes within each long, or within the entire float
7155 if it's smaller than a long (for HFmode). */
7156 offset
= MIN (3, total_bytes
- 1) - offset
;
7157 gcc_assert (offset
>= 0);
7160 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7162 && offset
- off
< len
)
7163 ptr
[offset
- off
] = value
;
7165 return MIN (len
, total_bytes
- off
);
7168 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7174 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7179 part
= TREE_REALPART (expr
);
7180 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7181 if (off
== -1 && rsize
== 0)
7183 part
= TREE_IMAGPART (expr
);
7185 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7186 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7188 if (off
== -1 && isize
!= rsize
)
7190 return rsize
+ isize
;
7194 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7200 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7207 count
= VECTOR_CST_NELTS (expr
);
7208 itype
= TREE_TYPE (TREE_TYPE (expr
));
7209 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7210 for (i
= 0; i
< count
; i
++)
7217 elem
= VECTOR_CST_ELT (expr
, i
);
7218 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7220 if ((off
== -1 && res
!= size
) || res
== 0)
7232 /* Subroutine of native_encode_expr. Encode the STRING_CST
7233 specified by EXPR into the buffer PTR of length LEN bytes.
7234 Return the number of bytes placed in the buffer, or zero
7238 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7240 tree type
= TREE_TYPE (expr
);
7242 /* Wide-char strings are encoded in target byte-order so native
7243 encoding them is trivial. */
7244 if (BITS_PER_UNIT
!= CHAR_BIT
7245 || TREE_CODE (type
) != ARRAY_TYPE
7246 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7247 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7250 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
7251 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7257 else if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7260 if (off
< TREE_STRING_LENGTH (expr
))
7262 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7263 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7265 memset (ptr
+ written
, 0,
7266 MIN (total_bytes
- written
, len
- written
));
7269 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7270 return MIN (total_bytes
- off
, len
);
7274 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7275 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7276 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7277 anything, just do a dry run. If OFF is not -1 then start
7278 the encoding at byte offset OFF and encode at most LEN bytes.
7279 Return the number of bytes placed in the buffer, or zero upon failure. */
7282 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7284 /* We don't support starting at negative offset and -1 is special. */
7288 switch (TREE_CODE (expr
))
7291 return native_encode_int (expr
, ptr
, len
, off
);
7294 return native_encode_real (expr
, ptr
, len
, off
);
7297 return native_encode_fixed (expr
, ptr
, len
, off
);
7300 return native_encode_complex (expr
, ptr
, len
, off
);
7303 return native_encode_vector (expr
, ptr
, len
, off
);
7306 return native_encode_string (expr
, ptr
, len
, off
);
7314 /* Subroutine of native_interpret_expr. Interpret the contents of
7315 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7316 If the buffer cannot be interpreted, return NULL_TREE. */
7319 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7321 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7323 if (total_bytes
> len
7324 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7327 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7329 return wide_int_to_tree (type
, result
);
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7338 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7340 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7341 int total_bytes
= GET_MODE_SIZE (mode
);
7343 FIXED_VALUE_TYPE fixed_value
;
7345 if (total_bytes
> len
7346 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7349 result
= double_int::from_buffer (ptr
, total_bytes
);
7350 fixed_value
= fixed_from_double_int (result
, mode
);
7352 return build_fixed (type
, fixed_value
);
7356 /* Subroutine of native_interpret_expr. Interpret the contents of
7357 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7358 If the buffer cannot be interpreted, return NULL_TREE. */
7361 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7363 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
7364 int total_bytes
= GET_MODE_SIZE (mode
);
7365 unsigned char value
;
7366 /* There are always 32 bits in each long, no matter the size of
7367 the hosts long. We handle floating point representations with
7372 if (total_bytes
> len
|| total_bytes
> 24)
7374 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7376 memset (tmp
, 0, sizeof (tmp
));
7377 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7378 bitpos
+= BITS_PER_UNIT
)
7380 /* Both OFFSET and BYTE index within a long;
7381 bitpos indexes the whole float. */
7382 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7383 if (UNITS_PER_WORD
< 4)
7385 int word
= byte
/ UNITS_PER_WORD
;
7386 if (WORDS_BIG_ENDIAN
)
7387 word
= (words
- 1) - word
;
7388 offset
= word
* UNITS_PER_WORD
;
7389 if (BYTES_BIG_ENDIAN
)
7390 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7392 offset
+= byte
% UNITS_PER_WORD
;
7397 if (BYTES_BIG_ENDIAN
)
7399 /* Reverse bytes within each long, or within the entire float
7400 if it's smaller than a long (for HFmode). */
7401 offset
= MIN (3, total_bytes
- 1) - offset
;
7402 gcc_assert (offset
>= 0);
7405 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7407 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7410 real_from_target (&r
, tmp
, mode
);
7411 return build_real (type
, r
);
7415 /* Subroutine of native_interpret_expr. Interpret the contents of
7416 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7417 If the buffer cannot be interpreted, return NULL_TREE. */
7420 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7422 tree etype
, rpart
, ipart
;
7425 etype
= TREE_TYPE (type
);
7426 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
7429 rpart
= native_interpret_expr (etype
, ptr
, size
);
7432 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7435 return build_complex (type
, rpart
, ipart
);
7439 /* Subroutine of native_interpret_expr. Interpret the contents of
7440 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7441 If the buffer cannot be interpreted, return NULL_TREE. */
7444 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7449 etype
= TREE_TYPE (type
);
7450 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
7451 count
= TYPE_VECTOR_SUBPARTS (type
);
7452 if (size
* count
> len
)
7455 tree_vector_builder
elements (type
, count
, 1);
7456 for (i
= 0; i
< count
; ++i
)
7458 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7461 elements
.quick_push (elem
);
7463 return elements
.build ();
7467 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7468 the buffer PTR of length LEN as a constant of type TYPE. For
7469 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7470 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7471 return NULL_TREE. */
7474 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7476 switch (TREE_CODE (type
))
7482 case REFERENCE_TYPE
:
7483 return native_interpret_int (type
, ptr
, len
);
7486 return native_interpret_real (type
, ptr
, len
);
7488 case FIXED_POINT_TYPE
:
7489 return native_interpret_fixed (type
, ptr
, len
);
7492 return native_interpret_complex (type
, ptr
, len
);
7495 return native_interpret_vector (type
, ptr
, len
);
7502 /* Returns true if we can interpret the contents of a native encoding
7506 can_native_interpret_type_p (tree type
)
7508 switch (TREE_CODE (type
))
7514 case REFERENCE_TYPE
:
7515 case FIXED_POINT_TYPE
:
7526 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7527 TYPE at compile-time. If we're unable to perform the conversion
7528 return NULL_TREE. */
7531 fold_view_convert_expr (tree type
, tree expr
)
7533 /* We support up to 512-bit values (for V8DFmode). */
7534 unsigned char buffer
[64];
7537 /* Check that the host and target are sane. */
7538 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7541 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7545 return native_interpret_expr (type
, buffer
, len
);
7548 /* Build an expression for the address of T. Folds away INDIRECT_REF
7549 to avoid confusing the gimplify process. */
7552 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7554 /* The size of the object is not relevant when talking about its address. */
7555 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7556 t
= TREE_OPERAND (t
, 0);
7558 if (TREE_CODE (t
) == INDIRECT_REF
)
7560 t
= TREE_OPERAND (t
, 0);
7562 if (TREE_TYPE (t
) != ptrtype
)
7563 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7565 else if (TREE_CODE (t
) == MEM_REF
7566 && integer_zerop (TREE_OPERAND (t
, 1)))
7567 return TREE_OPERAND (t
, 0);
7568 else if (TREE_CODE (t
) == MEM_REF
7569 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7570 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7571 TREE_OPERAND (t
, 0),
7572 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7573 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7575 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7577 if (TREE_TYPE (t
) != ptrtype
)
7578 t
= fold_convert_loc (loc
, ptrtype
, t
);
7581 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7586 /* Build an expression for the address of T. */
7589 build_fold_addr_expr_loc (location_t loc
, tree t
)
7591 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7593 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7596 /* Fold a unary expression of code CODE and type TYPE with operand
7597 OP0. Return the folded expression if folding is successful.
7598 Otherwise, return NULL_TREE. */
7601 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7605 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7607 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7608 && TREE_CODE_LENGTH (code
) == 1);
7613 if (CONVERT_EXPR_CODE_P (code
)
7614 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7616 /* Don't use STRIP_NOPS, because signedness of argument type
7618 STRIP_SIGN_NOPS (arg0
);
7622 /* Strip any conversions that don't change the mode. This
7623 is safe for every expression, except for a comparison
7624 expression because its signedness is derived from its
7627 Note that this is done as an internal manipulation within
7628 the constant folder, in order to find the simplest
7629 representation of the arguments so that their form can be
7630 studied. In any cases, the appropriate type conversions
7631 should be put back in the tree that will get out of the
7636 if (CONSTANT_CLASS_P (arg0
))
7638 tree tem
= const_unop (code
, type
, arg0
);
7641 if (TREE_TYPE (tem
) != type
)
7642 tem
= fold_convert_loc (loc
, type
, tem
);
7648 tem
= generic_simplify (loc
, code
, type
, op0
);
7652 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7654 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7655 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7656 fold_build1_loc (loc
, code
, type
,
7657 fold_convert_loc (loc
, TREE_TYPE (op0
),
7658 TREE_OPERAND (arg0
, 1))));
7659 else if (TREE_CODE (arg0
) == COND_EXPR
)
7661 tree arg01
= TREE_OPERAND (arg0
, 1);
7662 tree arg02
= TREE_OPERAND (arg0
, 2);
7663 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7664 arg01
= fold_build1_loc (loc
, code
, type
,
7665 fold_convert_loc (loc
,
7666 TREE_TYPE (op0
), arg01
));
7667 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7668 arg02
= fold_build1_loc (loc
, code
, type
,
7669 fold_convert_loc (loc
,
7670 TREE_TYPE (op0
), arg02
));
7671 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7674 /* If this was a conversion, and all we did was to move into
7675 inside the COND_EXPR, bring it back out. But leave it if
7676 it is a conversion from integer to integer and the
7677 result precision is no wider than a word since such a
7678 conversion is cheap and may be optimized away by combine,
7679 while it couldn't if it were outside the COND_EXPR. Then return
7680 so we don't get into an infinite recursion loop taking the
7681 conversion out and then back in. */
7683 if ((CONVERT_EXPR_CODE_P (code
)
7684 || code
== NON_LVALUE_EXPR
)
7685 && TREE_CODE (tem
) == COND_EXPR
7686 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7687 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7688 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7689 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7690 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7691 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7692 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7694 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7695 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7696 || flag_syntax_only
))
7697 tem
= build1_loc (loc
, code
, type
,
7699 TREE_TYPE (TREE_OPERAND
7700 (TREE_OPERAND (tem
, 1), 0)),
7701 TREE_OPERAND (tem
, 0),
7702 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7703 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7711 case NON_LVALUE_EXPR
:
7712 if (!maybe_lvalue_p (op0
))
7713 return fold_convert_loc (loc
, type
, op0
);
7718 case FIX_TRUNC_EXPR
:
7719 if (COMPARISON_CLASS_P (op0
))
7721 /* If we have (type) (a CMP b) and type is an integral type, return
7722 new expression involving the new type. Canonicalize
7723 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7725 Do not fold the result as that would not simplify further, also
7726 folding again results in recursions. */
7727 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7728 return build2_loc (loc
, TREE_CODE (op0
), type
,
7729 TREE_OPERAND (op0
, 0),
7730 TREE_OPERAND (op0
, 1));
7731 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7732 && TREE_CODE (type
) != VECTOR_TYPE
)
7733 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7734 constant_boolean_node (true, type
),
7735 constant_boolean_node (false, type
));
7738 /* Handle (T *)&A.B.C for A being of type T and B and C
7739 living at offset zero. This occurs frequently in
7740 C++ upcasting and then accessing the base. */
7741 if (TREE_CODE (op0
) == ADDR_EXPR
7742 && POINTER_TYPE_P (type
)
7743 && handled_component_p (TREE_OPERAND (op0
, 0)))
7745 HOST_WIDE_INT bitsize
, bitpos
;
7748 int unsignedp
, reversep
, volatilep
;
7750 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7751 &offset
, &mode
, &unsignedp
, &reversep
,
7753 /* If the reference was to a (constant) zero offset, we can use
7754 the address of the base if it has the same base type
7755 as the result type and the pointer type is unqualified. */
7756 if (! offset
&& bitpos
== 0
7757 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7758 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7759 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7760 return fold_convert_loc (loc
, type
,
7761 build_fold_addr_expr_loc (loc
, base
));
7764 if (TREE_CODE (op0
) == MODIFY_EXPR
7765 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7766 /* Detect assigning a bitfield. */
7767 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7769 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7771 /* Don't leave an assignment inside a conversion
7772 unless assigning a bitfield. */
7773 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7774 /* First do the assignment, then return converted constant. */
7775 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7776 TREE_NO_WARNING (tem
) = 1;
7777 TREE_USED (tem
) = 1;
7781 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7782 constants (if x has signed type, the sign bit cannot be set
7783 in c). This folds extension into the BIT_AND_EXPR.
7784 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7785 very likely don't have maximal range for their precision and this
7786 transformation effectively doesn't preserve non-maximal ranges. */
7787 if (TREE_CODE (type
) == INTEGER_TYPE
7788 && TREE_CODE (op0
) == BIT_AND_EXPR
7789 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7791 tree and_expr
= op0
;
7792 tree and0
= TREE_OPERAND (and_expr
, 0);
7793 tree and1
= TREE_OPERAND (and_expr
, 1);
7796 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7797 || (TYPE_PRECISION (type
)
7798 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7800 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7801 <= HOST_BITS_PER_WIDE_INT
7802 && tree_fits_uhwi_p (and1
))
7804 unsigned HOST_WIDE_INT cst
;
7806 cst
= tree_to_uhwi (and1
);
7807 cst
&= HOST_WIDE_INT_M1U
7808 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7809 change
= (cst
== 0);
7811 && !flag_syntax_only
7812 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
7815 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7816 and0
= fold_convert_loc (loc
, uns
, and0
);
7817 and1
= fold_convert_loc (loc
, uns
, and1
);
7822 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7823 TREE_OVERFLOW (and1
));
7824 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7825 fold_convert_loc (loc
, type
, and0
), tem
);
7829 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7830 cast (T1)X will fold away. We assume that this happens when X itself
7832 if (POINTER_TYPE_P (type
)
7833 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7834 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7836 tree arg00
= TREE_OPERAND (arg0
, 0);
7837 tree arg01
= TREE_OPERAND (arg0
, 1);
7839 return fold_build_pointer_plus_loc
7840 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7843 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7844 of the same precision, and X is an integer type not narrower than
7845 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7846 if (INTEGRAL_TYPE_P (type
)
7847 && TREE_CODE (op0
) == BIT_NOT_EXPR
7848 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7849 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7850 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7852 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7853 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7854 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7855 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7856 fold_convert_loc (loc
, type
, tem
));
7859 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7860 type of X and Y (integer types only). */
7861 if (INTEGRAL_TYPE_P (type
)
7862 && TREE_CODE (op0
) == MULT_EXPR
7863 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7864 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7866 /* Be careful not to introduce new overflows. */
7868 if (TYPE_OVERFLOW_WRAPS (type
))
7871 mult_type
= unsigned_type_for (type
);
7873 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7875 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7876 fold_convert_loc (loc
, mult_type
,
7877 TREE_OPERAND (op0
, 0)),
7878 fold_convert_loc (loc
, mult_type
,
7879 TREE_OPERAND (op0
, 1)));
7880 return fold_convert_loc (loc
, type
, tem
);
7886 case VIEW_CONVERT_EXPR
:
7887 if (TREE_CODE (op0
) == MEM_REF
)
7889 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
7890 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
7891 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7892 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7893 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7900 tem
= fold_negate_expr (loc
, arg0
);
7902 return fold_convert_loc (loc
, type
, tem
);
7906 /* Convert fabs((double)float) into (double)fabsf(float). */
7907 if (TREE_CODE (arg0
) == NOP_EXPR
7908 && TREE_CODE (type
) == REAL_TYPE
)
7910 tree targ0
= strip_float_extensions (arg0
);
7912 return fold_convert_loc (loc
, type
,
7913 fold_build1_loc (loc
, ABS_EXPR
,
7920 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7921 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7922 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7923 fold_convert_loc (loc
, type
,
7924 TREE_OPERAND (arg0
, 0)))))
7925 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7926 fold_convert_loc (loc
, type
,
7927 TREE_OPERAND (arg0
, 1)));
7928 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7929 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7930 fold_convert_loc (loc
, type
,
7931 TREE_OPERAND (arg0
, 1)))))
7932 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7933 fold_convert_loc (loc
, type
,
7934 TREE_OPERAND (arg0
, 0)), tem
);
7938 case TRUTH_NOT_EXPR
:
7939 /* Note that the operand of this must be an int
7940 and its values must be 0 or 1.
7941 ("true" is a fixed value perhaps depending on the language,
7942 but we don't handle values other than 1 correctly yet.) */
7943 tem
= fold_truth_not_expr (loc
, arg0
);
7946 return fold_convert_loc (loc
, type
, tem
);
7949 /* Fold *&X to X if X is an lvalue. */
7950 if (TREE_CODE (op0
) == ADDR_EXPR
)
7952 tree op00
= TREE_OPERAND (op0
, 0);
7954 || TREE_CODE (op00
) == PARM_DECL
7955 || TREE_CODE (op00
) == RESULT_DECL
)
7956 && !TREE_READONLY (op00
))
7963 } /* switch (code) */
7967 /* If the operation was a conversion do _not_ mark a resulting constant
7968 with TREE_OVERFLOW if the original constant was not. These conversions
7969 have implementation defined behavior and retaining the TREE_OVERFLOW
7970 flag here would confuse later passes such as VRP. */
7972 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7973 tree type
, tree op0
)
7975 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7977 && TREE_CODE (res
) == INTEGER_CST
7978 && TREE_CODE (op0
) == INTEGER_CST
7979 && CONVERT_EXPR_CODE_P (code
))
7980 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7985 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7986 operands OP0 and OP1. LOC is the location of the resulting expression.
7987 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7988 Return the folded expression if folding is successful. Otherwise,
7989 return NULL_TREE. */
7991 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7992 tree arg0
, tree arg1
, tree op0
, tree op1
)
7996 /* We only do these simplifications if we are optimizing. */
8000 /* Check for things like (A || B) && (A || C). We can convert this
8001 to A || (B && C). Note that either operator can be any of the four
8002 truth and/or operations and the transformation will still be
8003 valid. Also note that we only care about order for the
8004 ANDIF and ORIF operators. If B contains side effects, this
8005 might change the truth-value of A. */
8006 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8007 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8008 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8009 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8010 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8011 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8013 tree a00
= TREE_OPERAND (arg0
, 0);
8014 tree a01
= TREE_OPERAND (arg0
, 1);
8015 tree a10
= TREE_OPERAND (arg1
, 0);
8016 tree a11
= TREE_OPERAND (arg1
, 1);
8017 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8018 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8019 && (code
== TRUTH_AND_EXPR
8020 || code
== TRUTH_OR_EXPR
));
8022 if (operand_equal_p (a00
, a10
, 0))
8023 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8024 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8025 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8026 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8027 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8028 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8029 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8030 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8032 /* This case if tricky because we must either have commutative
8033 operators or else A10 must not have side-effects. */
8035 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8036 && operand_equal_p (a01
, a11
, 0))
8037 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8038 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8042 /* See if we can build a range comparison. */
8043 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8046 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8047 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8049 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8051 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8054 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8055 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8057 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8059 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8062 /* Check for the possibility of merging component references. If our
8063 lhs is another similar operation, try to merge its rhs with our
8064 rhs. Then try to merge our lhs and rhs. */
8065 if (TREE_CODE (arg0
) == code
8066 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8067 TREE_OPERAND (arg0
, 1), arg1
)))
8068 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8070 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8073 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8074 && !flag_sanitize_coverage
8075 && (code
== TRUTH_AND_EXPR
8076 || code
== TRUTH_ANDIF_EXPR
8077 || code
== TRUTH_OR_EXPR
8078 || code
== TRUTH_ORIF_EXPR
))
8080 enum tree_code ncode
, icode
;
8082 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8083 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8084 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8086 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8087 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8088 We don't want to pack more than two leafs to a non-IF AND/OR
8090 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8091 equal to IF-CODE, then we don't want to add right-hand operand.
8092 If the inner right-hand side of left-hand operand has
8093 side-effects, or isn't simple, then we can't add to it,
8094 as otherwise we might destroy if-sequence. */
8095 if (TREE_CODE (arg0
) == icode
8096 && simple_operand_p_2 (arg1
)
8097 /* Needed for sequence points to handle trappings, and
8099 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8101 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8103 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8106 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8107 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8108 else if (TREE_CODE (arg1
) == icode
8109 && simple_operand_p_2 (arg0
)
8110 /* Needed for sequence points to handle trappings, and
8112 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8114 tem
= fold_build2_loc (loc
, ncode
, type
,
8115 arg0
, TREE_OPERAND (arg1
, 0));
8116 return fold_build2_loc (loc
, icode
, type
, tem
,
8117 TREE_OPERAND (arg1
, 1));
8119 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8121 For sequence point consistancy, we need to check for trapping,
8122 and side-effects. */
8123 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8124 && simple_operand_p_2 (arg1
))
8125 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8131 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8132 by changing CODE to reduce the magnitude of constants involved in
8133 ARG0 of the comparison.
8134 Returns a canonicalized comparison tree if a simplification was
8135 possible, otherwise returns NULL_TREE.
8136 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8137 valid if signed overflow is undefined. */
8140 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8141 tree arg0
, tree arg1
,
8142 bool *strict_overflow_p
)
8144 enum tree_code code0
= TREE_CODE (arg0
);
8145 tree t
, cst0
= NULL_TREE
;
8148 /* Match A +- CST code arg1. We can change this only if overflow
8150 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8151 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8152 /* In principle pointers also have undefined overflow behavior,
8153 but that causes problems elsewhere. */
8154 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8155 && (code0
== MINUS_EXPR
8156 || code0
== PLUS_EXPR
)
8157 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8160 /* Identify the constant in arg0 and its sign. */
8161 cst0
= TREE_OPERAND (arg0
, 1);
8162 sgn0
= tree_int_cst_sgn (cst0
);
8164 /* Overflowed constants and zero will cause problems. */
8165 if (integer_zerop (cst0
)
8166 || TREE_OVERFLOW (cst0
))
8169 /* See if we can reduce the magnitude of the constant in
8170 arg0 by changing the comparison code. */
8171 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8173 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8175 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8176 else if (code
== GT_EXPR
8177 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8179 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8180 else if (code
== LE_EXPR
8181 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8183 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8184 else if (code
== GE_EXPR
8185 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8189 *strict_overflow_p
= true;
8191 /* Now build the constant reduced in magnitude. But not if that
8192 would produce one outside of its types range. */
8193 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8195 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8196 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8198 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8199 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8202 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8203 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8204 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8205 t
= fold_convert (TREE_TYPE (arg1
), t
);
8207 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8210 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8211 overflow further. Try to decrease the magnitude of constants involved
8212 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8213 and put sole constants at the second argument position.
8214 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8217 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8218 tree arg0
, tree arg1
)
8221 bool strict_overflow_p
;
8222 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8223 "when reducing constant in comparison");
8225 /* Try canonicalization by simplifying arg0. */
8226 strict_overflow_p
= false;
8227 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8228 &strict_overflow_p
);
8231 if (strict_overflow_p
)
8232 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8236 /* Try canonicalization by simplifying arg1 using the swapped
8238 code
= swap_tree_comparison (code
);
8239 strict_overflow_p
= false;
8240 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8241 &strict_overflow_p
);
8242 if (t
&& strict_overflow_p
)
8243 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8247 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8248 space. This is used to avoid issuing overflow warnings for
8249 expressions like &p->x which can not wrap. */
8252 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8254 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8261 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8262 if (offset
== NULL_TREE
)
8263 wi_offset
= wi::zero (precision
);
8264 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8267 wi_offset
= wi::to_wide (offset
);
8270 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8271 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8275 if (!wi::fits_uhwi_p (total
))
8278 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8282 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8284 if (TREE_CODE (base
) == ADDR_EXPR
)
8286 HOST_WIDE_INT base_size
;
8288 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8289 if (base_size
> 0 && size
< base_size
)
8293 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8296 /* Return a positive integer when the symbol DECL is known to have
8297 a nonzero address, zero when it's known not to (e.g., it's a weak
8298 symbol), and a negative integer when the symbol is not yet in the
8299 symbol table and so whether or not its address is zero is unknown.
8300 For function local objects always return positive integer. */
8302 maybe_nonzero_address (tree decl
)
8304 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
8305 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
8306 return symbol
->nonzero_address ();
8308 /* Function local objects are never NULL. */
8310 && (DECL_CONTEXT (decl
)
8311 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
8312 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
8318 /* Subroutine of fold_binary. This routine performs all of the
8319 transformations that are common to the equality/inequality
8320 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8321 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8322 fold_binary should call fold_binary. Fold a comparison with
8323 tree code CODE and type TYPE with operands OP0 and OP1. Return
8324 the folded comparison or NULL_TREE. */
8327 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8330 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8331 tree arg0
, arg1
, tem
;
8336 STRIP_SIGN_NOPS (arg0
);
8337 STRIP_SIGN_NOPS (arg1
);
8339 /* For comparisons of pointers we can decompose it to a compile time
8340 comparison of the base objects and the offsets into the object.
8341 This requires at least one operand being an ADDR_EXPR or a
8342 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8343 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8344 && (TREE_CODE (arg0
) == ADDR_EXPR
8345 || TREE_CODE (arg1
) == ADDR_EXPR
8346 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8347 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8349 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8350 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8352 int volatilep
, reversep
, unsignedp
;
8353 bool indirect_base0
= false, indirect_base1
= false;
8355 /* Get base and offset for the access. Strip ADDR_EXPR for
8356 get_inner_reference, but put it back by stripping INDIRECT_REF
8357 off the base object if possible. indirect_baseN will be true
8358 if baseN is not an address but refers to the object itself. */
8360 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8363 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8364 &bitsize
, &bitpos0
, &offset0
, &mode
,
8365 &unsignedp
, &reversep
, &volatilep
);
8366 if (TREE_CODE (base0
) == INDIRECT_REF
)
8367 base0
= TREE_OPERAND (base0
, 0);
8369 indirect_base0
= true;
8371 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8373 base0
= TREE_OPERAND (arg0
, 0);
8374 STRIP_SIGN_NOPS (base0
);
8375 if (TREE_CODE (base0
) == ADDR_EXPR
)
8378 = get_inner_reference (TREE_OPERAND (base0
, 0),
8379 &bitsize
, &bitpos0
, &offset0
, &mode
,
8380 &unsignedp
, &reversep
, &volatilep
);
8381 if (TREE_CODE (base0
) == INDIRECT_REF
)
8382 base0
= TREE_OPERAND (base0
, 0);
8384 indirect_base0
= true;
8386 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
8387 offset0
= TREE_OPERAND (arg0
, 1);
8389 offset0
= size_binop (PLUS_EXPR
, offset0
,
8390 TREE_OPERAND (arg0
, 1));
8391 if (TREE_CODE (offset0
) == INTEGER_CST
)
8393 offset_int tem
= wi::sext (wi::to_offset (offset0
),
8394 TYPE_PRECISION (sizetype
));
8395 tem
<<= LOG2_BITS_PER_UNIT
;
8397 if (wi::fits_shwi_p (tem
))
8399 bitpos0
= tem
.to_shwi ();
8400 offset0
= NULL_TREE
;
8406 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8409 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8410 &bitsize
, &bitpos1
, &offset1
, &mode
,
8411 &unsignedp
, &reversep
, &volatilep
);
8412 if (TREE_CODE (base1
) == INDIRECT_REF
)
8413 base1
= TREE_OPERAND (base1
, 0);
8415 indirect_base1
= true;
8417 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8419 base1
= TREE_OPERAND (arg1
, 0);
8420 STRIP_SIGN_NOPS (base1
);
8421 if (TREE_CODE (base1
) == ADDR_EXPR
)
8424 = get_inner_reference (TREE_OPERAND (base1
, 0),
8425 &bitsize
, &bitpos1
, &offset1
, &mode
,
8426 &unsignedp
, &reversep
, &volatilep
);
8427 if (TREE_CODE (base1
) == INDIRECT_REF
)
8428 base1
= TREE_OPERAND (base1
, 0);
8430 indirect_base1
= true;
8432 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
8433 offset1
= TREE_OPERAND (arg1
, 1);
8435 offset1
= size_binop (PLUS_EXPR
, offset1
,
8436 TREE_OPERAND (arg1
, 1));
8437 if (TREE_CODE (offset1
) == INTEGER_CST
)
8439 offset_int tem
= wi::sext (wi::to_offset (offset1
),
8440 TYPE_PRECISION (sizetype
));
8441 tem
<<= LOG2_BITS_PER_UNIT
;
8443 if (wi::fits_shwi_p (tem
))
8445 bitpos1
= tem
.to_shwi ();
8446 offset1
= NULL_TREE
;
8451 /* If we have equivalent bases we might be able to simplify. */
8452 if (indirect_base0
== indirect_base1
8453 && operand_equal_p (base0
, base1
,
8454 indirect_base0
? OEP_ADDRESS_OF
: 0))
8456 /* We can fold this expression to a constant if the non-constant
8457 offset parts are equal. */
8458 if (offset0
== offset1
8459 || (offset0
&& offset1
8460 && operand_equal_p (offset0
, offset1
, 0)))
8463 && bitpos0
!= bitpos1
8464 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8465 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8466 fold_overflow_warning (("assuming pointer wraparound does not "
8467 "occur when comparing P +- C1 with "
8469 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8474 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8476 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8478 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8480 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8482 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8484 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8488 /* We can simplify the comparison to a comparison of the variable
8489 offset parts if the constant offset parts are equal.
8490 Be careful to use signed sizetype here because otherwise we
8491 mess with array offsets in the wrong way. This is possible
8492 because pointer arithmetic is restricted to retain within an
8493 object and overflow on pointer differences is undefined as of
8494 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8495 else if (bitpos0
== bitpos1
)
8497 /* By converting to signed sizetype we cover middle-end pointer
8498 arithmetic which operates on unsigned pointer types of size
8499 type size and ARRAY_REF offsets which are properly sign or
8500 zero extended from their type in case it is narrower than
8502 if (offset0
== NULL_TREE
)
8503 offset0
= build_int_cst (ssizetype
, 0);
8505 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8506 if (offset1
== NULL_TREE
)
8507 offset1
= build_int_cst (ssizetype
, 0);
8509 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8512 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8513 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8514 fold_overflow_warning (("assuming pointer wraparound does not "
8515 "occur when comparing P +- C1 with "
8517 WARN_STRICT_OVERFLOW_COMPARISON
);
8519 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8522 /* For equal offsets we can simplify to a comparison of the
8524 else if (bitpos0
== bitpos1
8526 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8528 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8529 && ((offset0
== offset1
)
8530 || (offset0
&& offset1
8531 && operand_equal_p (offset0
, offset1
, 0))))
8534 base0
= build_fold_addr_expr_loc (loc
, base0
);
8536 base1
= build_fold_addr_expr_loc (loc
, base1
);
8537 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8539 /* Comparison between an ordinary (non-weak) symbol and a null
8540 pointer can be eliminated since such symbols must have a non
8541 null address. In C, relational expressions between pointers
8542 to objects and null pointers are undefined. The results
8543 below follow the C++ rules with the additional property that
8544 every object pointer compares greater than a null pointer.
8546 else if (((DECL_P (base0
)
8547 && maybe_nonzero_address (base0
) > 0
8548 /* Avoid folding references to struct members at offset 0 to
8549 prevent tests like '&ptr->firstmember == 0' from getting
8550 eliminated. When ptr is null, although the -> expression
8551 is strictly speaking invalid, GCC retains it as a matter
8552 of QoI. See PR c/44555. */
8553 && (offset0
== NULL_TREE
&& bitpos0
!= 0))
8554 || CONSTANT_CLASS_P (base0
))
8556 /* The caller guarantees that when one of the arguments is
8557 constant (i.e., null in this case) it is second. */
8558 && integer_zerop (arg1
))
8565 return constant_boolean_node (false, type
);
8569 return constant_boolean_node (true, type
);
8576 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8577 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8578 the resulting offset is smaller in absolute value than the
8579 original one and has the same sign. */
8580 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8581 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8582 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8583 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8584 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8585 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8586 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8587 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8589 tree const1
= TREE_OPERAND (arg0
, 1);
8590 tree const2
= TREE_OPERAND (arg1
, 1);
8591 tree variable1
= TREE_OPERAND (arg0
, 0);
8592 tree variable2
= TREE_OPERAND (arg1
, 0);
8594 const char * const warnmsg
= G_("assuming signed overflow does not "
8595 "occur when combining constants around "
8598 /* Put the constant on the side where it doesn't overflow and is
8599 of lower absolute value and of same sign than before. */
8600 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8601 ? MINUS_EXPR
: PLUS_EXPR
,
8603 if (!TREE_OVERFLOW (cst
)
8604 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8605 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8607 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8608 return fold_build2_loc (loc
, code
, type
,
8610 fold_build2_loc (loc
, TREE_CODE (arg1
),
8615 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8616 ? MINUS_EXPR
: PLUS_EXPR
,
8618 if (!TREE_OVERFLOW (cst
)
8619 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8620 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8622 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8623 return fold_build2_loc (loc
, code
, type
,
8624 fold_build2_loc (loc
, TREE_CODE (arg0
),
8631 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8635 /* If we are comparing an expression that just has comparisons
8636 of two integer values, arithmetic expressions of those comparisons,
8637 and constants, we can simplify it. There are only three cases
8638 to check: the two values can either be equal, the first can be
8639 greater, or the second can be greater. Fold the expression for
8640 those three values. Since each value must be 0 or 1, we have
8641 eight possibilities, each of which corresponds to the constant 0
8642 or 1 or one of the six possible comparisons.
8644 This handles common cases like (a > b) == 0 but also handles
8645 expressions like ((x > y) - (y > x)) > 0, which supposedly
8646 occur in macroized code. */
8648 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8650 tree cval1
= 0, cval2
= 0;
8653 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8654 /* Don't handle degenerate cases here; they should already
8655 have been handled anyway. */
8656 && cval1
!= 0 && cval2
!= 0
8657 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8658 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8659 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8660 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8661 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8662 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8663 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8665 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8666 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8668 /* We can't just pass T to eval_subst in case cval1 or cval2
8669 was the same as ARG1. */
8672 = fold_build2_loc (loc
, code
, type
,
8673 eval_subst (loc
, arg0
, cval1
, maxval
,
8677 = fold_build2_loc (loc
, code
, type
,
8678 eval_subst (loc
, arg0
, cval1
, maxval
,
8682 = fold_build2_loc (loc
, code
, type
,
8683 eval_subst (loc
, arg0
, cval1
, minval
,
8687 /* All three of these results should be 0 or 1. Confirm they are.
8688 Then use those values to select the proper code to use. */
8690 if (TREE_CODE (high_result
) == INTEGER_CST
8691 && TREE_CODE (equal_result
) == INTEGER_CST
8692 && TREE_CODE (low_result
) == INTEGER_CST
)
8694 /* Make a 3-bit mask with the high-order bit being the
8695 value for `>', the next for '=', and the low for '<'. */
8696 switch ((integer_onep (high_result
) * 4)
8697 + (integer_onep (equal_result
) * 2)
8698 + integer_onep (low_result
))
8702 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8723 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8728 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8729 protected_set_expr_location (tem
, loc
);
8732 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8741 /* Subroutine of fold_binary. Optimize complex multiplications of the
8742 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8743 argument EXPR represents the expression "z" of type TYPE. */
8746 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8748 tree itype
= TREE_TYPE (type
);
8749 tree rpart
, ipart
, tem
;
8751 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8753 rpart
= TREE_OPERAND (expr
, 0);
8754 ipart
= TREE_OPERAND (expr
, 1);
8756 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8758 rpart
= TREE_REALPART (expr
);
8759 ipart
= TREE_IMAGPART (expr
);
8763 expr
= save_expr (expr
);
8764 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8765 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8768 rpart
= save_expr (rpart
);
8769 ipart
= save_expr (ipart
);
8770 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8771 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8772 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8773 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8774 build_zero_cst (itype
));
8778 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8779 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8780 true if successful. */
8783 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
8787 if (TREE_CODE (arg
) == VECTOR_CST
)
8789 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8790 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8792 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8794 constructor_elt
*elt
;
8796 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8797 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8800 elts
[i
] = elt
->value
;
8804 for (; i
< nelts
; i
++)
8806 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8810 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8811 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8812 NULL_TREE otherwise. */
8815 fold_vec_perm (tree type
, tree arg0
, tree arg1
, vec_perm_indices sel
)
8818 bool need_ctor
= false;
8820 unsigned int nelts
= sel
.length ();
8821 gcc_assert (TYPE_VECTOR_SUBPARTS (type
) == nelts
8822 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8823 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8824 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8825 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8828 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
8829 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
8830 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
8833 tree_vector_builder
out_elts (type
, nelts
, 1);
8834 for (i
= 0; i
< nelts
; i
++)
8836 if (!CONSTANT_CLASS_P (in_elts
[sel
[i
]]))
8838 out_elts
.quick_push (unshare_expr (in_elts
[sel
[i
]]));
8843 vec
<constructor_elt
, va_gc
> *v
;
8844 vec_alloc (v
, nelts
);
8845 for (i
= 0; i
< nelts
; i
++)
8846 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
8847 return build_constructor (type
, v
);
8850 return out_elts
.build ();
8853 /* Try to fold a pointer difference of type TYPE two address expressions of
8854 array references AREF0 and AREF1 using location LOC. Return a
8855 simplified expression for the difference or NULL_TREE. */
8858 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8859 tree aref0
, tree aref1
,
8860 bool use_pointer_diff
)
8862 tree base0
= TREE_OPERAND (aref0
, 0);
8863 tree base1
= TREE_OPERAND (aref1
, 0);
8864 tree base_offset
= build_int_cst (type
, 0);
8866 /* If the bases are array references as well, recurse. If the bases
8867 are pointer indirections compute the difference of the pointers.
8868 If the bases are equal, we are set. */
8869 if ((TREE_CODE (base0
) == ARRAY_REF
8870 && TREE_CODE (base1
) == ARRAY_REF
8872 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
8874 || (INDIRECT_REF_P (base0
)
8875 && INDIRECT_REF_P (base1
)
8878 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
8879 TREE_OPERAND (base0
, 0),
8880 TREE_OPERAND (base1
, 0))
8881 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
8883 TREE_OPERAND (base0
, 0)),
8885 TREE_OPERAND (base1
, 0)))))
8886 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8888 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8889 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8890 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8891 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
8892 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8894 fold_build2_loc (loc
, MULT_EXPR
, type
,
8900 /* If the real or vector real constant CST of type TYPE has an exact
8901 inverse, return it, else return NULL. */
8904 exact_inverse (tree type
, tree cst
)
8910 switch (TREE_CODE (cst
))
8913 r
= TREE_REAL_CST (cst
);
8915 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8916 return build_real (type
, r
);
8922 unit_type
= TREE_TYPE (type
);
8923 mode
= TYPE_MODE (unit_type
);
8925 tree_vector_builder elts
;
8926 if (!elts
.new_unary_operation (type
, cst
, false))
8928 unsigned int count
= elts
.encoded_nelts ();
8929 for (unsigned int i
= 0; i
< count
; ++i
)
8931 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8932 if (!exact_real_inverse (mode
, &r
))
8934 elts
.quick_push (build_real (unit_type
, r
));
8937 return elts
.build ();
8945 /* Mask out the tz least significant bits of X of type TYPE where
8946 tz is the number of trailing zeroes in Y. */
8948 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8950 int tz
= wi::ctz (y
);
8952 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8956 /* Return true when T is an address and is known to be nonzero.
8957 For floating point we further ensure that T is not denormal.
8958 Similar logic is present in nonzero_address in rtlanal.h.
8960 If the return value is based on the assumption that signed overflow
8961 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8962 change *STRICT_OVERFLOW_P. */
8965 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8967 tree type
= TREE_TYPE (t
);
8968 enum tree_code code
;
8970 /* Doing something useful for floating point would need more work. */
8971 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8974 code
= TREE_CODE (t
);
8975 switch (TREE_CODE_CLASS (code
))
8978 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8981 case tcc_comparison
:
8982 return tree_binary_nonzero_warnv_p (code
, type
,
8983 TREE_OPERAND (t
, 0),
8984 TREE_OPERAND (t
, 1),
8987 case tcc_declaration
:
8989 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8997 case TRUTH_NOT_EXPR
:
8998 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9001 case TRUTH_AND_EXPR
:
9003 case TRUTH_XOR_EXPR
:
9004 return tree_binary_nonzero_warnv_p (code
, type
,
9005 TREE_OPERAND (t
, 0),
9006 TREE_OPERAND (t
, 1),
9014 case WITH_SIZE_EXPR
:
9016 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9021 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9025 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9030 tree fndecl
= get_callee_fndecl (t
);
9031 if (!fndecl
) return false;
9032 if (flag_delete_null_pointer_checks
&& !flag_check_new
9033 && DECL_IS_OPERATOR_NEW (fndecl
)
9034 && !TREE_NOTHROW (fndecl
))
9036 if (flag_delete_null_pointer_checks
9037 && lookup_attribute ("returns_nonnull",
9038 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9040 return alloca_call_p (t
);
9049 /* Return true when T is an address and is known to be nonzero.
9050 Handle warnings about undefined signed overflow. */
9053 tree_expr_nonzero_p (tree t
)
9055 bool ret
, strict_overflow_p
;
9057 strict_overflow_p
= false;
9058 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9059 if (strict_overflow_p
)
9060 fold_overflow_warning (("assuming signed overflow does not occur when "
9061 "determining that expression is always "
9063 WARN_STRICT_OVERFLOW_MISC
);
9067 /* Return true if T is known not to be equal to an integer W. */
9070 expr_not_equal_to (tree t
, const wide_int
&w
)
9072 wide_int min
, max
, nz
;
9073 value_range_type rtype
;
9074 switch (TREE_CODE (t
))
9077 return wi::to_wide (t
) != w
;
9080 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
9082 rtype
= get_range_info (t
, &min
, &max
);
9083 if (rtype
== VR_RANGE
)
9085 if (wi::lt_p (max
, w
, TYPE_SIGN (TREE_TYPE (t
))))
9087 if (wi::lt_p (w
, min
, TYPE_SIGN (TREE_TYPE (t
))))
9090 else if (rtype
== VR_ANTI_RANGE
9091 && wi::le_p (min
, w
, TYPE_SIGN (TREE_TYPE (t
)))
9092 && wi::le_p (w
, max
, TYPE_SIGN (TREE_TYPE (t
))))
9094 /* If T has some known zero bits and W has any of those bits set,
9095 then T is known not to be equal to W. */
9096 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
9097 TYPE_PRECISION (TREE_TYPE (t
))), 0))
9106 /* Fold a binary expression of code CODE and type TYPE with operands
9107 OP0 and OP1. LOC is the location of the resulting expression.
9108 Return the folded expression if folding is successful. Otherwise,
9109 return NULL_TREE. */
9112 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
9115 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9116 tree arg0
, arg1
, tem
;
9117 tree t1
= NULL_TREE
;
9118 bool strict_overflow_p
;
9121 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9122 && TREE_CODE_LENGTH (code
) == 2
9124 && op1
!= NULL_TREE
);
9129 /* Strip any conversions that don't change the mode. This is
9130 safe for every expression, except for a comparison expression
9131 because its signedness is derived from its operands. So, in
9132 the latter case, only strip conversions that don't change the
9133 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9136 Note that this is done as an internal manipulation within the
9137 constant folder, in order to find the simplest representation
9138 of the arguments so that their form can be studied. In any
9139 cases, the appropriate type conversions should be put back in
9140 the tree that will get out of the constant folder. */
9142 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9144 STRIP_SIGN_NOPS (arg0
);
9145 STRIP_SIGN_NOPS (arg1
);
9153 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9154 constant but we can't do arithmetic on them. */
9155 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9157 tem
= const_binop (code
, type
, arg0
, arg1
);
9158 if (tem
!= NULL_TREE
)
9160 if (TREE_TYPE (tem
) != type
)
9161 tem
= fold_convert_loc (loc
, type
, tem
);
9166 /* If this is a commutative operation, and ARG0 is a constant, move it
9167 to ARG1 to reduce the number of tests below. */
9168 if (commutative_tree_code (code
)
9169 && tree_swap_operands_p (arg0
, arg1
))
9170 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9172 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9173 to ARG1 to reduce the number of tests below. */
9174 if (kind
== tcc_comparison
9175 && tree_swap_operands_p (arg0
, arg1
))
9176 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9178 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9182 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9184 First check for cases where an arithmetic operation is applied to a
9185 compound, conditional, or comparison operation. Push the arithmetic
9186 operation inside the compound or conditional to see if any folding
9187 can then be done. Convert comparison to conditional for this purpose.
9188 The also optimizes non-constant cases that used to be done in
9191 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9192 one of the operands is a comparison and the other is a comparison, a
9193 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9194 code below would make the expression more complex. Change it to a
9195 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9196 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9198 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9199 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9200 && TREE_CODE (type
) != VECTOR_TYPE
9201 && ((truth_value_p (TREE_CODE (arg0
))
9202 && (truth_value_p (TREE_CODE (arg1
))
9203 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9204 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9205 || (truth_value_p (TREE_CODE (arg1
))
9206 && (truth_value_p (TREE_CODE (arg0
))
9207 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9208 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9210 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9211 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9214 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9215 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9217 if (code
== EQ_EXPR
)
9218 tem
= invert_truthvalue_loc (loc
, tem
);
9220 return fold_convert_loc (loc
, type
, tem
);
9223 if (TREE_CODE_CLASS (code
) == tcc_binary
9224 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9226 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9228 tem
= fold_build2_loc (loc
, code
, type
,
9229 fold_convert_loc (loc
, TREE_TYPE (op0
),
9230 TREE_OPERAND (arg0
, 1)), op1
);
9231 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9234 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
9236 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9237 fold_convert_loc (loc
, TREE_TYPE (op1
),
9238 TREE_OPERAND (arg1
, 1)));
9239 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9243 if (TREE_CODE (arg0
) == COND_EXPR
9244 || TREE_CODE (arg0
) == VEC_COND_EXPR
9245 || COMPARISON_CLASS_P (arg0
))
9247 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9249 /*cond_first_p=*/1);
9250 if (tem
!= NULL_TREE
)
9254 if (TREE_CODE (arg1
) == COND_EXPR
9255 || TREE_CODE (arg1
) == VEC_COND_EXPR
9256 || COMPARISON_CLASS_P (arg1
))
9258 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9260 /*cond_first_p=*/0);
9261 if (tem
!= NULL_TREE
)
9269 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9270 if (TREE_CODE (arg0
) == ADDR_EXPR
9271 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9273 tree iref
= TREE_OPERAND (arg0
, 0);
9274 return fold_build2 (MEM_REF
, type
,
9275 TREE_OPERAND (iref
, 0),
9276 int_const_binop (PLUS_EXPR
, arg1
,
9277 TREE_OPERAND (iref
, 1)));
9280 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9281 if (TREE_CODE (arg0
) == ADDR_EXPR
9282 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9285 HOST_WIDE_INT coffset
;
9286 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9290 return fold_build2 (MEM_REF
, type
,
9291 build_fold_addr_expr (base
),
9292 int_const_binop (PLUS_EXPR
, arg1
,
9293 size_int (coffset
)));
9298 case POINTER_PLUS_EXPR
:
9299 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9300 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9301 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9302 return fold_convert_loc (loc
, type
,
9303 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9304 fold_convert_loc (loc
, sizetype
,
9306 fold_convert_loc (loc
, sizetype
,
9312 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9314 /* X + (X / CST) * -CST is X % CST. */
9315 if (TREE_CODE (arg1
) == MULT_EXPR
9316 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9317 && operand_equal_p (arg0
,
9318 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9320 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9321 tree cst1
= TREE_OPERAND (arg1
, 1);
9322 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9324 if (sum
&& integer_zerop (sum
))
9325 return fold_convert_loc (loc
, type
,
9326 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9327 TREE_TYPE (arg0
), arg0
,
9332 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9333 one. Make sure the type is not saturating and has the signedness of
9334 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9335 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9336 if ((TREE_CODE (arg0
) == MULT_EXPR
9337 || TREE_CODE (arg1
) == MULT_EXPR
)
9338 && !TYPE_SATURATING (type
)
9339 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9340 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9341 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9343 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9348 if (! FLOAT_TYPE_P (type
))
9350 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9351 (plus (plus (mult) (mult)) (foo)) so that we can
9352 take advantage of the factoring cases below. */
9353 if (ANY_INTEGRAL_TYPE_P (type
)
9354 && TYPE_OVERFLOW_WRAPS (type
)
9355 && (((TREE_CODE (arg0
) == PLUS_EXPR
9356 || TREE_CODE (arg0
) == MINUS_EXPR
)
9357 && TREE_CODE (arg1
) == MULT_EXPR
)
9358 || ((TREE_CODE (arg1
) == PLUS_EXPR
9359 || TREE_CODE (arg1
) == MINUS_EXPR
)
9360 && TREE_CODE (arg0
) == MULT_EXPR
)))
9362 tree parg0
, parg1
, parg
, marg
;
9363 enum tree_code pcode
;
9365 if (TREE_CODE (arg1
) == MULT_EXPR
)
9366 parg
= arg0
, marg
= arg1
;
9368 parg
= arg1
, marg
= arg0
;
9369 pcode
= TREE_CODE (parg
);
9370 parg0
= TREE_OPERAND (parg
, 0);
9371 parg1
= TREE_OPERAND (parg
, 1);
9375 if (TREE_CODE (parg0
) == MULT_EXPR
9376 && TREE_CODE (parg1
) != MULT_EXPR
)
9377 return fold_build2_loc (loc
, pcode
, type
,
9378 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9379 fold_convert_loc (loc
, type
,
9381 fold_convert_loc (loc
, type
,
9383 fold_convert_loc (loc
, type
, parg1
));
9384 if (TREE_CODE (parg0
) != MULT_EXPR
9385 && TREE_CODE (parg1
) == MULT_EXPR
)
9387 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9388 fold_convert_loc (loc
, type
, parg0
),
9389 fold_build2_loc (loc
, pcode
, type
,
9390 fold_convert_loc (loc
, type
, marg
),
9391 fold_convert_loc (loc
, type
,
9397 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9398 to __complex__ ( x, y ). This is not the same for SNaNs or
9399 if signed zeros are involved. */
9400 if (!HONOR_SNANS (element_mode (arg0
))
9401 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9402 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9404 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9405 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9406 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9407 bool arg0rz
= false, arg0iz
= false;
9408 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9409 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9411 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9412 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9413 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9415 tree rp
= arg1r
? arg1r
9416 : build1 (REALPART_EXPR
, rtype
, arg1
);
9417 tree ip
= arg0i
? arg0i
9418 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9419 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9421 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9423 tree rp
= arg0r
? arg0r
9424 : build1 (REALPART_EXPR
, rtype
, arg0
);
9425 tree ip
= arg1i
? arg1i
9426 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9427 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9432 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9433 We associate floats only if the user has specified
9434 -fassociative-math. */
9435 if (flag_associative_math
9436 && TREE_CODE (arg1
) == PLUS_EXPR
9437 && TREE_CODE (arg0
) != MULT_EXPR
)
9439 tree tree10
= TREE_OPERAND (arg1
, 0);
9440 tree tree11
= TREE_OPERAND (arg1
, 1);
9441 if (TREE_CODE (tree11
) == MULT_EXPR
9442 && TREE_CODE (tree10
) == MULT_EXPR
)
9445 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9446 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9449 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9450 We associate floats only if the user has specified
9451 -fassociative-math. */
9452 if (flag_associative_math
9453 && TREE_CODE (arg0
) == PLUS_EXPR
9454 && TREE_CODE (arg1
) != MULT_EXPR
)
9456 tree tree00
= TREE_OPERAND (arg0
, 0);
9457 tree tree01
= TREE_OPERAND (arg0
, 1);
9458 if (TREE_CODE (tree01
) == MULT_EXPR
9459 && TREE_CODE (tree00
) == MULT_EXPR
)
9462 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9463 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9469 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9470 is a rotate of A by C1 bits. */
9471 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9472 is a rotate of A by B bits.
9473 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9474 though in this case CODE must be | and not + or ^, otherwise
9475 it doesn't return A when B is 0. */
9477 enum tree_code code0
, code1
;
9479 code0
= TREE_CODE (arg0
);
9480 code1
= TREE_CODE (arg1
);
9481 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9482 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9483 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9484 TREE_OPERAND (arg1
, 0), 0)
9485 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9486 TYPE_UNSIGNED (rtype
))
9487 /* Only create rotates in complete modes. Other cases are not
9488 expanded properly. */
9489 && (element_precision (rtype
)
9490 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9492 tree tree01
, tree11
;
9493 tree orig_tree01
, orig_tree11
;
9494 enum tree_code code01
, code11
;
9496 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
9497 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
9498 STRIP_NOPS (tree01
);
9499 STRIP_NOPS (tree11
);
9500 code01
= TREE_CODE (tree01
);
9501 code11
= TREE_CODE (tree11
);
9502 if (code11
!= MINUS_EXPR
9503 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
9505 std::swap (code0
, code1
);
9506 std::swap (code01
, code11
);
9507 std::swap (tree01
, tree11
);
9508 std::swap (orig_tree01
, orig_tree11
);
9510 if (code01
== INTEGER_CST
9511 && code11
== INTEGER_CST
9512 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9513 == element_precision (rtype
)))
9515 tem
= build2_loc (loc
, LROTATE_EXPR
,
9516 rtype
, TREE_OPERAND (arg0
, 0),
9517 code0
== LSHIFT_EXPR
9518 ? orig_tree01
: orig_tree11
);
9519 return fold_convert_loc (loc
, type
, tem
);
9521 else if (code11
== MINUS_EXPR
)
9523 tree tree110
, tree111
;
9524 tree110
= TREE_OPERAND (tree11
, 0);
9525 tree111
= TREE_OPERAND (tree11
, 1);
9526 STRIP_NOPS (tree110
);
9527 STRIP_NOPS (tree111
);
9528 if (TREE_CODE (tree110
) == INTEGER_CST
9529 && 0 == compare_tree_int (tree110
,
9530 element_precision (rtype
))
9531 && operand_equal_p (tree01
, tree111
, 0))
9533 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
9534 ? LROTATE_EXPR
: RROTATE_EXPR
),
9535 rtype
, TREE_OPERAND (arg0
, 0),
9537 return fold_convert_loc (loc
, type
, tem
);
9540 else if (code
== BIT_IOR_EXPR
9541 && code11
== BIT_AND_EXPR
9542 && pow2p_hwi (element_precision (rtype
)))
9544 tree tree110
, tree111
;
9545 tree110
= TREE_OPERAND (tree11
, 0);
9546 tree111
= TREE_OPERAND (tree11
, 1);
9547 STRIP_NOPS (tree110
);
9548 STRIP_NOPS (tree111
);
9549 if (TREE_CODE (tree110
) == NEGATE_EXPR
9550 && TREE_CODE (tree111
) == INTEGER_CST
9551 && 0 == compare_tree_int (tree111
,
9552 element_precision (rtype
) - 1)
9553 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
9555 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
9556 ? LROTATE_EXPR
: RROTATE_EXPR
),
9557 rtype
, TREE_OPERAND (arg0
, 0),
9559 return fold_convert_loc (loc
, type
, tem
);
9566 /* In most languages, can't associate operations on floats through
9567 parentheses. Rather than remember where the parentheses were, we
9568 don't associate floats at all, unless the user has specified
9570 And, we need to make sure type is not saturating. */
9572 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9573 && !TYPE_SATURATING (type
))
9575 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
9576 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
9580 /* Split both trees into variables, constants, and literals. Then
9581 associate each group together, the constants with literals,
9582 then the result with variables. This increases the chances of
9583 literals being recombined later and of generating relocatable
9584 expressions for the sum of a constant and literal. */
9585 var0
= split_tree (arg0
, type
, code
,
9586 &minus_var0
, &con0
, &minus_con0
,
9587 &lit0
, &minus_lit0
, 0);
9588 var1
= split_tree (arg1
, type
, code
,
9589 &minus_var1
, &con1
, &minus_con1
,
9590 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
9592 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9593 if (code
== MINUS_EXPR
)
9596 /* With undefined overflow prefer doing association in a type
9597 which wraps on overflow, if that is one of the operand types. */
9598 if (POINTER_TYPE_P (type
)
9599 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9601 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9602 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9603 atype
= TREE_TYPE (arg0
);
9604 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9605 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9606 atype
= TREE_TYPE (arg1
);
9607 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9610 /* With undefined overflow we can only associate constants with one
9611 variable, and constants whose association doesn't overflow. */
9612 if (POINTER_TYPE_P (atype
)
9613 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9615 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
9617 /* ??? If split_tree would handle NEGATE_EXPR we could
9618 simply reject these cases and the allowed cases would
9619 be the var0/minus_var1 ones. */
9620 tree tmp0
= var0
? var0
: minus_var0
;
9621 tree tmp1
= var1
? var1
: minus_var1
;
9622 bool one_neg
= false;
9624 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9626 tmp0
= TREE_OPERAND (tmp0
, 0);
9629 if (CONVERT_EXPR_P (tmp0
)
9630 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9631 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9632 <= TYPE_PRECISION (atype
)))
9633 tmp0
= TREE_OPERAND (tmp0
, 0);
9634 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9636 tmp1
= TREE_OPERAND (tmp1
, 0);
9639 if (CONVERT_EXPR_P (tmp1
)
9640 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9641 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9642 <= TYPE_PRECISION (atype
)))
9643 tmp1
= TREE_OPERAND (tmp1
, 0);
9644 /* The only case we can still associate with two variables
9645 is if they cancel out. */
9647 || !operand_equal_p (tmp0
, tmp1
, 0))
9650 else if ((var0
&& minus_var1
9651 && ! operand_equal_p (var0
, minus_var1
, 0))
9652 || (minus_var0
&& var1
9653 && ! operand_equal_p (minus_var0
, var1
, 0)))
9657 /* Only do something if we found more than two objects. Otherwise,
9658 nothing has changed and we risk infinite recursion. */
9660 && (2 < ((var0
!= 0) + (var1
!= 0)
9661 + (minus_var0
!= 0) + (minus_var1
!= 0)
9662 + (con0
!= 0) + (con1
!= 0)
9663 + (minus_con0
!= 0) + (minus_con1
!= 0)
9664 + (lit0
!= 0) + (lit1
!= 0)
9665 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9667 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9668 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
9670 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9671 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
9673 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9674 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9677 if (minus_var0
&& var0
)
9679 var0
= associate_trees (loc
, var0
, minus_var0
,
9683 if (minus_con0
&& con0
)
9685 con0
= associate_trees (loc
, con0
, minus_con0
,
9690 /* Preserve the MINUS_EXPR if the negative part of the literal is
9691 greater than the positive part. Otherwise, the multiplicative
9692 folding code (i.e extract_muldiv) may be fooled in case
9693 unsigned constants are subtracted, like in the following
9694 example: ((X*2 + 4) - 8U)/2. */
9695 if (minus_lit0
&& lit0
)
9697 if (TREE_CODE (lit0
) == INTEGER_CST
9698 && TREE_CODE (minus_lit0
) == INTEGER_CST
9699 && tree_int_cst_lt (lit0
, minus_lit0
)
9700 /* But avoid ending up with only negated parts. */
9703 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9709 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9715 /* Don't introduce overflows through reassociation. */
9716 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
9717 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
9720 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9721 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9723 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
9727 /* Eliminate minus_con0. */
9731 con0
= associate_trees (loc
, con0
, minus_con0
,
9734 var0
= associate_trees (loc
, var0
, minus_con0
,
9741 /* Eliminate minus_var0. */
9745 con0
= associate_trees (loc
, con0
, minus_var0
,
9753 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9760 case POINTER_DIFF_EXPR
:
9762 /* Fold &a[i] - &a[j] to i-j. */
9763 if (TREE_CODE (arg0
) == ADDR_EXPR
9764 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9765 && TREE_CODE (arg1
) == ADDR_EXPR
9766 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9768 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9769 TREE_OPERAND (arg0
, 0),
9770 TREE_OPERAND (arg1
, 0),
9772 == POINTER_DIFF_EXPR
);
9777 /* Further transformations are not for pointers. */
9778 if (code
== POINTER_DIFF_EXPR
)
9781 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9782 if (TREE_CODE (arg0
) == NEGATE_EXPR
9783 && negate_expr_p (op1
)
9784 /* If arg0 is e.g. unsigned int and type is int, then this could
9785 introduce UB, because if A is INT_MIN at runtime, the original
9786 expression can be well defined while the latter is not.
9788 && !(ANY_INTEGRAL_TYPE_P (type
)
9789 && TYPE_OVERFLOW_UNDEFINED (type
)
9790 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9791 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
9792 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
9793 fold_convert_loc (loc
, type
,
9794 TREE_OPERAND (arg0
, 0)));
9796 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9797 __complex__ ( x, -y ). This is not the same for SNaNs or if
9798 signed zeros are involved. */
9799 if (!HONOR_SNANS (element_mode (arg0
))
9800 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9801 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9803 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9804 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9805 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9806 bool arg0rz
= false, arg0iz
= false;
9807 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9808 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9810 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9811 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9812 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9814 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9816 : build1 (REALPART_EXPR
, rtype
, arg1
));
9817 tree ip
= arg0i
? arg0i
9818 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9819 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9821 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9823 tree rp
= arg0r
? arg0r
9824 : build1 (REALPART_EXPR
, rtype
, arg0
);
9825 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9827 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9828 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9833 /* A - B -> A + (-B) if B is easily negatable. */
9834 if (negate_expr_p (op1
)
9835 && ! TYPE_OVERFLOW_SANITIZED (type
)
9836 && ((FLOAT_TYPE_P (type
)
9837 /* Avoid this transformation if B is a positive REAL_CST. */
9838 && (TREE_CODE (op1
) != REAL_CST
9839 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9840 || INTEGRAL_TYPE_P (type
)))
9841 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9842 fold_convert_loc (loc
, type
, arg0
),
9845 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9846 one. Make sure the type is not saturating and has the signedness of
9847 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9848 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9849 if ((TREE_CODE (arg0
) == MULT_EXPR
9850 || TREE_CODE (arg1
) == MULT_EXPR
)
9851 && !TYPE_SATURATING (type
)
9852 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9853 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9854 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9856 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9864 if (! FLOAT_TYPE_P (type
))
9866 /* Transform x * -C into -x * C if x is easily negatable. */
9867 if (TREE_CODE (op1
) == INTEGER_CST
9868 && tree_int_cst_sgn (op1
) == -1
9869 && negate_expr_p (op0
)
9870 && negate_expr_p (op1
)
9871 && (tem
= negate_expr (op1
)) != op1
9872 && ! TREE_OVERFLOW (tem
))
9873 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9874 fold_convert_loc (loc
, type
,
9875 negate_expr (op0
)), tem
);
9877 strict_overflow_p
= false;
9878 if (TREE_CODE (arg1
) == INTEGER_CST
9879 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9880 &strict_overflow_p
)))
9882 if (strict_overflow_p
)
9883 fold_overflow_warning (("assuming signed overflow does not "
9884 "occur when simplifying "
9886 WARN_STRICT_OVERFLOW_MISC
);
9887 return fold_convert_loc (loc
, type
, tem
);
9890 /* Optimize z * conj(z) for integer complex numbers. */
9891 if (TREE_CODE (arg0
) == CONJ_EXPR
9892 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9893 return fold_mult_zconjz (loc
, type
, arg1
);
9894 if (TREE_CODE (arg1
) == CONJ_EXPR
9895 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9896 return fold_mult_zconjz (loc
, type
, arg0
);
9900 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9901 This is not the same for NaNs or if signed zeros are
9903 if (!HONOR_NANS (arg0
)
9904 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9905 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9906 && TREE_CODE (arg1
) == COMPLEX_CST
9907 && real_zerop (TREE_REALPART (arg1
)))
9909 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9910 if (real_onep (TREE_IMAGPART (arg1
)))
9912 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9913 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9915 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9916 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9918 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9919 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9920 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9924 /* Optimize z * conj(z) for floating point complex numbers.
9925 Guarded by flag_unsafe_math_optimizations as non-finite
9926 imaginary components don't produce scalar results. */
9927 if (flag_unsafe_math_optimizations
9928 && TREE_CODE (arg0
) == CONJ_EXPR
9929 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9930 return fold_mult_zconjz (loc
, type
, arg1
);
9931 if (flag_unsafe_math_optimizations
9932 && TREE_CODE (arg1
) == CONJ_EXPR
9933 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9934 return fold_mult_zconjz (loc
, type
, arg0
);
9939 /* Canonicalize (X & C1) | C2. */
9940 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9941 && TREE_CODE (arg1
) == INTEGER_CST
9942 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9944 int width
= TYPE_PRECISION (type
), w
;
9945 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
9946 wide_int c2
= wi::to_wide (arg1
);
9948 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9949 if ((c1
& c2
) == c1
)
9950 return omit_one_operand_loc (loc
, type
, arg1
,
9951 TREE_OPERAND (arg0
, 0));
9953 wide_int msk
= wi::mask (width
, false,
9954 TYPE_PRECISION (TREE_TYPE (arg1
)));
9956 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9957 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
9959 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9960 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
9963 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9964 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9965 mode which allows further optimizations. */
9968 wide_int c3
= wi::bit_and_not (c1
, c2
);
9969 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9971 wide_int mask
= wi::mask (w
, false,
9972 TYPE_PRECISION (type
));
9973 if (((c1
| c2
) & mask
) == mask
9974 && wi::bit_and_not (c1
, mask
) == 0)
9983 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9984 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
9985 wide_int_to_tree (type
, c3
));
9986 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
9990 /* See if this can be simplified into a rotate first. If that
9991 is unsuccessful continue in the association code. */
9995 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9996 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9997 && INTEGRAL_TYPE_P (type
)
9998 && integer_onep (TREE_OPERAND (arg0
, 1))
9999 && integer_onep (arg1
))
10000 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10001 build_zero_cst (TREE_TYPE (arg0
)));
10003 /* See if this can be simplified into a rotate first. If that
10004 is unsuccessful continue in the association code. */
10008 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10009 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10010 && INTEGRAL_TYPE_P (type
)
10011 && integer_onep (TREE_OPERAND (arg0
, 1))
10012 && integer_onep (arg1
))
10015 tem
= TREE_OPERAND (arg0
, 0);
10016 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10017 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10019 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10020 build_zero_cst (TREE_TYPE (tem
)));
10022 /* Fold ~X & 1 as (X & 1) == 0. */
10023 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10024 && INTEGRAL_TYPE_P (type
)
10025 && integer_onep (arg1
))
10028 tem
= TREE_OPERAND (arg0
, 0);
10029 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10030 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10032 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10033 build_zero_cst (TREE_TYPE (tem
)));
10035 /* Fold !X & 1 as X == 0. */
10036 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10037 && integer_onep (arg1
))
10039 tem
= TREE_OPERAND (arg0
, 0);
10040 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10041 build_zero_cst (TREE_TYPE (tem
)));
10044 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10045 multiple of 1 << CST. */
10046 if (TREE_CODE (arg1
) == INTEGER_CST
)
10048 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
10049 wide_int ncst1
= -cst1
;
10050 if ((cst1
& ncst1
) == ncst1
10051 && multiple_of_p (type
, arg0
,
10052 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10053 return fold_convert_loc (loc
, type
, arg0
);
10056 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10058 if (TREE_CODE (arg1
) == INTEGER_CST
10059 && TREE_CODE (arg0
) == MULT_EXPR
10060 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10062 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
10064 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
10067 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10069 else if (masked
!= warg1
)
10071 /* Avoid the transform if arg1 is a mask of some
10072 mode which allows further optimizations. */
10073 int pop
= wi::popcount (warg1
);
10074 if (!(pop
>= BITS_PER_UNIT
10076 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10077 return fold_build2_loc (loc
, code
, type
, op0
,
10078 wide_int_to_tree (type
, masked
));
10082 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10083 ((A & N) + B) & M -> (A + B) & M
10084 Similarly if (N & M) == 0,
10085 ((A | N) + B) & M -> (A + B) & M
10086 and for - instead of + (or unary - instead of +)
10087 and/or ^ instead of |.
10088 If B is constant and (B & M) == 0, fold into A & M. */
10089 if (TREE_CODE (arg1
) == INTEGER_CST
)
10091 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
10092 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10093 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10094 && (TREE_CODE (arg0
) == PLUS_EXPR
10095 || TREE_CODE (arg0
) == MINUS_EXPR
10096 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10097 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10098 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10104 /* Now we know that arg0 is (C + D) or (C - D) or
10105 -C and arg1 (M) is == (1LL << cst) - 1.
10106 Store C into PMOP[0] and D into PMOP[1]. */
10107 pmop
[0] = TREE_OPERAND (arg0
, 0);
10109 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10111 pmop
[1] = TREE_OPERAND (arg0
, 1);
10115 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10118 for (; which
>= 0; which
--)
10119 switch (TREE_CODE (pmop
[which
]))
10124 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10127 cst0
= wi::to_wide (TREE_OPERAND (pmop
[which
], 1)) & cst1
;
10128 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10133 else if (cst0
!= 0)
10135 /* If C or D is of the form (A & N) where
10136 (N & M) == M, or of the form (A | N) or
10137 (A ^ N) where (N & M) == 0, replace it with A. */
10138 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10141 /* If C or D is a N where (N & M) == 0, it can be
10142 omitted (assumed 0). */
10143 if ((TREE_CODE (arg0
) == PLUS_EXPR
10144 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10145 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
10146 pmop
[which
] = NULL
;
10152 /* Only build anything new if we optimized one or both arguments
10154 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10155 || (TREE_CODE (arg0
) != NEGATE_EXPR
10156 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10158 tree utype
= TREE_TYPE (arg0
);
10159 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10161 /* Perform the operations in a type that has defined
10162 overflow behavior. */
10163 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10164 if (pmop
[0] != NULL
)
10165 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10166 if (pmop
[1] != NULL
)
10167 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10170 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10171 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10172 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10174 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10175 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10177 else if (pmop
[0] != NULL
)
10179 else if (pmop
[1] != NULL
)
10182 return build_int_cst (type
, 0);
10184 else if (pmop
[0] == NULL
)
10185 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10187 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10189 /* TEM is now the new binary +, - or unary - replacement. */
10190 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10191 fold_convert_loc (loc
, utype
, arg1
));
10192 return fold_convert_loc (loc
, type
, tem
);
10197 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10198 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10199 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10201 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10203 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
10206 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10212 /* Don't touch a floating-point divide by zero unless the mode
10213 of the constant can represent infinity. */
10214 if (TREE_CODE (arg1
) == REAL_CST
10215 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10216 && real_zerop (arg1
))
10219 /* (-A) / (-B) -> A / B */
10220 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10221 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10222 TREE_OPERAND (arg0
, 0),
10223 negate_expr (arg1
));
10224 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10225 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10226 negate_expr (arg0
),
10227 TREE_OPERAND (arg1
, 0));
10230 case TRUNC_DIV_EXPR
:
10233 case FLOOR_DIV_EXPR
:
10234 /* Simplify A / (B << N) where A and B are positive and B is
10235 a power of 2, to A >> (N + log2(B)). */
10236 strict_overflow_p
= false;
10237 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10238 && (TYPE_UNSIGNED (type
)
10239 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10241 tree sval
= TREE_OPERAND (arg1
, 0);
10242 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10244 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10245 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10246 wi::exact_log2 (wi::to_wide (sval
)));
10248 if (strict_overflow_p
)
10249 fold_overflow_warning (("assuming signed overflow does not "
10250 "occur when simplifying A / (B << N)"),
10251 WARN_STRICT_OVERFLOW_MISC
);
10253 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10255 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10256 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10262 case ROUND_DIV_EXPR
:
10263 case CEIL_DIV_EXPR
:
10264 case EXACT_DIV_EXPR
:
10265 if (integer_zerop (arg1
))
10268 /* Convert -A / -B to A / B when the type is signed and overflow is
10270 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10271 && TREE_CODE (op0
) == NEGATE_EXPR
10272 && negate_expr_p (op1
))
10274 if (INTEGRAL_TYPE_P (type
))
10275 fold_overflow_warning (("assuming signed overflow does not occur "
10276 "when distributing negation across "
10278 WARN_STRICT_OVERFLOW_MISC
);
10279 return fold_build2_loc (loc
, code
, type
,
10280 fold_convert_loc (loc
, type
,
10281 TREE_OPERAND (arg0
, 0)),
10282 negate_expr (op1
));
10284 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10285 && TREE_CODE (arg1
) == NEGATE_EXPR
10286 && negate_expr_p (op0
))
10288 if (INTEGRAL_TYPE_P (type
))
10289 fold_overflow_warning (("assuming signed overflow does not occur "
10290 "when distributing negation across "
10292 WARN_STRICT_OVERFLOW_MISC
);
10293 return fold_build2_loc (loc
, code
, type
,
10295 fold_convert_loc (loc
, type
,
10296 TREE_OPERAND (arg1
, 0)));
10299 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10300 operation, EXACT_DIV_EXPR.
10302 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10303 At one time others generated faster code, it's not clear if they do
10304 after the last round to changes to the DIV code in expmed.c. */
10305 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10306 && multiple_of_p (type
, arg0
, arg1
))
10307 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10308 fold_convert (type
, arg0
),
10309 fold_convert (type
, arg1
));
10311 strict_overflow_p
= false;
10312 if (TREE_CODE (arg1
) == INTEGER_CST
10313 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10314 &strict_overflow_p
)))
10316 if (strict_overflow_p
)
10317 fold_overflow_warning (("assuming signed overflow does not occur "
10318 "when simplifying division"),
10319 WARN_STRICT_OVERFLOW_MISC
);
10320 return fold_convert_loc (loc
, type
, tem
);
10325 case CEIL_MOD_EXPR
:
10326 case FLOOR_MOD_EXPR
:
10327 case ROUND_MOD_EXPR
:
10328 case TRUNC_MOD_EXPR
:
10329 strict_overflow_p
= false;
10330 if (TREE_CODE (arg1
) == INTEGER_CST
10331 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10332 &strict_overflow_p
)))
10334 if (strict_overflow_p
)
10335 fold_overflow_warning (("assuming signed overflow does not occur "
10336 "when simplifying modulus"),
10337 WARN_STRICT_OVERFLOW_MISC
);
10338 return fold_convert_loc (loc
, type
, tem
);
10347 /* Since negative shift count is not well-defined,
10348 don't try to compute it in the compiler. */
10349 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10352 prec
= element_precision (type
);
10354 /* If we have a rotate of a bit operation with the rotate count and
10355 the second operand of the bit operation both constant,
10356 permute the two operations. */
10357 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10358 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10359 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10360 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10361 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10363 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10364 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10365 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10366 fold_build2_loc (loc
, code
, type
,
10368 fold_build2_loc (loc
, code
, type
,
10372 /* Two consecutive rotates adding up to the some integer
10373 multiple of the precision of the type can be ignored. */
10374 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10375 && TREE_CODE (arg0
) == RROTATE_EXPR
10376 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10377 && wi::umod_trunc (wi::to_wide (arg1
)
10378 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
10380 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10388 case TRUTH_ANDIF_EXPR
:
10389 /* Note that the operands of this must be ints
10390 and their values must be 0 or 1.
10391 ("true" is a fixed value perhaps depending on the language.) */
10392 /* If first arg is constant zero, return it. */
10393 if (integer_zerop (arg0
))
10394 return fold_convert_loc (loc
, type
, arg0
);
10396 case TRUTH_AND_EXPR
:
10397 /* If either arg is constant true, drop it. */
10398 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10399 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10400 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10401 /* Preserve sequence points. */
10402 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10403 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10404 /* If second arg is constant zero, result is zero, but first arg
10405 must be evaluated. */
10406 if (integer_zerop (arg1
))
10407 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10408 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10409 case will be handled here. */
10410 if (integer_zerop (arg0
))
10411 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10413 /* !X && X is always false. */
10414 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10415 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10416 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10417 /* X && !X is always false. */
10418 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10419 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10420 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10422 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10423 means A >= Y && A != MAX, but in this case we know that
10426 if (!TREE_SIDE_EFFECTS (arg0
)
10427 && !TREE_SIDE_EFFECTS (arg1
))
10429 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10430 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10431 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10433 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10434 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10435 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10438 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10444 case TRUTH_ORIF_EXPR
:
10445 /* Note that the operands of this must be ints
10446 and their values must be 0 or true.
10447 ("true" is a fixed value perhaps depending on the language.) */
10448 /* If first arg is constant true, return it. */
10449 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10450 return fold_convert_loc (loc
, type
, arg0
);
10452 case TRUTH_OR_EXPR
:
10453 /* If either arg is constant zero, drop it. */
10454 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10455 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10456 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10457 /* Preserve sequence points. */
10458 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10459 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10460 /* If second arg is constant true, result is true, but we must
10461 evaluate first arg. */
10462 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10463 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10464 /* Likewise for first arg, but note this only occurs here for
10466 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10467 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10469 /* !X || X is always true. */
10470 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10471 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10472 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10473 /* X || !X is always true. */
10474 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10475 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10476 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10478 /* (X && !Y) || (!X && Y) is X ^ Y */
10479 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10480 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10482 tree a0
, a1
, l0
, l1
, n0
, n1
;
10484 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10485 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10487 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10488 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10490 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10491 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10493 if ((operand_equal_p (n0
, a0
, 0)
10494 && operand_equal_p (n1
, a1
, 0))
10495 || (operand_equal_p (n0
, a1
, 0)
10496 && operand_equal_p (n1
, a0
, 0)))
10497 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10500 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10506 case TRUTH_XOR_EXPR
:
10507 /* If the second arg is constant zero, drop it. */
10508 if (integer_zerop (arg1
))
10509 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10510 /* If the second arg is constant true, this is a logical inversion. */
10511 if (integer_onep (arg1
))
10513 tem
= invert_truthvalue_loc (loc
, arg0
);
10514 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10516 /* Identical arguments cancel to zero. */
10517 if (operand_equal_p (arg0
, arg1
, 0))
10518 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10520 /* !X ^ X is always true. */
10521 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10522 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10523 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10525 /* X ^ !X is always true. */
10526 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10527 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10528 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10537 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10538 if (tem
!= NULL_TREE
)
10541 /* bool_var != 1 becomes !bool_var. */
10542 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10543 && code
== NE_EXPR
)
10544 return fold_convert_loc (loc
, type
,
10545 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10546 TREE_TYPE (arg0
), arg0
));
10548 /* bool_var == 0 becomes !bool_var. */
10549 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10550 && code
== EQ_EXPR
)
10551 return fold_convert_loc (loc
, type
,
10552 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10553 TREE_TYPE (arg0
), arg0
));
10555 /* !exp != 0 becomes !exp */
10556 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10557 && code
== NE_EXPR
)
10558 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10560 /* If this is an EQ or NE comparison with zero and ARG0 is
10561 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10562 two operations, but the latter can be done in one less insn
10563 on machines that have only two-operand insns or on which a
10564 constant cannot be the first operand. */
10565 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10566 && integer_zerop (arg1
))
10568 tree arg00
= TREE_OPERAND (arg0
, 0);
10569 tree arg01
= TREE_OPERAND (arg0
, 1);
10570 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10571 && integer_onep (TREE_OPERAND (arg00
, 0)))
10573 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10574 arg01
, TREE_OPERAND (arg00
, 1));
10575 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10576 build_int_cst (TREE_TYPE (arg0
), 1));
10577 return fold_build2_loc (loc
, code
, type
,
10578 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10581 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10582 && integer_onep (TREE_OPERAND (arg01
, 0)))
10584 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10585 arg00
, TREE_OPERAND (arg01
, 1));
10586 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10587 build_int_cst (TREE_TYPE (arg0
), 1));
10588 return fold_build2_loc (loc
, code
, type
,
10589 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10594 /* If this is an NE or EQ comparison of zero against the result of a
10595 signed MOD operation whose second operand is a power of 2, make
10596 the MOD operation unsigned since it is simpler and equivalent. */
10597 if (integer_zerop (arg1
)
10598 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10599 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10600 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10601 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10602 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10603 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10605 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10606 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10607 fold_convert_loc (loc
, newtype
,
10608 TREE_OPERAND (arg0
, 0)),
10609 fold_convert_loc (loc
, newtype
,
10610 TREE_OPERAND (arg0
, 1)));
10612 return fold_build2_loc (loc
, code
, type
, newmod
,
10613 fold_convert_loc (loc
, newtype
, arg1
));
10616 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10617 C1 is a valid shift constant, and C2 is a power of two, i.e.
10619 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10620 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10621 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10623 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10624 && integer_zerop (arg1
))
10626 tree itype
= TREE_TYPE (arg0
);
10627 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10628 prec
= TYPE_PRECISION (itype
);
10630 /* Check for a valid shift count. */
10631 if (wi::ltu_p (wi::to_wide (arg001
), prec
))
10633 tree arg01
= TREE_OPERAND (arg0
, 1);
10634 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10635 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10636 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10637 can be rewritten as (X & (C2 << C1)) != 0. */
10638 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10640 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10641 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10642 return fold_build2_loc (loc
, code
, type
, tem
,
10643 fold_convert_loc (loc
, itype
, arg1
));
10645 /* Otherwise, for signed (arithmetic) shifts,
10646 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10647 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10648 else if (!TYPE_UNSIGNED (itype
))
10649 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10650 arg000
, build_int_cst (itype
, 0));
10651 /* Otherwise, of unsigned (logical) shifts,
10652 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10653 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10655 return omit_one_operand_loc (loc
, type
,
10656 code
== EQ_EXPR
? integer_one_node
10657 : integer_zero_node
,
10662 /* If this is a comparison of a field, we may be able to simplify it. */
10663 if ((TREE_CODE (arg0
) == COMPONENT_REF
10664 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10665 /* Handle the constant case even without -O
10666 to make sure the warnings are given. */
10667 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10669 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10674 /* Optimize comparisons of strlen vs zero to a compare of the
10675 first character of the string vs zero. To wit,
10676 strlen(ptr) == 0 => *ptr == 0
10677 strlen(ptr) != 0 => *ptr != 0
10678 Other cases should reduce to one of these two (or a constant)
10679 due to the return value of strlen being unsigned. */
10680 if (TREE_CODE (arg0
) == CALL_EXPR
10681 && integer_zerop (arg1
))
10683 tree fndecl
= get_callee_fndecl (arg0
);
10686 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10687 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10688 && call_expr_nargs (arg0
) == 1
10689 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10691 tree iref
= build_fold_indirect_ref_loc (loc
,
10692 CALL_EXPR_ARG (arg0
, 0));
10693 return fold_build2_loc (loc
, code
, type
, iref
,
10694 build_int_cst (TREE_TYPE (iref
), 0));
10698 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10699 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10700 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10701 && integer_zerop (arg1
)
10702 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10704 tree arg00
= TREE_OPERAND (arg0
, 0);
10705 tree arg01
= TREE_OPERAND (arg0
, 1);
10706 tree itype
= TREE_TYPE (arg00
);
10707 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
10709 if (TYPE_UNSIGNED (itype
))
10711 itype
= signed_type_for (itype
);
10712 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10714 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10715 type
, arg00
, build_zero_cst (itype
));
10719 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10720 (X & C) == 0 when C is a single bit. */
10721 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10722 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10723 && integer_zerop (arg1
)
10724 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10726 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10727 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10728 TREE_OPERAND (arg0
, 1));
10729 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10731 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10735 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10736 constant C is a power of two, i.e. a single bit. */
10737 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10738 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10739 && integer_zerop (arg1
)
10740 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10741 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10742 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10744 tree arg00
= TREE_OPERAND (arg0
, 0);
10745 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10746 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10749 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10750 when is C is a power of two, i.e. a single bit. */
10751 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10752 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10753 && integer_zerop (arg1
)
10754 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10755 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10756 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10758 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10759 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10760 arg000
, TREE_OPERAND (arg0
, 1));
10761 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10762 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10765 if (integer_zerop (arg1
)
10766 && tree_expr_nonzero_p (arg0
))
10768 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10769 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10772 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10773 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10774 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10776 tree arg00
= TREE_OPERAND (arg0
, 0);
10777 tree arg01
= TREE_OPERAND (arg0
, 1);
10778 tree arg10
= TREE_OPERAND (arg1
, 0);
10779 tree arg11
= TREE_OPERAND (arg1
, 1);
10780 tree itype
= TREE_TYPE (arg0
);
10782 if (operand_equal_p (arg01
, arg11
, 0))
10784 tem
= fold_convert_loc (loc
, itype
, arg10
);
10785 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10786 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
10787 return fold_build2_loc (loc
, code
, type
, tem
,
10788 build_zero_cst (itype
));
10790 if (operand_equal_p (arg01
, arg10
, 0))
10792 tem
= fold_convert_loc (loc
, itype
, arg11
);
10793 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10794 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
10795 return fold_build2_loc (loc
, code
, type
, tem
,
10796 build_zero_cst (itype
));
10798 if (operand_equal_p (arg00
, arg11
, 0))
10800 tem
= fold_convert_loc (loc
, itype
, arg10
);
10801 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
10802 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
10803 return fold_build2_loc (loc
, code
, type
, tem
,
10804 build_zero_cst (itype
));
10806 if (operand_equal_p (arg00
, arg10
, 0))
10808 tem
= fold_convert_loc (loc
, itype
, arg11
);
10809 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
10810 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
10811 return fold_build2_loc (loc
, code
, type
, tem
,
10812 build_zero_cst (itype
));
10816 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10817 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10819 tree arg00
= TREE_OPERAND (arg0
, 0);
10820 tree arg01
= TREE_OPERAND (arg0
, 1);
10821 tree arg10
= TREE_OPERAND (arg1
, 0);
10822 tree arg11
= TREE_OPERAND (arg1
, 1);
10823 tree itype
= TREE_TYPE (arg0
);
10825 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10826 operand_equal_p guarantees no side-effects so we don't need
10827 to use omit_one_operand on Z. */
10828 if (operand_equal_p (arg01
, arg11
, 0))
10829 return fold_build2_loc (loc
, code
, type
, arg00
,
10830 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10832 if (operand_equal_p (arg01
, arg10
, 0))
10833 return fold_build2_loc (loc
, code
, type
, arg00
,
10834 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10836 if (operand_equal_p (arg00
, arg11
, 0))
10837 return fold_build2_loc (loc
, code
, type
, arg01
,
10838 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10840 if (operand_equal_p (arg00
, arg10
, 0))
10841 return fold_build2_loc (loc
, code
, type
, arg01
,
10842 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10845 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10846 if (TREE_CODE (arg01
) == INTEGER_CST
10847 && TREE_CODE (arg11
) == INTEGER_CST
)
10849 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10850 fold_convert_loc (loc
, itype
, arg11
));
10851 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10852 return fold_build2_loc (loc
, code
, type
, tem
,
10853 fold_convert_loc (loc
, itype
, arg10
));
10857 /* Attempt to simplify equality/inequality comparisons of complex
10858 values. Only lower the comparison if the result is known or
10859 can be simplified to a single scalar comparison. */
10860 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10861 || TREE_CODE (arg0
) == COMPLEX_CST
)
10862 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10863 || TREE_CODE (arg1
) == COMPLEX_CST
))
10865 tree real0
, imag0
, real1
, imag1
;
10868 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10870 real0
= TREE_OPERAND (arg0
, 0);
10871 imag0
= TREE_OPERAND (arg0
, 1);
10875 real0
= TREE_REALPART (arg0
);
10876 imag0
= TREE_IMAGPART (arg0
);
10879 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10881 real1
= TREE_OPERAND (arg1
, 0);
10882 imag1
= TREE_OPERAND (arg1
, 1);
10886 real1
= TREE_REALPART (arg1
);
10887 imag1
= TREE_IMAGPART (arg1
);
10890 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10891 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10893 if (integer_zerop (rcond
))
10895 if (code
== EQ_EXPR
)
10896 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10898 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10902 if (code
== NE_EXPR
)
10903 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10905 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
10909 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
10910 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
10912 if (integer_zerop (icond
))
10914 if (code
== EQ_EXPR
)
10915 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10917 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
10921 if (code
== NE_EXPR
)
10922 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10924 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
10935 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10936 if (tem
!= NULL_TREE
)
10939 /* Transform comparisons of the form X +- C CMP X. */
10940 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10941 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10942 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10943 && !HONOR_SNANS (arg0
))
10945 tree arg01
= TREE_OPERAND (arg0
, 1);
10946 enum tree_code code0
= TREE_CODE (arg0
);
10947 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10949 /* (X - c) > X becomes false. */
10950 if (code
== GT_EXPR
10951 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10952 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10953 return constant_boolean_node (0, type
);
10955 /* Likewise (X + c) < X becomes false. */
10956 if (code
== LT_EXPR
10957 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10958 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10959 return constant_boolean_node (0, type
);
10961 /* Convert (X - c) <= X to true. */
10962 if (!HONOR_NANS (arg1
)
10964 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10965 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10966 return constant_boolean_node (1, type
);
10968 /* Convert (X + c) >= X to true. */
10969 if (!HONOR_NANS (arg1
)
10971 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10972 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10973 return constant_boolean_node (1, type
);
10976 /* If we are comparing an ABS_EXPR with a constant, we can
10977 convert all the cases into explicit comparisons, but they may
10978 well not be faster than doing the ABS and one comparison.
10979 But ABS (X) <= C is a range comparison, which becomes a subtraction
10980 and a comparison, and is probably faster. */
10981 if (code
== LE_EXPR
10982 && TREE_CODE (arg1
) == INTEGER_CST
10983 && TREE_CODE (arg0
) == ABS_EXPR
10984 && ! TREE_SIDE_EFFECTS (arg0
)
10985 && (0 != (tem
= negate_expr (arg1
)))
10986 && TREE_CODE (tem
) == INTEGER_CST
10987 && !TREE_OVERFLOW (tem
))
10988 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
10989 build2 (GE_EXPR
, type
,
10990 TREE_OPERAND (arg0
, 0), tem
),
10991 build2 (LE_EXPR
, type
,
10992 TREE_OPERAND (arg0
, 0), arg1
));
10994 /* Convert ABS_EXPR<x> >= 0 to true. */
10995 strict_overflow_p
= false;
10996 if (code
== GE_EXPR
10997 && (integer_zerop (arg1
)
10998 || (! HONOR_NANS (arg0
)
10999 && real_zerop (arg1
)))
11000 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11002 if (strict_overflow_p
)
11003 fold_overflow_warning (("assuming signed overflow does not occur "
11004 "when simplifying comparison of "
11005 "absolute value and zero"),
11006 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11007 return omit_one_operand_loc (loc
, type
,
11008 constant_boolean_node (true, type
),
11012 /* Convert ABS_EXPR<x> < 0 to false. */
11013 strict_overflow_p
= false;
11014 if (code
== LT_EXPR
11015 && (integer_zerop (arg1
) || real_zerop (arg1
))
11016 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11018 if (strict_overflow_p
)
11019 fold_overflow_warning (("assuming signed overflow does not occur "
11020 "when simplifying comparison of "
11021 "absolute value and zero"),
11022 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11023 return omit_one_operand_loc (loc
, type
,
11024 constant_boolean_node (false, type
),
11028 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11029 and similarly for >= into !=. */
11030 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11031 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11032 && TREE_CODE (arg1
) == LSHIFT_EXPR
11033 && integer_onep (TREE_OPERAND (arg1
, 0)))
11034 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11035 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11036 TREE_OPERAND (arg1
, 1)),
11037 build_zero_cst (TREE_TYPE (arg0
)));
11039 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11040 otherwise Y might be >= # of bits in X's type and thus e.g.
11041 (unsigned char) (1 << Y) for Y 15 might be 0.
11042 If the cast is widening, then 1 << Y should have unsigned type,
11043 otherwise if Y is number of bits in the signed shift type minus 1,
11044 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11045 31 might be 0xffffffff80000000. */
11046 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11047 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11048 && CONVERT_EXPR_P (arg1
)
11049 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11050 && (element_precision (TREE_TYPE (arg1
))
11051 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11052 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11053 || (element_precision (TREE_TYPE (arg1
))
11054 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11055 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11057 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11058 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11059 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11060 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11061 build_zero_cst (TREE_TYPE (arg0
)));
11066 case UNORDERED_EXPR
:
11074 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11076 tree targ0
= strip_float_extensions (arg0
);
11077 tree targ1
= strip_float_extensions (arg1
);
11078 tree newtype
= TREE_TYPE (targ0
);
11080 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11081 newtype
= TREE_TYPE (targ1
);
11083 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11084 return fold_build2_loc (loc
, code
, type
,
11085 fold_convert_loc (loc
, newtype
, targ0
),
11086 fold_convert_loc (loc
, newtype
, targ1
));
11091 case COMPOUND_EXPR
:
11092 /* When pedantic, a compound expression can be neither an lvalue
11093 nor an integer constant expression. */
11094 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11096 /* Don't let (0, 0) be null pointer constant. */
11097 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11098 : fold_convert_loc (loc
, type
, arg1
);
11099 return pedantic_non_lvalue_loc (loc
, tem
);
11102 /* An ASSERT_EXPR should never be passed to fold_binary. */
11103 gcc_unreachable ();
11107 } /* switch (code) */
11110 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11111 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11115 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11117 switch (TREE_CODE (*tp
))
11123 *walk_subtrees
= 0;
11132 /* Return whether the sub-tree ST contains a label which is accessible from
11133 outside the sub-tree. */
11136 contains_label_p (tree st
)
11139 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11142 /* Fold a ternary expression of code CODE and type TYPE with operands
11143 OP0, OP1, and OP2. Return the folded expression if folding is
11144 successful. Otherwise, return NULL_TREE. */
11147 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11148 tree op0
, tree op1
, tree op2
)
11151 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11152 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11154 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11155 && TREE_CODE_LENGTH (code
) == 3);
11157 /* If this is a commutative operation, and OP0 is a constant, move it
11158 to OP1 to reduce the number of tests below. */
11159 if (commutative_ternary_tree_code (code
)
11160 && tree_swap_operands_p (op0
, op1
))
11161 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11163 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11167 /* Strip any conversions that don't change the mode. This is safe
11168 for every expression, except for a comparison expression because
11169 its signedness is derived from its operands. So, in the latter
11170 case, only strip conversions that don't change the signedness.
11172 Note that this is done as an internal manipulation within the
11173 constant folder, in order to find the simplest representation of
11174 the arguments so that their form can be studied. In any cases,
11175 the appropriate type conversions should be put back in the tree
11176 that will get out of the constant folder. */
11197 case COMPONENT_REF
:
11198 if (TREE_CODE (arg0
) == CONSTRUCTOR
11199 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11201 unsigned HOST_WIDE_INT idx
;
11203 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11210 case VEC_COND_EXPR
:
11211 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11212 so all simple results must be passed through pedantic_non_lvalue. */
11213 if (TREE_CODE (arg0
) == INTEGER_CST
)
11215 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11216 tem
= integer_zerop (arg0
) ? op2
: op1
;
11217 /* Only optimize constant conditions when the selected branch
11218 has the same type as the COND_EXPR. This avoids optimizing
11219 away "c ? x : throw", where the throw has a void type.
11220 Avoid throwing away that operand which contains label. */
11221 if ((!TREE_SIDE_EFFECTS (unused_op
)
11222 || !contains_label_p (unused_op
))
11223 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11224 || VOID_TYPE_P (type
)))
11225 return pedantic_non_lvalue_loc (loc
, tem
);
11228 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11230 if ((TREE_CODE (arg1
) == VECTOR_CST
11231 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11232 && (TREE_CODE (arg2
) == VECTOR_CST
11233 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11235 unsigned int nelts
= VECTOR_CST_NELTS (arg0
), i
;
11236 gcc_assert (nelts
== TYPE_VECTOR_SUBPARTS (type
));
11237 auto_vec_perm_indices
sel (nelts
);
11238 for (i
= 0; i
< nelts
; i
++)
11240 tree val
= VECTOR_CST_ELT (arg0
, i
);
11241 if (integer_all_onesp (val
))
11242 sel
.quick_push (i
);
11243 else if (integer_zerop (val
))
11244 sel
.quick_push (nelts
+ i
);
11245 else /* Currently unreachable. */
11248 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11249 if (t
!= NULL_TREE
)
11254 /* If we have A op B ? A : C, we may be able to convert this to a
11255 simpler expression, depending on the operation and the values
11256 of B and C. Signed zeros prevent all of these transformations,
11257 for reasons given above each one.
11259 Also try swapping the arguments and inverting the conditional. */
11260 if (COMPARISON_CLASS_P (arg0
)
11261 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
11262 && !HONOR_SIGNED_ZEROS (element_mode (op1
)))
11264 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11269 if (COMPARISON_CLASS_P (arg0
)
11270 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
11271 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11273 location_t loc0
= expr_location_or (arg0
, loc
);
11274 tem
= fold_invert_truthvalue (loc0
, arg0
);
11275 if (tem
&& COMPARISON_CLASS_P (tem
))
11277 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11283 /* If the second operand is simpler than the third, swap them
11284 since that produces better jump optimization results. */
11285 if (truth_value_p (TREE_CODE (arg0
))
11286 && tree_swap_operands_p (op1
, op2
))
11288 location_t loc0
= expr_location_or (arg0
, loc
);
11289 /* See if this can be inverted. If it can't, possibly because
11290 it was a floating-point inequality comparison, don't do
11292 tem
= fold_invert_truthvalue (loc0
, arg0
);
11294 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11297 /* Convert A ? 1 : 0 to simply A. */
11298 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11299 : (integer_onep (op1
)
11300 && !VECTOR_TYPE_P (type
)))
11301 && integer_zerop (op2
)
11302 /* If we try to convert OP0 to our type, the
11303 call to fold will try to move the conversion inside
11304 a COND, which will recurse. In that case, the COND_EXPR
11305 is probably the best choice, so leave it alone. */
11306 && type
== TREE_TYPE (arg0
))
11307 return pedantic_non_lvalue_loc (loc
, arg0
);
11309 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11310 over COND_EXPR in cases such as floating point comparisons. */
11311 if (integer_zerop (op1
)
11312 && code
== COND_EXPR
11313 && integer_onep (op2
)
11314 && !VECTOR_TYPE_P (type
)
11315 && truth_value_p (TREE_CODE (arg0
)))
11316 return pedantic_non_lvalue_loc (loc
,
11317 fold_convert_loc (loc
, type
,
11318 invert_truthvalue_loc (loc
,
11321 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11322 if (TREE_CODE (arg0
) == LT_EXPR
11323 && integer_zerop (TREE_OPERAND (arg0
, 1))
11324 && integer_zerop (op2
)
11325 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11327 /* sign_bit_p looks through both zero and sign extensions,
11328 but for this optimization only sign extensions are
11330 tree tem2
= TREE_OPERAND (arg0
, 0);
11331 while (tem
!= tem2
)
11333 if (TREE_CODE (tem2
) != NOP_EXPR
11334 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11339 tem2
= TREE_OPERAND (tem2
, 0);
11341 /* sign_bit_p only checks ARG1 bits within A's precision.
11342 If <sign bit of A> has wider type than A, bits outside
11343 of A's precision in <sign bit of A> need to be checked.
11344 If they are all 0, this optimization needs to be done
11345 in unsigned A's type, if they are all 1 in signed A's type,
11346 otherwise this can't be done. */
11348 && TYPE_PRECISION (TREE_TYPE (tem
))
11349 < TYPE_PRECISION (TREE_TYPE (arg1
))
11350 && TYPE_PRECISION (TREE_TYPE (tem
))
11351 < TYPE_PRECISION (type
))
11353 int inner_width
, outer_width
;
11356 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11357 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11358 if (outer_width
> TYPE_PRECISION (type
))
11359 outer_width
= TYPE_PRECISION (type
);
11361 wide_int mask
= wi::shifted_mask
11362 (inner_width
, outer_width
- inner_width
, false,
11363 TYPE_PRECISION (TREE_TYPE (arg1
)));
11365 wide_int common
= mask
& wi::to_wide (arg1
);
11366 if (common
== mask
)
11368 tem_type
= signed_type_for (TREE_TYPE (tem
));
11369 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11371 else if (common
== 0)
11373 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11374 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11382 fold_convert_loc (loc
, type
,
11383 fold_build2_loc (loc
, BIT_AND_EXPR
,
11384 TREE_TYPE (tem
), tem
,
11385 fold_convert_loc (loc
,
11390 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11391 already handled above. */
11392 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11393 && integer_onep (TREE_OPERAND (arg0
, 1))
11394 && integer_zerop (op2
)
11395 && integer_pow2p (arg1
))
11397 tree tem
= TREE_OPERAND (arg0
, 0);
11399 if (TREE_CODE (tem
) == RSHIFT_EXPR
11400 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11401 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
11402 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11403 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11404 fold_convert_loc (loc
, type
,
11405 TREE_OPERAND (tem
, 0)),
11409 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11410 is probably obsolete because the first operand should be a
11411 truth value (that's why we have the two cases above), but let's
11412 leave it in until we can confirm this for all front-ends. */
11413 if (integer_zerop (op2
)
11414 && TREE_CODE (arg0
) == NE_EXPR
11415 && integer_zerop (TREE_OPERAND (arg0
, 1))
11416 && integer_pow2p (arg1
)
11417 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11418 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11419 arg1
, OEP_ONLY_CONST
))
11420 return pedantic_non_lvalue_loc (loc
,
11421 fold_convert_loc (loc
, type
,
11422 TREE_OPERAND (arg0
, 0)));
11424 /* Disable the transformations below for vectors, since
11425 fold_binary_op_with_conditional_arg may undo them immediately,
11426 yielding an infinite loop. */
11427 if (code
== VEC_COND_EXPR
)
11430 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11431 if (integer_zerop (op2
)
11432 && truth_value_p (TREE_CODE (arg0
))
11433 && truth_value_p (TREE_CODE (arg1
))
11434 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11435 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11436 : TRUTH_ANDIF_EXPR
,
11437 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
11439 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11440 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11441 && truth_value_p (TREE_CODE (arg0
))
11442 && truth_value_p (TREE_CODE (arg1
))
11443 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11445 location_t loc0
= expr_location_or (arg0
, loc
);
11446 /* Only perform transformation if ARG0 is easily inverted. */
11447 tem
= fold_invert_truthvalue (loc0
, arg0
);
11449 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11452 type
, fold_convert_loc (loc
, type
, tem
),
11456 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11457 if (integer_zerop (arg1
)
11458 && truth_value_p (TREE_CODE (arg0
))
11459 && truth_value_p (TREE_CODE (op2
))
11460 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11462 location_t loc0
= expr_location_or (arg0
, loc
);
11463 /* Only perform transformation if ARG0 is easily inverted. */
11464 tem
= fold_invert_truthvalue (loc0
, arg0
);
11466 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11467 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11468 type
, fold_convert_loc (loc
, type
, tem
),
11472 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11473 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11474 && truth_value_p (TREE_CODE (arg0
))
11475 && truth_value_p (TREE_CODE (op2
))
11476 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11477 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11478 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11479 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11484 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11485 of fold_ternary on them. */
11486 gcc_unreachable ();
11488 case BIT_FIELD_REF
:
11489 if (TREE_CODE (arg0
) == VECTOR_CST
11490 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11491 || (TREE_CODE (type
) == VECTOR_TYPE
11492 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11494 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11495 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11496 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11497 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11500 && (idx
% width
) == 0
11501 && (n
% width
) == 0
11502 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11507 if (TREE_CODE (arg0
) == VECTOR_CST
)
11510 return VECTOR_CST_ELT (arg0
, idx
);
11512 tree_vector_builder
vals (type
, n
, 1);
11513 for (unsigned i
= 0; i
< n
; ++i
)
11514 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
11515 return vals
.build ();
11520 /* On constants we can use native encode/interpret to constant
11521 fold (nearly) all BIT_FIELD_REFs. */
11522 if (CONSTANT_CLASS_P (arg0
)
11523 && can_native_interpret_type_p (type
)
11524 && BITS_PER_UNIT
== 8)
11526 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11527 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11528 /* Limit us to a reasonable amount of work. To relax the
11529 other limitations we need bit-shifting of the buffer
11530 and rounding up the size. */
11531 if (bitpos
% BITS_PER_UNIT
== 0
11532 && bitsize
% BITS_PER_UNIT
== 0
11533 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
11535 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11536 unsigned HOST_WIDE_INT len
11537 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
11538 bitpos
/ BITS_PER_UNIT
);
11540 && len
* BITS_PER_UNIT
>= bitsize
)
11542 tree v
= native_interpret_expr (type
, b
,
11543 bitsize
/ BITS_PER_UNIT
);
11553 /* For integers we can decompose the FMA if possible. */
11554 if (TREE_CODE (arg0
) == INTEGER_CST
11555 && TREE_CODE (arg1
) == INTEGER_CST
)
11556 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11557 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11558 if (integer_zerop (arg2
))
11559 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11561 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11563 case VEC_PERM_EXPR
:
11564 if (TREE_CODE (arg2
) == VECTOR_CST
)
11566 unsigned int nelts
= VECTOR_CST_NELTS (arg2
), i
, mask
, mask2
;
11567 bool need_mask_canon
= false;
11568 bool need_mask_canon2
= false;
11569 bool all_in_vec0
= true;
11570 bool all_in_vec1
= true;
11571 bool maybe_identity
= true;
11572 bool single_arg
= (op0
== op1
);
11573 bool changed
= false;
11575 mask2
= 2 * nelts
- 1;
11576 mask
= single_arg
? (nelts
- 1) : mask2
;
11577 gcc_assert (nelts
== TYPE_VECTOR_SUBPARTS (type
));
11578 auto_vec_perm_indices
sel (nelts
);
11579 auto_vec_perm_indices
sel2 (nelts
);
11580 for (i
= 0; i
< nelts
; i
++)
11582 tree val
= VECTOR_CST_ELT (arg2
, i
);
11583 if (TREE_CODE (val
) != INTEGER_CST
)
11586 /* Make sure that the perm value is in an acceptable
11588 wi::tree_to_wide_ref t
= wi::to_wide (val
);
11589 need_mask_canon
|= wi::gtu_p (t
, mask
);
11590 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11591 unsigned int elt
= t
.to_uhwi () & mask
;
11592 unsigned int elt2
= t
.to_uhwi () & mask2
;
11595 all_in_vec1
= false;
11597 all_in_vec0
= false;
11599 if ((elt
& (nelts
- 1)) != i
)
11600 maybe_identity
= false;
11602 sel
.quick_push (elt
);
11603 sel2
.quick_push (elt2
);
11606 if (maybe_identity
)
11616 else if (all_in_vec1
)
11619 for (i
= 0; i
< nelts
; i
++)
11621 need_mask_canon
= true;
11624 if ((TREE_CODE (op0
) == VECTOR_CST
11625 || TREE_CODE (op0
) == CONSTRUCTOR
)
11626 && (TREE_CODE (op1
) == VECTOR_CST
11627 || TREE_CODE (op1
) == CONSTRUCTOR
))
11629 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11630 if (t
!= NULL_TREE
)
11634 if (op0
== op1
&& !single_arg
)
11637 /* Some targets are deficient and fail to expand a single
11638 argument permutation while still allowing an equivalent
11639 2-argument version. */
11640 if (need_mask_canon
&& arg2
== op2
11641 && !can_vec_perm_p (TYPE_MODE (type
), false, &sel
)
11642 && can_vec_perm_p (TYPE_MODE (type
), false, &sel2
))
11644 need_mask_canon
= need_mask_canon2
;
11648 if (need_mask_canon
&& arg2
== op2
)
11650 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11651 tree_vector_builder
tsel (TREE_TYPE (arg2
), nelts
, 1);
11652 for (i
= 0; i
< nelts
; i
++)
11653 tsel
.quick_push (build_int_cst (eltype
, sel
[i
]));
11654 op2
= tsel
.build ();
11659 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11663 case BIT_INSERT_EXPR
:
11664 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11665 if (TREE_CODE (arg0
) == INTEGER_CST
11666 && TREE_CODE (arg1
) == INTEGER_CST
)
11668 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11669 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
11670 wide_int tem
= (wi::to_wide (arg0
)
11671 & wi::shifted_mask (bitpos
, bitsize
, true,
11672 TYPE_PRECISION (type
)));
11674 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
11676 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
11678 else if (TREE_CODE (arg0
) == VECTOR_CST
11679 && CONSTANT_CLASS_P (arg1
)
11680 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
11683 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11684 unsigned HOST_WIDE_INT elsize
11685 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
11686 if (bitpos
% elsize
== 0)
11688 unsigned k
= bitpos
/ elsize
;
11689 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
11693 unsigned int nelts
= VECTOR_CST_NELTS (arg0
);
11694 tree_vector_builder
elts (type
, nelts
, 1);
11695 elts
.quick_grow (nelts
);
11696 for (unsigned int i
= 0; i
< nelts
; ++i
)
11697 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
11698 return elts
.build ();
11706 } /* switch (code) */
11709 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11710 of an array (or vector). */
11713 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11715 tree index_type
= NULL_TREE
;
11716 offset_int low_bound
= 0;
11718 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11720 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11721 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11723 /* Static constructors for variably sized objects makes no sense. */
11724 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11725 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11726 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11731 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11732 TYPE_SIGN (index_type
));
11734 offset_int index
= low_bound
- 1;
11736 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11737 TYPE_SIGN (index_type
));
11739 offset_int max_index
;
11740 unsigned HOST_WIDE_INT cnt
;
11743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11745 /* Array constructor might explicitly set index, or specify a range,
11746 or leave index NULL meaning that it is next index after previous
11750 if (TREE_CODE (cfield
) == INTEGER_CST
)
11751 max_index
= index
= wi::to_offset (cfield
);
11754 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11755 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11756 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11763 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11764 TYPE_SIGN (index_type
));
11768 /* Do we have match? */
11769 if (wi::cmpu (access_index
, index
) >= 0
11770 && wi::cmpu (access_index
, max_index
) <= 0)
11776 /* Perform constant folding and related simplification of EXPR.
11777 The related simplifications include x*1 => x, x*0 => 0, etc.,
11778 and application of the associative law.
11779 NOP_EXPR conversions may be removed freely (as long as we
11780 are careful not to change the type of the overall expression).
11781 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11782 but we can constant-fold them if they have constant operands. */
11784 #ifdef ENABLE_FOLD_CHECKING
11785 # define fold(x) fold_1 (x)
11786 static tree
fold_1 (tree
);
11792 const tree t
= expr
;
11793 enum tree_code code
= TREE_CODE (t
);
11794 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11796 location_t loc
= EXPR_LOCATION (expr
);
11798 /* Return right away if a constant. */
11799 if (kind
== tcc_constant
)
11802 /* CALL_EXPR-like objects with variable numbers of operands are
11803 treated specially. */
11804 if (kind
== tcc_vl_exp
)
11806 if (code
== CALL_EXPR
)
11808 tem
= fold_call_expr (loc
, expr
, false);
11809 return tem
? tem
: expr
;
11814 if (IS_EXPR_CODE_CLASS (kind
))
11816 tree type
= TREE_TYPE (t
);
11817 tree op0
, op1
, op2
;
11819 switch (TREE_CODE_LENGTH (code
))
11822 op0
= TREE_OPERAND (t
, 0);
11823 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11824 return tem
? tem
: expr
;
11826 op0
= TREE_OPERAND (t
, 0);
11827 op1
= TREE_OPERAND (t
, 1);
11828 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11829 return tem
? tem
: expr
;
11831 op0
= TREE_OPERAND (t
, 0);
11832 op1
= TREE_OPERAND (t
, 1);
11833 op2
= TREE_OPERAND (t
, 2);
11834 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11835 return tem
? tem
: expr
;
11845 tree op0
= TREE_OPERAND (t
, 0);
11846 tree op1
= TREE_OPERAND (t
, 1);
11848 if (TREE_CODE (op1
) == INTEGER_CST
11849 && TREE_CODE (op0
) == CONSTRUCTOR
11850 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
11852 tree val
= get_array_ctor_element_at_index (op0
,
11853 wi::to_offset (op1
));
11861 /* Return a VECTOR_CST if possible. */
11864 tree type
= TREE_TYPE (t
);
11865 if (TREE_CODE (type
) != VECTOR_TYPE
)
11870 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
11871 if (! CONSTANT_CLASS_P (val
))
11874 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
11878 return fold (DECL_INITIAL (t
));
11882 } /* switch (code) */
11885 #ifdef ENABLE_FOLD_CHECKING
11888 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
11889 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
11890 static void fold_check_failed (const_tree
, const_tree
);
11891 void print_fold_checksum (const_tree
);
11893 /* When --enable-checking=fold, compute a digest of expr before
11894 and after actual fold call to see if fold did not accidentally
11895 change original expr. */
11901 struct md5_ctx ctx
;
11902 unsigned char checksum_before
[16], checksum_after
[16];
11903 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
11905 md5_init_ctx (&ctx
);
11906 fold_checksum_tree (expr
, &ctx
, &ht
);
11907 md5_finish_ctx (&ctx
, checksum_before
);
11910 ret
= fold_1 (expr
);
11912 md5_init_ctx (&ctx
);
11913 fold_checksum_tree (expr
, &ctx
, &ht
);
11914 md5_finish_ctx (&ctx
, checksum_after
);
11916 if (memcmp (checksum_before
, checksum_after
, 16))
11917 fold_check_failed (expr
, ret
);
11923 print_fold_checksum (const_tree expr
)
11925 struct md5_ctx ctx
;
11926 unsigned char checksum
[16], cnt
;
11927 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
11929 md5_init_ctx (&ctx
);
11930 fold_checksum_tree (expr
, &ctx
, &ht
);
11931 md5_finish_ctx (&ctx
, checksum
);
11932 for (cnt
= 0; cnt
< 16; ++cnt
)
11933 fprintf (stderr
, "%02x", checksum
[cnt
]);
11934 putc ('\n', stderr
);
11938 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
11940 internal_error ("fold check: original tree changed by fold");
11944 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
11945 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
11947 const tree_node
**slot
;
11948 enum tree_code code
;
11949 union tree_node buf
;
11955 slot
= ht
->find_slot (expr
, INSERT
);
11959 code
= TREE_CODE (expr
);
11960 if (TREE_CODE_CLASS (code
) == tcc_declaration
11961 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
11963 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11964 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11965 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
11966 buf
.decl_with_vis
.symtab_node
= NULL
;
11967 expr
= (tree
) &buf
;
11969 else if (TREE_CODE_CLASS (code
) == tcc_type
11970 && (TYPE_POINTER_TO (expr
)
11971 || TYPE_REFERENCE_TO (expr
)
11972 || TYPE_CACHED_VALUES_P (expr
)
11973 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
11974 || TYPE_NEXT_VARIANT (expr
)
11975 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
11977 /* Allow these fields to be modified. */
11979 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11980 expr
= tmp
= (tree
) &buf
;
11981 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
11982 TYPE_POINTER_TO (tmp
) = NULL
;
11983 TYPE_REFERENCE_TO (tmp
) = NULL
;
11984 TYPE_NEXT_VARIANT (tmp
) = NULL
;
11985 TYPE_ALIAS_SET (tmp
) = -1;
11986 if (TYPE_CACHED_VALUES_P (tmp
))
11988 TYPE_CACHED_VALUES_P (tmp
) = 0;
11989 TYPE_CACHED_VALUES (tmp
) = NULL
;
11992 md5_process_bytes (expr
, tree_size (expr
), ctx
);
11993 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
11994 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
11995 if (TREE_CODE_CLASS (code
) != tcc_type
11996 && TREE_CODE_CLASS (code
) != tcc_declaration
11997 && code
!= TREE_LIST
11998 && code
!= SSA_NAME
11999 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12000 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12001 switch (TREE_CODE_CLASS (code
))
12007 md5_process_bytes (TREE_STRING_POINTER (expr
),
12008 TREE_STRING_LENGTH (expr
), ctx
);
12011 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12012 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12015 len
= vector_cst_encoded_nelts (expr
);
12016 for (i
= 0; i
< len
; ++i
)
12017 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
12023 case tcc_exceptional
:
12027 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12028 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12029 expr
= TREE_CHAIN (expr
);
12030 goto recursive_label
;
12033 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12034 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12040 case tcc_expression
:
12041 case tcc_reference
:
12042 case tcc_comparison
:
12045 case tcc_statement
:
12047 len
= TREE_OPERAND_LENGTH (expr
);
12048 for (i
= 0; i
< len
; ++i
)
12049 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12051 case tcc_declaration
:
12052 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12053 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12054 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12056 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12057 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12058 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12059 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12060 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12063 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12065 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12067 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12068 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12070 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12074 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12075 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12076 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12077 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12078 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12079 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12080 if (INTEGRAL_TYPE_P (expr
)
12081 || SCALAR_FLOAT_TYPE_P (expr
))
12083 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12084 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12086 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12087 if (TREE_CODE (expr
) == RECORD_TYPE
12088 || TREE_CODE (expr
) == UNION_TYPE
12089 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12090 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12091 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12098 /* Helper function for outputting the checksum of a tree T. When
12099 debugging with gdb, you can "define mynext" to be "next" followed
12100 by "call debug_fold_checksum (op0)", then just trace down till the
12103 DEBUG_FUNCTION
void
12104 debug_fold_checksum (const_tree t
)
12107 unsigned char checksum
[16];
12108 struct md5_ctx ctx
;
12109 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12111 md5_init_ctx (&ctx
);
12112 fold_checksum_tree (t
, &ctx
, &ht
);
12113 md5_finish_ctx (&ctx
, checksum
);
12116 for (i
= 0; i
< 16; i
++)
12117 fprintf (stderr
, "%d ", checksum
[i
]);
12119 fprintf (stderr
, "\n");
12124 /* Fold a unary tree expression with code CODE of type TYPE with an
12125 operand OP0. LOC is the location of the resulting expression.
12126 Return a folded expression if successful. Otherwise, return a tree
12127 expression with code CODE of type TYPE with an operand OP0. */
12130 fold_build1_loc (location_t loc
,
12131 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12134 #ifdef ENABLE_FOLD_CHECKING
12135 unsigned char checksum_before
[16], checksum_after
[16];
12136 struct md5_ctx ctx
;
12137 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12139 md5_init_ctx (&ctx
);
12140 fold_checksum_tree (op0
, &ctx
, &ht
);
12141 md5_finish_ctx (&ctx
, checksum_before
);
12145 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12147 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12149 #ifdef ENABLE_FOLD_CHECKING
12150 md5_init_ctx (&ctx
);
12151 fold_checksum_tree (op0
, &ctx
, &ht
);
12152 md5_finish_ctx (&ctx
, checksum_after
);
12154 if (memcmp (checksum_before
, checksum_after
, 16))
12155 fold_check_failed (op0
, tem
);
12160 /* Fold a binary tree expression with code CODE of type TYPE with
12161 operands OP0 and OP1. LOC is the location of the resulting
12162 expression. Return a folded expression if successful. Otherwise,
12163 return a tree expression with code CODE of type TYPE with operands
12167 fold_build2_loc (location_t loc
,
12168 enum tree_code code
, tree type
, tree op0
, tree op1
12172 #ifdef ENABLE_FOLD_CHECKING
12173 unsigned char checksum_before_op0
[16],
12174 checksum_before_op1
[16],
12175 checksum_after_op0
[16],
12176 checksum_after_op1
[16];
12177 struct md5_ctx ctx
;
12178 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12180 md5_init_ctx (&ctx
);
12181 fold_checksum_tree (op0
, &ctx
, &ht
);
12182 md5_finish_ctx (&ctx
, checksum_before_op0
);
12185 md5_init_ctx (&ctx
);
12186 fold_checksum_tree (op1
, &ctx
, &ht
);
12187 md5_finish_ctx (&ctx
, checksum_before_op1
);
12191 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12193 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12195 #ifdef ENABLE_FOLD_CHECKING
12196 md5_init_ctx (&ctx
);
12197 fold_checksum_tree (op0
, &ctx
, &ht
);
12198 md5_finish_ctx (&ctx
, checksum_after_op0
);
12201 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12202 fold_check_failed (op0
, tem
);
12204 md5_init_ctx (&ctx
);
12205 fold_checksum_tree (op1
, &ctx
, &ht
);
12206 md5_finish_ctx (&ctx
, checksum_after_op1
);
12208 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12209 fold_check_failed (op1
, tem
);
12214 /* Fold a ternary tree expression with code CODE of type TYPE with
12215 operands OP0, OP1, and OP2. Return a folded expression if
12216 successful. Otherwise, return a tree expression with code CODE of
12217 type TYPE with operands OP0, OP1, and OP2. */
12220 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
12221 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12224 #ifdef ENABLE_FOLD_CHECKING
12225 unsigned char checksum_before_op0
[16],
12226 checksum_before_op1
[16],
12227 checksum_before_op2
[16],
12228 checksum_after_op0
[16],
12229 checksum_after_op1
[16],
12230 checksum_after_op2
[16];
12231 struct md5_ctx ctx
;
12232 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12234 md5_init_ctx (&ctx
);
12235 fold_checksum_tree (op0
, &ctx
, &ht
);
12236 md5_finish_ctx (&ctx
, checksum_before_op0
);
12239 md5_init_ctx (&ctx
);
12240 fold_checksum_tree (op1
, &ctx
, &ht
);
12241 md5_finish_ctx (&ctx
, checksum_before_op1
);
12244 md5_init_ctx (&ctx
);
12245 fold_checksum_tree (op2
, &ctx
, &ht
);
12246 md5_finish_ctx (&ctx
, checksum_before_op2
);
12250 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12251 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12253 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12255 #ifdef ENABLE_FOLD_CHECKING
12256 md5_init_ctx (&ctx
);
12257 fold_checksum_tree (op0
, &ctx
, &ht
);
12258 md5_finish_ctx (&ctx
, checksum_after_op0
);
12261 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12262 fold_check_failed (op0
, tem
);
12264 md5_init_ctx (&ctx
);
12265 fold_checksum_tree (op1
, &ctx
, &ht
);
12266 md5_finish_ctx (&ctx
, checksum_after_op1
);
12269 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12270 fold_check_failed (op1
, tem
);
12272 md5_init_ctx (&ctx
);
12273 fold_checksum_tree (op2
, &ctx
, &ht
);
12274 md5_finish_ctx (&ctx
, checksum_after_op2
);
12276 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12277 fold_check_failed (op2
, tem
);
12282 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12283 arguments in ARGARRAY, and a null static chain.
12284 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12285 of type TYPE from the given operands as constructed by build_call_array. */
12288 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12289 int nargs
, tree
*argarray
)
12292 #ifdef ENABLE_FOLD_CHECKING
12293 unsigned char checksum_before_fn
[16],
12294 checksum_before_arglist
[16],
12295 checksum_after_fn
[16],
12296 checksum_after_arglist
[16];
12297 struct md5_ctx ctx
;
12298 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12301 md5_init_ctx (&ctx
);
12302 fold_checksum_tree (fn
, &ctx
, &ht
);
12303 md5_finish_ctx (&ctx
, checksum_before_fn
);
12306 md5_init_ctx (&ctx
);
12307 for (i
= 0; i
< nargs
; i
++)
12308 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12309 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12313 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12315 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12317 #ifdef ENABLE_FOLD_CHECKING
12318 md5_init_ctx (&ctx
);
12319 fold_checksum_tree (fn
, &ctx
, &ht
);
12320 md5_finish_ctx (&ctx
, checksum_after_fn
);
12323 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12324 fold_check_failed (fn
, tem
);
12326 md5_init_ctx (&ctx
);
12327 for (i
= 0; i
< nargs
; i
++)
12328 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12329 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12331 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12332 fold_check_failed (NULL_TREE
, tem
);
12337 /* Perform constant folding and related simplification of initializer
12338 expression EXPR. These behave identically to "fold_buildN" but ignore
12339 potential run-time traps and exceptions that fold must preserve. */
12341 #define START_FOLD_INIT \
12342 int saved_signaling_nans = flag_signaling_nans;\
12343 int saved_trapping_math = flag_trapping_math;\
12344 int saved_rounding_math = flag_rounding_math;\
12345 int saved_trapv = flag_trapv;\
12346 int saved_folding_initializer = folding_initializer;\
12347 flag_signaling_nans = 0;\
12348 flag_trapping_math = 0;\
12349 flag_rounding_math = 0;\
12351 folding_initializer = 1;
12353 #define END_FOLD_INIT \
12354 flag_signaling_nans = saved_signaling_nans;\
12355 flag_trapping_math = saved_trapping_math;\
12356 flag_rounding_math = saved_rounding_math;\
12357 flag_trapv = saved_trapv;\
12358 folding_initializer = saved_folding_initializer;
12361 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12362 tree type
, tree op
)
12367 result
= fold_build1_loc (loc
, code
, type
, op
);
12374 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12375 tree type
, tree op0
, tree op1
)
12380 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12387 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12388 int nargs
, tree
*argarray
)
12393 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12399 #undef START_FOLD_INIT
12400 #undef END_FOLD_INIT
12402 /* Determine if first argument is a multiple of second argument. Return 0 if
12403 it is not, or we cannot easily determined it to be.
12405 An example of the sort of thing we care about (at this point; this routine
12406 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12407 fold cases do now) is discovering that
12409 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12415 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12417 This code also handles discovering that
12419 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12421 is a multiple of 8 so we don't have to worry about dealing with a
12422 possible remainder.
12424 Note that we *look* inside a SAVE_EXPR only to determine how it was
12425 calculated; it is not safe for fold to do much of anything else with the
12426 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12427 at run time. For example, the latter example above *cannot* be implemented
12428 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12429 evaluation time of the original SAVE_EXPR is not necessarily the same at
12430 the time the new expression is evaluated. The only optimization of this
12431 sort that would be valid is changing
12433 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12437 SAVE_EXPR (I) * SAVE_EXPR (J)
12439 (where the same SAVE_EXPR (J) is used in the original and the
12440 transformed version). */
12443 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12448 if (operand_equal_p (top
, bottom
, 0))
12451 if (TREE_CODE (type
) != INTEGER_TYPE
)
12454 switch (TREE_CODE (top
))
12457 /* Bitwise and provides a power of two multiple. If the mask is
12458 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12459 if (!integer_pow2p (bottom
))
12464 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12465 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12468 /* It is impossible to prove if op0 - op1 is multiple of bottom
12469 precisely, so be conservative here checking if both op0 and op1
12470 are multiple of bottom. Note we check the second operand first
12471 since it's usually simpler. */
12472 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12473 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12476 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12477 as op0 - 3 if the expression has unsigned type. For example,
12478 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12479 op1
= TREE_OPERAND (top
, 1);
12480 if (TYPE_UNSIGNED (type
)
12481 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
12482 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
12483 return (multiple_of_p (type
, op1
, bottom
)
12484 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12487 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12489 op1
= TREE_OPERAND (top
, 1);
12490 /* const_binop may not detect overflow correctly,
12491 so check for it explicitly here. */
12492 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
12494 && 0 != (t1
= fold_convert (type
,
12495 const_binop (LSHIFT_EXPR
,
12498 && !TREE_OVERFLOW (t1
))
12499 return multiple_of_p (type
, t1
, bottom
);
12504 /* Can't handle conversions from non-integral or wider integral type. */
12505 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12506 || (TYPE_PRECISION (type
)
12507 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12513 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12516 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12517 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12520 if (TREE_CODE (bottom
) != INTEGER_CST
12521 || integer_zerop (bottom
)
12522 || (TYPE_UNSIGNED (type
)
12523 && (tree_int_cst_sgn (top
) < 0
12524 || tree_int_cst_sgn (bottom
) < 0)))
12526 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12530 if (TREE_CODE (bottom
) == INTEGER_CST
12531 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
12532 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
12534 enum tree_code code
= gimple_assign_rhs_code (stmt
);
12536 /* Check for special cases to see if top is defined as multiple
12539 top = (X & ~(bottom - 1) ; bottom is power of 2
12545 if (code
== BIT_AND_EXPR
12546 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12547 && TREE_CODE (op2
) == INTEGER_CST
12548 && integer_pow2p (bottom
)
12549 && wi::multiple_of_p (wi::to_widest (op2
),
12550 wi::to_widest (bottom
), UNSIGNED
))
12553 op1
= gimple_assign_rhs1 (stmt
);
12554 if (code
== MINUS_EXPR
12555 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12556 && TREE_CODE (op2
) == SSA_NAME
12557 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
12558 && gimple_code (stmt
) == GIMPLE_ASSIGN
12559 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
12560 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
12561 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
12572 #define tree_expr_nonnegative_warnv_p(X, Y) \
12573 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12575 #define RECURSE(X) \
12576 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12578 /* Return true if CODE or TYPE is known to be non-negative. */
12581 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12583 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12584 && truth_value_p (code
))
12585 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12586 have a signed:1 type (where the value is -1 and 0). */
12591 /* Return true if (CODE OP0) is known to be non-negative. If the return
12592 value is based on the assumption that signed overflow is undefined,
12593 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12594 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12597 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12598 bool *strict_overflow_p
, int depth
)
12600 if (TYPE_UNSIGNED (type
))
12606 /* We can't return 1 if flag_wrapv is set because
12607 ABS_EXPR<INT_MIN> = INT_MIN. */
12608 if (!ANY_INTEGRAL_TYPE_P (type
))
12610 if (TYPE_OVERFLOW_UNDEFINED (type
))
12612 *strict_overflow_p
= true;
12617 case NON_LVALUE_EXPR
:
12619 case FIX_TRUNC_EXPR
:
12620 return RECURSE (op0
);
12624 tree inner_type
= TREE_TYPE (op0
);
12625 tree outer_type
= type
;
12627 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12629 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12630 return RECURSE (op0
);
12631 if (INTEGRAL_TYPE_P (inner_type
))
12633 if (TYPE_UNSIGNED (inner_type
))
12635 return RECURSE (op0
);
12638 else if (INTEGRAL_TYPE_P (outer_type
))
12640 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12641 return RECURSE (op0
);
12642 if (INTEGRAL_TYPE_P (inner_type
))
12643 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12644 && TYPE_UNSIGNED (inner_type
);
12650 return tree_simple_nonnegative_warnv_p (code
, type
);
12653 /* We don't know sign of `t', so be conservative and return false. */
12657 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12658 value is based on the assumption that signed overflow is undefined,
12659 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12660 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12663 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12664 tree op1
, bool *strict_overflow_p
,
12667 if (TYPE_UNSIGNED (type
))
12672 case POINTER_PLUS_EXPR
:
12674 if (FLOAT_TYPE_P (type
))
12675 return RECURSE (op0
) && RECURSE (op1
);
12677 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12678 both unsigned and at least 2 bits shorter than the result. */
12679 if (TREE_CODE (type
) == INTEGER_TYPE
12680 && TREE_CODE (op0
) == NOP_EXPR
12681 && TREE_CODE (op1
) == NOP_EXPR
)
12683 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12684 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12685 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12686 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12688 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12689 TYPE_PRECISION (inner2
)) + 1;
12690 return prec
< TYPE_PRECISION (type
);
12696 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12698 /* x * x is always non-negative for floating point x
12699 or without overflow. */
12700 if (operand_equal_p (op0
, op1
, 0)
12701 || (RECURSE (op0
) && RECURSE (op1
)))
12703 if (ANY_INTEGRAL_TYPE_P (type
)
12704 && TYPE_OVERFLOW_UNDEFINED (type
))
12705 *strict_overflow_p
= true;
12710 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12711 both unsigned and their total bits is shorter than the result. */
12712 if (TREE_CODE (type
) == INTEGER_TYPE
12713 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12714 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12716 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12717 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12719 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12720 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12723 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12724 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12726 if (TREE_CODE (op0
) == INTEGER_CST
)
12727 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12729 if (TREE_CODE (op1
) == INTEGER_CST
)
12730 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12732 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12733 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12735 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12736 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12737 : TYPE_PRECISION (inner0
);
12739 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12740 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12741 : TYPE_PRECISION (inner1
);
12743 return precision0
+ precision1
< TYPE_PRECISION (type
);
12750 return RECURSE (op0
) || RECURSE (op1
);
12756 case TRUNC_DIV_EXPR
:
12757 case CEIL_DIV_EXPR
:
12758 case FLOOR_DIV_EXPR
:
12759 case ROUND_DIV_EXPR
:
12760 return RECURSE (op0
) && RECURSE (op1
);
12762 case TRUNC_MOD_EXPR
:
12763 return RECURSE (op0
);
12765 case FLOOR_MOD_EXPR
:
12766 return RECURSE (op1
);
12768 case CEIL_MOD_EXPR
:
12769 case ROUND_MOD_EXPR
:
12771 return tree_simple_nonnegative_warnv_p (code
, type
);
12774 /* We don't know sign of `t', so be conservative and return false. */
12778 /* Return true if T is known to be non-negative. If the return
12779 value is based on the assumption that signed overflow is undefined,
12780 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12781 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12784 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12786 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12789 switch (TREE_CODE (t
))
12792 return tree_int_cst_sgn (t
) >= 0;
12795 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12798 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12801 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12804 /* Limit the depth of recursion to avoid quadratic behavior.
12805 This is expected to catch almost all occurrences in practice.
12806 If this code misses important cases that unbounded recursion
12807 would not, passes that need this information could be revised
12808 to provide it through dataflow propagation. */
12809 return (!name_registered_for_update_p (t
)
12810 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12811 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12812 strict_overflow_p
, depth
));
12815 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12819 /* Return true if T is known to be non-negative. If the return
12820 value is based on the assumption that signed overflow is undefined,
12821 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12822 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12825 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12826 bool *strict_overflow_p
, int depth
)
12847 case CFN_BUILT_IN_BSWAP32
:
12848 case CFN_BUILT_IN_BSWAP64
:
12854 /* sqrt(-0.0) is -0.0. */
12855 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12857 return RECURSE (arg0
);
12883 CASE_CFN_NEARBYINT
:
12890 CASE_CFN_SIGNIFICAND
:
12894 /* True if the 1st argument is nonnegative. */
12895 return RECURSE (arg0
);
12899 /* True if the 1st OR 2nd arguments are nonnegative. */
12900 return RECURSE (arg0
) || RECURSE (arg1
);
12904 /* True if the 1st AND 2nd arguments are nonnegative. */
12905 return RECURSE (arg0
) && RECURSE (arg1
);
12908 CASE_CFN_COPYSIGN_FN
:
12909 /* True if the 2nd argument is nonnegative. */
12910 return RECURSE (arg1
);
12913 /* True if the 1st argument is nonnegative or the second
12914 argument is an even integer. */
12915 if (TREE_CODE (arg1
) == INTEGER_CST
12916 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
12918 return RECURSE (arg0
);
12921 /* True if the 1st argument is nonnegative or the second
12922 argument is an even integer valued real. */
12923 if (TREE_CODE (arg1
) == REAL_CST
)
12928 c
= TREE_REAL_CST (arg1
);
12929 n
= real_to_integer (&c
);
12932 REAL_VALUE_TYPE cint
;
12933 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
12934 if (real_identical (&c
, &cint
))
12938 return RECURSE (arg0
);
12943 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
12946 /* Return true if T is known to be non-negative. If the return
12947 value is based on the assumption that signed overflow is undefined,
12948 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12949 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12952 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12954 enum tree_code code
= TREE_CODE (t
);
12955 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12962 tree temp
= TARGET_EXPR_SLOT (t
);
12963 t
= TARGET_EXPR_INITIAL (t
);
12965 /* If the initializer is non-void, then it's a normal expression
12966 that will be assigned to the slot. */
12967 if (!VOID_TYPE_P (t
))
12968 return RECURSE (t
);
12970 /* Otherwise, the initializer sets the slot in some way. One common
12971 way is an assignment statement at the end of the initializer. */
12974 if (TREE_CODE (t
) == BIND_EXPR
)
12975 t
= expr_last (BIND_EXPR_BODY (t
));
12976 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
12977 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
12978 t
= expr_last (TREE_OPERAND (t
, 0));
12979 else if (TREE_CODE (t
) == STATEMENT_LIST
)
12984 if (TREE_CODE (t
) == MODIFY_EXPR
12985 && TREE_OPERAND (t
, 0) == temp
)
12986 return RECURSE (TREE_OPERAND (t
, 1));
12993 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
12994 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
12996 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
12997 get_call_combined_fn (t
),
13000 strict_overflow_p
, depth
);
13002 case COMPOUND_EXPR
:
13004 return RECURSE (TREE_OPERAND (t
, 1));
13007 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13010 return RECURSE (TREE_OPERAND (t
, 0));
13013 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13018 #undef tree_expr_nonnegative_warnv_p
13020 /* Return true if T is known to be non-negative. If the return
13021 value is based on the assumption that signed overflow is undefined,
13022 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13023 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13026 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13028 enum tree_code code
;
13029 if (t
== error_mark_node
)
13032 code
= TREE_CODE (t
);
13033 switch (TREE_CODE_CLASS (code
))
13036 case tcc_comparison
:
13037 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13039 TREE_OPERAND (t
, 0),
13040 TREE_OPERAND (t
, 1),
13041 strict_overflow_p
, depth
);
13044 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13046 TREE_OPERAND (t
, 0),
13047 strict_overflow_p
, depth
);
13050 case tcc_declaration
:
13051 case tcc_reference
:
13052 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13060 case TRUTH_AND_EXPR
:
13061 case TRUTH_OR_EXPR
:
13062 case TRUTH_XOR_EXPR
:
13063 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13065 TREE_OPERAND (t
, 0),
13066 TREE_OPERAND (t
, 1),
13067 strict_overflow_p
, depth
);
13068 case TRUTH_NOT_EXPR
:
13069 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13071 TREE_OPERAND (t
, 0),
13072 strict_overflow_p
, depth
);
13079 case WITH_SIZE_EXPR
:
13081 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13084 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13088 /* Return true if `t' is known to be non-negative. Handle warnings
13089 about undefined signed overflow. */
13092 tree_expr_nonnegative_p (tree t
)
13094 bool ret
, strict_overflow_p
;
13096 strict_overflow_p
= false;
13097 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13098 if (strict_overflow_p
)
13099 fold_overflow_warning (("assuming signed overflow does not occur when "
13100 "determining that expression is always "
13102 WARN_STRICT_OVERFLOW_MISC
);
13107 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13108 For floating point we further ensure that T is not denormal.
13109 Similar logic is present in nonzero_address in rtlanal.h.
13111 If the return value is based on the assumption that signed overflow
13112 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13113 change *STRICT_OVERFLOW_P. */
13116 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13117 bool *strict_overflow_p
)
13122 return tree_expr_nonzero_warnv_p (op0
,
13123 strict_overflow_p
);
13127 tree inner_type
= TREE_TYPE (op0
);
13128 tree outer_type
= type
;
13130 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13131 && tree_expr_nonzero_warnv_p (op0
,
13132 strict_overflow_p
));
13136 case NON_LVALUE_EXPR
:
13137 return tree_expr_nonzero_warnv_p (op0
,
13138 strict_overflow_p
);
13147 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13148 For floating point we further ensure that T is not denormal.
13149 Similar logic is present in nonzero_address in rtlanal.h.
13151 If the return value is based on the assumption that signed overflow
13152 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13153 change *STRICT_OVERFLOW_P. */
13156 tree_binary_nonzero_warnv_p (enum tree_code code
,
13159 tree op1
, bool *strict_overflow_p
)
13161 bool sub_strict_overflow_p
;
13164 case POINTER_PLUS_EXPR
:
13166 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13168 /* With the presence of negative values it is hard
13169 to say something. */
13170 sub_strict_overflow_p
= false;
13171 if (!tree_expr_nonnegative_warnv_p (op0
,
13172 &sub_strict_overflow_p
)
13173 || !tree_expr_nonnegative_warnv_p (op1
,
13174 &sub_strict_overflow_p
))
13176 /* One of operands must be positive and the other non-negative. */
13177 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13178 overflows, on a twos-complement machine the sum of two
13179 nonnegative numbers can never be zero. */
13180 return (tree_expr_nonzero_warnv_p (op0
,
13182 || tree_expr_nonzero_warnv_p (op1
,
13183 strict_overflow_p
));
13188 if (TYPE_OVERFLOW_UNDEFINED (type
))
13190 if (tree_expr_nonzero_warnv_p (op0
,
13192 && tree_expr_nonzero_warnv_p (op1
,
13193 strict_overflow_p
))
13195 *strict_overflow_p
= true;
13202 sub_strict_overflow_p
= false;
13203 if (tree_expr_nonzero_warnv_p (op0
,
13204 &sub_strict_overflow_p
)
13205 && tree_expr_nonzero_warnv_p (op1
,
13206 &sub_strict_overflow_p
))
13208 if (sub_strict_overflow_p
)
13209 *strict_overflow_p
= true;
13214 sub_strict_overflow_p
= false;
13215 if (tree_expr_nonzero_warnv_p (op0
,
13216 &sub_strict_overflow_p
))
13218 if (sub_strict_overflow_p
)
13219 *strict_overflow_p
= true;
13221 /* When both operands are nonzero, then MAX must be too. */
13222 if (tree_expr_nonzero_warnv_p (op1
,
13223 strict_overflow_p
))
13226 /* MAX where operand 0 is positive is positive. */
13227 return tree_expr_nonnegative_warnv_p (op0
,
13228 strict_overflow_p
);
13230 /* MAX where operand 1 is positive is positive. */
13231 else if (tree_expr_nonzero_warnv_p (op1
,
13232 &sub_strict_overflow_p
)
13233 && tree_expr_nonnegative_warnv_p (op1
,
13234 &sub_strict_overflow_p
))
13236 if (sub_strict_overflow_p
)
13237 *strict_overflow_p
= true;
13243 return (tree_expr_nonzero_warnv_p (op1
,
13245 || tree_expr_nonzero_warnv_p (op0
,
13246 strict_overflow_p
));
13255 /* Return true when T is an address and is known to be nonzero.
13256 For floating point we further ensure that T is not denormal.
13257 Similar logic is present in nonzero_address in rtlanal.h.
13259 If the return value is based on the assumption that signed overflow
13260 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13261 change *STRICT_OVERFLOW_P. */
13264 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13266 bool sub_strict_overflow_p
;
13267 switch (TREE_CODE (t
))
13270 return !integer_zerop (t
);
13274 tree base
= TREE_OPERAND (t
, 0);
13276 if (!DECL_P (base
))
13277 base
= get_base_address (base
);
13279 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
13280 base
= TARGET_EXPR_SLOT (base
);
13285 /* For objects in symbol table check if we know they are non-zero.
13286 Don't do anything for variables and functions before symtab is built;
13287 it is quite possible that they will be declared weak later. */
13288 int nonzero_addr
= maybe_nonzero_address (base
);
13289 if (nonzero_addr
>= 0)
13290 return nonzero_addr
;
13292 /* Constants are never weak. */
13293 if (CONSTANT_CLASS_P (base
))
13300 sub_strict_overflow_p
= false;
13301 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13302 &sub_strict_overflow_p
)
13303 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13304 &sub_strict_overflow_p
))
13306 if (sub_strict_overflow_p
)
13307 *strict_overflow_p
= true;
13313 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
13315 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
13323 #define integer_valued_real_p(X) \
13324 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13326 #define RECURSE(X) \
13327 ((integer_valued_real_p) (X, depth + 1))
13329 /* Return true if the floating point result of (CODE OP0) has an
13330 integer value. We also allow +Inf, -Inf and NaN to be considered
13331 integer values. Return false for signaling NaN.
13333 DEPTH is the current nesting depth of the query. */
13336 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13344 return RECURSE (op0
);
13348 tree type
= TREE_TYPE (op0
);
13349 if (TREE_CODE (type
) == INTEGER_TYPE
)
13351 if (TREE_CODE (type
) == REAL_TYPE
)
13352 return RECURSE (op0
);
13362 /* Return true if the floating point result of (CODE OP0 OP1) has an
13363 integer value. We also allow +Inf, -Inf and NaN to be considered
13364 integer values. Return false for signaling NaN.
13366 DEPTH is the current nesting depth of the query. */
13369 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13378 return RECURSE (op0
) && RECURSE (op1
);
13386 /* Return true if the floating point result of calling FNDECL with arguments
13387 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13388 considered integer values. Return false for signaling NaN. If FNDECL
13389 takes fewer than 2 arguments, the remaining ARGn are null.
13391 DEPTH is the current nesting depth of the query. */
13394 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13400 CASE_CFN_NEARBYINT
:
13410 return RECURSE (arg0
) && RECURSE (arg1
);
13418 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13419 has an integer value. We also allow +Inf, -Inf and NaN to be
13420 considered integer values. Return false for signaling NaN.
13422 DEPTH is the current nesting depth of the query. */
13425 integer_valued_real_single_p (tree t
, int depth
)
13427 switch (TREE_CODE (t
))
13430 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13433 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13436 /* Limit the depth of recursion to avoid quadratic behavior.
13437 This is expected to catch almost all occurrences in practice.
13438 If this code misses important cases that unbounded recursion
13439 would not, passes that need this information could be revised
13440 to provide it through dataflow propagation. */
13441 return (!name_registered_for_update_p (t
)
13442 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13443 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13452 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13453 has an integer value. We also allow +Inf, -Inf and NaN to be
13454 considered integer values. Return false for signaling NaN.
13456 DEPTH is the current nesting depth of the query. */
13459 integer_valued_real_invalid_p (tree t
, int depth
)
13461 switch (TREE_CODE (t
))
13463 case COMPOUND_EXPR
:
13466 return RECURSE (TREE_OPERAND (t
, 1));
13469 return RECURSE (TREE_OPERAND (t
, 0));
13478 #undef integer_valued_real_p
13480 /* Return true if the floating point expression T has an integer value.
13481 We also allow +Inf, -Inf and NaN to be considered integer values.
13482 Return false for signaling NaN.
13484 DEPTH is the current nesting depth of the query. */
13487 integer_valued_real_p (tree t
, int depth
)
13489 if (t
== error_mark_node
)
13492 tree_code code
= TREE_CODE (t
);
13493 switch (TREE_CODE_CLASS (code
))
13496 case tcc_comparison
:
13497 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13498 TREE_OPERAND (t
, 1), depth
);
13501 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13504 case tcc_declaration
:
13505 case tcc_reference
:
13506 return integer_valued_real_single_p (t
, depth
);
13516 return integer_valued_real_single_p (t
, depth
);
13520 tree arg0
= (call_expr_nargs (t
) > 0
13521 ? CALL_EXPR_ARG (t
, 0)
13523 tree arg1
= (call_expr_nargs (t
) > 1
13524 ? CALL_EXPR_ARG (t
, 1)
13526 return integer_valued_real_call_p (get_call_combined_fn (t
),
13527 arg0
, arg1
, depth
);
13531 return integer_valued_real_invalid_p (t
, depth
);
13535 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13536 attempt to fold the expression to a constant without modifying TYPE,
13539 If the expression could be simplified to a constant, then return
13540 the constant. If the expression would not be simplified to a
13541 constant, then return NULL_TREE. */
13544 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13546 tree tem
= fold_binary (code
, type
, op0
, op1
);
13547 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13550 /* Given the components of a unary expression CODE, TYPE and OP0,
13551 attempt to fold the expression to a constant without modifying
13554 If the expression could be simplified to a constant, then return
13555 the constant. If the expression would not be simplified to a
13556 constant, then return NULL_TREE. */
13559 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13561 tree tem
= fold_unary (code
, type
, op0
);
13562 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13565 /* If EXP represents referencing an element in a constant string
13566 (either via pointer arithmetic or array indexing), return the
13567 tree representing the value accessed, otherwise return NULL. */
13570 fold_read_from_constant_string (tree exp
)
13572 if ((TREE_CODE (exp
) == INDIRECT_REF
13573 || TREE_CODE (exp
) == ARRAY_REF
)
13574 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13576 tree exp1
= TREE_OPERAND (exp
, 0);
13579 location_t loc
= EXPR_LOCATION (exp
);
13581 if (TREE_CODE (exp
) == INDIRECT_REF
)
13582 string
= string_constant (exp1
, &index
);
13585 tree low_bound
= array_ref_low_bound (exp
);
13586 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13588 /* Optimize the special-case of a zero lower bound.
13590 We convert the low_bound to sizetype to avoid some problems
13591 with constant folding. (E.g. suppose the lower bound is 1,
13592 and its mode is QI. Without the conversion,l (ARRAY
13593 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13594 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13595 if (! integer_zerop (low_bound
))
13596 index
= size_diffop_loc (loc
, index
,
13597 fold_convert_loc (loc
, sizetype
, low_bound
));
13602 scalar_int_mode char_mode
;
13604 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13605 && TREE_CODE (string
) == STRING_CST
13606 && TREE_CODE (index
) == INTEGER_CST
13607 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13608 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
13610 && GET_MODE_SIZE (char_mode
) == 1)
13611 return build_int_cst_type (TREE_TYPE (exp
),
13612 (TREE_STRING_POINTER (string
)
13613 [TREE_INT_CST_LOW (index
)]));
13618 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13619 an integer constant, real, or fixed-point constant.
13621 TYPE is the type of the result. */
13624 fold_negate_const (tree arg0
, tree type
)
13626 tree t
= NULL_TREE
;
13628 switch (TREE_CODE (arg0
))
13633 wide_int val
= wi::neg (wi::to_wide (arg0
), &overflow
);
13634 t
= force_fit_type (type
, val
, 1,
13635 (overflow
&& ! TYPE_UNSIGNED (type
))
13636 || TREE_OVERFLOW (arg0
));
13641 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13646 FIXED_VALUE_TYPE f
;
13647 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13648 &(TREE_FIXED_CST (arg0
)), NULL
,
13649 TYPE_SATURATING (type
));
13650 t
= build_fixed (type
, f
);
13651 /* Propagate overflow flags. */
13652 if (overflow_p
| TREE_OVERFLOW (arg0
))
13653 TREE_OVERFLOW (t
) = 1;
13658 gcc_unreachable ();
13664 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13665 an integer constant or real constant.
13667 TYPE is the type of the result. */
13670 fold_abs_const (tree arg0
, tree type
)
13672 tree t
= NULL_TREE
;
13674 switch (TREE_CODE (arg0
))
13678 /* If the value is unsigned or non-negative, then the absolute value
13679 is the same as the ordinary value. */
13680 if (!wi::neg_p (wi::to_wide (arg0
), TYPE_SIGN (type
)))
13683 /* If the value is negative, then the absolute value is
13688 wide_int val
= wi::neg (wi::to_wide (arg0
), &overflow
);
13689 t
= force_fit_type (type
, val
, -1,
13690 overflow
| TREE_OVERFLOW (arg0
));
13696 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13697 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13703 gcc_unreachable ();
13709 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13710 constant. TYPE is the type of the result. */
13713 fold_not_const (const_tree arg0
, tree type
)
13715 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13717 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
13720 /* Given CODE, a relational operator, the target type, TYPE and two
13721 constant operands OP0 and OP1, return the result of the
13722 relational operation. If the result is not a compile time
13723 constant, then return NULL_TREE. */
13726 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13728 int result
, invert
;
13730 /* From here on, the only cases we handle are when the result is
13731 known to be a constant. */
13733 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13735 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13736 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13738 /* Handle the cases where either operand is a NaN. */
13739 if (real_isnan (c0
) || real_isnan (c1
))
13749 case UNORDERED_EXPR
:
13763 if (flag_trapping_math
)
13769 gcc_unreachable ();
13772 return constant_boolean_node (result
, type
);
13775 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13778 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13780 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13781 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13782 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13785 /* Handle equality/inequality of complex constants. */
13786 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13788 tree rcond
= fold_relational_const (code
, type
,
13789 TREE_REALPART (op0
),
13790 TREE_REALPART (op1
));
13791 tree icond
= fold_relational_const (code
, type
,
13792 TREE_IMAGPART (op0
),
13793 TREE_IMAGPART (op1
));
13794 if (code
== EQ_EXPR
)
13795 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13796 else if (code
== NE_EXPR
)
13797 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13802 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13804 if (!VECTOR_TYPE_P (type
))
13806 /* Have vector comparison with scalar boolean result. */
13807 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
13808 && VECTOR_CST_NELTS (op0
) == VECTOR_CST_NELTS (op1
));
13809 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (op0
); i
++)
13811 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13812 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13813 tree tmp
= fold_relational_const (code
, type
, elem0
, elem1
);
13814 if (tmp
== NULL_TREE
)
13816 if (integer_zerop (tmp
))
13817 return constant_boolean_node (false, type
);
13819 return constant_boolean_node (true, type
);
13821 tree_vector_builder elts
;
13822 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
13824 unsigned int count
= elts
.encoded_nelts ();
13825 for (unsigned i
= 0; i
< count
; i
++)
13827 tree elem_type
= TREE_TYPE (type
);
13828 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13829 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13831 tree tem
= fold_relational_const (code
, elem_type
,
13834 if (tem
== NULL_TREE
)
13837 elts
.quick_push (build_int_cst (elem_type
,
13838 integer_zerop (tem
) ? 0 : -1));
13841 return elts
.build ();
13844 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13846 To compute GT, swap the arguments and do LT.
13847 To compute GE, do LT and invert the result.
13848 To compute LE, swap the arguments, do LT and invert the result.
13849 To compute NE, do EQ and invert the result.
13851 Therefore, the code below must handle only EQ and LT. */
13853 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13855 std::swap (op0
, op1
);
13856 code
= swap_tree_comparison (code
);
13859 /* Note that it is safe to invert for real values here because we
13860 have already handled the one case that it matters. */
13863 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13866 code
= invert_tree_comparison (code
, false);
13869 /* Compute a result for LT or EQ if args permit;
13870 Otherwise return T. */
13871 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13873 if (code
== EQ_EXPR
)
13874 result
= tree_int_cst_equal (op0
, op1
);
13876 result
= tree_int_cst_lt (op0
, op1
);
13883 return constant_boolean_node (result
, type
);
13886 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13887 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13891 fold_build_cleanup_point_expr (tree type
, tree expr
)
13893 /* If the expression does not have side effects then we don't have to wrap
13894 it with a cleanup point expression. */
13895 if (!TREE_SIDE_EFFECTS (expr
))
13898 /* If the expression is a return, check to see if the expression inside the
13899 return has no side effects or the right hand side of the modify expression
13900 inside the return. If either don't have side effects set we don't need to
13901 wrap the expression in a cleanup point expression. Note we don't check the
13902 left hand side of the modify because it should always be a return decl. */
13903 if (TREE_CODE (expr
) == RETURN_EXPR
)
13905 tree op
= TREE_OPERAND (expr
, 0);
13906 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13908 op
= TREE_OPERAND (op
, 1);
13909 if (!TREE_SIDE_EFFECTS (op
))
13913 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
13916 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13917 of an indirection through OP0, or NULL_TREE if no simplification is
13921 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
13927 subtype
= TREE_TYPE (sub
);
13928 if (!POINTER_TYPE_P (subtype
)
13929 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
13932 if (TREE_CODE (sub
) == ADDR_EXPR
)
13934 tree op
= TREE_OPERAND (sub
, 0);
13935 tree optype
= TREE_TYPE (op
);
13936 /* *&CONST_DECL -> to the value of the const decl. */
13937 if (TREE_CODE (op
) == CONST_DECL
)
13938 return DECL_INITIAL (op
);
13939 /* *&p => p; make sure to handle *&"str"[cst] here. */
13940 if (type
== optype
)
13942 tree fop
= fold_read_from_constant_string (op
);
13948 /* *(foo *)&fooarray => fooarray[0] */
13949 else if (TREE_CODE (optype
) == ARRAY_TYPE
13950 && type
== TREE_TYPE (optype
)
13951 && (!in_gimple_form
13952 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
13954 tree type_domain
= TYPE_DOMAIN (optype
);
13955 tree min_val
= size_zero_node
;
13956 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13957 min_val
= TYPE_MIN_VALUE (type_domain
);
13959 && TREE_CODE (min_val
) != INTEGER_CST
)
13961 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
13962 NULL_TREE
, NULL_TREE
);
13964 /* *(foo *)&complexfoo => __real__ complexfoo */
13965 else if (TREE_CODE (optype
) == COMPLEX_TYPE
13966 && type
== TREE_TYPE (optype
))
13967 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
13968 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13969 else if (TREE_CODE (optype
) == VECTOR_TYPE
13970 && type
== TREE_TYPE (optype
))
13972 tree part_width
= TYPE_SIZE (type
);
13973 tree index
= bitsize_int (0);
13974 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
13978 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
13979 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
13981 tree op00
= TREE_OPERAND (sub
, 0);
13982 tree op01
= TREE_OPERAND (sub
, 1);
13985 if (TREE_CODE (op00
) == ADDR_EXPR
)
13988 op00
= TREE_OPERAND (op00
, 0);
13989 op00type
= TREE_TYPE (op00
);
13991 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13992 if (TREE_CODE (op00type
) == VECTOR_TYPE
13993 && type
== TREE_TYPE (op00type
))
13995 tree part_width
= TYPE_SIZE (type
);
13996 unsigned HOST_WIDE_INT max_offset
13997 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
13998 * TYPE_VECTOR_SUBPARTS (op00type
));
13999 if (tree_int_cst_sign_bit (op01
) == 0
14000 && compare_tree_int (op01
, max_offset
) == -1)
14002 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (op01
);
14003 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14004 tree index
= bitsize_int (indexi
);
14005 return fold_build3_loc (loc
,
14006 BIT_FIELD_REF
, type
, op00
,
14007 part_width
, index
);
14010 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14011 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14012 && type
== TREE_TYPE (op00type
))
14014 tree size
= TYPE_SIZE_UNIT (type
);
14015 if (tree_int_cst_equal (size
, op01
))
14016 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14018 /* ((foo *)&fooarray)[1] => fooarray[1] */
14019 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14020 && type
== TREE_TYPE (op00type
))
14022 tree type_domain
= TYPE_DOMAIN (op00type
);
14023 tree min
= size_zero_node
;
14024 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14025 min
= TYPE_MIN_VALUE (type_domain
);
14026 offset_int off
= wi::to_offset (op01
);
14027 offset_int el_sz
= wi::to_offset (TYPE_SIZE_UNIT (type
));
14028 offset_int remainder
;
14029 off
= wi::divmod_trunc (off
, el_sz
, SIGNED
, &remainder
);
14030 if (remainder
== 0 && TREE_CODE (min
) == INTEGER_CST
)
14032 off
= off
+ wi::to_offset (min
);
14033 op01
= wide_int_to_tree (sizetype
, off
);
14034 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14035 NULL_TREE
, NULL_TREE
);
14041 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14042 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14043 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14044 && (!in_gimple_form
14045 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14048 tree min_val
= size_zero_node
;
14049 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14050 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14051 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14052 min_val
= TYPE_MIN_VALUE (type_domain
);
14054 && TREE_CODE (min_val
) != INTEGER_CST
)
14056 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14063 /* Builds an expression for an indirection through T, simplifying some
14067 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14069 tree type
= TREE_TYPE (TREE_TYPE (t
));
14070 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14075 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14078 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14081 fold_indirect_ref_loc (location_t loc
, tree t
)
14083 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14091 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14092 whose result is ignored. The type of the returned tree need not be
14093 the same as the original expression. */
14096 fold_ignored_result (tree t
)
14098 if (!TREE_SIDE_EFFECTS (t
))
14099 return integer_zero_node
;
14102 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14105 t
= TREE_OPERAND (t
, 0);
14109 case tcc_comparison
:
14110 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14111 t
= TREE_OPERAND (t
, 0);
14112 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14113 t
= TREE_OPERAND (t
, 1);
14118 case tcc_expression
:
14119 switch (TREE_CODE (t
))
14121 case COMPOUND_EXPR
:
14122 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14124 t
= TREE_OPERAND (t
, 0);
14128 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14129 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14131 t
= TREE_OPERAND (t
, 0);
14144 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14147 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14149 tree div
= NULL_TREE
;
14154 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14155 have to do anything. Only do this when we are not given a const,
14156 because in that case, this check is more expensive than just
14158 if (TREE_CODE (value
) != INTEGER_CST
)
14160 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14162 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14166 /* If divisor is a power of two, simplify this to bit manipulation. */
14167 if (pow2_or_zerop (divisor
))
14169 if (TREE_CODE (value
) == INTEGER_CST
)
14171 wide_int val
= wi::to_wide (value
);
14174 if ((val
& (divisor
- 1)) == 0)
14177 overflow_p
= TREE_OVERFLOW (value
);
14178 val
+= divisor
- 1;
14179 val
&= (int) -divisor
;
14183 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14189 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14190 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14191 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14192 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14198 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14199 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14200 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14206 /* Likewise, but round down. */
14209 round_down_loc (location_t loc
, tree value
, int divisor
)
14211 tree div
= NULL_TREE
;
14213 gcc_assert (divisor
> 0);
14217 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14218 have to do anything. Only do this when we are not given a const,
14219 because in that case, this check is more expensive than just
14221 if (TREE_CODE (value
) != INTEGER_CST
)
14223 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14225 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14229 /* If divisor is a power of two, simplify this to bit manipulation. */
14230 if (pow2_or_zerop (divisor
))
14234 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14235 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14240 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14241 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14242 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14248 /* Returns the pointer to the base of the object addressed by EXP and
14249 extracts the information about the offset of the access, storing it
14250 to PBITPOS and POFFSET. */
14253 split_address_to_core_and_offset (tree exp
,
14254 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14258 int unsignedp
, reversep
, volatilep
;
14259 HOST_WIDE_INT bitsize
;
14260 location_t loc
= EXPR_LOCATION (exp
);
14262 if (TREE_CODE (exp
) == ADDR_EXPR
)
14264 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14265 poffset
, &mode
, &unsignedp
, &reversep
,
14267 core
= build_fold_addr_expr_loc (loc
, core
);
14269 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
14271 core
= TREE_OPERAND (exp
, 0);
14274 *poffset
= TREE_OPERAND (exp
, 1);
14275 if (TREE_CODE (*poffset
) == INTEGER_CST
)
14277 offset_int tem
= wi::sext (wi::to_offset (*poffset
),
14278 TYPE_PRECISION (TREE_TYPE (*poffset
)));
14279 tem
<<= LOG2_BITS_PER_UNIT
;
14280 if (wi::fits_shwi_p (tem
))
14282 *pbitpos
= tem
.to_shwi ();
14283 *poffset
= NULL_TREE
;
14291 *poffset
= NULL_TREE
;
14297 /* Returns true if addresses of E1 and E2 differ by a constant, false
14298 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14301 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14304 HOST_WIDE_INT bitpos1
, bitpos2
;
14305 tree toffset1
, toffset2
, tdiff
, type
;
14307 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14308 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14310 if (bitpos1
% BITS_PER_UNIT
!= 0
14311 || bitpos2
% BITS_PER_UNIT
!= 0
14312 || !operand_equal_p (core1
, core2
, 0))
14315 if (toffset1
&& toffset2
)
14317 type
= TREE_TYPE (toffset1
);
14318 if (type
!= TREE_TYPE (toffset2
))
14319 toffset2
= fold_convert (type
, toffset2
);
14321 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14322 if (!cst_and_fits_in_hwi (tdiff
))
14325 *diff
= int_cst_value (tdiff
);
14327 else if (toffset1
|| toffset2
)
14329 /* If only one of the offsets is non-constant, the difference cannot
14336 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14340 /* Return OFF converted to a pointer offset type suitable as offset for
14341 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14343 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14345 return fold_convert_loc (loc
, sizetype
, off
);
14348 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14350 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14352 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14353 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14356 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14358 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14360 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14361 ptr
, size_int (off
));
14364 /* Return a char pointer for a C string if it is a string constant
14365 or sum of string constant and integer constant. We only support
14366 string constants properly terminated with '\0' character.
14367 If STRLEN is a valid pointer, length (including terminating character)
14368 of returned string is stored to the argument. */
14371 c_getstr (tree src
, unsigned HOST_WIDE_INT
*strlen
)
14378 src
= string_constant (src
, &offset_node
);
14382 unsigned HOST_WIDE_INT offset
= 0;
14383 if (offset_node
!= NULL_TREE
)
14385 if (!tree_fits_uhwi_p (offset_node
))
14388 offset
= tree_to_uhwi (offset_node
);
14391 unsigned HOST_WIDE_INT string_length
= TREE_STRING_LENGTH (src
);
14392 const char *string
= TREE_STRING_POINTER (src
);
14394 /* Support only properly null-terminated strings. */
14395 if (string_length
== 0
14396 || string
[string_length
- 1] != '\0'
14397 || offset
>= string_length
)
14401 *strlen
= string_length
- offset
;
14402 return string
+ offset
;
14407 namespace selftest
{
14409 /* Helper functions for writing tests of folding trees. */
14411 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14414 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
14417 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
14420 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14421 wrapping WRAPPED_EXPR. */
14424 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
14427 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
14428 ASSERT_NE (wrapped_expr
, result
);
14429 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
14430 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
14433 /* Verify that various arithmetic binary operations are folded
14437 test_arithmetic_folding ()
14439 tree type
= integer_type_node
;
14440 tree x
= create_tmp_var_raw (type
, "x");
14441 tree zero
= build_zero_cst (type
);
14442 tree one
= build_int_cst (type
, 1);
14445 /* 1 <-- (0 + 1) */
14446 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
14448 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
14451 /* (nonlvalue)x <-- (x + 0) */
14452 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
14456 /* 0 <-- (x - x) */
14457 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
14459 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
14462 /* Multiplication. */
14463 /* 0 <-- (x * 0) */
14464 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
14467 /* (nonlvalue)x <-- (x * 1) */
14468 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
14472 /* Verify that various binary operations on vectors are folded
14476 test_vector_folding ()
14478 tree inner_type
= integer_type_node
;
14479 tree type
= build_vector_type (inner_type
, 4);
14480 tree zero
= build_zero_cst (type
);
14481 tree one
= build_one_cst (type
);
14483 /* Verify equality tests that return a scalar boolean result. */
14484 tree res_type
= boolean_type_node
;
14485 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
14486 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
14487 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
14488 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
14491 /* Verify folding of VEC_DUPLICATE_EXPRs. */
14494 test_vec_duplicate_folding ()
14496 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
14497 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
14498 /* This will be 1 if VEC_MODE isn't a vector mode. */
14499 unsigned int nunits
= GET_MODE_NUNITS (vec_mode
);
14501 tree type
= build_vector_type (ssizetype
, nunits
);
14502 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
14503 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
14504 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
14507 /* Run all of the selftests within this file. */
14510 fold_const_c_tests ()
14512 test_arithmetic_folding ();
14513 test_vector_folding ();
14514 test_vec_duplicate_folding ();
14517 } // namespace selftest
14519 #endif /* CHECKING_P */