1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
79 #include "tree-ssanames.h"
81 #include "stringpool.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer
= 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code
{
113 static bool negate_expr_p (tree
);
114 static tree
negate_expr (tree
);
115 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
116 static enum comparison_code
comparison_to_compcode (enum tree_code
);
117 static enum tree_code
compcode_to_comparison (enum comparison_code
);
118 static bool twoval_comparison_p (tree
, tree
*, tree
*);
119 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
120 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
122 static bool simple_operand_p (const_tree
);
123 static bool simple_operand_p_2 (tree
);
124 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
125 static tree
range_predecessor (tree
);
126 static tree
range_successor (tree
);
127 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
fold_binary_op_with_conditional_arg (location_t
,
133 enum tree_code
, tree
,
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
140 static tree
fold_view_convert_expr (tree
, tree
);
141 static tree
fold_negate_expr (location_t
, tree
);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t
, location_t loc
)
150 location_t tloc
= EXPR_LOCATION (t
);
151 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x
, location_t loc
)
160 if (CAN_HAVE_LOCATION_P (x
)
161 && EXPR_LOCATION (x
) != loc
162 && !(TREE_CODE (x
) == SAVE_EXPR
163 || TREE_CODE (x
) == TARGET_EXPR
164 || TREE_CODE (x
) == BIND_EXPR
))
167 SET_EXPR_LOCATION (x
, loc
);
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
177 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
181 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
183 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
197 static int fold_deferring_overflow_warnings
;
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
204 static const char* fold_deferred_overflow_warning
;
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
209 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
215 fold_defer_overflow_warnings (void)
217 ++fold_deferring_overflow_warnings
;
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
230 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
235 gcc_assert (fold_deferring_overflow_warnings
> 0);
236 --fold_deferring_overflow_warnings
;
237 if (fold_deferring_overflow_warnings
> 0)
239 if (fold_deferred_overflow_warning
!= NULL
241 && code
< (int) fold_deferred_overflow_code
)
242 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
246 warnmsg
= fold_deferred_overflow_warning
;
247 fold_deferred_overflow_warning
= NULL
;
249 if (!issue
|| warnmsg
== NULL
)
252 if (gimple_no_warning_p (stmt
))
255 /* Use the smallest code level when deciding to issue the
257 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
258 code
= fold_deferred_overflow_code
;
260 if (!issue_strict_overflow_warning (code
))
264 locus
= input_location
;
266 locus
= gimple_location (stmt
);
267 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
270 /* Stop deferring overflow warnings, ignoring any deferred
274 fold_undefer_and_ignore_overflow_warnings (void)
276 fold_undefer_overflow_warnings (false, NULL
, 0);
279 /* Whether we are deferring overflow warnings. */
282 fold_deferring_overflow_warnings_p (void)
284 return fold_deferring_overflow_warnings
> 0;
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
291 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
293 if (fold_deferring_overflow_warnings
> 0)
295 if (fold_deferred_overflow_warning
== NULL
296 || wc
< fold_deferred_overflow_code
)
298 fold_deferred_overflow_warning
= gmsgid
;
299 fold_deferred_overflow_code
= wc
;
302 else if (issue_strict_overflow_warning (wc
))
303 warning (OPT_Wstrict_overflow
, gmsgid
);
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
310 negate_mathfn_p (combined_fn fn
)
333 CASE_CFN_ROUNDEVEN_FN
:
345 return !flag_rounding_math
;
353 /* Check whether we may negate an integer constant T without causing
357 may_negate_without_overflow_p (const_tree t
)
361 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
363 type
= TREE_TYPE (t
);
364 if (TYPE_UNSIGNED (type
))
367 return !wi::only_sign_bit_p (wi::to_wide (t
));
370 /* Determine whether an expression T can be cheaply negated using
371 the function negate_expr without introducing undefined overflow. */
374 negate_expr_p (tree t
)
381 type
= TREE_TYPE (t
);
384 switch (TREE_CODE (t
))
387 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
390 /* Check that -CST will not overflow type. */
391 return may_negate_without_overflow_p (t
);
393 return (INTEGRAL_TYPE_P (type
)
394 && TYPE_OVERFLOW_WRAPS (type
));
400 return !TYPE_OVERFLOW_SANITIZED (type
);
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
408 return negate_expr_p (TREE_REALPART (t
))
409 && negate_expr_p (TREE_IMAGPART (t
));
413 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
416 /* Steps don't prevent negation. */
417 unsigned int count
= vector_cst_encoded_nelts (t
);
418 for (unsigned int i
= 0; i
< count
; ++i
)
419 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
426 return negate_expr_p (TREE_OPERAND (t
, 0))
427 && negate_expr_p (TREE_OPERAND (t
, 1));
430 return negate_expr_p (TREE_OPERAND (t
, 0));
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
434 || HONOR_SIGNED_ZEROS (element_mode (type
))
435 || (ANY_INTEGRAL_TYPE_P (type
)
436 && ! TYPE_OVERFLOW_WRAPS (type
)))
438 /* -(A + B) -> (-B) - A. */
439 if (negate_expr_p (TREE_OPERAND (t
, 1)))
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t
, 0));
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
447 && !HONOR_SIGNED_ZEROS (element_mode (type
))
448 && (! ANY_INTEGRAL_TYPE_P (type
)
449 || TYPE_OVERFLOW_WRAPS (type
));
452 if (TYPE_UNSIGNED (type
))
454 /* INT_MIN/n * n doesn't overflow while negating one operand it does
455 if n is a (negative) power of two. */
456 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
457 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
458 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
460 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
461 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
463 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
469 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
470 return negate_expr_p (TREE_OPERAND (t
, 1))
471 || negate_expr_p (TREE_OPERAND (t
, 0));
477 if (TYPE_UNSIGNED (type
))
479 /* In general we can't negate A in A / B, because if A is INT_MIN and
480 B is not 1 we change the sign of the result. */
481 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
482 && negate_expr_p (TREE_OPERAND (t
, 0)))
484 /* In general we can't negate B in A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. */
487 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
488 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
489 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
490 && ! integer_onep (TREE_OPERAND (t
, 1))))
491 return negate_expr_p (TREE_OPERAND (t
, 1));
495 /* Negate -((double)float) as (double)(-float). */
496 if (TREE_CODE (type
) == REAL_TYPE
)
498 tree tem
= strip_float_extensions (t
);
500 return negate_expr_p (tem
);
505 /* Negate -f(x) as f(-x). */
506 if (negate_mathfn_p (get_call_combined_fn (t
)))
507 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
511 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
512 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
514 tree op1
= TREE_OPERAND (t
, 1);
515 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
532 fold_negate_expr_1 (location_t loc
, tree t
)
534 tree type
= TREE_TYPE (t
);
537 switch (TREE_CODE (t
))
539 /* Convert - (~A) to A + 1. */
541 if (INTEGRAL_TYPE_P (type
))
542 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
543 build_one_cst (type
));
547 tem
= fold_negate_const (t
, type
);
548 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
549 || (ANY_INTEGRAL_TYPE_P (type
)
550 && !TYPE_OVERFLOW_TRAPS (type
)
551 && TYPE_OVERFLOW_WRAPS (type
))
552 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
559 tem
= fold_negate_const (t
, type
);
564 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
565 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
567 return build_complex (type
, rpart
, ipart
);
573 tree_vector_builder elts
;
574 elts
.new_unary_operation (type
, t
, true);
575 unsigned int count
= elts
.encoded_nelts ();
576 for (unsigned int i
= 0; i
< count
; ++i
)
578 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
579 if (elt
== NULL_TREE
)
581 elts
.quick_push (elt
);
584 return elts
.build ();
588 if (negate_expr_p (t
))
589 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
591 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
595 if (negate_expr_p (t
))
596 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
597 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
601 if (!TYPE_OVERFLOW_SANITIZED (type
))
602 return TREE_OPERAND (t
, 0);
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
607 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t
, 1)))
612 tem
= negate_expr (TREE_OPERAND (t
, 1));
613 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
614 tem
, TREE_OPERAND (t
, 0));
617 /* -(A + B) -> (-A) - B. */
618 if (negate_expr_p (TREE_OPERAND (t
, 0)))
620 tem
= negate_expr (TREE_OPERAND (t
, 0));
621 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
622 tem
, TREE_OPERAND (t
, 1));
628 /* - (A - B) -> B - A */
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
630 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
631 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
632 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
636 if (TYPE_UNSIGNED (type
))
642 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
644 tem
= TREE_OPERAND (t
, 1);
645 if (negate_expr_p (tem
))
646 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
647 TREE_OPERAND (t
, 0), negate_expr (tem
));
648 tem
= TREE_OPERAND (t
, 0);
649 if (negate_expr_p (tem
))
650 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
651 negate_expr (tem
), TREE_OPERAND (t
, 1));
658 if (TYPE_UNSIGNED (type
))
660 /* In general we can't negate A in A / B, because if A is INT_MIN and
661 B is not 1 we change the sign of the result. */
662 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
663 && negate_expr_p (TREE_OPERAND (t
, 0)))
664 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
665 negate_expr (TREE_OPERAND (t
, 0)),
666 TREE_OPERAND (t
, 1));
667 /* In general we can't negate B in A / B, because if A is INT_MIN and
668 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 and actually traps on some architectures. */
670 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
671 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
672 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
673 && ! integer_onep (TREE_OPERAND (t
, 1))))
674 && negate_expr_p (TREE_OPERAND (t
, 1)))
675 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
677 negate_expr (TREE_OPERAND (t
, 1)));
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type
) == REAL_TYPE
)
684 tem
= strip_float_extensions (t
);
685 if (tem
!= t
&& negate_expr_p (tem
))
686 return fold_convert_loc (loc
, type
, negate_expr (tem
));
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (get_call_combined_fn (t
))
693 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
697 fndecl
= get_callee_fndecl (t
);
698 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
699 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
705 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
707 tree op1
= TREE_OPERAND (t
, 1);
708 if (wi::to_wide (op1
) == TYPE_PRECISION (type
) - 1)
710 tree ntype
= TYPE_UNSIGNED (type
)
711 ? signed_type_for (type
)
712 : unsigned_type_for (type
);
713 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
714 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
715 return fold_convert_loc (loc
, type
, temp
);
727 /* A wrapper for fold_negate_expr_1. */
730 fold_negate_expr (location_t loc
, tree t
)
732 tree type
= TREE_TYPE (t
);
734 tree tem
= fold_negate_expr_1 (loc
, t
);
735 if (tem
== NULL_TREE
)
737 return fold_convert_loc (loc
, type
, tem
);
740 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
741 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
753 loc
= EXPR_LOCATION (t
);
754 type
= TREE_TYPE (t
);
757 tem
= fold_negate_expr (loc
, t
);
759 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
760 return fold_convert_loc (loc
, type
, tem
);
763 /* Split a tree IN into a constant, literal and variable parts that could be
764 combined with CODE to make IN. "constant" means an expression with
765 TREE_CONSTANT but that isn't an actual constant. CODE must be a
766 commutative arithmetic operation. Store the constant part into *CONP,
767 the literal in *LITP and return the variable part. If a part isn't
768 present, set it to null. If the tree does not decompose in this way,
769 return the entire tree as the variable part and the other parts as null.
771 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
772 case, we negate an operand that was subtracted. Except if it is a
773 literal for which we use *MINUS_LITP instead.
775 If NEGATE_P is true, we are negating all of IN, again except a literal
776 for which we use *MINUS_LITP instead. If a variable part is of pointer
777 type, it is negated after converting to TYPE. This prevents us from
778 generating illegal MINUS pointer expression. LOC is the location of
779 the converted variable part.
781 If IN is itself a literal or constant, return it as appropriate.
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
787 split_tree (tree in
, tree type
, enum tree_code code
,
788 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
789 tree
*litp
, tree
*minus_litp
, int negate_p
)
798 /* Strip any conversions that don't change the machine mode or signedness. */
799 STRIP_SIGN_NOPS (in
);
801 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
802 || TREE_CODE (in
) == FIXED_CST
)
804 else if (TREE_CODE (in
) == code
805 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
806 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
807 /* We can associate addition and subtraction together (even
808 though the C standard doesn't say so) for integers because
809 the value is not affected. For reals, the value might be
810 affected, so we can't. */
811 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
812 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
813 || (code
== MINUS_EXPR
814 && (TREE_CODE (in
) == PLUS_EXPR
815 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
817 tree op0
= TREE_OPERAND (in
, 0);
818 tree op1
= TREE_OPERAND (in
, 1);
819 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
820 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
822 /* First see if either of the operands is a literal, then a constant. */
823 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
824 || TREE_CODE (op0
) == FIXED_CST
)
825 *litp
= op0
, op0
= 0;
826 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
827 || TREE_CODE (op1
) == FIXED_CST
)
828 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
830 if (op0
!= 0 && TREE_CONSTANT (op0
))
831 *conp
= op0
, op0
= 0;
832 else if (op1
!= 0 && TREE_CONSTANT (op1
))
833 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
835 /* If we haven't dealt with either operand, this is not a case we can
836 decompose. Otherwise, VAR is either of the ones remaining, if any. */
837 if (op0
!= 0 && op1
!= 0)
842 var
= op1
, neg_var_p
= neg1_p
;
844 /* Now do any needed negations. */
846 *minus_litp
= *litp
, *litp
= 0;
847 if (neg_conp_p
&& *conp
)
848 *minus_conp
= *conp
, *conp
= 0;
849 if (neg_var_p
&& var
)
850 *minus_varp
= var
, var
= 0;
852 else if (TREE_CONSTANT (in
))
854 else if (TREE_CODE (in
) == BIT_NOT_EXPR
855 && code
== PLUS_EXPR
)
857 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
858 when IN is constant. */
859 *litp
= build_minus_one_cst (type
);
860 *minus_varp
= TREE_OPERAND (in
, 0);
868 *minus_litp
= *litp
, *litp
= 0;
869 else if (*minus_litp
)
870 *litp
= *minus_litp
, *minus_litp
= 0;
872 *minus_conp
= *conp
, *conp
= 0;
873 else if (*minus_conp
)
874 *conp
= *minus_conp
, *minus_conp
= 0;
876 *minus_varp
= var
, var
= 0;
877 else if (*minus_varp
)
878 var
= *minus_varp
, *minus_varp
= 0;
882 && TREE_OVERFLOW_P (*litp
))
883 *litp
= drop_tree_overflow (*litp
);
885 && TREE_OVERFLOW_P (*minus_litp
))
886 *minus_litp
= drop_tree_overflow (*minus_litp
);
891 /* Re-associate trees split by the above function. T1 and T2 are
892 either expressions to associate or null. Return the new
893 expression, if any. LOC is the location of the new expression. If
894 we build an operation, do it in TYPE and with CODE. */
897 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
901 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
911 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
912 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
914 if (code
== PLUS_EXPR
)
916 if (TREE_CODE (t1
) == NEGATE_EXPR
)
917 return build2_loc (loc
, MINUS_EXPR
, type
,
918 fold_convert_loc (loc
, type
, t2
),
919 fold_convert_loc (loc
, type
,
920 TREE_OPERAND (t1
, 0)));
921 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
922 return build2_loc (loc
, MINUS_EXPR
, type
,
923 fold_convert_loc (loc
, type
, t1
),
924 fold_convert_loc (loc
, type
,
925 TREE_OPERAND (t2
, 0)));
926 else if (integer_zerop (t2
))
927 return fold_convert_loc (loc
, type
, t1
);
929 else if (code
== MINUS_EXPR
)
931 if (integer_zerop (t2
))
932 return fold_convert_loc (loc
, type
, t1
);
935 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
936 fold_convert_loc (loc
, type
, t2
));
939 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
940 fold_convert_loc (loc
, type
, t2
));
943 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
944 for use in int_const_binop, size_binop and size_diffop. */
947 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
949 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
951 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
966 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
967 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
968 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
971 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
972 a new constant in RES. Return FALSE if we don't know how to
973 evaluate CODE at compile-time. */
976 wide_int_binop (wide_int
&res
,
977 enum tree_code code
, const wide_int
&arg1
, const wide_int
&arg2
,
978 signop sign
, wi::overflow_type
*overflow
)
981 *overflow
= wi::OVF_NONE
;
985 res
= wi::bit_or (arg1
, arg2
);
989 res
= wi::bit_xor (arg1
, arg2
);
993 res
= wi::bit_and (arg1
, arg2
);
997 if (wi::neg_p (arg2
))
999 res
= wi::lshift (arg1
, arg2
);
1003 if (wi::neg_p (arg2
))
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res
= wi::rshift (arg1
, arg2
, sign
);
1013 if (wi::neg_p (arg2
))
1016 if (code
== RROTATE_EXPR
)
1017 code
= LROTATE_EXPR
;
1019 code
= RROTATE_EXPR
;
1024 if (code
== RROTATE_EXPR
)
1025 res
= wi::rrotate (arg1
, tmp
);
1027 res
= wi::lrotate (arg1
, tmp
);
1031 res
= wi::add (arg1
, arg2
, sign
, overflow
);
1035 res
= wi::sub (arg1
, arg2
, sign
, overflow
);
1039 res
= wi::mul (arg1
, arg2
, sign
, overflow
);
1042 case MULT_HIGHPART_EXPR
:
1043 res
= wi::mul_high (arg1
, arg2
, sign
);
1046 case TRUNC_DIV_EXPR
:
1047 case EXACT_DIV_EXPR
:
1050 res
= wi::div_trunc (arg1
, arg2
, sign
, overflow
);
1053 case FLOOR_DIV_EXPR
:
1056 res
= wi::div_floor (arg1
, arg2
, sign
, overflow
);
1062 res
= wi::div_ceil (arg1
, arg2
, sign
, overflow
);
1065 case ROUND_DIV_EXPR
:
1068 res
= wi::div_round (arg1
, arg2
, sign
, overflow
);
1071 case TRUNC_MOD_EXPR
:
1074 res
= wi::mod_trunc (arg1
, arg2
, sign
, overflow
);
1077 case FLOOR_MOD_EXPR
:
1080 res
= wi::mod_floor (arg1
, arg2
, sign
, overflow
);
1086 res
= wi::mod_ceil (arg1
, arg2
, sign
, overflow
);
1089 case ROUND_MOD_EXPR
:
1092 res
= wi::mod_round (arg1
, arg2
, sign
, overflow
);
1096 res
= wi::min (arg1
, arg2
, sign
);
1100 res
= wi::max (arg1
, arg2
, sign
);
1109 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1110 produce a new constant in RES. Return FALSE if we don't know how
1111 to evaluate CODE at compile-time. */
1114 poly_int_binop (poly_wide_int
&res
, enum tree_code code
,
1115 const_tree arg1
, const_tree arg2
,
1116 signop sign
, wi::overflow_type
*overflow
)
1118 gcc_assert (NUM_POLY_INT_COEFFS
!= 1);
1119 gcc_assert (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
));
1123 res
= wi::add (wi::to_poly_wide (arg1
),
1124 wi::to_poly_wide (arg2
), sign
, overflow
);
1128 res
= wi::sub (wi::to_poly_wide (arg1
),
1129 wi::to_poly_wide (arg2
), sign
, overflow
);
1133 if (TREE_CODE (arg2
) == INTEGER_CST
)
1134 res
= wi::mul (wi::to_poly_wide (arg1
),
1135 wi::to_wide (arg2
), sign
, overflow
);
1136 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1137 res
= wi::mul (wi::to_poly_wide (arg2
),
1138 wi::to_wide (arg1
), sign
, overflow
);
1144 if (TREE_CODE (arg2
) == INTEGER_CST
)
1145 res
= wi::to_poly_wide (arg1
) << wi::to_wide (arg2
);
1151 if (TREE_CODE (arg2
) != INTEGER_CST
1152 || !can_ior_p (wi::to_poly_wide (arg1
), wi::to_wide (arg2
),
1163 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1164 produce a new constant. Return NULL_TREE if we don't know how to
1165 evaluate CODE at compile-time. */
1168 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
,
1171 poly_wide_int poly_res
;
1172 tree type
= TREE_TYPE (arg1
);
1173 signop sign
= TYPE_SIGN (type
);
1174 wi::overflow_type overflow
= wi::OVF_NONE
;
1176 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1178 wide_int warg1
= wi::to_wide (arg1
), res
;
1179 wide_int warg2
= wi::to_wide (arg2
, TYPE_PRECISION (type
));
1180 if (!wide_int_binop (res
, code
, warg1
, warg2
, sign
, &overflow
))
1184 else if (!poly_int_tree_p (arg1
)
1185 || !poly_int_tree_p (arg2
)
1186 || !poly_int_binop (poly_res
, code
, arg1
, arg2
, sign
, &overflow
))
1188 return force_fit_type (type
, poly_res
, overflowable
,
1189 (((sign
== SIGNED
|| overflowable
== -1)
1191 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
)));
1194 /* Return true if binary operation OP distributes over addition in operand
1195 OPNO, with the other operand being held constant. OPNO counts from 1. */
1198 distributes_over_addition_p (tree_code op
, int opno
)
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216 constant. We assume ARG1 and ARG2 have the same data type, or at least
1217 are the same kind of constant and the same machine mode. Return zero if
1218 combining the constants is not allowed in the current operating mode. */
1221 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1223 /* Sanity check for the recursive cases. */
1230 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1232 if (code
== POINTER_PLUS_EXPR
)
1233 return int_const_binop (PLUS_EXPR
,
1234 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1236 return int_const_binop (code
, arg1
, arg2
);
1239 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1244 REAL_VALUE_TYPE value
;
1245 REAL_VALUE_TYPE result
;
1249 /* The following codes are handled by real_arithmetic. */
1264 d1
= TREE_REAL_CST (arg1
);
1265 d2
= TREE_REAL_CST (arg2
);
1267 type
= TREE_TYPE (arg1
);
1268 mode
= TYPE_MODE (type
);
1270 /* Don't perform operation if we honor signaling NaNs and
1271 either operand is a signaling NaN. */
1272 if (HONOR_SNANS (mode
)
1273 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1274 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1277 /* Don't perform operation if it would raise a division
1278 by zero exception. */
1279 if (code
== RDIV_EXPR
1280 && real_equal (&d2
, &dconst0
)
1281 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1284 /* If either operand is a NaN, just return it. Otherwise, set up
1285 for floating-point trap; we return an overflow. */
1286 if (REAL_VALUE_ISNAN (d1
))
1288 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 t
= build_real (type
, d1
);
1294 else if (REAL_VALUE_ISNAN (d2
))
1296 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 t
= build_real (type
, d2
);
1303 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1304 real_convert (&result
, mode
, &value
);
1306 /* Don't constant fold this floating point operation if
1307 the result has overflowed and flag_trapping_math. */
1308 if (flag_trapping_math
1309 && MODE_HAS_INFINITIES (mode
)
1310 && REAL_VALUE_ISINF (result
)
1311 && !REAL_VALUE_ISINF (d1
)
1312 && !REAL_VALUE_ISINF (d2
))
1315 /* Don't constant fold this floating point operation if the
1316 result may dependent upon the run-time rounding mode and
1317 flag_rounding_math is set, or if GCC's software emulation
1318 is unable to accurately represent the result. */
1319 if ((flag_rounding_math
1320 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1321 && (inexact
|| !real_identical (&result
, &value
)))
1324 t
= build_real (type
, result
);
1326 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1330 if (TREE_CODE (arg1
) == FIXED_CST
)
1332 FIXED_VALUE_TYPE f1
;
1333 FIXED_VALUE_TYPE f2
;
1334 FIXED_VALUE_TYPE result
;
1339 /* The following codes are handled by fixed_arithmetic. */
1345 case TRUNC_DIV_EXPR
:
1346 if (TREE_CODE (arg2
) != FIXED_CST
)
1348 f2
= TREE_FIXED_CST (arg2
);
1354 if (TREE_CODE (arg2
) != INTEGER_CST
)
1356 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1357 f2
.data
.high
= w2
.elt (1);
1358 f2
.data
.low
= w2
.ulow ();
1367 f1
= TREE_FIXED_CST (arg1
);
1368 type
= TREE_TYPE (arg1
);
1369 sat_p
= TYPE_SATURATING (type
);
1370 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1371 t
= build_fixed (type
, result
);
1372 /* Propagate overflow flags. */
1373 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1374 TREE_OVERFLOW (t
) = 1;
1378 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1380 tree type
= TREE_TYPE (arg1
);
1381 tree r1
= TREE_REALPART (arg1
);
1382 tree i1
= TREE_IMAGPART (arg1
);
1383 tree r2
= TREE_REALPART (arg2
);
1384 tree i2
= TREE_IMAGPART (arg2
);
1391 real
= const_binop (code
, r1
, r2
);
1392 imag
= const_binop (code
, i1
, i2
);
1396 if (COMPLEX_FLOAT_TYPE_P (type
))
1397 return do_mpc_arg2 (arg1
, arg2
, type
,
1398 /* do_nonfinite= */ folding_initializer
,
1401 real
= const_binop (MINUS_EXPR
,
1402 const_binop (MULT_EXPR
, r1
, r2
),
1403 const_binop (MULT_EXPR
, i1
, i2
));
1404 imag
= const_binop (PLUS_EXPR
,
1405 const_binop (MULT_EXPR
, r1
, i2
),
1406 const_binop (MULT_EXPR
, i1
, r2
));
1410 if (COMPLEX_FLOAT_TYPE_P (type
))
1411 return do_mpc_arg2 (arg1
, arg2
, type
,
1412 /* do_nonfinite= */ folding_initializer
,
1415 case TRUNC_DIV_EXPR
:
1417 case FLOOR_DIV_EXPR
:
1418 case ROUND_DIV_EXPR
:
1419 if (flag_complex_method
== 0)
1421 /* Keep this algorithm in sync with
1422 tree-complex.c:expand_complex_div_straight().
1424 Expand complex division to scalars, straightforward algorithm.
1425 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1429 = const_binop (PLUS_EXPR
,
1430 const_binop (MULT_EXPR
, r2
, r2
),
1431 const_binop (MULT_EXPR
, i2
, i2
));
1433 = const_binop (PLUS_EXPR
,
1434 const_binop (MULT_EXPR
, r1
, r2
),
1435 const_binop (MULT_EXPR
, i1
, i2
));
1437 = const_binop (MINUS_EXPR
,
1438 const_binop (MULT_EXPR
, i1
, r2
),
1439 const_binop (MULT_EXPR
, r1
, i2
));
1441 real
= const_binop (code
, t1
, magsquared
);
1442 imag
= const_binop (code
, t2
, magsquared
);
1446 /* Keep this algorithm in sync with
1447 tree-complex.c:expand_complex_div_wide().
1449 Expand complex division to scalars, modified algorithm to minimize
1450 overflow with wide input ranges. */
1451 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1452 fold_abs_const (r2
, TREE_TYPE (type
)),
1453 fold_abs_const (i2
, TREE_TYPE (type
)));
1455 if (integer_nonzerop (compare
))
1457 /* In the TRUE branch, we compute
1459 div = (br * ratio) + bi;
1460 tr = (ar * ratio) + ai;
1461 ti = (ai * ratio) - ar;
1464 tree ratio
= const_binop (code
, r2
, i2
);
1465 tree div
= const_binop (PLUS_EXPR
, i2
,
1466 const_binop (MULT_EXPR
, r2
, ratio
));
1467 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1468 real
= const_binop (PLUS_EXPR
, real
, i1
);
1469 real
= const_binop (code
, real
, div
);
1471 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1472 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1473 imag
= const_binop (code
, imag
, div
);
1477 /* In the FALSE branch, we compute
1479 divisor = (d * ratio) + c;
1480 tr = (b * ratio) + a;
1481 ti = b - (a * ratio);
1484 tree ratio
= const_binop (code
, i2
, r2
);
1485 tree div
= const_binop (PLUS_EXPR
, r2
,
1486 const_binop (MULT_EXPR
, i2
, ratio
));
1488 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1489 real
= const_binop (PLUS_EXPR
, real
, r1
);
1490 real
= const_binop (code
, real
, div
);
1492 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1493 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1494 imag
= const_binop (code
, imag
, div
);
1504 return build_complex (type
, real
, imag
);
1507 if (TREE_CODE (arg1
) == VECTOR_CST
1508 && TREE_CODE (arg2
) == VECTOR_CST
1509 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)),
1510 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1512 tree type
= TREE_TYPE (arg1
);
1514 if (VECTOR_CST_STEPPED_P (arg1
)
1515 && VECTOR_CST_STEPPED_P (arg2
))
1516 /* We can operate directly on the encoding if:
1518 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1520 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1522 Addition and subtraction are the supported operators
1523 for which this is true. */
1524 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1525 else if (VECTOR_CST_STEPPED_P (arg1
))
1526 /* We can operate directly on stepped encodings if:
1530 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1532 which is true if (x -> x op c) distributes over addition. */
1533 step_ok_p
= distributes_over_addition_p (code
, 1);
1535 /* Similarly in reverse. */
1536 step_ok_p
= distributes_over_addition_p (code
, 2);
1537 tree_vector_builder elts
;
1538 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1540 unsigned int count
= elts
.encoded_nelts ();
1541 for (unsigned int i
= 0; i
< count
; ++i
)
1543 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1544 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1546 tree elt
= const_binop (code
, elem1
, elem2
);
1548 /* It is possible that const_binop cannot handle the given
1549 code and return NULL_TREE */
1550 if (elt
== NULL_TREE
)
1552 elts
.quick_push (elt
);
1555 return elts
.build ();
1558 /* Shifts allow a scalar offset for a vector. */
1559 if (TREE_CODE (arg1
) == VECTOR_CST
1560 && TREE_CODE (arg2
) == INTEGER_CST
)
1562 tree type
= TREE_TYPE (arg1
);
1563 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1564 tree_vector_builder elts
;
1565 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1567 unsigned int count
= elts
.encoded_nelts ();
1568 for (unsigned int i
= 0; i
< count
; ++i
)
1570 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1572 tree elt
= const_binop (code
, elem1
, arg2
);
1574 /* It is possible that const_binop cannot handle the given
1575 code and return NULL_TREE. */
1576 if (elt
== NULL_TREE
)
1578 elts
.quick_push (elt
);
1581 return elts
.build ();
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587 to fold_relational_const. */
1590 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1592 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1593 return fold_relational_const (code
, type
, arg1
, arg2
);
1595 /* ??? Until we make the const_binop worker take the type of the
1596 result as argument put those cases that need it here. */
1599 case VEC_SERIES_EXPR
:
1600 if (CONSTANT_CLASS_P (arg1
)
1601 && CONSTANT_CLASS_P (arg2
))
1602 return build_vec_series (type
, arg1
, arg2
);
1606 if ((TREE_CODE (arg1
) == REAL_CST
1607 && TREE_CODE (arg2
) == REAL_CST
)
1608 || (TREE_CODE (arg1
) == INTEGER_CST
1609 && TREE_CODE (arg2
) == INTEGER_CST
))
1610 return build_complex (type
, arg1
, arg2
);
1613 case POINTER_DIFF_EXPR
:
1614 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1616 poly_offset_int res
= (wi::to_poly_offset (arg1
)
1617 - wi::to_poly_offset (arg2
));
1618 return force_fit_type (type
, res
, 1,
1619 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1623 case VEC_PACK_TRUNC_EXPR
:
1624 case VEC_PACK_FIX_TRUNC_EXPR
:
1625 case VEC_PACK_FLOAT_EXPR
:
1627 unsigned int HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1629 if (TREE_CODE (arg1
) != VECTOR_CST
1630 || TREE_CODE (arg2
) != VECTOR_CST
)
1633 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1636 out_nelts
= in_nelts
* 2;
1637 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1638 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1640 tree_vector_builder
elts (type
, out_nelts
, 1);
1641 for (i
= 0; i
< out_nelts
; i
++)
1643 tree elt
= (i
< in_nelts
1644 ? VECTOR_CST_ELT (arg1
, i
)
1645 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1646 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1648 : code
== VEC_PACK_FLOAT_EXPR
1649 ? FLOAT_EXPR
: FIX_TRUNC_EXPR
,
1650 TREE_TYPE (type
), elt
);
1651 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1653 elts
.quick_push (elt
);
1656 return elts
.build ();
1659 case VEC_WIDEN_MULT_LO_EXPR
:
1660 case VEC_WIDEN_MULT_HI_EXPR
:
1661 case VEC_WIDEN_MULT_EVEN_EXPR
:
1662 case VEC_WIDEN_MULT_ODD_EXPR
:
1664 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, out
, ofs
, scale
;
1666 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1669 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1671 out_nelts
= in_nelts
/ 2;
1672 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1673 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1675 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1676 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1677 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1678 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1679 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1681 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1684 tree_vector_builder
elts (type
, out_nelts
, 1);
1685 for (out
= 0; out
< out_nelts
; out
++)
1687 unsigned int in
= (out
<< scale
) + ofs
;
1688 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1689 VECTOR_CST_ELT (arg1
, in
));
1690 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1691 VECTOR_CST_ELT (arg2
, in
));
1693 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1695 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1696 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1698 elts
.quick_push (elt
);
1701 return elts
.build ();
1707 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1710 /* Make sure type and arg0 have the same saturating flag. */
1711 gcc_checking_assert (TYPE_SATURATING (type
)
1712 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1714 return const_binop (code
, arg1
, arg2
);
1717 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1718 Return zero if computing the constants is not possible. */
1721 const_unop (enum tree_code code
, tree type
, tree arg0
)
1723 /* Don't perform the operation, other than NEGATE and ABS, if
1724 flag_signaling_nans is on and the operand is a signaling NaN. */
1725 if (TREE_CODE (arg0
) == REAL_CST
1726 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1727 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1728 && code
!= NEGATE_EXPR
1730 && code
!= ABSU_EXPR
)
1737 case FIX_TRUNC_EXPR
:
1738 case FIXED_CONVERT_EXPR
:
1739 return fold_convert_const (code
, type
, arg0
);
1741 case ADDR_SPACE_CONVERT_EXPR
:
1742 /* If the source address is 0, and the source address space
1743 cannot have a valid object at 0, fold to dest type null. */
1744 if (integer_zerop (arg0
)
1745 && !(targetm
.addr_space
.zero_address_valid
1746 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1747 return fold_convert_const (code
, type
, arg0
);
1750 case VIEW_CONVERT_EXPR
:
1751 return fold_view_convert_expr (type
, arg0
);
1755 /* Can't call fold_negate_const directly here as that doesn't
1756 handle all cases and we might not be able to negate some
1758 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1759 if (tem
&& CONSTANT_CLASS_P (tem
))
1766 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1767 return fold_abs_const (arg0
, type
);
1771 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1773 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1775 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1780 if (TREE_CODE (arg0
) == INTEGER_CST
)
1781 return fold_not_const (arg0
, type
);
1782 else if (POLY_INT_CST_P (arg0
))
1783 return wide_int_to_tree (type
, -poly_int_cst_value (arg0
));
1784 /* Perform BIT_NOT_EXPR on each element individually. */
1785 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1789 /* This can cope with stepped encodings because ~x == -1 - x. */
1790 tree_vector_builder elements
;
1791 elements
.new_unary_operation (type
, arg0
, true);
1792 unsigned int i
, count
= elements
.encoded_nelts ();
1793 for (i
= 0; i
< count
; ++i
)
1795 elem
= VECTOR_CST_ELT (arg0
, i
);
1796 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1797 if (elem
== NULL_TREE
)
1799 elements
.quick_push (elem
);
1802 return elements
.build ();
1806 case TRUTH_NOT_EXPR
:
1807 if (TREE_CODE (arg0
) == INTEGER_CST
)
1808 return constant_boolean_node (integer_zerop (arg0
), type
);
1812 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1813 return fold_convert (type
, TREE_REALPART (arg0
));
1817 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1818 return fold_convert (type
, TREE_IMAGPART (arg0
));
1821 case VEC_UNPACK_LO_EXPR
:
1822 case VEC_UNPACK_HI_EXPR
:
1823 case VEC_UNPACK_FLOAT_LO_EXPR
:
1824 case VEC_UNPACK_FLOAT_HI_EXPR
:
1825 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
1826 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
1828 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1829 enum tree_code subcode
;
1831 if (TREE_CODE (arg0
) != VECTOR_CST
)
1834 if (!VECTOR_CST_NELTS (arg0
).is_constant (&in_nelts
))
1836 out_nelts
= in_nelts
/ 2;
1837 gcc_assert (known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1839 unsigned int offset
= 0;
1840 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1841 || code
== VEC_UNPACK_FLOAT_LO_EXPR
1842 || code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
))
1845 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1847 else if (code
== VEC_UNPACK_FLOAT_LO_EXPR
1848 || code
== VEC_UNPACK_FLOAT_HI_EXPR
)
1849 subcode
= FLOAT_EXPR
;
1851 subcode
= FIX_TRUNC_EXPR
;
1853 tree_vector_builder
elts (type
, out_nelts
, 1);
1854 for (i
= 0; i
< out_nelts
; i
++)
1856 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1857 VECTOR_CST_ELT (arg0
, i
+ offset
));
1858 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1860 elts
.quick_push (elt
);
1863 return elts
.build ();
1866 case VEC_DUPLICATE_EXPR
:
1867 if (CONSTANT_CLASS_P (arg0
))
1868 return build_vector_from_val (type
, arg0
);
1878 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1879 indicates which particular sizetype to create. */
1882 size_int_kind (poly_int64 number
, enum size_type_kind kind
)
1884 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1887 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1888 is a tree code. The type of the result is taken from the operands.
1889 Both must be equivalent integer types, ala int_binop_types_match_p.
1890 If the operands are constant, so is the result. */
1893 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1895 tree type
= TREE_TYPE (arg0
);
1897 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1898 return error_mark_node
;
1900 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1903 /* Handle the special case of two poly_int constants faster. */
1904 if (poly_int_tree_p (arg0
) && poly_int_tree_p (arg1
))
1906 /* And some specific cases even faster than that. */
1907 if (code
== PLUS_EXPR
)
1909 if (integer_zerop (arg0
)
1910 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1912 if (integer_zerop (arg1
)
1913 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1916 else if (code
== MINUS_EXPR
)
1918 if (integer_zerop (arg1
)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1922 else if (code
== MULT_EXPR
)
1924 if (integer_onep (arg0
)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1929 /* Handle general case of two integer constants. For sizetype
1930 constant calculations we always want to know about overflow,
1931 even in the unsigned case. */
1932 tree res
= int_const_binop (code
, arg0
, arg1
, -1);
1933 if (res
!= NULL_TREE
)
1937 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1940 /* Given two values, either both of sizetype or both of bitsizetype,
1941 compute the difference between the two values. Return the value
1942 in signed type corresponding to the type of the operands. */
1945 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1947 tree type
= TREE_TYPE (arg0
);
1950 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1953 /* If the type is already signed, just do the simple thing. */
1954 if (!TYPE_UNSIGNED (type
))
1955 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1957 if (type
== sizetype
)
1959 else if (type
== bitsizetype
)
1960 ctype
= sbitsizetype
;
1962 ctype
= signed_type_for (type
);
1964 /* If either operand is not a constant, do the conversions to the signed
1965 type and subtract. The hardware will do the right thing with any
1966 overflow in the subtraction. */
1967 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1968 return size_binop_loc (loc
, MINUS_EXPR
,
1969 fold_convert_loc (loc
, ctype
, arg0
),
1970 fold_convert_loc (loc
, ctype
, arg1
));
1972 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1973 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1974 overflow) and negate (which can't either). Special-case a result
1975 of zero while we're here. */
1976 if (tree_int_cst_equal (arg0
, arg1
))
1977 return build_int_cst (ctype
, 0);
1978 else if (tree_int_cst_lt (arg1
, arg0
))
1979 return fold_convert_loc (loc
, ctype
,
1980 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1982 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1983 fold_convert_loc (loc
, ctype
,
1984 size_binop_loc (loc
,
1989 /* A subroutine of fold_convert_const handling conversions of an
1990 INTEGER_CST to another integer type. */
1993 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1995 /* Given an integer constant, make new constant with new type,
1996 appropriately sign-extended or truncated. Use widest_int
1997 so that any extension is done according ARG1's type. */
1998 return force_fit_type (type
, wi::to_widest (arg1
),
1999 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2000 TREE_OVERFLOW (arg1
));
2003 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2004 to an integer type. */
2007 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2009 bool overflow
= false;
2012 /* The following code implements the floating point to integer
2013 conversion rules required by the Java Language Specification,
2014 that IEEE NaNs are mapped to zero and values that overflow
2015 the target precision saturate, i.e. values greater than
2016 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2017 are mapped to INT_MIN. These semantics are allowed by the
2018 C and C++ standards that simply state that the behavior of
2019 FP-to-integer conversion is unspecified upon overflow. */
2023 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2027 case FIX_TRUNC_EXPR
:
2028 real_trunc (&r
, VOIDmode
, &x
);
2035 /* If R is NaN, return zero and show we have an overflow. */
2036 if (REAL_VALUE_ISNAN (r
))
2039 val
= wi::zero (TYPE_PRECISION (type
));
2042 /* See if R is less than the lower bound or greater than the
2047 tree lt
= TYPE_MIN_VALUE (type
);
2048 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2049 if (real_less (&r
, &l
))
2052 val
= wi::to_wide (lt
);
2058 tree ut
= TYPE_MAX_VALUE (type
);
2061 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2062 if (real_less (&u
, &r
))
2065 val
= wi::to_wide (ut
);
2071 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
2073 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
2077 /* A subroutine of fold_convert_const handling conversions of a
2078 FIXED_CST to an integer type. */
2081 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2084 double_int temp
, temp_trunc
;
2087 /* Right shift FIXED_CST to temp by fbit. */
2088 temp
= TREE_FIXED_CST (arg1
).data
;
2089 mode
= TREE_FIXED_CST (arg1
).mode
;
2090 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
2092 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2093 HOST_BITS_PER_DOUBLE_INT
,
2094 SIGNED_FIXED_POINT_MODE_P (mode
));
2096 /* Left shift temp to temp_trunc by fbit. */
2097 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2098 HOST_BITS_PER_DOUBLE_INT
,
2099 SIGNED_FIXED_POINT_MODE_P (mode
));
2103 temp
= double_int_zero
;
2104 temp_trunc
= double_int_zero
;
2107 /* If FIXED_CST is negative, we need to round the value toward 0.
2108 By checking if the fractional bits are not zero to add 1 to temp. */
2109 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2110 && temp_trunc
.is_negative ()
2111 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2112 temp
+= double_int_one
;
2114 /* Given a fixed-point constant, make new constant with new type,
2115 appropriately sign-extended or truncated. */
2116 t
= force_fit_type (type
, temp
, -1,
2117 (temp
.is_negative ()
2118 && (TYPE_UNSIGNED (type
)
2119 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2120 | TREE_OVERFLOW (arg1
));
2125 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2126 to another floating point type. */
2129 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2131 REAL_VALUE_TYPE value
;
2134 /* Don't perform the operation if flag_signaling_nans is on
2135 and the operand is a signaling NaN. */
2136 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
2137 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2140 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2141 t
= build_real (type
, value
);
2143 /* If converting an infinity or NAN to a representation that doesn't
2144 have one, set the overflow bit so that we can produce some kind of
2145 error message at the appropriate point if necessary. It's not the
2146 most user-friendly message, but it's better than nothing. */
2147 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2148 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2149 TREE_OVERFLOW (t
) = 1;
2150 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2151 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2152 TREE_OVERFLOW (t
) = 1;
2153 /* Regular overflow, conversion produced an infinity in a mode that
2154 can't represent them. */
2155 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2156 && REAL_VALUE_ISINF (value
)
2157 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2158 TREE_OVERFLOW (t
) = 1;
2160 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2164 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2165 to a floating point type. */
2168 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2170 REAL_VALUE_TYPE value
;
2173 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2174 &TREE_FIXED_CST (arg1
));
2175 t
= build_real (type
, value
);
2177 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182 to another fixed-point type. */
2185 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2187 FIXED_VALUE_TYPE value
;
2191 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2192 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2193 t
= build_fixed (type
, value
);
2195 /* Propagate overflow flags. */
2196 if (overflow_p
| TREE_OVERFLOW (arg1
))
2197 TREE_OVERFLOW (t
) = 1;
2201 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2202 to a fixed-point type. */
2205 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2207 FIXED_VALUE_TYPE value
;
2212 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2214 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2215 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2216 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2218 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2220 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2221 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2222 TYPE_SATURATING (type
));
2223 t
= build_fixed (type
, value
);
2225 /* Propagate overflow flags. */
2226 if (overflow_p
| TREE_OVERFLOW (arg1
))
2227 TREE_OVERFLOW (t
) = 1;
2231 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2232 to a fixed-point type. */
2235 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2237 FIXED_VALUE_TYPE value
;
2241 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2242 &TREE_REAL_CST (arg1
),
2243 TYPE_SATURATING (type
));
2244 t
= build_fixed (type
, value
);
2246 /* Propagate overflow flags. */
2247 if (overflow_p
| TREE_OVERFLOW (arg1
))
2248 TREE_OVERFLOW (t
) = 1;
2252 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2253 type TYPE. If no simplification can be done return NULL_TREE. */
2256 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2258 tree arg_type
= TREE_TYPE (arg1
);
2259 if (arg_type
== type
)
2262 /* We can't widen types, since the runtime value could overflow the
2263 original type before being extended to the new type. */
2264 if (POLY_INT_CST_P (arg1
)
2265 && (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2266 && TYPE_PRECISION (type
) <= TYPE_PRECISION (arg_type
))
2267 return build_poly_int_cst (type
,
2268 poly_wide_int::from (poly_int_cst_value (arg1
),
2269 TYPE_PRECISION (type
),
2270 TYPE_SIGN (arg_type
)));
2272 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2273 || TREE_CODE (type
) == OFFSET_TYPE
)
2275 if (TREE_CODE (arg1
) == INTEGER_CST
)
2276 return fold_convert_const_int_from_int (type
, arg1
);
2277 else if (TREE_CODE (arg1
) == REAL_CST
)
2278 return fold_convert_const_int_from_real (code
, type
, arg1
);
2279 else if (TREE_CODE (arg1
) == FIXED_CST
)
2280 return fold_convert_const_int_from_fixed (type
, arg1
);
2282 else if (TREE_CODE (type
) == REAL_TYPE
)
2284 if (TREE_CODE (arg1
) == INTEGER_CST
)
2285 return build_real_from_int_cst (type
, arg1
);
2286 else if (TREE_CODE (arg1
) == REAL_CST
)
2287 return fold_convert_const_real_from_real (type
, arg1
);
2288 else if (TREE_CODE (arg1
) == FIXED_CST
)
2289 return fold_convert_const_real_from_fixed (type
, arg1
);
2291 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2293 if (TREE_CODE (arg1
) == FIXED_CST
)
2294 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2295 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2296 return fold_convert_const_fixed_from_int (type
, arg1
);
2297 else if (TREE_CODE (arg1
) == REAL_CST
)
2298 return fold_convert_const_fixed_from_real (type
, arg1
);
2300 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2302 if (TREE_CODE (arg1
) == VECTOR_CST
2303 && known_eq (TYPE_VECTOR_SUBPARTS (type
), VECTOR_CST_NELTS (arg1
)))
2305 tree elttype
= TREE_TYPE (type
);
2306 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2307 /* We can't handle steps directly when extending, since the
2308 values need to wrap at the original precision first. */
2310 = (INTEGRAL_TYPE_P (elttype
)
2311 && INTEGRAL_TYPE_P (arg1_elttype
)
2312 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2313 tree_vector_builder v
;
2314 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2316 unsigned int len
= v
.encoded_nelts ();
2317 for (unsigned int i
= 0; i
< len
; ++i
)
2319 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2320 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2321 if (cvt
== NULL_TREE
)
2331 /* Construct a vector of zero elements of vector type TYPE. */
2334 build_zero_vector (tree type
)
2338 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2339 return build_vector_from_val (type
, t
);
2342 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2345 fold_convertible_p (const_tree type
, const_tree arg
)
2347 tree orig
= TREE_TYPE (arg
);
2352 if (TREE_CODE (arg
) == ERROR_MARK
2353 || TREE_CODE (type
) == ERROR_MARK
2354 || TREE_CODE (orig
) == ERROR_MARK
)
2357 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2360 switch (TREE_CODE (type
))
2362 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2363 case POINTER_TYPE
: case REFERENCE_TYPE
:
2365 return (INTEGRAL_TYPE_P (orig
)
2366 || (POINTER_TYPE_P (orig
)
2367 && TYPE_PRECISION (type
) <= TYPE_PRECISION (orig
))
2368 || TREE_CODE (orig
) == OFFSET_TYPE
);
2371 case FIXED_POINT_TYPE
:
2373 return TREE_CODE (type
) == TREE_CODE (orig
);
2376 return (VECTOR_TYPE_P (orig
)
2377 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
2378 TYPE_VECTOR_SUBPARTS (orig
))
2379 && fold_convertible_p (TREE_TYPE (type
), TREE_TYPE (orig
)));
2386 /* Convert expression ARG to type TYPE. Used by the middle-end for
2387 simple conversions in preference to calling the front-end's convert. */
2390 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2392 tree orig
= TREE_TYPE (arg
);
2398 if (TREE_CODE (arg
) == ERROR_MARK
2399 || TREE_CODE (type
) == ERROR_MARK
2400 || TREE_CODE (orig
) == ERROR_MARK
)
2401 return error_mark_node
;
2403 switch (TREE_CODE (type
))
2406 case REFERENCE_TYPE
:
2407 /* Handle conversions between pointers to different address spaces. */
2408 if (POINTER_TYPE_P (orig
)
2409 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2410 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2411 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2414 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2416 if (TREE_CODE (arg
) == INTEGER_CST
)
2418 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2419 if (tem
!= NULL_TREE
)
2422 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2423 || TREE_CODE (orig
) == OFFSET_TYPE
)
2424 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2425 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2426 return fold_convert_loc (loc
, type
,
2427 fold_build1_loc (loc
, REALPART_EXPR
,
2428 TREE_TYPE (orig
), arg
));
2429 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2430 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2431 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2434 if (TREE_CODE (arg
) == INTEGER_CST
)
2436 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2437 if (tem
!= NULL_TREE
)
2440 else if (TREE_CODE (arg
) == REAL_CST
)
2442 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2443 if (tem
!= NULL_TREE
)
2446 else if (TREE_CODE (arg
) == FIXED_CST
)
2448 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2449 if (tem
!= NULL_TREE
)
2453 switch (TREE_CODE (orig
))
2456 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2457 case POINTER_TYPE
: case REFERENCE_TYPE
:
2458 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2461 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2463 case FIXED_POINT_TYPE
:
2464 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2467 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2468 return fold_convert_loc (loc
, type
, tem
);
2474 case FIXED_POINT_TYPE
:
2475 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2476 || TREE_CODE (arg
) == REAL_CST
)
2478 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2479 if (tem
!= NULL_TREE
)
2480 goto fold_convert_exit
;
2483 switch (TREE_CODE (orig
))
2485 case FIXED_POINT_TYPE
:
2490 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2493 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2494 return fold_convert_loc (loc
, type
, tem
);
2501 switch (TREE_CODE (orig
))
2504 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2505 case POINTER_TYPE
: case REFERENCE_TYPE
:
2507 case FIXED_POINT_TYPE
:
2508 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2509 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2510 fold_convert_loc (loc
, TREE_TYPE (type
),
2511 integer_zero_node
));
2516 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2518 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2519 TREE_OPERAND (arg
, 0));
2520 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2521 TREE_OPERAND (arg
, 1));
2522 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2525 arg
= save_expr (arg
);
2526 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2527 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2528 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2529 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2530 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2538 if (integer_zerop (arg
))
2539 return build_zero_vector (type
);
2540 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2541 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2542 || TREE_CODE (orig
) == VECTOR_TYPE
);
2543 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2546 tem
= fold_ignored_result (arg
);
2547 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2550 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2551 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2555 protected_set_expr_location_unshare (tem
, loc
);
2559 /* Return false if expr can be assumed not to be an lvalue, true
2563 maybe_lvalue_p (const_tree x
)
2565 /* We only need to wrap lvalue tree codes. */
2566 switch (TREE_CODE (x
))
2579 case ARRAY_RANGE_REF
:
2585 case PREINCREMENT_EXPR
:
2586 case PREDECREMENT_EXPR
:
2588 case TRY_CATCH_EXPR
:
2589 case WITH_CLEANUP_EXPR
:
2595 case VIEW_CONVERT_EXPR
:
2599 /* Assume the worst for front-end tree codes. */
2600 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2608 /* Return an expr equal to X but certainly not valid as an lvalue. */
2611 non_lvalue_loc (location_t loc
, tree x
)
2613 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2618 if (! maybe_lvalue_p (x
))
2620 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2623 /* When pedantic, return an expr equal to X but certainly not valid as a
2624 pedantic lvalue. Otherwise, return X. */
2627 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2629 return protected_set_expr_location_unshare (x
, loc
);
2632 /* Given a tree comparison code, return the code that is the logical inverse.
2633 It is generally not safe to do this for floating-point comparisons, except
2634 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2635 ERROR_MARK in this case. */
2638 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2640 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2641 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2651 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2653 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2655 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2657 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2671 return UNORDERED_EXPR
;
2672 case UNORDERED_EXPR
:
2673 return ORDERED_EXPR
;
2679 /* Similar, but return the comparison that results if the operands are
2680 swapped. This is safe for floating-point. */
2683 swap_tree_comparison (enum tree_code code
)
2690 case UNORDERED_EXPR
:
2716 /* Convert a comparison tree code from an enum tree_code representation
2717 into a compcode bit-based encoding. This function is the inverse of
2718 compcode_to_comparison. */
2720 static enum comparison_code
2721 comparison_to_compcode (enum tree_code code
)
2738 return COMPCODE_ORD
;
2739 case UNORDERED_EXPR
:
2740 return COMPCODE_UNORD
;
2742 return COMPCODE_UNLT
;
2744 return COMPCODE_UNEQ
;
2746 return COMPCODE_UNLE
;
2748 return COMPCODE_UNGT
;
2750 return COMPCODE_LTGT
;
2752 return COMPCODE_UNGE
;
2758 /* Convert a compcode bit-based encoding of a comparison operator back
2759 to GCC's enum tree_code representation. This function is the
2760 inverse of comparison_to_compcode. */
2762 static enum tree_code
2763 compcode_to_comparison (enum comparison_code code
)
2780 return ORDERED_EXPR
;
2781 case COMPCODE_UNORD
:
2782 return UNORDERED_EXPR
;
2800 /* Return true if COND1 tests the opposite condition of COND2. */
2803 inverse_conditions_p (const_tree cond1
, const_tree cond2
)
2805 return (COMPARISON_CLASS_P (cond1
)
2806 && COMPARISON_CLASS_P (cond2
)
2807 && (invert_tree_comparison
2809 HONOR_NANS (TREE_OPERAND (cond1
, 0))) == TREE_CODE (cond2
))
2810 && operand_equal_p (TREE_OPERAND (cond1
, 0),
2811 TREE_OPERAND (cond2
, 0), 0)
2812 && operand_equal_p (TREE_OPERAND (cond1
, 1),
2813 TREE_OPERAND (cond2
, 1), 0));
2816 /* Return a tree for the comparison which is the combination of
2817 doing the AND or OR (depending on CODE) of the two operations LCODE
2818 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2819 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2820 if this makes the transformation invalid. */
2823 combine_comparisons (location_t loc
,
2824 enum tree_code code
, enum tree_code lcode
,
2825 enum tree_code rcode
, tree truth_type
,
2826 tree ll_arg
, tree lr_arg
)
2828 bool honor_nans
= HONOR_NANS (ll_arg
);
2829 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2830 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2835 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2836 compcode
= lcompcode
& rcompcode
;
2839 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2840 compcode
= lcompcode
| rcompcode
;
2849 /* Eliminate unordered comparisons, as well as LTGT and ORD
2850 which are not used unless the mode has NaNs. */
2851 compcode
&= ~COMPCODE_UNORD
;
2852 if (compcode
== COMPCODE_LTGT
)
2853 compcode
= COMPCODE_NE
;
2854 else if (compcode
== COMPCODE_ORD
)
2855 compcode
= COMPCODE_TRUE
;
2857 else if (flag_trapping_math
)
2859 /* Check that the original operation and the optimized ones will trap
2860 under the same condition. */
2861 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2862 && (lcompcode
!= COMPCODE_EQ
)
2863 && (lcompcode
!= COMPCODE_ORD
);
2864 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2865 && (rcompcode
!= COMPCODE_EQ
)
2866 && (rcompcode
!= COMPCODE_ORD
);
2867 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2868 && (compcode
!= COMPCODE_EQ
)
2869 && (compcode
!= COMPCODE_ORD
);
2871 /* In a short-circuited boolean expression the LHS might be
2872 such that the RHS, if evaluated, will never trap. For
2873 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2874 if neither x nor y is NaN. (This is a mixed blessing: for
2875 example, the expression above will never trap, hence
2876 optimizing it to x < y would be invalid). */
2877 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2878 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2881 /* If the comparison was short-circuited, and only the RHS
2882 trapped, we may now generate a spurious trap. */
2884 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2887 /* If we changed the conditions that cause a trap, we lose. */
2888 if ((ltrap
|| rtrap
) != trap
)
2892 if (compcode
== COMPCODE_TRUE
)
2893 return constant_boolean_node (true, truth_type
);
2894 else if (compcode
== COMPCODE_FALSE
)
2895 return constant_boolean_node (false, truth_type
);
2898 enum tree_code tcode
;
2900 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2901 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2905 /* Return nonzero if two operands (typically of the same tree node)
2906 are necessarily equal. FLAGS modifies behavior as follows:
2908 If OEP_ONLY_CONST is set, only return nonzero for constants.
2909 This function tests whether the operands are indistinguishable;
2910 it does not test whether they are equal using C's == operation.
2911 The distinction is important for IEEE floating point, because
2912 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2913 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2915 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2916 even though it may hold multiple values during a function.
2917 This is because a GCC tree node guarantees that nothing else is
2918 executed between the evaluation of its "operands" (which may often
2919 be evaluated in arbitrary order). Hence if the operands themselves
2920 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2921 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2922 unset means assuming isochronic (or instantaneous) tree equivalence.
2923 Unless comparing arbitrary expression trees, such as from different
2924 statements, this flag can usually be left unset.
2926 If OEP_PURE_SAME is set, then pure functions with identical arguments
2927 are considered the same. It is used when the caller has other ways
2928 to ensure that global memory is unchanged in between.
2930 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2931 not values of expressions.
2933 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2934 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2936 If OEP_BITWISE is set, then require the values to be bitwise identical
2937 rather than simply numerically equal. Do not take advantage of things
2938 like math-related flags or undefined behavior; only return true for
2939 values that are provably bitwise identical in all circumstances.
2941 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2942 any operand with side effect. This is unnecesarily conservative in the
2943 case we know that arg0 and arg1 are in disjoint code paths (such as in
2944 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2945 addresses with TREE_CONSTANT flag set so we know that &var == &var
2946 even if var is volatile. */
2949 operand_compare::operand_equal_p (const_tree arg0
, const_tree arg1
,
2953 if (verify_hash_value (arg0
, arg1
, flags
, &r
))
2956 STRIP_ANY_LOCATION_WRAPPER (arg0
);
2957 STRIP_ANY_LOCATION_WRAPPER (arg1
);
2959 /* If either is ERROR_MARK, they aren't equal. */
2960 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2961 || TREE_TYPE (arg0
) == error_mark_node
2962 || TREE_TYPE (arg1
) == error_mark_node
)
2965 /* Similar, if either does not have a type (like a template id),
2966 they aren't equal. */
2967 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2970 /* Bitwise identity makes no sense if the values have different layouts. */
2971 if ((flags
& OEP_BITWISE
)
2972 && !tree_nop_conversion_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
2975 /* We cannot consider pointers to different address space equal. */
2976 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2977 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2978 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2979 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2982 /* Check equality of integer constants before bailing out due to
2983 precision differences. */
2984 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2986 /* Address of INTEGER_CST is not defined; check that we did not forget
2987 to drop the OEP_ADDRESS_OF flags. */
2988 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2989 return tree_int_cst_equal (arg0
, arg1
);
2992 if (!(flags
& OEP_ADDRESS_OF
))
2994 /* If both types don't have the same signedness, then we can't consider
2995 them equal. We must check this before the STRIP_NOPS calls
2996 because they may change the signedness of the arguments. As pointers
2997 strictly don't have a signedness, require either two pointers or
2998 two non-pointers as well. */
2999 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3000 || POINTER_TYPE_P (TREE_TYPE (arg0
))
3001 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3004 /* If both types don't have the same precision, then it is not safe
3006 if (element_precision (TREE_TYPE (arg0
))
3007 != element_precision (TREE_TYPE (arg1
)))
3014 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3015 sanity check once the issue is solved. */
3017 /* Addresses of conversions and SSA_NAMEs (and many other things)
3018 are not defined. Check that we did not forget to drop the
3019 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3020 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
3021 && TREE_CODE (arg0
) != SSA_NAME
);
3024 /* In case both args are comparisons but with different comparison
3025 code, try to swap the comparison operands of one arg to produce
3026 a match and compare that variant. */
3027 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3028 && COMPARISON_CLASS_P (arg0
)
3029 && COMPARISON_CLASS_P (arg1
))
3031 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3033 if (TREE_CODE (arg0
) == swap_code
)
3034 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3035 TREE_OPERAND (arg1
, 1), flags
)
3036 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3037 TREE_OPERAND (arg1
, 0), flags
);
3040 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
3042 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3043 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
3045 else if (flags
& OEP_ADDRESS_OF
)
3047 /* If we are interested in comparing addresses ignore
3048 MEM_REF wrappings of the base that can appear just for
3050 if (TREE_CODE (arg0
) == MEM_REF
3052 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
3053 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
3054 && integer_zerop (TREE_OPERAND (arg0
, 1)))
3056 else if (TREE_CODE (arg1
) == MEM_REF
3058 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
3060 && integer_zerop (TREE_OPERAND (arg1
, 1)))
3068 /* When not checking adddresses, this is needed for conversions and for
3069 COMPONENT_REF. Might as well play it safe and always test this. */
3070 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3072 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
3073 && !(flags
& OEP_ADDRESS_OF
)))
3076 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3077 We don't care about side effects in that case because the SAVE_EXPR
3078 takes care of that for us. In all other cases, two expressions are
3079 equal if they have no side effects. If we have two identical
3080 expressions with side effects that should be treated the same due
3081 to the only side effects being identical SAVE_EXPR's, that will
3082 be detected in the recursive calls below.
3083 If we are taking an invariant address of two identical objects
3084 they are necessarily equal as well. */
3085 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3086 && (TREE_CODE (arg0
) == SAVE_EXPR
3087 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
3088 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3091 /* Next handle constant cases, those for which we can return 1 even
3092 if ONLY_CONST is set. */
3093 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3094 switch (TREE_CODE (arg0
))
3097 return tree_int_cst_equal (arg0
, arg1
);
3100 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3101 TREE_FIXED_CST (arg1
));
3104 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
3107 if (!(flags
& OEP_BITWISE
) && !HONOR_SIGNED_ZEROS (arg0
))
3109 /* If we do not distinguish between signed and unsigned zero,
3110 consider them equal. */
3111 if (real_zerop (arg0
) && real_zerop (arg1
))
3118 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
3119 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
3122 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3123 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3126 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3127 for (unsigned int i
= 0; i
< count
; ++i
)
3128 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3129 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3135 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3137 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3141 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3142 && ! memcmp (TREE_STRING_POINTER (arg0
),
3143 TREE_STRING_POINTER (arg1
),
3144 TREE_STRING_LENGTH (arg0
)));
3147 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3148 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3149 flags
| OEP_ADDRESS_OF
3150 | OEP_MATCH_SIDE_EFFECTS
);
3152 /* In GIMPLE empty constructors are allowed in initializers of
3154 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3159 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3160 two instances of undefined behavior will give identical results. */
3161 if (flags
& (OEP_ONLY_CONST
| OEP_BITWISE
))
3164 /* Define macros to test an operand from arg0 and arg1 for equality and a
3165 variant that allows null and views null as being different from any
3166 non-null value. In the latter case, if either is null, the both
3167 must be; otherwise, do the normal comparison. */
3168 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3169 TREE_OPERAND (arg1, N), flags)
3171 #define OP_SAME_WITH_NULL(N) \
3172 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3173 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3175 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3178 /* Two conversions are equal only if signedness and modes match. */
3179 switch (TREE_CODE (arg0
))
3182 case FIX_TRUNC_EXPR
:
3183 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3184 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3194 case tcc_comparison
:
3196 if (OP_SAME (0) && OP_SAME (1))
3199 /* For commutative ops, allow the other order. */
3200 return (commutative_tree_code (TREE_CODE (arg0
))
3201 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3202 TREE_OPERAND (arg1
, 1), flags
)
3203 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3204 TREE_OPERAND (arg1
, 0), flags
));
3207 /* If either of the pointer (or reference) expressions we are
3208 dereferencing contain a side effect, these cannot be equal,
3209 but their addresses can be. */
3210 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3211 && (TREE_SIDE_EFFECTS (arg0
)
3212 || TREE_SIDE_EFFECTS (arg1
)))
3215 switch (TREE_CODE (arg0
))
3218 if (!(flags
& OEP_ADDRESS_OF
))
3220 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3221 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3223 /* Verify that the access types are compatible. */
3224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0
))
3225 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)))
3228 flags
&= ~OEP_ADDRESS_OF
;
3232 /* Require the same offset. */
3233 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3234 TYPE_SIZE (TREE_TYPE (arg1
)),
3235 flags
& ~OEP_ADDRESS_OF
))
3240 case VIEW_CONVERT_EXPR
:
3243 case TARGET_MEM_REF
:
3245 if (!(flags
& OEP_ADDRESS_OF
))
3247 /* Require equal access sizes */
3248 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3249 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3250 || !TYPE_SIZE (TREE_TYPE (arg1
))
3251 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3252 TYPE_SIZE (TREE_TYPE (arg1
)),
3255 /* Verify that access happens in similar types. */
3256 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3258 /* Verify that accesses are TBAA compatible. */
3259 if (!alias_ptr_types_compatible_p
3260 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3261 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3262 || (MR_DEPENDENCE_CLIQUE (arg0
)
3263 != MR_DEPENDENCE_CLIQUE (arg1
))
3264 || (MR_DEPENDENCE_BASE (arg0
)
3265 != MR_DEPENDENCE_BASE (arg1
)))
3267 /* Verify that alignment is compatible. */
3268 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3269 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3272 flags
&= ~OEP_ADDRESS_OF
;
3273 return (OP_SAME (0) && OP_SAME (1)
3274 /* TARGET_MEM_REF require equal extra operands. */
3275 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3276 || (OP_SAME_WITH_NULL (2)
3277 && OP_SAME_WITH_NULL (3)
3278 && OP_SAME_WITH_NULL (4))));
3281 case ARRAY_RANGE_REF
:
3284 flags
&= ~OEP_ADDRESS_OF
;
3285 /* Compare the array index by value if it is constant first as we
3286 may have different types but same value here. */
3287 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3288 TREE_OPERAND (arg1
, 1))
3290 && OP_SAME_WITH_NULL (2)
3291 && OP_SAME_WITH_NULL (3)
3292 /* Compare low bound and element size as with OEP_ADDRESS_OF
3293 we have to account for the offset of the ref. */
3294 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3295 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3296 || (operand_equal_p (array_ref_low_bound
3297 (CONST_CAST_TREE (arg0
)),
3299 (CONST_CAST_TREE (arg1
)), flags
)
3300 && operand_equal_p (array_ref_element_size
3301 (CONST_CAST_TREE (arg0
)),
3302 array_ref_element_size
3303 (CONST_CAST_TREE (arg1
)),
3307 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3308 may be NULL when we're called to compare MEM_EXPRs. */
3309 if (!OP_SAME_WITH_NULL (0))
3312 bool compare_address
= flags
& OEP_ADDRESS_OF
;
3314 /* Most of time we only need to compare FIELD_DECLs for equality.
3315 However when determining address look into actual offsets.
3316 These may match for unions and unshared record types. */
3317 flags
&= ~OEP_ADDRESS_OF
;
3320 if (compare_address
)
3322 if (TREE_OPERAND (arg0
, 2)
3323 || TREE_OPERAND (arg1
, 2))
3324 return OP_SAME_WITH_NULL (2);
3325 tree field0
= TREE_OPERAND (arg0
, 1);
3326 tree field1
= TREE_OPERAND (arg1
, 1);
3328 if (!operand_equal_p (DECL_FIELD_OFFSET (field0
),
3329 DECL_FIELD_OFFSET (field1
), flags
)
3330 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0
),
3331 DECL_FIELD_BIT_OFFSET (field1
),
3339 return OP_SAME_WITH_NULL (2);
3344 flags
&= ~OEP_ADDRESS_OF
;
3345 return OP_SAME (1) && OP_SAME (2);
3351 case tcc_expression
:
3352 switch (TREE_CODE (arg0
))
3355 /* Be sure we pass right ADDRESS_OF flag. */
3356 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3357 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3358 TREE_OPERAND (arg1
, 0),
3359 flags
| OEP_ADDRESS_OF
);
3361 case TRUTH_NOT_EXPR
:
3364 case TRUTH_ANDIF_EXPR
:
3365 case TRUTH_ORIF_EXPR
:
3366 return OP_SAME (0) && OP_SAME (1);
3368 case WIDEN_MULT_PLUS_EXPR
:
3369 case WIDEN_MULT_MINUS_EXPR
:
3372 /* The multiplcation operands are commutative. */
3375 case TRUTH_AND_EXPR
:
3377 case TRUTH_XOR_EXPR
:
3378 if (OP_SAME (0) && OP_SAME (1))
3381 /* Otherwise take into account this is a commutative operation. */
3382 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3383 TREE_OPERAND (arg1
, 1), flags
)
3384 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3385 TREE_OPERAND (arg1
, 0), flags
));
3388 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3390 flags
&= ~OEP_ADDRESS_OF
;
3393 case BIT_INSERT_EXPR
:
3394 /* BIT_INSERT_EXPR has an implict operand as the type precision
3395 of op1. Need to check to make sure they are the same. */
3396 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3397 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3398 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3399 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3405 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3410 case PREDECREMENT_EXPR
:
3411 case PREINCREMENT_EXPR
:
3412 case POSTDECREMENT_EXPR
:
3413 case POSTINCREMENT_EXPR
:
3414 if (flags
& OEP_LEXICOGRAPHIC
)
3415 return OP_SAME (0) && OP_SAME (1);
3418 case CLEANUP_POINT_EXPR
:
3421 if (flags
& OEP_LEXICOGRAPHIC
)
3426 /* Virtual table reference. */
3427 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0
),
3428 OBJ_TYPE_REF_EXPR (arg1
), flags
))
3430 flags
&= ~OEP_ADDRESS_OF
;
3431 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0
))
3432 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1
)))
3434 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0
),
3435 OBJ_TYPE_REF_OBJECT (arg1
), flags
))
3437 if (virtual_method_call_p (arg0
))
3439 if (!virtual_method_call_p (arg1
))
3441 return types_same_for_odr (obj_type_ref_class (arg0
),
3442 obj_type_ref_class (arg1
));
3451 switch (TREE_CODE (arg0
))
3454 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3455 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3456 /* If not both CALL_EXPRs are either internal or normal function
3457 functions, then they are not equal. */
3459 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3461 /* If the CALL_EXPRs call different internal functions, then they
3463 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3468 /* If the CALL_EXPRs call different functions, then they are not
3470 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3475 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3477 unsigned int cef
= call_expr_flags (arg0
);
3478 if (flags
& OEP_PURE_SAME
)
3479 cef
&= ECF_CONST
| ECF_PURE
;
3482 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3486 /* Now see if all the arguments are the same. */
3488 const_call_expr_arg_iterator iter0
, iter1
;
3490 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3491 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3493 a0
= next_const_call_expr_arg (&iter0
),
3494 a1
= next_const_call_expr_arg (&iter1
))
3495 if (! operand_equal_p (a0
, a1
, flags
))
3498 /* If we get here and both argument lists are exhausted
3499 then the CALL_EXPRs are equal. */
3500 return ! (a0
|| a1
);
3506 case tcc_declaration
:
3507 /* Consider __builtin_sqrt equal to sqrt. */
3508 return (TREE_CODE (arg0
) == FUNCTION_DECL
3509 && fndecl_built_in_p (arg0
) && fndecl_built_in_p (arg1
)
3510 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3511 && (DECL_UNCHECKED_FUNCTION_CODE (arg0
)
3512 == DECL_UNCHECKED_FUNCTION_CODE (arg1
)));
3514 case tcc_exceptional
:
3515 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3517 if (CONSTRUCTOR_NO_CLEARING (arg0
) != CONSTRUCTOR_NO_CLEARING (arg1
))
3520 /* In GIMPLE constructors are used only to build vectors from
3521 elements. Individual elements in the constructor must be
3522 indexed in increasing order and form an initial sequence.
3524 We make no effort to compare constructors in generic.
3525 (see sem_variable::equals in ipa-icf which can do so for
3527 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3528 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3531 /* Be sure that vectors constructed have the same representation.
3532 We only tested element precision and modes to match.
3533 Vectors may be BLKmode and thus also check that the number of
3535 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)),
3536 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))))
3539 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3540 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3541 unsigned int len
= vec_safe_length (v0
);
3543 if (len
!= vec_safe_length (v1
))
3546 for (unsigned int i
= 0; i
< len
; i
++)
3548 constructor_elt
*c0
= &(*v0
)[i
];
3549 constructor_elt
*c1
= &(*v1
)[i
];
3551 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3552 /* In GIMPLE the indexes can be either NULL or matching i.
3553 Double check this so we won't get false
3554 positives for GENERIC. */
3556 && (TREE_CODE (c0
->index
) != INTEGER_CST
3557 || compare_tree_int (c0
->index
, i
)))
3559 && (TREE_CODE (c1
->index
) != INTEGER_CST
3560 || compare_tree_int (c1
->index
, i
))))
3565 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3566 && (flags
& OEP_LEXICOGRAPHIC
))
3568 /* Compare the STATEMENT_LISTs. */
3569 tree_stmt_iterator tsi1
, tsi2
;
3570 tree body1
= CONST_CAST_TREE (arg0
);
3571 tree body2
= CONST_CAST_TREE (arg1
);
3572 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3573 tsi_next (&tsi1
), tsi_next (&tsi2
))
3575 /* The lists don't have the same number of statements. */
3576 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3578 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3580 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3581 flags
& (OEP_LEXICOGRAPHIC
3582 | OEP_NO_HASH_CHECK
)))
3589 switch (TREE_CODE (arg0
))
3592 if (flags
& OEP_LEXICOGRAPHIC
)
3593 return OP_SAME_WITH_NULL (0);
3595 case DEBUG_BEGIN_STMT
:
3596 if (flags
& OEP_LEXICOGRAPHIC
)
3608 #undef OP_SAME_WITH_NULL
3611 /* Generate a hash value for an expression. This can be used iteratively
3612 by passing a previous result as the HSTATE argument. */
3615 operand_compare::hash_operand (const_tree t
, inchash::hash
&hstate
,
3619 enum tree_code code
;
3620 enum tree_code_class tclass
;
3622 if (t
== NULL_TREE
|| t
== error_mark_node
)
3624 hstate
.merge_hash (0);
3628 STRIP_ANY_LOCATION_WRAPPER (t
);
3630 if (!(flags
& OEP_ADDRESS_OF
))
3633 code
= TREE_CODE (t
);
3637 /* Alas, constants aren't shared, so we can't rely on pointer
3640 hstate
.merge_hash (0);
3643 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3644 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
3645 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
3650 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
3653 val2
= real_hash (TREE_REAL_CST_PTR (t
));
3654 hstate
.merge_hash (val2
);
3659 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
3660 hstate
.merge_hash (val2
);
3664 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
3665 TREE_STRING_LENGTH (t
));
3668 hash_operand (TREE_REALPART (t
), hstate
, flags
);
3669 hash_operand (TREE_IMAGPART (t
), hstate
, flags
);
3673 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
3674 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
3675 unsigned int count
= vector_cst_encoded_nelts (t
);
3676 for (unsigned int i
= 0; i
< count
; ++i
)
3677 hash_operand (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
3681 /* We can just compare by pointer. */
3682 hstate
.add_hwi (SSA_NAME_VERSION (t
));
3684 case PLACEHOLDER_EXPR
:
3685 /* The node itself doesn't matter. */
3692 /* A list of expressions, for a CALL_EXPR or as the elements of a
3694 for (; t
; t
= TREE_CHAIN (t
))
3695 hash_operand (TREE_VALUE (t
), hstate
, flags
);
3699 unsigned HOST_WIDE_INT idx
;
3701 flags
&= ~OEP_ADDRESS_OF
;
3702 hstate
.add_int (CONSTRUCTOR_NO_CLEARING (t
));
3703 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
3705 /* In GIMPLE the indexes can be either NULL or matching i. */
3706 if (field
== NULL_TREE
)
3707 field
= bitsize_int (idx
);
3708 hash_operand (field
, hstate
, flags
);
3709 hash_operand (value
, hstate
, flags
);
3713 case STATEMENT_LIST
:
3715 tree_stmt_iterator i
;
3716 for (i
= tsi_start (CONST_CAST_TREE (t
));
3717 !tsi_end_p (i
); tsi_next (&i
))
3718 hash_operand (tsi_stmt (i
), hstate
, flags
);
3722 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
3723 hash_operand (TREE_VEC_ELT (t
, i
), hstate
, flags
);
3725 case IDENTIFIER_NODE
:
3726 hstate
.add_object (IDENTIFIER_HASH_VALUE (t
));
3729 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3730 Otherwise nodes that compare equal according to operand_equal_p might
3731 get different hash codes. However, don't do this for machine specific
3732 or front end builtins, since the function code is overloaded in those
3734 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
3735 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
3737 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
3738 code
= TREE_CODE (t
);
3742 if (POLY_INT_CST_P (t
))
3744 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3745 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
3748 tclass
= TREE_CODE_CLASS (code
);
3750 if (tclass
== tcc_declaration
)
3752 /* DECL's have a unique ID */
3753 hstate
.add_hwi (DECL_UID (t
));
3755 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
3757 /* For comparisons that can be swapped, use the lower
3759 enum tree_code ccode
= swap_tree_comparison (code
);
3762 hstate
.add_object (ccode
);
3763 hash_operand (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
3764 hash_operand (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
3766 else if (CONVERT_EXPR_CODE_P (code
))
3768 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3770 enum tree_code ccode
= NOP_EXPR
;
3771 hstate
.add_object (ccode
);
3773 /* Don't hash the type, that can lead to having nodes which
3774 compare equal according to operand_equal_p, but which
3775 have different hash codes. Make sure to include signedness
3776 in the hash computation. */
3777 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3778 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3780 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3781 else if (code
== MEM_REF
3782 && (flags
& OEP_ADDRESS_OF
) != 0
3783 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
3784 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
3785 && integer_zerop (TREE_OPERAND (t
, 1)))
3786 hash_operand (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
3788 /* Don't ICE on FE specific trees, or their arguments etc.
3789 during operand_equal_p hash verification. */
3790 else if (!IS_EXPR_CODE_CLASS (tclass
))
3791 gcc_assert (flags
& OEP_HASH_CHECK
);
3794 unsigned int sflags
= flags
;
3796 hstate
.add_object (code
);
3801 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3802 flags
|= OEP_ADDRESS_OF
;
3808 case TARGET_MEM_REF
:
3809 flags
&= ~OEP_ADDRESS_OF
;
3814 if (sflags
& OEP_ADDRESS_OF
)
3816 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3817 if (TREE_OPERAND (t
, 2))
3818 hash_operand (TREE_OPERAND (t
, 2), hstate
,
3819 flags
& ~OEP_ADDRESS_OF
);
3822 tree field
= TREE_OPERAND (t
, 1);
3823 hash_operand (DECL_FIELD_OFFSET (field
),
3824 hstate
, flags
& ~OEP_ADDRESS_OF
);
3825 hash_operand (DECL_FIELD_BIT_OFFSET (field
),
3826 hstate
, flags
& ~OEP_ADDRESS_OF
);
3832 case ARRAY_RANGE_REF
:
3834 sflags
&= ~OEP_ADDRESS_OF
;
3838 flags
&= ~OEP_ADDRESS_OF
;
3841 case WIDEN_MULT_PLUS_EXPR
:
3842 case WIDEN_MULT_MINUS_EXPR
:
3844 /* The multiplication operands are commutative. */
3845 inchash::hash one
, two
;
3846 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3847 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3848 hstate
.add_commutative (one
, two
);
3849 hash_operand (TREE_OPERAND (t
, 2), two
, flags
);
3854 if (CALL_EXPR_FN (t
) == NULL_TREE
)
3855 hstate
.add_int (CALL_EXPR_IFN (t
));
3859 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3860 Usually different TARGET_EXPRs just should use
3861 different temporaries in their slots. */
3862 hash_operand (TARGET_EXPR_SLOT (t
), hstate
, flags
);
3866 /* Virtual table reference. */
3867 inchash::add_expr (OBJ_TYPE_REF_EXPR (t
), hstate
, flags
);
3868 flags
&= ~OEP_ADDRESS_OF
;
3869 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t
), hstate
, flags
);
3870 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t
), hstate
, flags
);
3871 if (!virtual_method_call_p (t
))
3873 if (tree c
= obj_type_ref_class (t
))
3875 c
= TYPE_NAME (TYPE_MAIN_VARIANT (c
));
3876 /* We compute mangled names only when free_lang_data is run.
3877 In that case we can hash precisely. */
3878 if (TREE_CODE (c
) == TYPE_DECL
3879 && DECL_ASSEMBLER_NAME_SET_P (c
))
3881 (IDENTIFIER_HASH_VALUE
3882 (DECL_ASSEMBLER_NAME (c
)));
3889 /* Don't hash the type, that can lead to having nodes which
3890 compare equal according to operand_equal_p, but which
3891 have different hash codes. */
3892 if (code
== NON_LVALUE_EXPR
)
3894 /* Make sure to include signness in the hash computation. */
3895 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3896 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3899 else if (commutative_tree_code (code
))
3901 /* It's a commutative expression. We want to hash it the same
3902 however it appears. We do this by first hashing both operands
3903 and then rehashing based on the order of their independent
3905 inchash::hash one
, two
;
3906 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3907 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3908 hstate
.add_commutative (one
, two
);
3911 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
3912 hash_operand (TREE_OPERAND (t
, i
), hstate
,
3913 i
== 0 ? flags
: sflags
);
3920 operand_compare::verify_hash_value (const_tree arg0
, const_tree arg1
,
3921 unsigned int flags
, bool *ret
)
3923 /* When checking, verify at the outermost operand_equal_p call that
3924 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3926 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
3928 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
3932 inchash::hash
hstate0 (0), hstate1 (0);
3933 hash_operand (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
3934 hash_operand (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
3935 hashval_t h0
= hstate0
.end ();
3936 hashval_t h1
= hstate1
.end ();
3937 gcc_assert (h0
== h1
);
3951 static operand_compare default_compare_instance
;
3953 /* Conveinece wrapper around operand_compare class because usually we do
3954 not need to play with the valueizer. */
3957 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3959 return default_compare_instance
.operand_equal_p (arg0
, arg1
, flags
);
3965 /* Generate a hash value for an expression. This can be used iteratively
3966 by passing a previous result as the HSTATE argument.
3968 This function is intended to produce the same hash for expressions which
3969 would compare equal using operand_equal_p. */
3971 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
3973 default_compare_instance
.hash_operand (t
, hstate
, flags
);
3978 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3979 with a different signedness or a narrower precision. */
3982 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
3984 if (operand_equal_p (arg0
, arg1
, 0))
3987 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3988 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3991 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3992 and see if the inner values are the same. This removes any
3993 signedness comparison, which doesn't matter here. */
3998 if (operand_equal_p (op0
, op1
, 0))
4001 /* Discard a single widening conversion from ARG1 and see if the inner
4002 value is the same as ARG0. */
4003 if (CONVERT_EXPR_P (arg1
)
4004 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4005 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
4006 < TYPE_PRECISION (TREE_TYPE (arg1
))
4007 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
4013 /* See if ARG is an expression that is either a comparison or is performing
4014 arithmetic on comparisons. The comparisons must only be comparing
4015 two different values, which will be stored in *CVAL1 and *CVAL2; if
4016 they are nonzero it means that some operands have already been found.
4017 No variables may be used anywhere else in the expression except in the
4020 If this is true, return 1. Otherwise, return zero. */
4023 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
)
4025 enum tree_code code
= TREE_CODE (arg
);
4026 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4028 /* We can handle some of the tcc_expression cases here. */
4029 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4031 else if (tclass
== tcc_expression
4032 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
4033 || code
== COMPOUND_EXPR
))
4034 tclass
= tcc_binary
;
4039 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
);
4042 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4043 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
));
4048 case tcc_expression
:
4049 if (code
== COND_EXPR
)
4050 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4051 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
)
4052 && twoval_comparison_p (TREE_OPERAND (arg
, 2), cval1
, cval2
));
4055 case tcc_comparison
:
4056 /* First see if we can handle the first operand, then the second. For
4057 the second operand, we know *CVAL1 can't be zero. It must be that
4058 one side of the comparison is each of the values; test for the
4059 case where this isn't true by failing if the two operands
4062 if (operand_equal_p (TREE_OPERAND (arg
, 0),
4063 TREE_OPERAND (arg
, 1), 0))
4067 *cval1
= TREE_OPERAND (arg
, 0);
4068 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
4070 else if (*cval2
== 0)
4071 *cval2
= TREE_OPERAND (arg
, 0);
4072 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
4077 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
4079 else if (*cval2
== 0)
4080 *cval2
= TREE_OPERAND (arg
, 1);
4081 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
4093 /* ARG is a tree that is known to contain just arithmetic operations and
4094 comparisons. Evaluate the operations in the tree substituting NEW0 for
4095 any occurrence of OLD0 as an operand of a comparison and likewise for
4099 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
4100 tree old1
, tree new1
)
4102 tree type
= TREE_TYPE (arg
);
4103 enum tree_code code
= TREE_CODE (arg
);
4104 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4106 /* We can handle some of the tcc_expression cases here. */
4107 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4109 else if (tclass
== tcc_expression
4110 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
4111 tclass
= tcc_binary
;
4116 return fold_build1_loc (loc
, code
, type
,
4117 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4118 old0
, new0
, old1
, new1
));
4121 return fold_build2_loc (loc
, code
, type
,
4122 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4123 old0
, new0
, old1
, new1
),
4124 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4125 old0
, new0
, old1
, new1
));
4127 case tcc_expression
:
4131 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
4135 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
4139 return fold_build3_loc (loc
, code
, type
,
4140 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4141 old0
, new0
, old1
, new1
),
4142 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4143 old0
, new0
, old1
, new1
),
4144 eval_subst (loc
, TREE_OPERAND (arg
, 2),
4145 old0
, new0
, old1
, new1
));
4149 /* Fall through - ??? */
4151 case tcc_comparison
:
4153 tree arg0
= TREE_OPERAND (arg
, 0);
4154 tree arg1
= TREE_OPERAND (arg
, 1);
4156 /* We need to check both for exact equality and tree equality. The
4157 former will be true if the operand has a side-effect. In that
4158 case, we know the operand occurred exactly once. */
4160 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
4162 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
4165 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
4167 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
4170 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
4178 /* Return a tree for the case when the result of an expression is RESULT
4179 converted to TYPE and OMITTED was previously an operand of the expression
4180 but is now not needed (e.g., we folded OMITTED * 0).
4182 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4183 the conversion of RESULT to TYPE. */
4186 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
4188 tree t
= fold_convert_loc (loc
, type
, result
);
4190 /* If the resulting operand is an empty statement, just return the omitted
4191 statement casted to void. */
4192 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
4193 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
4194 fold_ignored_result (omitted
));
4196 if (TREE_SIDE_EFFECTS (omitted
))
4197 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4198 fold_ignored_result (omitted
), t
);
4200 return non_lvalue_loc (loc
, t
);
4203 /* Return a tree for the case when the result of an expression is RESULT
4204 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4205 of the expression but are now not needed.
4207 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4208 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4209 evaluated before OMITTED2. Otherwise, if neither has side effects,
4210 just do the conversion of RESULT to TYPE. */
4213 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
4214 tree omitted1
, tree omitted2
)
4216 tree t
= fold_convert_loc (loc
, type
, result
);
4218 if (TREE_SIDE_EFFECTS (omitted2
))
4219 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
4220 if (TREE_SIDE_EFFECTS (omitted1
))
4221 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
4223 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
4227 /* Return a simplified tree node for the truth-negation of ARG. This
4228 never alters ARG itself. We assume that ARG is an operation that
4229 returns a truth value (0 or 1).
4231 FIXME: one would think we would fold the result, but it causes
4232 problems with the dominator optimizer. */
4235 fold_truth_not_expr (location_t loc
, tree arg
)
4237 tree type
= TREE_TYPE (arg
);
4238 enum tree_code code
= TREE_CODE (arg
);
4239 location_t loc1
, loc2
;
4241 /* If this is a comparison, we can simply invert it, except for
4242 floating-point non-equality comparisons, in which case we just
4243 enclose a TRUTH_NOT_EXPR around what we have. */
4245 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4247 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
4248 if (FLOAT_TYPE_P (op_type
)
4249 && flag_trapping_math
4250 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
4251 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
4254 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
4255 if (code
== ERROR_MARK
)
4258 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
4259 TREE_OPERAND (arg
, 1));
4260 if (TREE_NO_WARNING (arg
))
4261 TREE_NO_WARNING (ret
) = 1;
4268 return constant_boolean_node (integer_zerop (arg
), type
);
4270 case TRUTH_AND_EXPR
:
4271 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4272 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4273 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
4274 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4275 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4278 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4279 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4280 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
4281 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4282 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4284 case TRUTH_XOR_EXPR
:
4285 /* Here we can invert either operand. We invert the first operand
4286 unless the second operand is a TRUTH_NOT_EXPR in which case our
4287 result is the XOR of the first operand with the inside of the
4288 negation of the second operand. */
4290 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
4291 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
4292 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
4294 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
4295 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
4296 TREE_OPERAND (arg
, 1));
4298 case TRUTH_ANDIF_EXPR
:
4299 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4300 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4301 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
4302 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4303 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4305 case TRUTH_ORIF_EXPR
:
4306 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4307 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4308 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
4309 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4310 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4312 case TRUTH_NOT_EXPR
:
4313 return TREE_OPERAND (arg
, 0);
4317 tree arg1
= TREE_OPERAND (arg
, 1);
4318 tree arg2
= TREE_OPERAND (arg
, 2);
4320 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4321 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
4323 /* A COND_EXPR may have a throw as one operand, which
4324 then has void type. Just leave void operands
4326 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
4327 VOID_TYPE_P (TREE_TYPE (arg1
))
4328 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
4329 VOID_TYPE_P (TREE_TYPE (arg2
))
4330 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
4334 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4335 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4336 TREE_OPERAND (arg
, 0),
4337 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
4339 case NON_LVALUE_EXPR
:
4340 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4341 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
4344 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
4345 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4350 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4351 return build1_loc (loc
, TREE_CODE (arg
), type
,
4352 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4355 if (!integer_onep (TREE_OPERAND (arg
, 1)))
4357 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
4360 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4362 case CLEANUP_POINT_EXPR
:
4363 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4364 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
4365 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4372 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4373 assume that ARG is an operation that returns a truth value (0 or 1
4374 for scalars, 0 or -1 for vectors). Return the folded expression if
4375 folding is successful. Otherwise, return NULL_TREE. */
4378 fold_invert_truthvalue (location_t loc
, tree arg
)
4380 tree type
= TREE_TYPE (arg
);
4381 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
4387 /* Return a simplified tree node for the truth-negation of ARG. This
4388 never alters ARG itself. We assume that ARG is an operation that
4389 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4392 invert_truthvalue_loc (location_t loc
, tree arg
)
4394 if (TREE_CODE (arg
) == ERROR_MARK
)
4397 tree type
= TREE_TYPE (arg
);
4398 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
4404 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4405 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4406 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4407 is the original memory reference used to preserve the alias set of
4411 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
4412 HOST_WIDE_INT bitsize
, poly_int64 bitpos
,
4413 int unsignedp
, int reversep
)
4415 tree result
, bftype
;
4417 /* Attempt not to lose the access path if possible. */
4418 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
4420 tree ninner
= TREE_OPERAND (orig_inner
, 0);
4422 poly_int64 nbitsize
, nbitpos
;
4424 int nunsignedp
, nreversep
, nvolatilep
= 0;
4425 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
4426 &noffset
, &nmode
, &nunsignedp
,
4427 &nreversep
, &nvolatilep
);
4429 && noffset
== NULL_TREE
4430 && known_subrange_p (bitpos
, bitsize
, nbitpos
, nbitsize
)
4440 alias_set_type iset
= get_alias_set (orig_inner
);
4441 if (iset
== 0 && get_alias_set (inner
) != iset
)
4442 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
4443 build_fold_addr_expr (inner
),
4444 build_int_cst (ptr_type_node
, 0));
4446 if (known_eq (bitpos
, 0) && !reversep
)
4448 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4449 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4450 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4451 && tree_fits_shwi_p (size
)
4452 && tree_to_shwi (size
) == bitsize
)
4453 return fold_convert_loc (loc
, type
, inner
);
4457 if (TYPE_PRECISION (bftype
) != bitsize
4458 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4459 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4461 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
4462 bitsize_int (bitsize
), bitsize_int (bitpos
));
4463 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
4466 result
= fold_convert_loc (loc
, type
, result
);
4471 /* Optimize a bit-field compare.
4473 There are two cases: First is a compare against a constant and the
4474 second is a comparison of two items where the fields are at the same
4475 bit position relative to the start of a chunk (byte, halfword, word)
4476 large enough to contain it. In these cases we can avoid the shift
4477 implicit in bitfield extractions.
4479 For constants, we emit a compare of the shifted constant with the
4480 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4481 compared. For two fields at the same position, we do the ANDs with the
4482 similar mask and compare the result of the ANDs.
4484 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4485 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4486 are the left and right operands of the comparison, respectively.
4488 If the optimization described above can be done, we return the resulting
4489 tree. Otherwise we return zero. */
4492 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4493 tree compare_type
, tree lhs
, tree rhs
)
4495 poly_int64 plbitpos
, plbitsize
, rbitpos
, rbitsize
;
4496 HOST_WIDE_INT lbitpos
, lbitsize
, nbitpos
, nbitsize
;
4497 tree type
= TREE_TYPE (lhs
);
4499 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4500 machine_mode lmode
, rmode
;
4501 scalar_int_mode nmode
;
4502 int lunsignedp
, runsignedp
;
4503 int lreversep
, rreversep
;
4504 int lvolatilep
= 0, rvolatilep
= 0;
4505 tree linner
, rinner
= NULL_TREE
;
4509 /* Get all the information about the extractions being done. If the bit size
4510 is the same as the size of the underlying object, we aren't doing an
4511 extraction at all and so can do nothing. We also don't want to
4512 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4513 then will no longer be able to replace it. */
4514 linner
= get_inner_reference (lhs
, &plbitsize
, &plbitpos
, &offset
, &lmode
,
4515 &lunsignedp
, &lreversep
, &lvolatilep
);
4517 || !known_size_p (plbitsize
)
4518 || !plbitsize
.is_constant (&lbitsize
)
4519 || !plbitpos
.is_constant (&lbitpos
)
4520 || known_eq (lbitsize
, GET_MODE_BITSIZE (lmode
))
4522 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
4527 rreversep
= lreversep
;
4530 /* If this is not a constant, we can only do something if bit positions,
4531 sizes, signedness and storage order are the same. */
4533 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4534 &runsignedp
, &rreversep
, &rvolatilep
);
4537 || maybe_ne (lbitpos
, rbitpos
)
4538 || maybe_ne (lbitsize
, rbitsize
)
4539 || lunsignedp
!= runsignedp
4540 || lreversep
!= rreversep
4542 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
4547 /* Honor the C++ memory model and mimic what RTL expansion does. */
4548 poly_uint64 bitstart
= 0;
4549 poly_uint64 bitend
= 0;
4550 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4552 get_bit_range (&bitstart
, &bitend
, lhs
, &plbitpos
, &offset
);
4553 if (!plbitpos
.is_constant (&lbitpos
) || offset
!= NULL_TREE
)
4557 /* See if we can find a mode to refer to this field. We should be able to,
4558 but fail if we can't. */
4559 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4560 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4561 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4562 TYPE_ALIGN (TREE_TYPE (rinner
))),
4563 BITS_PER_WORD
, false, &nmode
))
4566 /* Set signed and unsigned types of the precision of this mode for the
4568 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4570 /* Compute the bit position and size for the new reference and our offset
4571 within it. If the new reference is the same size as the original, we
4572 won't optimize anything, so return zero. */
4573 nbitsize
= GET_MODE_BITSIZE (nmode
);
4574 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4576 if (nbitsize
== lbitsize
)
4579 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4580 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4582 /* Make the mask to be used against the extracted field. */
4583 mask
= build_int_cst_type (unsigned_type
, -1);
4584 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4585 mask
= const_binop (RSHIFT_EXPR
, mask
,
4586 size_int (nbitsize
- lbitsize
- lbitpos
));
4593 /* If not comparing with constant, just rework the comparison
4595 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4596 nbitsize
, nbitpos
, 1, lreversep
);
4597 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4598 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4599 nbitsize
, nbitpos
, 1, rreversep
);
4600 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4601 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4604 /* Otherwise, we are handling the constant case. See if the constant is too
4605 big for the field. Warn and return a tree for 0 (false) if so. We do
4606 this not only for its own sake, but to avoid having to test for this
4607 error case below. If we didn't, we might generate wrong code.
4609 For unsigned fields, the constant shifted right by the field length should
4610 be all zero. For signed fields, the high-order bits should agree with
4615 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4617 warning (0, "comparison is always %d due to width of bit-field",
4619 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4624 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4625 if (tem
!= 0 && tem
!= -1)
4627 warning (0, "comparison is always %d due to width of bit-field",
4629 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4636 /* Single-bit compares should always be against zero. */
4637 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4639 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4640 rhs
= build_int_cst (type
, 0);
4643 /* Make a new bitfield reference, shift the constant over the
4644 appropriate number of bits and mask it with the computed mask
4645 (in case this was a signed field). If we changed it, make a new one. */
4646 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4647 nbitsize
, nbitpos
, 1, lreversep
);
4649 rhs
= const_binop (BIT_AND_EXPR
,
4650 const_binop (LSHIFT_EXPR
,
4651 fold_convert_loc (loc
, unsigned_type
, rhs
),
4652 size_int (lbitpos
)),
4655 lhs
= build2_loc (loc
, code
, compare_type
,
4656 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4660 /* Subroutine for fold_truth_andor_1: decode a field reference.
4662 If EXP is a comparison reference, we return the innermost reference.
4664 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4665 set to the starting bit number.
4667 If the innermost field can be completely contained in a mode-sized
4668 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4670 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4671 otherwise it is not changed.
4673 *PUNSIGNEDP is set to the signedness of the field.
4675 *PREVERSEP is set to the storage order of the field.
4677 *PMASK is set to the mask used. This is either contained in a
4678 BIT_AND_EXPR or derived from the width of the field.
4680 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4682 Return 0 if this is not a component reference or is one that we can't
4683 do anything with. */
4686 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4687 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4688 int *punsignedp
, int *preversep
, int *pvolatilep
,
4689 tree
*pmask
, tree
*pand_mask
)
4692 tree outer_type
= 0;
4694 tree mask
, inner
, offset
;
4696 unsigned int precision
;
4698 /* All the optimizations using this function assume integer fields.
4699 There are problems with FP fields since the type_for_size call
4700 below can fail for, e.g., XFmode. */
4701 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4704 /* We are interested in the bare arrangement of bits, so strip everything
4705 that doesn't affect the machine mode. However, record the type of the
4706 outermost expression if it may matter below. */
4707 if (CONVERT_EXPR_P (exp
)
4708 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4709 outer_type
= TREE_TYPE (exp
);
4712 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4714 and_mask
= TREE_OPERAND (exp
, 1);
4715 exp
= TREE_OPERAND (exp
, 0);
4716 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4717 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4721 poly_int64 poly_bitsize
, poly_bitpos
;
4722 inner
= get_inner_reference (exp
, &poly_bitsize
, &poly_bitpos
, &offset
,
4723 pmode
, punsignedp
, preversep
, pvolatilep
);
4724 if ((inner
== exp
&& and_mask
== 0)
4725 || !poly_bitsize
.is_constant (pbitsize
)
4726 || !poly_bitpos
.is_constant (pbitpos
)
4729 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4730 /* Reject out-of-bound accesses (PR79731). */
4731 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4732 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4733 *pbitpos
+ *pbitsize
) < 0))
4736 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4737 if (unsigned_type
== NULL_TREE
)
4742 /* If the number of bits in the reference is the same as the bitsize of
4743 the outer type, then the outer type gives the signedness. Otherwise
4744 (in case of a small bitfield) the signedness is unchanged. */
4745 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4746 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4748 /* Compute the mask to access the bitfield. */
4749 precision
= TYPE_PRECISION (unsigned_type
);
4751 mask
= build_int_cst_type (unsigned_type
, -1);
4753 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4754 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4756 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4758 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4759 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4762 *pand_mask
= and_mask
;
4766 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4767 bit positions and MASK is SIGNED. */
4770 all_ones_mask_p (const_tree mask
, unsigned int size
)
4772 tree type
= TREE_TYPE (mask
);
4773 unsigned int precision
= TYPE_PRECISION (type
);
4775 /* If this function returns true when the type of the mask is
4776 UNSIGNED, then there will be errors. In particular see
4777 gcc.c-torture/execute/990326-1.c. There does not appear to be
4778 any documentation paper trail as to why this is so. But the pre
4779 wide-int worked with that restriction and it has been preserved
4781 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4784 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4787 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4788 represents the sign bit of EXP's type. If EXP represents a sign
4789 or zero extension, also test VAL against the unextended type.
4790 The return value is the (sub)expression whose sign bit is VAL,
4791 or NULL_TREE otherwise. */
4794 sign_bit_p (tree exp
, const_tree val
)
4799 /* Tree EXP must have an integral type. */
4800 t
= TREE_TYPE (exp
);
4801 if (! INTEGRAL_TYPE_P (t
))
4804 /* Tree VAL must be an integer constant. */
4805 if (TREE_CODE (val
) != INTEGER_CST
4806 || TREE_OVERFLOW (val
))
4809 width
= TYPE_PRECISION (t
);
4810 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4813 /* Handle extension from a narrower type. */
4814 if (TREE_CODE (exp
) == NOP_EXPR
4815 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4816 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4821 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4822 to be evaluated unconditionally. */
4825 simple_operand_p (const_tree exp
)
4827 /* Strip any conversions that don't change the machine mode. */
4830 return (CONSTANT_CLASS_P (exp
)
4831 || TREE_CODE (exp
) == SSA_NAME
4833 && ! TREE_ADDRESSABLE (exp
)
4834 && ! TREE_THIS_VOLATILE (exp
)
4835 && ! DECL_NONLOCAL (exp
)
4836 /* Don't regard global variables as simple. They may be
4837 allocated in ways unknown to the compiler (shared memory,
4838 #pragma weak, etc). */
4839 && ! TREE_PUBLIC (exp
)
4840 && ! DECL_EXTERNAL (exp
)
4841 /* Weakrefs are not safe to be read, since they can be NULL.
4842 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4843 have DECL_WEAK flag set. */
4844 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4845 /* Loading a static variable is unduly expensive, but global
4846 registers aren't expensive. */
4847 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4850 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4851 to be evaluated unconditionally.
4852 I addition to simple_operand_p, we assume that comparisons, conversions,
4853 and logic-not operations are simple, if their operands are simple, too. */
4856 simple_operand_p_2 (tree exp
)
4858 enum tree_code code
;
4860 if (TREE_SIDE_EFFECTS (exp
) || generic_expr_could_trap_p (exp
))
4863 while (CONVERT_EXPR_P (exp
))
4864 exp
= TREE_OPERAND (exp
, 0);
4866 code
= TREE_CODE (exp
);
4868 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4869 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4870 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4872 if (code
== TRUTH_NOT_EXPR
)
4873 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4875 return simple_operand_p (exp
);
4879 /* The following functions are subroutines to fold_range_test and allow it to
4880 try to change a logical combination of comparisons into a range test.
4883 X == 2 || X == 3 || X == 4 || X == 5
4887 (unsigned) (X - 2) <= 3
4889 We describe each set of comparisons as being either inside or outside
4890 a range, using a variable named like IN_P, and then describe the
4891 range with a lower and upper bound. If one of the bounds is omitted,
4892 it represents either the highest or lowest value of the type.
4894 In the comments below, we represent a range by two numbers in brackets
4895 preceded by a "+" to designate being inside that range, or a "-" to
4896 designate being outside that range, so the condition can be inverted by
4897 flipping the prefix. An omitted bound is represented by a "-". For
4898 example, "- [-, 10]" means being outside the range starting at the lowest
4899 possible value and ending at 10, in other words, being greater than 10.
4900 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4903 We set up things so that the missing bounds are handled in a consistent
4904 manner so neither a missing bound nor "true" and "false" need to be
4905 handled using a special case. */
4907 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4908 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4909 and UPPER1_P are nonzero if the respective argument is an upper bound
4910 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4911 must be specified for a comparison. ARG1 will be converted to ARG0's
4912 type if both are specified. */
4915 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4916 tree arg1
, int upper1_p
)
4922 /* If neither arg represents infinity, do the normal operation.
4923 Else, if not a comparison, return infinity. Else handle the special
4924 comparison rules. Note that most of the cases below won't occur, but
4925 are handled for consistency. */
4927 if (arg0
!= 0 && arg1
!= 0)
4929 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4930 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4932 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4935 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4938 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4939 for neither. In real maths, we cannot assume open ended ranges are
4940 the same. But, this is computer arithmetic, where numbers are finite.
4941 We can therefore make the transformation of any unbounded range with
4942 the value Z, Z being greater than any representable number. This permits
4943 us to treat unbounded ranges as equal. */
4944 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4945 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4949 result
= sgn0
== sgn1
;
4952 result
= sgn0
!= sgn1
;
4955 result
= sgn0
< sgn1
;
4958 result
= sgn0
<= sgn1
;
4961 result
= sgn0
> sgn1
;
4964 result
= sgn0
>= sgn1
;
4970 return constant_boolean_node (result
, type
);
4973 /* Helper routine for make_range. Perform one step for it, return
4974 new expression if the loop should continue or NULL_TREE if it should
4978 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4979 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4980 bool *strict_overflow_p
)
4982 tree arg0_type
= TREE_TYPE (arg0
);
4983 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4984 int in_p
= *p_in_p
, n_in_p
;
4988 case TRUTH_NOT_EXPR
:
4989 /* We can only do something if the range is testing for zero. */
4990 if (low
== NULL_TREE
|| high
== NULL_TREE
4991 || ! integer_zerop (low
) || ! integer_zerop (high
))
4996 case EQ_EXPR
: case NE_EXPR
:
4997 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4998 /* We can only do something if the range is testing for zero
4999 and if the second operand is an integer constant. Note that
5000 saying something is "in" the range we make is done by
5001 complementing IN_P since it will set in the initial case of
5002 being not equal to zero; "out" is leaving it alone. */
5003 if (low
== NULL_TREE
|| high
== NULL_TREE
5004 || ! integer_zerop (low
) || ! integer_zerop (high
)
5005 || TREE_CODE (arg1
) != INTEGER_CST
)
5010 case NE_EXPR
: /* - [c, c] */
5013 case EQ_EXPR
: /* + [c, c] */
5014 in_p
= ! in_p
, low
= high
= arg1
;
5016 case GT_EXPR
: /* - [-, c] */
5017 low
= 0, high
= arg1
;
5019 case GE_EXPR
: /* + [c, -] */
5020 in_p
= ! in_p
, low
= arg1
, high
= 0;
5022 case LT_EXPR
: /* - [c, -] */
5023 low
= arg1
, high
= 0;
5025 case LE_EXPR
: /* + [-, c] */
5026 in_p
= ! in_p
, low
= 0, high
= arg1
;
5032 /* If this is an unsigned comparison, we also know that EXP is
5033 greater than or equal to zero. We base the range tests we make
5034 on that fact, so we record it here so we can parse existing
5035 range tests. We test arg0_type since often the return type
5036 of, e.g. EQ_EXPR, is boolean. */
5037 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
5039 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
5041 build_int_cst (arg0_type
, 0),
5045 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
5047 /* If the high bound is missing, but we have a nonzero low
5048 bound, reverse the range so it goes from zero to the low bound
5050 if (high
== 0 && low
&& ! integer_zerop (low
))
5053 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
5054 build_int_cst (TREE_TYPE (low
), 1), 0);
5055 low
= build_int_cst (arg0_type
, 0);
5065 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5066 low and high are non-NULL, then normalize will DTRT. */
5067 if (!TYPE_UNSIGNED (arg0_type
)
5068 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5070 if (low
== NULL_TREE
)
5071 low
= TYPE_MIN_VALUE (arg0_type
);
5072 if (high
== NULL_TREE
)
5073 high
= TYPE_MAX_VALUE (arg0_type
);
5076 /* (-x) IN [a,b] -> x in [-b, -a] */
5077 n_low
= range_binop (MINUS_EXPR
, exp_type
,
5078 build_int_cst (exp_type
, 0),
5080 n_high
= range_binop (MINUS_EXPR
, exp_type
,
5081 build_int_cst (exp_type
, 0),
5083 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
5089 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
5090 build_int_cst (exp_type
, 1));
5094 if (TREE_CODE (arg1
) != INTEGER_CST
)
5097 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5098 move a constant to the other side. */
5099 if (!TYPE_UNSIGNED (arg0_type
)
5100 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5103 /* If EXP is signed, any overflow in the computation is undefined,
5104 so we don't worry about it so long as our computations on
5105 the bounds don't overflow. For unsigned, overflow is defined
5106 and this is exactly the right thing. */
5107 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5108 arg0_type
, low
, 0, arg1
, 0);
5109 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5110 arg0_type
, high
, 1, arg1
, 0);
5111 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
5112 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
5115 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5116 *strict_overflow_p
= true;
5119 /* Check for an unsigned range which has wrapped around the maximum
5120 value thus making n_high < n_low, and normalize it. */
5121 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
5123 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
5124 build_int_cst (TREE_TYPE (n_high
), 1), 0);
5125 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
5126 build_int_cst (TREE_TYPE (n_low
), 1), 0);
5128 /* If the range is of the form +/- [ x+1, x ], we won't
5129 be able to normalize it. But then, it represents the
5130 whole range or the empty set, so make it
5132 if (tree_int_cst_equal (n_low
, low
)
5133 && tree_int_cst_equal (n_high
, high
))
5139 low
= n_low
, high
= n_high
;
5147 case NON_LVALUE_EXPR
:
5148 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
5151 if (! INTEGRAL_TYPE_P (arg0_type
)
5152 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
5153 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
5156 n_low
= low
, n_high
= high
;
5159 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
5162 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
5164 /* If we're converting arg0 from an unsigned type, to exp,
5165 a signed type, we will be doing the comparison as unsigned.
5166 The tests above have already verified that LOW and HIGH
5169 So we have to ensure that we will handle large unsigned
5170 values the same way that the current signed bounds treat
5173 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
5177 /* For fixed-point modes, we need to pass the saturating flag
5178 as the 2nd parameter. */
5179 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
5181 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
5182 TYPE_SATURATING (arg0_type
));
5185 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
5187 /* A range without an upper bound is, naturally, unbounded.
5188 Since convert would have cropped a very large value, use
5189 the max value for the destination type. */
5191 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
5192 : TYPE_MAX_VALUE (arg0_type
);
5194 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
5195 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
5196 fold_convert_loc (loc
, arg0_type
,
5198 build_int_cst (arg0_type
, 1));
5200 /* If the low bound is specified, "and" the range with the
5201 range for which the original unsigned value will be
5205 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
5206 1, fold_convert_loc (loc
, arg0_type
,
5211 in_p
= (n_in_p
== in_p
);
5215 /* Otherwise, "or" the range with the range of the input
5216 that will be interpreted as negative. */
5217 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
5218 1, fold_convert_loc (loc
, arg0_type
,
5223 in_p
= (in_p
!= n_in_p
);
5237 /* Given EXP, a logical expression, set the range it is testing into
5238 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5239 actually being tested. *PLOW and *PHIGH will be made of the same
5240 type as the returned expression. If EXP is not a comparison, we
5241 will most likely not be returning a useful value and range. Set
5242 *STRICT_OVERFLOW_P to true if the return value is only valid
5243 because signed overflow is undefined; otherwise, do not change
5244 *STRICT_OVERFLOW_P. */
5247 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
5248 bool *strict_overflow_p
)
5250 enum tree_code code
;
5251 tree arg0
, arg1
= NULL_TREE
;
5252 tree exp_type
, nexp
;
5255 location_t loc
= EXPR_LOCATION (exp
);
5257 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5258 and see if we can refine the range. Some of the cases below may not
5259 happen, but it doesn't seem worth worrying about this. We "continue"
5260 the outer loop when we've changed something; otherwise we "break"
5261 the switch, which will "break" the while. */
5264 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
5268 code
= TREE_CODE (exp
);
5269 exp_type
= TREE_TYPE (exp
);
5272 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
5274 if (TREE_OPERAND_LENGTH (exp
) > 0)
5275 arg0
= TREE_OPERAND (exp
, 0);
5276 if (TREE_CODE_CLASS (code
) == tcc_binary
5277 || TREE_CODE_CLASS (code
) == tcc_comparison
5278 || (TREE_CODE_CLASS (code
) == tcc_expression
5279 && TREE_OPERAND_LENGTH (exp
) > 1))
5280 arg1
= TREE_OPERAND (exp
, 1);
5282 if (arg0
== NULL_TREE
)
5285 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
5286 &high
, &in_p
, strict_overflow_p
);
5287 if (nexp
== NULL_TREE
)
5292 /* If EXP is a constant, we can evaluate whether this is true or false. */
5293 if (TREE_CODE (exp
) == INTEGER_CST
)
5295 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
5297 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5303 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5307 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5308 a bitwise check i.e. when
5309 LOW == 0xXX...X00...0
5310 HIGH == 0xXX...X11...1
5311 Return corresponding mask in MASK and stem in VALUE. */
5314 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
5317 if (TREE_CODE (low
) != INTEGER_CST
5318 || TREE_CODE (high
) != INTEGER_CST
)
5321 unsigned prec
= TYPE_PRECISION (type
);
5322 wide_int lo
= wi::to_wide (low
, prec
);
5323 wide_int hi
= wi::to_wide (high
, prec
);
5325 wide_int end_mask
= lo
^ hi
;
5326 if ((end_mask
& (end_mask
+ 1)) != 0
5327 || (lo
& end_mask
) != 0)
5330 wide_int stem_mask
= ~end_mask
;
5331 wide_int stem
= lo
& stem_mask
;
5332 if (stem
!= (hi
& stem_mask
))
5335 *mask
= wide_int_to_tree (type
, stem_mask
);
5336 *value
= wide_int_to_tree (type
, stem
);
5341 /* Helper routine for build_range_check and match.pd. Return the type to
5342 perform the check or NULL if it shouldn't be optimized. */
5345 range_check_type (tree etype
)
5347 /* First make sure that arithmetics in this type is valid, then make sure
5348 that it wraps around. */
5349 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5350 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
), 1);
5352 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_UNSIGNED (etype
))
5354 tree utype
, minv
, maxv
;
5356 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5357 for the type in question, as we rely on this here. */
5358 utype
= unsigned_type_for (etype
);
5359 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
5360 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5361 build_int_cst (TREE_TYPE (maxv
), 1), 1);
5362 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
5364 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5370 else if (POINTER_TYPE_P (etype
))
5371 etype
= unsigned_type_for (etype
);
5375 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5376 type, TYPE, return an expression to test if EXP is in (or out of, depending
5377 on IN_P) the range. Return 0 if the test couldn't be created. */
5380 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
5381 tree low
, tree high
)
5383 tree etype
= TREE_TYPE (exp
), mask
, value
;
5385 /* Disable this optimization for function pointer expressions
5386 on targets that require function pointer canonicalization. */
5387 if (targetm
.have_canonicalize_funcptr_for_compare ()
5388 && POINTER_TYPE_P (etype
)
5389 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype
)))
5394 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
5396 return invert_truthvalue_loc (loc
, value
);
5401 if (low
== 0 && high
== 0)
5402 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
5405 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
5406 fold_convert_loc (loc
, etype
, high
));
5409 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
5410 fold_convert_loc (loc
, etype
, low
));
5412 if (operand_equal_p (low
, high
, 0))
5413 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
5414 fold_convert_loc (loc
, etype
, low
));
5416 if (TREE_CODE (exp
) == BIT_AND_EXPR
5417 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
5418 return fold_build2_loc (loc
, EQ_EXPR
, type
,
5419 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
5423 if (integer_zerop (low
))
5425 if (! TYPE_UNSIGNED (etype
))
5427 etype
= unsigned_type_for (etype
);
5428 high
= fold_convert_loc (loc
, etype
, high
);
5429 exp
= fold_convert_loc (loc
, etype
, exp
);
5431 return build_range_check (loc
, type
, exp
, 1, 0, high
);
5434 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5435 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
5437 int prec
= TYPE_PRECISION (etype
);
5439 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
5441 if (TYPE_UNSIGNED (etype
))
5443 tree signed_etype
= signed_type_for (etype
);
5444 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
5446 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
5448 etype
= signed_etype
;
5449 exp
= fold_convert_loc (loc
, etype
, exp
);
5451 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5452 build_int_cst (etype
, 0));
5456 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5457 This requires wrap-around arithmetics for the type of the expression. */
5458 etype
= range_check_type (etype
);
5459 if (etype
== NULL_TREE
)
5462 high
= fold_convert_loc (loc
, etype
, high
);
5463 low
= fold_convert_loc (loc
, etype
, low
);
5464 exp
= fold_convert_loc (loc
, etype
, exp
);
5466 value
= const_binop (MINUS_EXPR
, high
, low
);
5468 if (value
!= 0 && !TREE_OVERFLOW (value
))
5469 return build_range_check (loc
, type
,
5470 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5471 1, build_int_cst (etype
, 0), value
);
5476 /* Return the predecessor of VAL in its type, handling the infinite case. */
5479 range_predecessor (tree val
)
5481 tree type
= TREE_TYPE (val
);
5483 if (INTEGRAL_TYPE_P (type
)
5484 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5487 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
5488 build_int_cst (TREE_TYPE (val
), 1), 0);
5491 /* Return the successor of VAL in its type, handling the infinite case. */
5494 range_successor (tree val
)
5496 tree type
= TREE_TYPE (val
);
5498 if (INTEGRAL_TYPE_P (type
)
5499 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5502 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
5503 build_int_cst (TREE_TYPE (val
), 1), 0);
5506 /* Given two ranges, see if we can merge them into one. Return 1 if we
5507 can, 0 if we can't. Set the output range into the specified parameters. */
5510 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5511 tree high0
, int in1_p
, tree low1
, tree high1
)
5519 int lowequal
= ((low0
== 0 && low1
== 0)
5520 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5521 low0
, 0, low1
, 0)));
5522 int highequal
= ((high0
== 0 && high1
== 0)
5523 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5524 high0
, 1, high1
, 1)));
5526 /* Make range 0 be the range that starts first, or ends last if they
5527 start at the same value. Swap them if it isn't. */
5528 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5531 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5532 high1
, 1, high0
, 1))))
5534 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5535 tem
= low0
, low0
= low1
, low1
= tem
;
5536 tem
= high0
, high0
= high1
, high1
= tem
;
5539 /* If the second range is != high1 where high1 is the type maximum of
5540 the type, try first merging with < high1 range. */
5543 && TREE_CODE (low1
) == INTEGER_CST
5544 && (TREE_CODE (TREE_TYPE (low1
)) == INTEGER_TYPE
5545 || (TREE_CODE (TREE_TYPE (low1
)) == ENUMERAL_TYPE
5546 && known_eq (TYPE_PRECISION (TREE_TYPE (low1
)),
5547 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1
))))))
5548 && operand_equal_p (low1
, high1
, 0))
5550 if (tree_int_cst_equal (low1
, TYPE_MAX_VALUE (TREE_TYPE (low1
)))
5551 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5552 !in1_p
, NULL_TREE
, range_predecessor (low1
)))
5554 /* Similarly for the second range != low1 where low1 is the type minimum
5555 of the type, try first merging with > low1 range. */
5556 if (tree_int_cst_equal (low1
, TYPE_MIN_VALUE (TREE_TYPE (low1
)))
5557 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5558 !in1_p
, range_successor (low1
), NULL_TREE
))
5562 /* Now flag two cases, whether the ranges are disjoint or whether the
5563 second range is totally subsumed in the first. Note that the tests
5564 below are simplified by the ones above. */
5565 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5566 high0
, 1, low1
, 0));
5567 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5568 high1
, 1, high0
, 1));
5570 /* We now have four cases, depending on whether we are including or
5571 excluding the two ranges. */
5574 /* If they don't overlap, the result is false. If the second range
5575 is a subset it is the result. Otherwise, the range is from the start
5576 of the second to the end of the first. */
5578 in_p
= 0, low
= high
= 0;
5580 in_p
= 1, low
= low1
, high
= high1
;
5582 in_p
= 1, low
= low1
, high
= high0
;
5585 else if (in0_p
&& ! in1_p
)
5587 /* If they don't overlap, the result is the first range. If they are
5588 equal, the result is false. If the second range is a subset of the
5589 first, and the ranges begin at the same place, we go from just after
5590 the end of the second range to the end of the first. If the second
5591 range is not a subset of the first, or if it is a subset and both
5592 ranges end at the same place, the range starts at the start of the
5593 first range and ends just before the second range.
5594 Otherwise, we can't describe this as a single range. */
5596 in_p
= 1, low
= low0
, high
= high0
;
5597 else if (lowequal
&& highequal
)
5598 in_p
= 0, low
= high
= 0;
5599 else if (subset
&& lowequal
)
5601 low
= range_successor (high1
);
5606 /* We are in the weird situation where high0 > high1 but
5607 high1 has no successor. Punt. */
5611 else if (! subset
|| highequal
)
5614 high
= range_predecessor (low1
);
5618 /* low0 < low1 but low1 has no predecessor. Punt. */
5626 else if (! in0_p
&& in1_p
)
5628 /* If they don't overlap, the result is the second range. If the second
5629 is a subset of the first, the result is false. Otherwise,
5630 the range starts just after the first range and ends at the
5631 end of the second. */
5633 in_p
= 1, low
= low1
, high
= high1
;
5634 else if (subset
|| highequal
)
5635 in_p
= 0, low
= high
= 0;
5638 low
= range_successor (high0
);
5643 /* high1 > high0 but high0 has no successor. Punt. */
5651 /* The case where we are excluding both ranges. Here the complex case
5652 is if they don't overlap. In that case, the only time we have a
5653 range is if they are adjacent. If the second is a subset of the
5654 first, the result is the first. Otherwise, the range to exclude
5655 starts at the beginning of the first range and ends at the end of the
5659 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5660 range_successor (high0
),
5662 in_p
= 0, low
= low0
, high
= high1
;
5665 /* Canonicalize - [min, x] into - [-, x]. */
5666 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5667 switch (TREE_CODE (TREE_TYPE (low0
)))
5670 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0
)),
5672 (TYPE_MODE (TREE_TYPE (low0
)))))
5676 if (tree_int_cst_equal (low0
,
5677 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5681 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5682 && integer_zerop (low0
))
5689 /* Canonicalize - [x, max] into - [x, -]. */
5690 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5691 switch (TREE_CODE (TREE_TYPE (high1
)))
5694 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1
)),
5696 (TYPE_MODE (TREE_TYPE (high1
)))))
5700 if (tree_int_cst_equal (high1
,
5701 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5705 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5706 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5708 build_int_cst (TREE_TYPE (high1
), 1),
5716 /* The ranges might be also adjacent between the maximum and
5717 minimum values of the given type. For
5718 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5719 return + [x + 1, y - 1]. */
5720 if (low0
== 0 && high1
== 0)
5722 low
= range_successor (high0
);
5723 high
= range_predecessor (low1
);
5724 if (low
== 0 || high
== 0)
5734 in_p
= 0, low
= low0
, high
= high0
;
5736 in_p
= 0, low
= low0
, high
= high1
;
5739 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5744 /* Subroutine of fold, looking inside expressions of the form
5745 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5746 of the COND_EXPR. This function is being used also to optimize
5747 A op B ? C : A, by reversing the comparison first.
5749 Return a folded expression whose code is not a COND_EXPR
5750 anymore, or NULL_TREE if no folding opportunity is found. */
5753 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5754 tree arg0
, tree arg1
, tree arg2
)
5756 enum tree_code comp_code
= TREE_CODE (arg0
);
5757 tree arg00
= TREE_OPERAND (arg0
, 0);
5758 tree arg01
= TREE_OPERAND (arg0
, 1);
5759 tree arg1_type
= TREE_TYPE (arg1
);
5765 /* If we have A op 0 ? A : -A, consider applying the following
5768 A == 0? A : -A same as -A
5769 A != 0? A : -A same as A
5770 A >= 0? A : -A same as abs (A)
5771 A > 0? A : -A same as abs (A)
5772 A <= 0? A : -A same as -abs (A)
5773 A < 0? A : -A same as -abs (A)
5775 None of these transformations work for modes with signed
5776 zeros. If A is +/-0, the first two transformations will
5777 change the sign of the result (from +0 to -0, or vice
5778 versa). The last four will fix the sign of the result,
5779 even though the original expressions could be positive or
5780 negative, depending on the sign of A.
5782 Note that all these transformations are correct if A is
5783 NaN, since the two alternatives (A and -A) are also NaNs. */
5784 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5785 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5786 ? real_zerop (arg01
)
5787 : integer_zerop (arg01
))
5788 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5789 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5790 /* In the case that A is of the form X-Y, '-A' (arg2) may
5791 have already been folded to Y-X, check for that. */
5792 || (TREE_CODE (arg1
) == MINUS_EXPR
5793 && TREE_CODE (arg2
) == MINUS_EXPR
5794 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5795 TREE_OPERAND (arg2
, 1), 0)
5796 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5797 TREE_OPERAND (arg2
, 0), 0))))
5802 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5803 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5806 return fold_convert_loc (loc
, type
, arg1
);
5809 if (flag_trapping_math
)
5814 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5816 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5817 return fold_convert_loc (loc
, type
, tem
);
5820 if (flag_trapping_math
)
5825 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5827 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5828 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
5830 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5831 is not, invokes UB both in abs and in the negation of it.
5832 So, use ABSU_EXPR instead. */
5833 tree utype
= unsigned_type_for (TREE_TYPE (arg1
));
5834 tem
= fold_build1_loc (loc
, ABSU_EXPR
, utype
, arg1
);
5835 tem
= negate_expr (tem
);
5836 return fold_convert_loc (loc
, type
, tem
);
5840 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5841 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5844 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5848 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5849 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5850 both transformations are correct when A is NaN: A != 0
5851 is then true, and A == 0 is false. */
5853 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5854 && integer_zerop (arg01
) && integer_zerop (arg2
))
5856 if (comp_code
== NE_EXPR
)
5857 return fold_convert_loc (loc
, type
, arg1
);
5858 else if (comp_code
== EQ_EXPR
)
5859 return build_zero_cst (type
);
5862 /* Try some transformations of A op B ? A : B.
5864 A == B? A : B same as B
5865 A != B? A : B same as A
5866 A >= B? A : B same as max (A, B)
5867 A > B? A : B same as max (B, A)
5868 A <= B? A : B same as min (A, B)
5869 A < B? A : B same as min (B, A)
5871 As above, these transformations don't work in the presence
5872 of signed zeros. For example, if A and B are zeros of
5873 opposite sign, the first two transformations will change
5874 the sign of the result. In the last four, the original
5875 expressions give different results for (A=+0, B=-0) and
5876 (A=-0, B=+0), but the transformed expressions do not.
5878 The first two transformations are correct if either A or B
5879 is a NaN. In the first transformation, the condition will
5880 be false, and B will indeed be chosen. In the case of the
5881 second transformation, the condition A != B will be true,
5882 and A will be chosen.
5884 The conversions to max() and min() are not correct if B is
5885 a number and A is not. The conditions in the original
5886 expressions will be false, so all four give B. The min()
5887 and max() versions would give a NaN instead. */
5888 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5889 && operand_equal_for_comparison_p (arg01
, arg2
)
5890 /* Avoid these transformations if the COND_EXPR may be used
5891 as an lvalue in the C++ front-end. PR c++/19199. */
5893 || VECTOR_TYPE_P (type
)
5894 || (! lang_GNU_CXX ()
5895 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5896 || ! maybe_lvalue_p (arg1
)
5897 || ! maybe_lvalue_p (arg2
)))
5899 tree comp_op0
= arg00
;
5900 tree comp_op1
= arg01
;
5901 tree comp_type
= TREE_TYPE (comp_op0
);
5906 return fold_convert_loc (loc
, type
, arg2
);
5908 return fold_convert_loc (loc
, type
, arg1
);
5913 /* In C++ a ?: expression can be an lvalue, so put the
5914 operand which will be used if they are equal first
5915 so that we can convert this back to the
5916 corresponding COND_EXPR. */
5917 if (!HONOR_NANS (arg1
))
5919 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5920 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5921 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5922 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5923 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5924 comp_op1
, comp_op0
);
5925 return fold_convert_loc (loc
, type
, tem
);
5932 if (!HONOR_NANS (arg1
))
5934 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5935 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5936 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5937 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5938 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5939 comp_op1
, comp_op0
);
5940 return fold_convert_loc (loc
, type
, tem
);
5944 if (!HONOR_NANS (arg1
))
5945 return fold_convert_loc (loc
, type
, arg2
);
5948 if (!HONOR_NANS (arg1
))
5949 return fold_convert_loc (loc
, type
, arg1
);
5952 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5962 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5963 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5964 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5968 /* EXP is some logical combination of boolean tests. See if we can
5969 merge it into some range test. Return the new tree if so. */
5972 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5975 int or_op
= (code
== TRUTH_ORIF_EXPR
5976 || code
== TRUTH_OR_EXPR
);
5977 int in0_p
, in1_p
, in_p
;
5978 tree low0
, low1
, low
, high0
, high1
, high
;
5979 bool strict_overflow_p
= false;
5981 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5982 "when simplifying range test");
5984 if (!INTEGRAL_TYPE_P (type
))
5987 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5988 /* If op0 is known true or false and this is a short-circuiting
5989 operation we must not merge with op1 since that makes side-effects
5990 unconditional. So special-case this. */
5992 && ((code
== TRUTH_ORIF_EXPR
&& in0_p
)
5993 || (code
== TRUTH_ANDIF_EXPR
&& !in0_p
)))
5995 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5997 /* If this is an OR operation, invert both sides; we will invert
5998 again at the end. */
6000 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
6002 /* If both expressions are the same, if we can merge the ranges, and we
6003 can build the range test, return it or it inverted. If one of the
6004 ranges is always true or always false, consider it to be the same
6005 expression as the other. */
6006 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
6007 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
6009 && (tem
= (build_range_check (loc
, type
,
6011 : rhs
!= 0 ? rhs
: integer_zero_node
,
6012 in_p
, low
, high
))) != 0)
6014 if (strict_overflow_p
)
6015 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
6016 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
6019 /* On machines where the branch cost is expensive, if this is a
6020 short-circuited branch and the underlying object on both sides
6021 is the same, make a non-short-circuit operation. */
6022 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
6023 if (param_logical_op_non_short_circuit
!= -1)
6024 logical_op_non_short_circuit
6025 = param_logical_op_non_short_circuit
;
6026 if (logical_op_non_short_circuit
6027 && !flag_sanitize_coverage
6028 && lhs
!= 0 && rhs
!= 0
6029 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6030 && operand_equal_p (lhs
, rhs
, 0))
6032 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6033 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6034 which cases we can't do this. */
6035 if (simple_operand_p (lhs
))
6036 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6037 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6040 else if (!lang_hooks
.decls
.global_bindings_p ()
6041 && !CONTAINS_PLACEHOLDER_P (lhs
))
6043 tree common
= save_expr (lhs
);
6045 if ((lhs
= build_range_check (loc
, type
, common
,
6046 or_op
? ! in0_p
: in0_p
,
6048 && (rhs
= build_range_check (loc
, type
, common
,
6049 or_op
? ! in1_p
: in1_p
,
6052 if (strict_overflow_p
)
6053 fold_overflow_warning (warnmsg
,
6054 WARN_STRICT_OVERFLOW_COMPARISON
);
6055 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6056 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6065 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6066 bit value. Arrange things so the extra bits will be set to zero if and
6067 only if C is signed-extended to its full width. If MASK is nonzero,
6068 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6071 unextend (tree c
, int p
, int unsignedp
, tree mask
)
6073 tree type
= TREE_TYPE (c
);
6074 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
6077 if (p
== modesize
|| unsignedp
)
6080 /* We work by getting just the sign bit into the low-order bit, then
6081 into the high-order bit, then sign-extend. We then XOR that value
6083 temp
= build_int_cst (TREE_TYPE (c
),
6084 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
6086 /* We must use a signed type in order to get an arithmetic right shift.
6087 However, we must also avoid introducing accidental overflows, so that
6088 a subsequent call to integer_zerop will work. Hence we must
6089 do the type conversion here. At this point, the constant is either
6090 zero or one, and the conversion to a signed type can never overflow.
6091 We could get an overflow if this conversion is done anywhere else. */
6092 if (TYPE_UNSIGNED (type
))
6093 temp
= fold_convert (signed_type_for (type
), temp
);
6095 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
6096 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
6098 temp
= const_binop (BIT_AND_EXPR
, temp
,
6099 fold_convert (TREE_TYPE (c
), mask
));
6100 /* If necessary, convert the type back to match the type of C. */
6101 if (TYPE_UNSIGNED (type
))
6102 temp
= fold_convert (type
, temp
);
6104 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
6107 /* For an expression that has the form
6111 we can drop one of the inner expressions and simplify to
6115 LOC is the location of the resulting expression. OP is the inner
6116 logical operation; the left-hand side in the examples above, while CMPOP
6117 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6118 removing a condition that guards another, as in
6119 (A != NULL && A->...) || A == NULL
6120 which we must not transform. If RHS_ONLY is true, only eliminate the
6121 right-most operand of the inner logical operation. */
6124 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
6127 tree type
= TREE_TYPE (cmpop
);
6128 enum tree_code code
= TREE_CODE (cmpop
);
6129 enum tree_code truthop_code
= TREE_CODE (op
);
6130 tree lhs
= TREE_OPERAND (op
, 0);
6131 tree rhs
= TREE_OPERAND (op
, 1);
6132 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
6133 enum tree_code rhs_code
= TREE_CODE (rhs
);
6134 enum tree_code lhs_code
= TREE_CODE (lhs
);
6135 enum tree_code inv_code
;
6137 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
6140 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
6143 if (rhs_code
== truthop_code
)
6145 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
6146 if (newrhs
!= NULL_TREE
)
6149 rhs_code
= TREE_CODE (rhs
);
6152 if (lhs_code
== truthop_code
&& !rhs_only
)
6154 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
6155 if (newlhs
!= NULL_TREE
)
6158 lhs_code
= TREE_CODE (lhs
);
6162 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
6163 if (inv_code
== rhs_code
6164 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6165 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6167 if (!rhs_only
&& inv_code
== lhs_code
6168 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6169 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6171 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
6172 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
6177 /* Find ways of folding logical expressions of LHS and RHS:
6178 Try to merge two comparisons to the same innermost item.
6179 Look for range tests like "ch >= '0' && ch <= '9'".
6180 Look for combinations of simple terms on machines with expensive branches
6181 and evaluate the RHS unconditionally.
6183 For example, if we have p->a == 2 && p->b == 4 and we can make an
6184 object large enough to span both A and B, we can do this with a comparison
6185 against the object ANDed with the a mask.
6187 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6188 operations to do this with one comparison.
6190 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6191 function and the one above.
6193 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6194 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6196 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6199 We return the simplified tree or 0 if no optimization is possible. */
6202 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
6205 /* If this is the "or" of two comparisons, we can do something if
6206 the comparisons are NE_EXPR. If this is the "and", we can do something
6207 if the comparisons are EQ_EXPR. I.e.,
6208 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6210 WANTED_CODE is this operation code. For single bit fields, we can
6211 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6212 comparison for one-bit fields. */
6214 enum tree_code wanted_code
;
6215 enum tree_code lcode
, rcode
;
6216 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
6217 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
6218 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
6219 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
6220 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
6221 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
6222 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
6223 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
6224 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
6225 scalar_int_mode lnmode
, rnmode
;
6226 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
6227 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
6228 tree l_const
, r_const
;
6229 tree lntype
, rntype
, result
;
6230 HOST_WIDE_INT first_bit
, end_bit
;
6233 /* Start by getting the comparison codes. Fail if anything is volatile.
6234 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6235 it were surrounded with a NE_EXPR. */
6237 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
6240 lcode
= TREE_CODE (lhs
);
6241 rcode
= TREE_CODE (rhs
);
6243 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
6245 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
6246 build_int_cst (TREE_TYPE (lhs
), 0));
6250 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
6252 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
6253 build_int_cst (TREE_TYPE (rhs
), 0));
6257 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
6258 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
6261 ll_arg
= TREE_OPERAND (lhs
, 0);
6262 lr_arg
= TREE_OPERAND (lhs
, 1);
6263 rl_arg
= TREE_OPERAND (rhs
, 0);
6264 rr_arg
= TREE_OPERAND (rhs
, 1);
6266 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6267 if (simple_operand_p (ll_arg
)
6268 && simple_operand_p (lr_arg
))
6270 if (operand_equal_p (ll_arg
, rl_arg
, 0)
6271 && operand_equal_p (lr_arg
, rr_arg
, 0))
6273 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
6274 truth_type
, ll_arg
, lr_arg
);
6278 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
6279 && operand_equal_p (lr_arg
, rl_arg
, 0))
6281 result
= combine_comparisons (loc
, code
, lcode
,
6282 swap_tree_comparison (rcode
),
6283 truth_type
, ll_arg
, lr_arg
);
6289 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
6290 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
6292 /* If the RHS can be evaluated unconditionally and its operands are
6293 simple, it wins to evaluate the RHS unconditionally on machines
6294 with expensive branches. In this case, this isn't a comparison
6295 that can be merged. */
6297 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
6299 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
6300 && simple_operand_p (rl_arg
)
6301 && simple_operand_p (rr_arg
))
6303 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6304 if (code
== TRUTH_OR_EXPR
6305 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
6306 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
6307 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6308 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6309 return build2_loc (loc
, NE_EXPR
, truth_type
,
6310 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6312 build_int_cst (TREE_TYPE (ll_arg
), 0));
6314 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6315 if (code
== TRUTH_AND_EXPR
6316 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
6317 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
6318 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6319 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6320 return build2_loc (loc
, EQ_EXPR
, truth_type
,
6321 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6323 build_int_cst (TREE_TYPE (ll_arg
), 0));
6326 /* See if the comparisons can be merged. Then get all the parameters for
6329 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
6330 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
6333 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
6335 ll_inner
= decode_field_reference (loc
, &ll_arg
,
6336 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
6337 &ll_unsignedp
, &ll_reversep
, &volatilep
,
6338 &ll_mask
, &ll_and_mask
);
6339 lr_inner
= decode_field_reference (loc
, &lr_arg
,
6340 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
6341 &lr_unsignedp
, &lr_reversep
, &volatilep
,
6342 &lr_mask
, &lr_and_mask
);
6343 rl_inner
= decode_field_reference (loc
, &rl_arg
,
6344 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
6345 &rl_unsignedp
, &rl_reversep
, &volatilep
,
6346 &rl_mask
, &rl_and_mask
);
6347 rr_inner
= decode_field_reference (loc
, &rr_arg
,
6348 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
6349 &rr_unsignedp
, &rr_reversep
, &volatilep
,
6350 &rr_mask
, &rr_and_mask
);
6352 /* It must be true that the inner operation on the lhs of each
6353 comparison must be the same if we are to be able to do anything.
6354 Then see if we have constants. If not, the same must be true for
6357 || ll_reversep
!= rl_reversep
6358 || ll_inner
== 0 || rl_inner
== 0
6359 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
6362 if (TREE_CODE (lr_arg
) == INTEGER_CST
6363 && TREE_CODE (rr_arg
) == INTEGER_CST
)
6365 l_const
= lr_arg
, r_const
= rr_arg
;
6366 lr_reversep
= ll_reversep
;
6368 else if (lr_reversep
!= rr_reversep
6369 || lr_inner
== 0 || rr_inner
== 0
6370 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
6373 l_const
= r_const
= 0;
6375 /* If either comparison code is not correct for our logical operation,
6376 fail. However, we can convert a one-bit comparison against zero into
6377 the opposite comparison against that bit being set in the field. */
6379 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
6380 if (lcode
!= wanted_code
)
6382 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
6384 /* Make the left operand unsigned, since we are only interested
6385 in the value of one bit. Otherwise we are doing the wrong
6394 /* This is analogous to the code for l_const above. */
6395 if (rcode
!= wanted_code
)
6397 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
6406 /* See if we can find a mode that contains both fields being compared on
6407 the left. If we can't, fail. Otherwise, update all constants and masks
6408 to be relative to a field of that size. */
6409 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
6410 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
6411 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6412 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
6413 volatilep
, &lnmode
))
6416 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6417 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6418 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6419 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6421 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6423 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6424 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6427 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6428 size_int (xll_bitpos
));
6429 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6430 size_int (xrl_bitpos
));
6434 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6435 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6436 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
6437 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6438 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6441 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6443 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6448 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6449 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6450 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
6451 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6452 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6455 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6457 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6461 /* If the right sides are not constant, do the same for it. Also,
6462 disallow this optimization if a size, signedness or storage order
6463 mismatch occurs between the left and right sides. */
6466 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6467 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6468 || ll_reversep
!= lr_reversep
6469 /* Make sure the two fields on the right
6470 correspond to the left without being swapped. */
6471 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6474 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6475 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6476 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6477 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
6478 volatilep
, &rnmode
))
6481 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6482 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6483 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6484 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6486 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6488 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6489 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6492 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6494 size_int (xlr_bitpos
));
6495 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6497 size_int (xrr_bitpos
));
6499 /* Make a mask that corresponds to both fields being compared.
6500 Do this for both items being compared. If the operands are the
6501 same size and the bits being compared are in the same position
6502 then we can do this by masking both and comparing the masked
6504 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6505 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
6506 if (lnbitsize
== rnbitsize
6507 && xll_bitpos
== xlr_bitpos
6511 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6512 lntype
, lnbitsize
, lnbitpos
,
6513 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6514 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6515 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6517 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
6518 rntype
, rnbitsize
, rnbitpos
,
6519 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
6520 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6521 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6523 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6526 /* There is still another way we can do something: If both pairs of
6527 fields being compared are adjacent, we may be able to make a wider
6528 field containing them both.
6530 Note that we still must mask the lhs/rhs expressions. Furthermore,
6531 the mask must be shifted to account for the shift done by
6532 make_bit_field_ref. */
6533 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
6534 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6535 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6536 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6544 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
6545 ll_bitsize
+ rl_bitsize
,
6546 MIN (ll_bitpos
, rl_bitpos
),
6547 ll_unsignedp
, ll_reversep
);
6548 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
6549 lr_bitsize
+ rr_bitsize
,
6550 MIN (lr_bitpos
, rr_bitpos
),
6551 lr_unsignedp
, lr_reversep
);
6553 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6554 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
6555 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6556 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
6558 /* Convert to the smaller type before masking out unwanted bits. */
6560 if (lntype
!= rntype
)
6562 if (lnbitsize
> rnbitsize
)
6564 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6565 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6568 else if (lnbitsize
< rnbitsize
)
6570 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6571 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6576 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6577 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6579 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6580 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6582 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6588 /* Handle the case of comparisons with constants. If there is something in
6589 common between the masks, those bits of the constants must be the same.
6590 If not, the condition is always false. Test for this to avoid generating
6591 incorrect code below. */
6592 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
6593 if (! integer_zerop (result
)
6594 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
6595 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
6597 if (wanted_code
== NE_EXPR
)
6599 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6600 return constant_boolean_node (true, truth_type
);
6604 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6605 return constant_boolean_node (false, truth_type
);
6612 /* Construct the expression we will return. First get the component
6613 reference we will make. Unless the mask is all ones the width of
6614 that field, perform the mask operation. Then compare with the
6616 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6617 lntype
, lnbitsize
, lnbitpos
,
6618 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6620 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6621 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6622 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6624 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6625 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6628 /* T is an integer expression that is being multiplied, divided, or taken a
6629 modulus (CODE says which and what kind of divide or modulus) by a
6630 constant C. See if we can eliminate that operation by folding it with
6631 other operations already in T. WIDE_TYPE, if non-null, is a type that
6632 should be used for the computation if wider than our type.
6634 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6635 (X * 2) + (Y * 4). We must, however, be assured that either the original
6636 expression would not overflow or that overflow is undefined for the type
6637 in the language in question.
6639 If we return a non-null expression, it is an equivalent form of the
6640 original computation, but need not be in the original type.
6642 We set *STRICT_OVERFLOW_P to true if the return values depends on
6643 signed overflow being undefined. Otherwise we do not change
6644 *STRICT_OVERFLOW_P. */
6647 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6648 bool *strict_overflow_p
)
6650 /* To avoid exponential search depth, refuse to allow recursion past
6651 three levels. Beyond that (1) it's highly unlikely that we'll find
6652 something interesting and (2) we've probably processed it before
6653 when we built the inner expression. */
6662 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6669 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6670 bool *strict_overflow_p
)
6672 tree type
= TREE_TYPE (t
);
6673 enum tree_code tcode
= TREE_CODE (t
);
6674 tree ctype
= (wide_type
!= 0
6675 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6676 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6677 ? wide_type
: type
);
6679 int same_p
= tcode
== code
;
6680 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6681 bool sub_strict_overflow_p
;
6683 /* Don't deal with constants of zero here; they confuse the code below. */
6684 if (integer_zerop (c
))
6687 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6688 op0
= TREE_OPERAND (t
, 0);
6690 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6691 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6693 /* Note that we need not handle conditional operations here since fold
6694 already handles those cases. So just do arithmetic here. */
6698 /* For a constant, we can always simplify if we are a multiply
6699 or (for divide and modulus) if it is a multiple of our constant. */
6700 if (code
== MULT_EXPR
6701 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6704 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6705 fold_convert (ctype
, c
));
6706 /* If the multiplication overflowed, we lost information on it.
6707 See PR68142 and PR69845. */
6708 if (TREE_OVERFLOW (tem
))
6714 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6715 /* If op0 is an expression ... */
6716 if ((COMPARISON_CLASS_P (op0
)
6717 || UNARY_CLASS_P (op0
)
6718 || BINARY_CLASS_P (op0
)
6719 || VL_EXP_CLASS_P (op0
)
6720 || EXPRESSION_CLASS_P (op0
))
6721 /* ... and has wrapping overflow, and its type is smaller
6722 than ctype, then we cannot pass through as widening. */
6723 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6724 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6725 && (TYPE_PRECISION (ctype
)
6726 > TYPE_PRECISION (TREE_TYPE (op0
))))
6727 /* ... or this is a truncation (t is narrower than op0),
6728 then we cannot pass through this narrowing. */
6729 || (TYPE_PRECISION (type
)
6730 < TYPE_PRECISION (TREE_TYPE (op0
)))
6731 /* ... or signedness changes for division or modulus,
6732 then we cannot pass through this conversion. */
6733 || (code
!= MULT_EXPR
6734 && (TYPE_UNSIGNED (ctype
)
6735 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6736 /* ... or has undefined overflow while the converted to
6737 type has not, we cannot do the operation in the inner type
6738 as that would introduce undefined overflow. */
6739 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6740 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6741 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6744 /* Pass the constant down and see if we can make a simplification. If
6745 we can, replace this expression with the inner simplification for
6746 possible later conversion to our or some other type. */
6747 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6748 && TREE_CODE (t2
) == INTEGER_CST
6749 && !TREE_OVERFLOW (t2
)
6750 && (t1
= extract_muldiv (op0
, t2
, code
,
6751 code
== MULT_EXPR
? ctype
: NULL_TREE
,
6752 strict_overflow_p
)) != 0)
6757 /* If widening the type changes it from signed to unsigned, then we
6758 must avoid building ABS_EXPR itself as unsigned. */
6759 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6761 tree cstype
= (*signed_type_for
) (ctype
);
6762 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6765 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6766 return fold_convert (ctype
, t1
);
6770 /* If the constant is negative, we cannot simplify this. */
6771 if (tree_int_cst_sgn (c
) == -1)
6775 /* For division and modulus, type can't be unsigned, as e.g.
6776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6777 For signed types, even with wrapping overflow, this is fine. */
6778 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6780 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6782 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6785 case MIN_EXPR
: case MAX_EXPR
:
6786 /* If widening the type changes the signedness, then we can't perform
6787 this optimization as that changes the result. */
6788 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6792 sub_strict_overflow_p
= false;
6793 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6794 &sub_strict_overflow_p
)) != 0
6795 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6796 &sub_strict_overflow_p
)) != 0)
6798 if (tree_int_cst_sgn (c
) < 0)
6799 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6800 if (sub_strict_overflow_p
)
6801 *strict_overflow_p
= true;
6802 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6803 fold_convert (ctype
, t2
));
6807 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6808 /* If the second operand is constant, this is a multiplication
6809 or floor division, by a power of two, so we can treat it that
6810 way unless the multiplier or divisor overflows. Signed
6811 left-shift overflow is implementation-defined rather than
6812 undefined in C90, so do not convert signed left shift into
6814 if (TREE_CODE (op1
) == INTEGER_CST
6815 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6816 /* const_binop may not detect overflow correctly,
6817 so check for it explicitly here. */
6818 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6820 && (t1
= fold_convert (ctype
,
6821 const_binop (LSHIFT_EXPR
, size_one_node
,
6823 && !TREE_OVERFLOW (t1
))
6824 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6825 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6827 fold_convert (ctype
, op0
),
6829 c
, code
, wide_type
, strict_overflow_p
);
6832 case PLUS_EXPR
: case MINUS_EXPR
:
6833 /* See if we can eliminate the operation on both sides. If we can, we
6834 can return a new PLUS or MINUS. If we can't, the only remaining
6835 cases where we can do anything are if the second operand is a
6837 sub_strict_overflow_p
= false;
6838 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6839 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6840 if (t1
!= 0 && t2
!= 0
6841 && TYPE_OVERFLOW_WRAPS (ctype
)
6842 && (code
== MULT_EXPR
6843 /* If not multiplication, we can only do this if both operands
6844 are divisible by c. */
6845 || (multiple_of_p (ctype
, op0
, c
)
6846 && multiple_of_p (ctype
, op1
, c
))))
6848 if (sub_strict_overflow_p
)
6849 *strict_overflow_p
= true;
6850 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6851 fold_convert (ctype
, t2
));
6854 /* If this was a subtraction, negate OP1 and set it to be an addition.
6855 This simplifies the logic below. */
6856 if (tcode
== MINUS_EXPR
)
6858 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6859 /* If OP1 was not easily negatable, the constant may be OP0. */
6860 if (TREE_CODE (op0
) == INTEGER_CST
)
6862 std::swap (op0
, op1
);
6867 if (TREE_CODE (op1
) != INTEGER_CST
)
6870 /* If either OP1 or C are negative, this optimization is not safe for
6871 some of the division and remainder types while for others we need
6872 to change the code. */
6873 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6875 if (code
== CEIL_DIV_EXPR
)
6876 code
= FLOOR_DIV_EXPR
;
6877 else if (code
== FLOOR_DIV_EXPR
)
6878 code
= CEIL_DIV_EXPR
;
6879 else if (code
!= MULT_EXPR
6880 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6884 /* If it's a multiply or a division/modulus operation of a multiple
6885 of our constant, do the operation and verify it doesn't overflow. */
6886 if (code
== MULT_EXPR
6887 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6890 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6891 fold_convert (ctype
, c
));
6892 /* We allow the constant to overflow with wrapping semantics. */
6894 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6900 /* If we have an unsigned type, we cannot widen the operation since it
6901 will change the result if the original computation overflowed. */
6902 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6905 /* The last case is if we are a multiply. In that case, we can
6906 apply the distributive law to commute the multiply and addition
6907 if the multiplication of the constants doesn't overflow
6908 and overflow is defined. With undefined overflow
6909 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6910 But fold_plusminus_mult_expr would factor back any power-of-two
6911 value so do not distribute in the first place in this case. */
6912 if (code
== MULT_EXPR
6913 && TYPE_OVERFLOW_WRAPS (ctype
)
6914 && !(tree_fits_shwi_p (c
) && pow2p_hwi (absu_hwi (tree_to_shwi (c
)))))
6915 return fold_build2 (tcode
, ctype
,
6916 fold_build2 (code
, ctype
,
6917 fold_convert (ctype
, op0
),
6918 fold_convert (ctype
, c
)),
6924 /* We have a special case here if we are doing something like
6925 (C * 8) % 4 since we know that's zero. */
6926 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6927 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6928 /* If the multiplication can overflow we cannot optimize this. */
6929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6930 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6931 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6934 *strict_overflow_p
= true;
6935 return omit_one_operand (type
, integer_zero_node
, op0
);
6938 /* ... fall through ... */
6940 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6941 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6942 /* If we can extract our operation from the LHS, do so and return a
6943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6944 do something only if the second operand is a constant. */
6946 && TYPE_OVERFLOW_WRAPS (ctype
)
6947 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6948 strict_overflow_p
)) != 0)
6949 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6950 fold_convert (ctype
, op1
));
6951 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6952 && TYPE_OVERFLOW_WRAPS (ctype
)
6953 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6954 strict_overflow_p
)) != 0)
6955 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6956 fold_convert (ctype
, t1
));
6957 else if (TREE_CODE (op1
) != INTEGER_CST
)
6960 /* If these are the same operation types, we can associate them
6961 assuming no overflow. */
6964 bool overflow_p
= false;
6965 wi::overflow_type overflow_mul
;
6966 signop sign
= TYPE_SIGN (ctype
);
6967 unsigned prec
= TYPE_PRECISION (ctype
);
6968 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6969 wi::to_wide (c
, prec
),
6970 sign
, &overflow_mul
);
6971 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6973 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6976 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6977 wide_int_to_tree (ctype
, mul
));
6980 /* If these operations "cancel" each other, we have the main
6981 optimizations of this pass, which occur when either constant is a
6982 multiple of the other, in which case we replace this with either an
6983 operation or CODE or TCODE.
6985 If we have an unsigned type, we cannot do this since it will change
6986 the result if the original computation overflowed. */
6987 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6988 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6989 || (tcode
== MULT_EXPR
6990 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6991 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6992 && code
!= MULT_EXPR
)))
6994 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6997 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6998 *strict_overflow_p
= true;
6999 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
7000 fold_convert (ctype
,
7001 const_binop (TRUNC_DIV_EXPR
,
7004 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
7007 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
7008 *strict_overflow_p
= true;
7009 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
7010 fold_convert (ctype
,
7011 const_binop (TRUNC_DIV_EXPR
,
7024 /* Return a node which has the indicated constant VALUE (either 0 or
7025 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7026 and is of the indicated TYPE. */
7029 constant_boolean_node (bool value
, tree type
)
7031 if (type
== integer_type_node
)
7032 return value
? integer_one_node
: integer_zero_node
;
7033 else if (type
== boolean_type_node
)
7034 return value
? boolean_true_node
: boolean_false_node
;
7035 else if (TREE_CODE (type
) == VECTOR_TYPE
)
7036 return build_vector_from_val (type
,
7037 build_int_cst (TREE_TYPE (type
),
7040 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
7044 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7045 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7046 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7047 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7048 COND is the first argument to CODE; otherwise (as in the example
7049 given here), it is the second argument. TYPE is the type of the
7050 original expression. Return NULL_TREE if no simplification is
7054 fold_binary_op_with_conditional_arg (location_t loc
,
7055 enum tree_code code
,
7056 tree type
, tree op0
, tree op1
,
7057 tree cond
, tree arg
, int cond_first_p
)
7059 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
7060 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
7061 tree test
, true_value
, false_value
;
7062 tree lhs
= NULL_TREE
;
7063 tree rhs
= NULL_TREE
;
7064 enum tree_code cond_code
= COND_EXPR
;
7066 /* Do not move possibly trapping operations into the conditional as this
7067 pessimizes code and causes gimplification issues when applied late. */
7068 if (operation_could_trap_p (code
, FLOAT_TYPE_P (type
),
7069 ANY_INTEGRAL_TYPE_P (type
)
7070 && TYPE_OVERFLOW_TRAPS (type
), op1
))
7073 if (TREE_CODE (cond
) == COND_EXPR
7074 || TREE_CODE (cond
) == VEC_COND_EXPR
)
7076 test
= TREE_OPERAND (cond
, 0);
7077 true_value
= TREE_OPERAND (cond
, 1);
7078 false_value
= TREE_OPERAND (cond
, 2);
7079 /* If this operand throws an expression, then it does not make
7080 sense to try to perform a logical or arithmetic operation
7082 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
7084 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
7087 else if (!(TREE_CODE (type
) != VECTOR_TYPE
7088 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
7090 tree testtype
= TREE_TYPE (cond
);
7092 true_value
= constant_boolean_node (true, testtype
);
7093 false_value
= constant_boolean_node (false, testtype
);
7096 /* Detect the case of mixing vector and scalar types - bail out. */
7099 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
7100 cond_code
= VEC_COND_EXPR
;
7102 /* This transformation is only worthwhile if we don't have to wrap ARG
7103 in a SAVE_EXPR and the operation can be simplified without recursing
7104 on at least one of the branches once its pushed inside the COND_EXPR. */
7105 if (!TREE_CONSTANT (arg
)
7106 && (TREE_SIDE_EFFECTS (arg
)
7107 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
7108 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
7111 arg
= fold_convert_loc (loc
, arg_type
, arg
);
7114 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
7116 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
7118 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
7122 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
7124 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
7126 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
7129 /* Check that we have simplified at least one of the branches. */
7130 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
7133 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
7137 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7139 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7140 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7141 ADDEND is the same as X.
7143 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7144 and finite. The problematic cases are when X is zero, and its mode
7145 has signed zeros. In the case of rounding towards -infinity,
7146 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7147 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7150 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
7152 if (!real_zerop (addend
))
7155 /* Don't allow the fold with -fsignaling-nans. */
7156 if (HONOR_SNANS (type
))
7159 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7160 if (!HONOR_SIGNED_ZEROS (type
))
7163 /* There is no case that is safe for all rounding modes. */
7164 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
7167 /* In a vector or complex, we would need to check the sign of all zeros. */
7168 if (TREE_CODE (addend
) == VECTOR_CST
)
7169 addend
= uniform_vector_p (addend
);
7170 if (!addend
|| TREE_CODE (addend
) != REAL_CST
)
7173 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7174 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
7177 /* The mode has signed zeros, and we have to honor their sign.
7178 In this situation, there is only one case we can return true for.
7179 X - 0 is the same as X with default rounding. */
7183 /* Subroutine of match.pd that optimizes comparisons of a division by
7184 a nonzero integer constant against an integer constant, i.e.
7187 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7188 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7191 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
7192 tree
*hi
, bool *neg_overflow
)
7194 tree prod
, tmp
, type
= TREE_TYPE (c1
);
7195 signop sign
= TYPE_SIGN (type
);
7196 wi::overflow_type overflow
;
7198 /* We have to do this the hard way to detect unsigned overflow.
7199 prod = int_const_binop (MULT_EXPR, c1, c2); */
7200 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
7201 prod
= force_fit_type (type
, val
, -1, overflow
);
7202 *neg_overflow
= false;
7204 if (sign
== UNSIGNED
)
7206 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7209 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7210 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
7211 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
7213 else if (tree_int_cst_sgn (c1
) >= 0)
7215 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7216 switch (tree_int_cst_sgn (c2
))
7219 *neg_overflow
= true;
7220 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7225 *lo
= fold_negate_const (tmp
, type
);
7230 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7240 /* A negative divisor reverses the relational operators. */
7241 code
= swap_tree_comparison (code
);
7243 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
7244 switch (tree_int_cst_sgn (c2
))
7247 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7252 *hi
= fold_negate_const (tmp
, type
);
7257 *neg_overflow
= true;
7258 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7267 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
7270 if (TREE_OVERFLOW (*lo
)
7271 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
7273 if (TREE_OVERFLOW (*hi
)
7274 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
7281 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7282 equality/inequality test, then return a simplified form of the test
7283 using a sign testing. Otherwise return NULL. TYPE is the desired
7287 fold_single_bit_test_into_sign_test (location_t loc
,
7288 enum tree_code code
, tree arg0
, tree arg1
,
7291 /* If this is testing a single bit, we can optimize the test. */
7292 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7293 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7294 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7296 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7297 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7298 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7300 if (arg00
!= NULL_TREE
7301 /* This is only a win if casting to a signed type is cheap,
7302 i.e. when arg00's type is not a partial mode. */
7303 && type_has_mode_precision_p (TREE_TYPE (arg00
)))
7305 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7306 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7308 fold_convert_loc (loc
, stype
, arg00
),
7309 build_int_cst (stype
, 0));
7316 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7317 equality/inequality test, then return a simplified form of
7318 the test using shifts and logical operations. Otherwise return
7319 NULL. TYPE is the desired result type. */
7322 fold_single_bit_test (location_t loc
, enum tree_code code
,
7323 tree arg0
, tree arg1
, tree result_type
)
7325 /* If this is testing a single bit, we can optimize the test. */
7326 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7327 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7328 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7330 tree inner
= TREE_OPERAND (arg0
, 0);
7331 tree type
= TREE_TYPE (arg0
);
7332 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7333 scalar_int_mode operand_mode
= SCALAR_INT_TYPE_MODE (type
);
7335 tree signed_type
, unsigned_type
, intermediate_type
;
7338 /* First, see if we can fold the single bit test into a sign-bit
7340 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7345 /* Otherwise we have (A & C) != 0 where C is a single bit,
7346 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7347 Similarly for (A & C) == 0. */
7349 /* If INNER is a right shift of a constant and it plus BITNUM does
7350 not overflow, adjust BITNUM and INNER. */
7351 if (TREE_CODE (inner
) == RSHIFT_EXPR
7352 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7353 && bitnum
< TYPE_PRECISION (type
)
7354 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner
, 1)),
7355 TYPE_PRECISION (type
) - bitnum
))
7357 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
7358 inner
= TREE_OPERAND (inner
, 0);
7361 /* If we are going to be able to omit the AND below, we must do our
7362 operations as unsigned. If we must use the AND, we have a choice.
7363 Normally unsigned is faster, but for some machines signed is. */
7364 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
7365 && !flag_syntax_only
) ? 0 : 1;
7367 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7368 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7369 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7370 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7373 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7374 inner
, size_int (bitnum
));
7376 one
= build_int_cst (intermediate_type
, 1);
7378 if (code
== EQ_EXPR
)
7379 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7381 /* Put the AND last so it can combine with more things. */
7382 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7384 /* Make sure to return the proper type. */
7385 inner
= fold_convert_loc (loc
, result_type
, inner
);
7392 /* Test whether it is preferable two swap two operands, ARG0 and
7393 ARG1, for example because ARG0 is an integer constant and ARG1
7397 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
7399 if (CONSTANT_CLASS_P (arg1
))
7401 if (CONSTANT_CLASS_P (arg0
))
7407 if (TREE_CONSTANT (arg1
))
7409 if (TREE_CONSTANT (arg0
))
7412 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7413 for commutative and comparison operators. Ensuring a canonical
7414 form allows the optimizers to find additional redundancies without
7415 having to explicitly check for both orderings. */
7416 if (TREE_CODE (arg0
) == SSA_NAME
7417 && TREE_CODE (arg1
) == SSA_NAME
7418 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7421 /* Put SSA_NAMEs last. */
7422 if (TREE_CODE (arg1
) == SSA_NAME
)
7424 if (TREE_CODE (arg0
) == SSA_NAME
)
7427 /* Put variables last. */
7437 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7438 means A >= Y && A != MAX, but in this case we know that
7439 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7442 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7444 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7446 if (TREE_CODE (bound
) == LT_EXPR
)
7447 a
= TREE_OPERAND (bound
, 0);
7448 else if (TREE_CODE (bound
) == GT_EXPR
)
7449 a
= TREE_OPERAND (bound
, 1);
7453 typea
= TREE_TYPE (a
);
7454 if (!INTEGRAL_TYPE_P (typea
)
7455 && !POINTER_TYPE_P (typea
))
7458 if (TREE_CODE (ineq
) == LT_EXPR
)
7460 a1
= TREE_OPERAND (ineq
, 1);
7461 y
= TREE_OPERAND (ineq
, 0);
7463 else if (TREE_CODE (ineq
) == GT_EXPR
)
7465 a1
= TREE_OPERAND (ineq
, 0);
7466 y
= TREE_OPERAND (ineq
, 1);
7471 if (TREE_TYPE (a1
) != typea
)
7474 if (POINTER_TYPE_P (typea
))
7476 /* Convert the pointer types into integer before taking the difference. */
7477 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7478 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7479 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7482 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7484 if (!diff
|| !integer_onep (diff
))
7487 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7490 /* Fold a sum or difference of at least one multiplication.
7491 Returns the folded tree or NULL if no simplification could be made. */
7494 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7495 tree arg0
, tree arg1
)
7497 tree arg00
, arg01
, arg10
, arg11
;
7498 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7500 /* (A * C) +- (B * C) -> (A+-B) * C.
7501 (A * C) +- A -> A * (C+-1).
7502 We are most concerned about the case where C is a constant,
7503 but other combinations show up during loop reduction. Since
7504 it is not difficult, try all four possibilities. */
7506 if (TREE_CODE (arg0
) == MULT_EXPR
)
7508 arg00
= TREE_OPERAND (arg0
, 0);
7509 arg01
= TREE_OPERAND (arg0
, 1);
7511 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7513 arg00
= build_one_cst (type
);
7518 /* We cannot generate constant 1 for fract. */
7519 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7522 arg01
= build_one_cst (type
);
7524 if (TREE_CODE (arg1
) == MULT_EXPR
)
7526 arg10
= TREE_OPERAND (arg1
, 0);
7527 arg11
= TREE_OPERAND (arg1
, 1);
7529 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7531 arg10
= build_one_cst (type
);
7532 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7533 the purpose of this canonicalization. */
7534 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
7535 && negate_expr_p (arg1
)
7536 && code
== PLUS_EXPR
)
7538 arg11
= negate_expr (arg1
);
7546 /* We cannot generate constant 1 for fract. */
7547 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7550 arg11
= build_one_cst (type
);
7554 /* Prefer factoring a common non-constant. */
7555 if (operand_equal_p (arg00
, arg10
, 0))
7556 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7557 else if (operand_equal_p (arg01
, arg11
, 0))
7558 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7559 else if (operand_equal_p (arg00
, arg11
, 0))
7560 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7561 else if (operand_equal_p (arg01
, arg10
, 0))
7562 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7564 /* No identical multiplicands; see if we can find a common
7565 power-of-two factor in non-power-of-two multiplies. This
7566 can help in multi-dimensional array access. */
7567 else if (tree_fits_shwi_p (arg01
) && tree_fits_shwi_p (arg11
))
7569 HOST_WIDE_INT int01
= tree_to_shwi (arg01
);
7570 HOST_WIDE_INT int11
= tree_to_shwi (arg11
);
7575 /* Move min of absolute values to int11. */
7576 if (absu_hwi (int01
) < absu_hwi (int11
))
7578 tmp
= int01
, int01
= int11
, int11
= tmp
;
7579 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7586 const unsigned HOST_WIDE_INT factor
= absu_hwi (int11
);
7588 && pow2p_hwi (factor
)
7589 && (int01
& (factor
- 1)) == 0
7590 /* The remainder should not be a constant, otherwise we
7591 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7592 increased the number of multiplications necessary. */
7593 && TREE_CODE (arg10
) != INTEGER_CST
)
7595 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7596 build_int_cst (TREE_TYPE (arg00
),
7601 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7608 if (! ANY_INTEGRAL_TYPE_P (type
)
7609 || TYPE_OVERFLOW_WRAPS (type
)
7610 /* We are neither factoring zero nor minus one. */
7611 || TREE_CODE (same
) == INTEGER_CST
)
7612 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7613 fold_build2_loc (loc
, code
, type
,
7614 fold_convert_loc (loc
, type
, alt0
),
7615 fold_convert_loc (loc
, type
, alt1
)),
7616 fold_convert_loc (loc
, type
, same
));
7618 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7619 same may be minus one and thus the multiplication may overflow. Perform
7620 the sum operation in an unsigned type. */
7621 tree utype
= unsigned_type_for (type
);
7622 tree tem
= fold_build2_loc (loc
, code
, utype
,
7623 fold_convert_loc (loc
, utype
, alt0
),
7624 fold_convert_loc (loc
, utype
, alt1
));
7625 /* If the sum evaluated to a constant that is not -INF the multiplication
7627 if (TREE_CODE (tem
) == INTEGER_CST
7628 && (wi::to_wide (tem
)
7629 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7630 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7631 fold_convert (type
, tem
), same
);
7633 /* Do not resort to unsigned multiplication because
7634 we lose the no-overflow property of the expression. */
7638 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7639 specified by EXPR into the buffer PTR of length LEN bytes.
7640 Return the number of bytes placed in the buffer, or zero
7644 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7646 tree type
= TREE_TYPE (expr
);
7647 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7648 int byte
, offset
, word
, words
;
7649 unsigned char value
;
7651 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7658 return MIN (len
, total_bytes
- off
);
7660 words
= total_bytes
/ UNITS_PER_WORD
;
7662 for (byte
= 0; byte
< total_bytes
; byte
++)
7664 int bitpos
= byte
* BITS_PER_UNIT
;
7665 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7667 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7669 if (total_bytes
> UNITS_PER_WORD
)
7671 word
= byte
/ UNITS_PER_WORD
;
7672 if (WORDS_BIG_ENDIAN
)
7673 word
= (words
- 1) - word
;
7674 offset
= word
* UNITS_PER_WORD
;
7675 if (BYTES_BIG_ENDIAN
)
7676 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7678 offset
+= byte
% UNITS_PER_WORD
;
7681 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7682 if (offset
>= off
&& offset
- off
< len
)
7683 ptr
[offset
- off
] = value
;
7685 return MIN (len
, total_bytes
- off
);
7689 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7690 specified by EXPR into the buffer PTR of length LEN bytes.
7691 Return the number of bytes placed in the buffer, or zero
7695 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7697 tree type
= TREE_TYPE (expr
);
7698 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7699 int total_bytes
= GET_MODE_SIZE (mode
);
7700 FIXED_VALUE_TYPE value
;
7701 tree i_value
, i_type
;
7703 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7706 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7708 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7711 value
= TREE_FIXED_CST (expr
);
7712 i_value
= double_int_to_tree (i_type
, value
.data
);
7714 return native_encode_int (i_value
, ptr
, len
, off
);
7718 /* Subroutine of native_encode_expr. Encode the REAL_CST
7719 specified by EXPR into the buffer PTR of length LEN bytes.
7720 Return the number of bytes placed in the buffer, or zero
7724 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7726 tree type
= TREE_TYPE (expr
);
7727 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7728 int byte
, offset
, word
, words
, bitpos
;
7729 unsigned char value
;
7731 /* There are always 32 bits in each long, no matter the size of
7732 the hosts long. We handle floating point representations with
7736 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7743 return MIN (len
, total_bytes
- off
);
7745 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7747 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7749 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7750 bitpos
+= BITS_PER_UNIT
)
7752 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7753 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7755 if (UNITS_PER_WORD
< 4)
7757 word
= byte
/ UNITS_PER_WORD
;
7758 if (WORDS_BIG_ENDIAN
)
7759 word
= (words
- 1) - word
;
7760 offset
= word
* UNITS_PER_WORD
;
7761 if (BYTES_BIG_ENDIAN
)
7762 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7764 offset
+= byte
% UNITS_PER_WORD
;
7769 if (BYTES_BIG_ENDIAN
)
7771 /* Reverse bytes within each long, or within the entire float
7772 if it's smaller than a long (for HFmode). */
7773 offset
= MIN (3, total_bytes
- 1) - offset
;
7774 gcc_assert (offset
>= 0);
7777 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7779 && offset
- off
< len
)
7780 ptr
[offset
- off
] = value
;
7782 return MIN (len
, total_bytes
- off
);
7785 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7786 specified by EXPR into the buffer PTR of length LEN bytes.
7787 Return the number of bytes placed in the buffer, or zero
7791 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7796 part
= TREE_REALPART (expr
);
7797 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7798 if (off
== -1 && rsize
== 0)
7800 part
= TREE_IMAGPART (expr
);
7802 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7803 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7805 if (off
== -1 && isize
!= rsize
)
7807 return rsize
+ isize
;
7810 /* Like native_encode_vector, but only encode the first COUNT elements.
7811 The other arguments are as for native_encode_vector. */
7814 native_encode_vector_part (const_tree expr
, unsigned char *ptr
, int len
,
7815 int off
, unsigned HOST_WIDE_INT count
)
7817 tree itype
= TREE_TYPE (TREE_TYPE (expr
));
7818 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr
))
7819 && TYPE_PRECISION (itype
) <= BITS_PER_UNIT
)
7821 /* This is the only case in which elements can be smaller than a byte.
7822 Element 0 is always in the lsb of the containing byte. */
7823 unsigned int elt_bits
= TYPE_PRECISION (itype
);
7824 int total_bytes
= CEIL (elt_bits
* count
, BITS_PER_UNIT
);
7825 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7831 /* Zero the buffer and then set bits later where necessary. */
7832 int extract_bytes
= MIN (len
, total_bytes
- off
);
7834 memset (ptr
, 0, extract_bytes
);
7836 unsigned int elts_per_byte
= BITS_PER_UNIT
/ elt_bits
;
7837 unsigned int first_elt
= off
* elts_per_byte
;
7838 unsigned int extract_elts
= extract_bytes
* elts_per_byte
;
7839 for (unsigned int i
= 0; i
< extract_elts
; ++i
)
7841 tree elt
= VECTOR_CST_ELT (expr
, first_elt
+ i
);
7842 if (TREE_CODE (elt
) != INTEGER_CST
)
7845 if (ptr
&& wi::extract_uhwi (wi::to_wide (elt
), 0, 1))
7847 unsigned int bit
= i
* elt_bits
;
7848 ptr
[bit
/ BITS_PER_UNIT
] |= 1 << (bit
% BITS_PER_UNIT
);
7851 return extract_bytes
;
7855 int size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7856 for (unsigned HOST_WIDE_INT i
= 0; i
< count
; i
++)
7863 tree elem
= VECTOR_CST_ELT (expr
, i
);
7864 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7866 if ((off
== -1 && res
!= size
) || res
== 0)
7870 return (off
== -1 && i
< count
- 1) ? 0 : offset
;
7877 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7878 specified by EXPR into the buffer PTR of length LEN bytes.
7879 Return the number of bytes placed in the buffer, or zero
7883 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7885 unsigned HOST_WIDE_INT count
;
7886 if (!VECTOR_CST_NELTS (expr
).is_constant (&count
))
7888 return native_encode_vector_part (expr
, ptr
, len
, off
, count
);
7892 /* Subroutine of native_encode_expr. Encode the STRING_CST
7893 specified by EXPR into the buffer PTR of length LEN bytes.
7894 Return the number of bytes placed in the buffer, or zero
7898 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7900 tree type
= TREE_TYPE (expr
);
7902 /* Wide-char strings are encoded in target byte-order so native
7903 encoding them is trivial. */
7904 if (BITS_PER_UNIT
!= CHAR_BIT
7905 || TREE_CODE (type
) != ARRAY_TYPE
7906 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7907 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7910 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
7911 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7915 len
= MIN (total_bytes
- off
, len
);
7921 if (off
< TREE_STRING_LENGTH (expr
))
7923 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7924 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7926 memset (ptr
+ written
, 0, len
- written
);
7932 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7933 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7934 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7935 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7936 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7937 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7938 Return the number of bytes placed in the buffer, or zero upon failure. */
7941 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7943 /* We don't support starting at negative offset and -1 is special. */
7947 switch (TREE_CODE (expr
))
7950 return native_encode_int (expr
, ptr
, len
, off
);
7953 return native_encode_real (expr
, ptr
, len
, off
);
7956 return native_encode_fixed (expr
, ptr
, len
, off
);
7959 return native_encode_complex (expr
, ptr
, len
, off
);
7962 return native_encode_vector (expr
, ptr
, len
, off
);
7965 return native_encode_string (expr
, ptr
, len
, off
);
7972 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7973 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7974 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7975 machine modes, we can't just use build_nonstandard_integer_type. */
7978 find_bitfield_repr_type (int fieldsize
, int len
)
7981 for (int pass
= 0; pass
< 2; pass
++)
7983 enum mode_class mclass
= pass
? MODE_PARTIAL_INT
: MODE_INT
;
7984 FOR_EACH_MODE_IN_CLASS (mode
, mclass
)
7985 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
7986 && known_eq (GET_MODE_PRECISION (mode
),
7987 GET_MODE_BITSIZE (mode
))
7988 && known_le (GET_MODE_SIZE (mode
), len
))
7990 tree ret
= lang_hooks
.types
.type_for_mode (mode
, 1);
7991 if (ret
&& TYPE_MODE (ret
) == mode
)
7996 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
7997 if (int_n_enabled_p
[i
]
7998 && int_n_data
[i
].bitsize
>= (unsigned) (BITS_PER_UNIT
* fieldsize
)
7999 && int_n_trees
[i
].unsigned_type
)
8001 tree ret
= int_n_trees
[i
].unsigned_type
;
8002 mode
= TYPE_MODE (ret
);
8003 if (known_ge (GET_MODE_SIZE (mode
), fieldsize
)
8004 && known_eq (GET_MODE_PRECISION (mode
),
8005 GET_MODE_BITSIZE (mode
))
8006 && known_le (GET_MODE_SIZE (mode
), len
))
8013 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8014 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8015 to be non-NULL and OFF zero), then in addition to filling the
8016 bytes pointed by PTR with the value also clear any bits pointed
8017 by MASK that are known to be initialized, keep them as is for
8018 e.g. uninitialized padding bits or uninitialized fields. */
8021 native_encode_initializer (tree init
, unsigned char *ptr
, int len
,
8022 int off
, unsigned char *mask
)
8026 /* We don't support starting at negative offset and -1 is special. */
8027 if (off
< -1 || init
== NULL_TREE
)
8030 gcc_assert (mask
== NULL
|| (off
== 0 && ptr
));
8033 switch (TREE_CODE (init
))
8035 case VIEW_CONVERT_EXPR
:
8036 case NON_LVALUE_EXPR
:
8037 return native_encode_initializer (TREE_OPERAND (init
, 0), ptr
, len
, off
,
8040 r
= native_encode_expr (init
, ptr
, len
, off
);
8042 memset (mask
, 0, r
);
8045 tree type
= TREE_TYPE (init
);
8046 HOST_WIDE_INT total_bytes
= int_size_in_bytes (type
);
8047 if (total_bytes
< 0)
8049 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
8051 int o
= off
== -1 ? 0 : off
;
8052 if (TREE_CODE (type
) == ARRAY_TYPE
)
8054 HOST_WIDE_INT min_index
;
8055 unsigned HOST_WIDE_INT cnt
;
8056 HOST_WIDE_INT curpos
= 0, fieldsize
, valueinit
= -1;
8057 constructor_elt
*ce
;
8059 if (TYPE_DOMAIN (type
) == NULL_TREE
8060 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))))
8063 fieldsize
= int_size_in_bytes (TREE_TYPE (type
));
8067 min_index
= tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)));
8069 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8071 for (cnt
= 0; ; cnt
++)
8073 tree val
= NULL_TREE
, index
= NULL_TREE
;
8074 HOST_WIDE_INT pos
= curpos
, count
= 0;
8076 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8081 else if (mask
== NULL
8082 || CONSTRUCTOR_NO_CLEARING (init
)
8083 || curpos
>= total_bytes
)
8087 if (index
&& TREE_CODE (index
) == RANGE_EXPR
)
8089 if (!tree_fits_shwi_p (TREE_OPERAND (index
, 0))
8090 || !tree_fits_shwi_p (TREE_OPERAND (index
, 1)))
8092 pos
= (tree_to_shwi (TREE_OPERAND (index
, 0)) - min_index
)
8094 count
= (tree_to_shwi (TREE_OPERAND (index
, 1))
8095 - tree_to_shwi (TREE_OPERAND (index
, 0)));
8099 if (!tree_fits_shwi_p (index
))
8101 pos
= (tree_to_shwi (index
) - min_index
) * fieldsize
;
8104 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
) && curpos
!= pos
)
8106 if (valueinit
== -1)
8108 tree zero
= build_zero_cst (TREE_TYPE (type
));
8109 r
= native_encode_initializer (zero
, ptr
+ curpos
,
8112 if (TREE_CODE (zero
) == CONSTRUCTOR
)
8117 curpos
+= fieldsize
;
8119 while (curpos
!= pos
)
8121 memcpy (ptr
+ curpos
, ptr
+ valueinit
, fieldsize
);
8122 memcpy (mask
+ curpos
, mask
+ valueinit
, fieldsize
);
8123 curpos
+= fieldsize
;
8133 && (curpos
+ fieldsize
8134 <= (HOST_WIDE_INT
) off
+ len
)))
8139 memcpy (ptr
+ (curpos
- o
), ptr
+ (pos
- o
),
8142 memcpy (mask
+ curpos
, mask
+ pos
, fieldsize
);
8144 else if (!native_encode_initializer (val
,
8161 else if (curpos
+ fieldsize
> off
8162 && curpos
< (HOST_WIDE_INT
) off
+ len
)
8164 /* Partial overlap. */
8165 unsigned char *p
= NULL
;
8168 gcc_assert (mask
== NULL
);
8172 p
= ptr
+ curpos
- off
;
8173 l
= MIN ((HOST_WIDE_INT
) off
+ len
- curpos
,
8182 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8185 curpos
+= fieldsize
;
8187 while (count
-- != 0);
8189 return MIN (total_bytes
- off
, len
);
8191 else if (TREE_CODE (type
) == RECORD_TYPE
8192 || TREE_CODE (type
) == UNION_TYPE
)
8194 unsigned HOST_WIDE_INT cnt
;
8195 constructor_elt
*ce
;
8196 tree fld_base
= TYPE_FIELDS (type
);
8197 tree to_free
= NULL_TREE
;
8199 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| mask
== NULL
);
8201 memset (ptr
, '\0', MIN (total_bytes
- o
, len
));
8202 for (cnt
= 0; ; cnt
++)
8204 tree val
= NULL_TREE
, field
= NULL_TREE
;
8205 HOST_WIDE_INT pos
= 0, fieldsize
;
8206 unsigned HOST_WIDE_INT bpos
= 0, epos
= 0;
8211 to_free
= NULL_TREE
;
8214 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init
), cnt
, &ce
))
8218 if (field
== NULL_TREE
)
8221 pos
= int_byte_position (field
);
8222 if (off
!= -1 && (HOST_WIDE_INT
) off
+ len
<= pos
)
8225 else if (mask
== NULL
8226 || CONSTRUCTOR_NO_CLEARING (init
))
8231 if (mask
&& !CONSTRUCTOR_NO_CLEARING (init
))
8234 for (fld
= fld_base
; fld
; fld
= DECL_CHAIN (fld
))
8236 if (TREE_CODE (fld
) != FIELD_DECL
)
8240 if (DECL_PADDING_P (fld
))
8242 if (DECL_SIZE_UNIT (fld
) == NULL_TREE
8243 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld
)))
8245 if (integer_zerop (DECL_SIZE_UNIT (fld
)))
8249 if (fld
== NULL_TREE
)
8255 fld_base
= DECL_CHAIN (fld
);
8260 pos
= int_byte_position (field
);
8261 val
= build_zero_cst (TREE_TYPE (fld
));
8262 if (TREE_CODE (val
) == CONSTRUCTOR
)
8267 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
8268 && TYPE_DOMAIN (TREE_TYPE (field
))
8269 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field
))))
8271 if (mask
|| off
!= -1)
8273 if (val
== NULL_TREE
)
8275 if (TREE_CODE (TREE_TYPE (val
)) != ARRAY_TYPE
)
8277 fieldsize
= int_size_in_bytes (TREE_TYPE (val
));
8279 || (int) fieldsize
!= fieldsize
8280 || (pos
+ fieldsize
) > INT_MAX
)
8282 if (pos
+ fieldsize
> total_bytes
)
8284 if (ptr
!= NULL
&& total_bytes
< len
)
8285 memset (ptr
+ total_bytes
, '\0',
8286 MIN (pos
+ fieldsize
, len
) - total_bytes
);
8287 total_bytes
= pos
+ fieldsize
;
8292 if (DECL_SIZE_UNIT (field
) == NULL_TREE
8293 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field
)))
8295 fieldsize
= tree_to_shwi (DECL_SIZE_UNIT (field
));
8300 if (DECL_BIT_FIELD (field
))
8302 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8304 fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8305 bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8306 if (bpos
% BITS_PER_UNIT
)
8307 bpos
%= BITS_PER_UNIT
;
8311 epos
= fieldsize
% BITS_PER_UNIT
;
8312 fieldsize
+= BITS_PER_UNIT
- 1;
8313 fieldsize
/= BITS_PER_UNIT
;
8316 if (off
!= -1 && pos
+ fieldsize
<= off
)
8319 if (val
== NULL_TREE
)
8322 if (DECL_BIT_FIELD (field
))
8324 /* FIXME: Handle PDP endian. */
8325 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
8328 if (TREE_CODE (val
) != INTEGER_CST
)
8331 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8332 tree repr_type
= NULL_TREE
;
8333 HOST_WIDE_INT rpos
= 0;
8334 if (repr
&& INTEGRAL_TYPE_P (TREE_TYPE (repr
)))
8336 rpos
= int_byte_position (repr
);
8337 repr_type
= TREE_TYPE (repr
);
8341 repr_type
= find_bitfield_repr_type (fieldsize
, len
);
8342 if (repr_type
== NULL_TREE
)
8344 HOST_WIDE_INT repr_size
= int_size_in_bytes (repr_type
);
8345 gcc_assert (repr_size
> 0 && repr_size
<= len
);
8346 if (pos
+ repr_size
<= o
+ len
)
8350 rpos
= o
+ len
- repr_size
;
8351 gcc_assert (rpos
<= pos
);
8357 wide_int w
= wi::to_wide (val
, TYPE_PRECISION (repr_type
));
8358 int diff
= (TYPE_PRECISION (repr_type
)
8359 - TYPE_PRECISION (TREE_TYPE (field
)));
8360 HOST_WIDE_INT bitoff
= (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8361 if (!BYTES_BIG_ENDIAN
)
8362 w
= wi::lshift (w
, bitoff
);
8364 w
= wi::lshift (w
, diff
- bitoff
);
8365 val
= wide_int_to_tree (repr_type
, w
);
8367 unsigned char buf
[MAX_BITSIZE_MODE_ANY_INT
8368 / BITS_PER_UNIT
+ 1];
8369 int l
= native_encode_int (val
, buf
, sizeof buf
, 0);
8370 if (l
* BITS_PER_UNIT
!= TYPE_PRECISION (repr_type
))
8376 /* If the bitfield does not start at byte boundary, handle
8377 the partial byte at the start. */
8379 && (off
== -1 || (pos
>= off
&& len
>= 1)))
8381 if (!BYTES_BIG_ENDIAN
)
8383 int msk
= (1 << bpos
) - 1;
8384 buf
[pos
- rpos
] &= ~msk
;
8385 buf
[pos
- rpos
] |= ptr
[pos
- o
] & msk
;
8388 if (fieldsize
> 1 || epos
== 0)
8391 mask
[pos
] &= (msk
| ~((1 << epos
) - 1));
8396 int msk
= (1 << (BITS_PER_UNIT
- bpos
)) - 1;
8397 buf
[pos
- rpos
] &= msk
;
8398 buf
[pos
- rpos
] |= ptr
[pos
- o
] & ~msk
;
8401 if (fieldsize
> 1 || epos
== 0)
8405 | ((1 << (BITS_PER_UNIT
- epos
))
8410 /* If the bitfield does not end at byte boundary, handle
8411 the partial byte at the end. */
8414 || pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
))
8416 if (!BYTES_BIG_ENDIAN
)
8418 int msk
= (1 << epos
) - 1;
8419 buf
[pos
- rpos
+ fieldsize
- 1] &= msk
;
8420 buf
[pos
- rpos
+ fieldsize
- 1]
8421 |= ptr
[pos
+ fieldsize
- 1 - o
] & ~msk
;
8422 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8423 mask
[pos
+ fieldsize
- 1] &= ~msk
;
8427 int msk
= (1 << (BITS_PER_UNIT
- epos
)) - 1;
8428 buf
[pos
- rpos
+ fieldsize
- 1] &= ~msk
;
8429 buf
[pos
- rpos
+ fieldsize
- 1]
8430 |= ptr
[pos
+ fieldsize
- 1 - o
] & msk
;
8431 if (mask
&& (fieldsize
> 1 || bpos
== 0))
8432 mask
[pos
+ fieldsize
- 1] &= msk
;
8437 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8439 memcpy (ptr
+ pos
- o
, buf
+ (pos
- rpos
), fieldsize
);
8440 if (mask
&& (fieldsize
> (bpos
!= 0) + (epos
!= 0)))
8441 memset (mask
+ pos
+ (bpos
!= 0), 0,
8442 fieldsize
- (bpos
!= 0) - (epos
!= 0));
8446 /* Partial overlap. */
8447 HOST_WIDE_INT fsz
= fieldsize
;
8448 gcc_assert (mask
== NULL
);
8454 if (pos
+ fsz
> (HOST_WIDE_INT
) off
+ len
)
8455 fsz
= (HOST_WIDE_INT
) off
+ len
- pos
;
8456 memcpy (ptr
+ pos
- off
, buf
+ (pos
- rpos
), fsz
);
8463 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8465 int fldsize
= fieldsize
;
8468 tree fld
= DECL_CHAIN (field
);
8471 if (TREE_CODE (fld
) == FIELD_DECL
)
8473 fld
= DECL_CHAIN (fld
);
8475 if (fld
== NULL_TREE
)
8476 fldsize
= len
- pos
;
8478 r
= native_encode_initializer (val
, ptr
? ptr
+ pos
- o
8482 mask
? mask
+ pos
: NULL
);
8486 && fldsize
!= fieldsize
8488 && pos
+ r
> total_bytes
)
8489 total_bytes
= pos
+ r
;
8493 /* Partial overlap. */
8494 unsigned char *p
= NULL
;
8497 gcc_assert (mask
== NULL
);
8501 p
= ptr
+ pos
- off
;
8502 l
= MIN ((HOST_WIDE_INT
) off
+ len
- pos
,
8511 if (!native_encode_initializer (val
, p
, l
, no
, NULL
))
8515 return MIN (total_bytes
- off
, len
);
8522 /* Subroutine of native_interpret_expr. Interpret the contents of
8523 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8524 If the buffer cannot be interpreted, return NULL_TREE. */
8527 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8529 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
8531 if (total_bytes
> len
8532 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8535 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
8537 return wide_int_to_tree (type
, result
);
8541 /* Subroutine of native_interpret_expr. Interpret the contents of
8542 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8543 If the buffer cannot be interpreted, return NULL_TREE. */
8546 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
8548 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
8549 int total_bytes
= GET_MODE_SIZE (mode
);
8551 FIXED_VALUE_TYPE fixed_value
;
8553 if (total_bytes
> len
8554 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8557 result
= double_int::from_buffer (ptr
, total_bytes
);
8558 fixed_value
= fixed_from_double_int (result
, mode
);
8560 return build_fixed (type
, fixed_value
);
8564 /* Subroutine of native_interpret_expr. Interpret the contents of
8565 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8566 If the buffer cannot be interpreted, return NULL_TREE. */
8569 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8571 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8572 int total_bytes
= GET_MODE_SIZE (mode
);
8573 unsigned char value
;
8574 /* There are always 32 bits in each long, no matter the size of
8575 the hosts long. We handle floating point representations with
8580 if (total_bytes
> len
|| total_bytes
> 24)
8582 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8584 memset (tmp
, 0, sizeof (tmp
));
8585 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8586 bitpos
+= BITS_PER_UNIT
)
8588 /* Both OFFSET and BYTE index within a long;
8589 bitpos indexes the whole float. */
8590 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8591 if (UNITS_PER_WORD
< 4)
8593 int word
= byte
/ UNITS_PER_WORD
;
8594 if (WORDS_BIG_ENDIAN
)
8595 word
= (words
- 1) - word
;
8596 offset
= word
* UNITS_PER_WORD
;
8597 if (BYTES_BIG_ENDIAN
)
8598 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8600 offset
+= byte
% UNITS_PER_WORD
;
8605 if (BYTES_BIG_ENDIAN
)
8607 /* Reverse bytes within each long, or within the entire float
8608 if it's smaller than a long (for HFmode). */
8609 offset
= MIN (3, total_bytes
- 1) - offset
;
8610 gcc_assert (offset
>= 0);
8613 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8615 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8618 real_from_target (&r
, tmp
, mode
);
8619 tree ret
= build_real (type
, r
);
8620 if (MODE_COMPOSITE_P (mode
))
8622 /* For floating point values in composite modes, punt if this folding
8623 doesn't preserve bit representation. As the mode doesn't have fixed
8624 precision while GCC pretends it does, there could be valid values that
8625 GCC can't really represent accurately. See PR95450. */
8626 unsigned char buf
[24];
8627 if (native_encode_expr (ret
, buf
, total_bytes
, 0) != total_bytes
8628 || memcmp (ptr
, buf
, total_bytes
) != 0)
8635 /* Subroutine of native_interpret_expr. Interpret the contents of
8636 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8637 If the buffer cannot be interpreted, return NULL_TREE. */
8640 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8642 tree etype
, rpart
, ipart
;
8645 etype
= TREE_TYPE (type
);
8646 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8649 rpart
= native_interpret_expr (etype
, ptr
, size
);
8652 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8655 return build_complex (type
, rpart
, ipart
);
8658 /* Read a vector of type TYPE from the target memory image given by BYTES,
8659 which contains LEN bytes. The vector is known to be encodable using
8660 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8662 Return the vector on success, otherwise return null. */
8665 native_interpret_vector_part (tree type
, const unsigned char *bytes
,
8666 unsigned int len
, unsigned int npatterns
,
8667 unsigned int nelts_per_pattern
)
8669 tree elt_type
= TREE_TYPE (type
);
8670 if (VECTOR_BOOLEAN_TYPE_P (type
)
8671 && TYPE_PRECISION (elt_type
) <= BITS_PER_UNIT
)
8673 /* This is the only case in which elements can be smaller than a byte.
8674 Element 0 is always in the lsb of the containing byte. */
8675 unsigned int elt_bits
= TYPE_PRECISION (elt_type
);
8676 if (elt_bits
* npatterns
* nelts_per_pattern
> len
* BITS_PER_UNIT
)
8679 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8680 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8682 unsigned int bit_index
= i
* elt_bits
;
8683 unsigned int byte_index
= bit_index
/ BITS_PER_UNIT
;
8684 unsigned int lsb
= bit_index
% BITS_PER_UNIT
;
8685 builder
.quick_push (bytes
[byte_index
] & (1 << lsb
)
8686 ? build_all_ones_cst (elt_type
)
8687 : build_zero_cst (elt_type
));
8689 return builder
.build ();
8692 unsigned int elt_bytes
= tree_to_uhwi (TYPE_SIZE_UNIT (elt_type
));
8693 if (elt_bytes
* npatterns
* nelts_per_pattern
> len
)
8696 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8697 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8699 tree elt
= native_interpret_expr (elt_type
, bytes
, elt_bytes
);
8702 builder
.quick_push (elt
);
8705 return builder
.build ();
8708 /* Subroutine of native_interpret_expr. Interpret the contents of
8709 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8710 If the buffer cannot be interpreted, return NULL_TREE. */
8713 native_interpret_vector (tree type
, const unsigned char *ptr
, unsigned int len
)
8717 unsigned HOST_WIDE_INT count
;
8719 etype
= TREE_TYPE (type
);
8720 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8721 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&count
)
8722 || size
* count
> len
)
8725 return native_interpret_vector_part (type
, ptr
, len
, count
, 1);
8729 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8730 the buffer PTR of length LEN as a constant of type TYPE. For
8731 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8732 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8733 return NULL_TREE. */
8736 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8738 switch (TREE_CODE (type
))
8744 case REFERENCE_TYPE
:
8745 return native_interpret_int (type
, ptr
, len
);
8748 return native_interpret_real (type
, ptr
, len
);
8750 case FIXED_POINT_TYPE
:
8751 return native_interpret_fixed (type
, ptr
, len
);
8754 return native_interpret_complex (type
, ptr
, len
);
8757 return native_interpret_vector (type
, ptr
, len
);
8764 /* Returns true if we can interpret the contents of a native encoding
8768 can_native_interpret_type_p (tree type
)
8770 switch (TREE_CODE (type
))
8776 case REFERENCE_TYPE
:
8777 case FIXED_POINT_TYPE
:
8787 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8788 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8791 native_interpret_aggregate (tree type
, const unsigned char *ptr
, int off
,
8794 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
8795 if (TREE_CODE (type
) == ARRAY_TYPE
)
8797 HOST_WIDE_INT eltsz
= int_size_in_bytes (TREE_TYPE (type
));
8798 if (eltsz
< 0 || eltsz
> len
|| TYPE_DOMAIN (type
) == NULL_TREE
)
8801 HOST_WIDE_INT cnt
= 0;
8802 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)))
8804 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))))
8806 cnt
= tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))) + 1;
8810 HOST_WIDE_INT pos
= 0;
8811 for (HOST_WIDE_INT i
= 0; i
< cnt
; i
++, pos
+= eltsz
)
8814 if (pos
>= len
|| pos
+ eltsz
> len
)
8816 if (can_native_interpret_type_p (TREE_TYPE (type
)))
8818 v
= native_interpret_expr (TREE_TYPE (type
),
8819 ptr
+ off
+ pos
, eltsz
);
8823 else if (TREE_CODE (TREE_TYPE (type
)) == RECORD_TYPE
8824 || TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
8825 v
= native_interpret_aggregate (TREE_TYPE (type
), ptr
, off
+ pos
,
8829 CONSTRUCTOR_APPEND_ELT (elts
, size_int (i
), v
);
8831 return build_constructor (type
, elts
);
8833 if (TREE_CODE (type
) != RECORD_TYPE
)
8835 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8837 if (TREE_CODE (field
) != FIELD_DECL
|| DECL_PADDING_P (field
))
8840 HOST_WIDE_INT bitoff
= 0, pos
= 0, sz
= 0;
8843 if (DECL_BIT_FIELD (field
))
8845 fld
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8846 if (fld
&& INTEGRAL_TYPE_P (TREE_TYPE (fld
)))
8848 poly_int64 bitoffset
;
8849 poly_uint64 field_offset
, fld_offset
;
8850 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8851 && poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &fld_offset
))
8852 bitoffset
= (field_offset
- fld_offset
) * BITS_PER_UNIT
;
8855 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8856 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)));
8857 diff
= (TYPE_PRECISION (TREE_TYPE (fld
))
8858 - TYPE_PRECISION (TREE_TYPE (field
)));
8859 if (!bitoffset
.is_constant (&bitoff
)
8866 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field
)))
8868 int fieldsize
= TYPE_PRECISION (TREE_TYPE (field
));
8869 int bpos
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
8870 bpos
%= BITS_PER_UNIT
;
8872 fieldsize
+= BITS_PER_UNIT
- 1;
8873 fieldsize
/= BITS_PER_UNIT
;
8874 tree repr_type
= find_bitfield_repr_type (fieldsize
, len
);
8875 if (repr_type
== NULL_TREE
)
8877 sz
= int_size_in_bytes (repr_type
);
8878 if (sz
< 0 || sz
> len
)
8880 pos
= int_byte_position (field
);
8881 if (pos
< 0 || pos
> len
|| pos
+ fieldsize
> len
)
8884 if (pos
+ sz
<= len
)
8889 gcc_assert (rpos
<= pos
);
8891 bitoff
= (HOST_WIDE_INT
) (pos
- rpos
) * BITS_PER_UNIT
+ bpos
;
8893 diff
= (TYPE_PRECISION (repr_type
)
8894 - TYPE_PRECISION (TREE_TYPE (field
)));
8895 v
= native_interpret_expr (repr_type
, ptr
+ off
+ pos
, sz
);
8904 sz
= int_size_in_bytes (TREE_TYPE (fld
));
8905 if (sz
< 0 || sz
> len
)
8907 tree byte_pos
= byte_position (fld
);
8908 if (!tree_fits_shwi_p (byte_pos
))
8910 pos
= tree_to_shwi (byte_pos
);
8911 if (pos
< 0 || pos
> len
|| pos
+ sz
> len
)
8914 if (fld
== NULL_TREE
)
8915 /* Already handled above. */;
8916 else if (can_native_interpret_type_p (TREE_TYPE (fld
)))
8918 v
= native_interpret_expr (TREE_TYPE (fld
),
8919 ptr
+ off
+ pos
, sz
);
8923 else if (TREE_CODE (TREE_TYPE (fld
)) == RECORD_TYPE
8924 || TREE_CODE (TREE_TYPE (fld
)) == ARRAY_TYPE
)
8925 v
= native_interpret_aggregate (TREE_TYPE (fld
), ptr
, off
+ pos
, sz
);
8930 if (TREE_CODE (v
) != INTEGER_CST
)
8933 /* FIXME: Figure out how to handle PDP endian bitfields. */
8934 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
8936 if (!BYTES_BIG_ENDIAN
)
8937 v
= wide_int_to_tree (TREE_TYPE (field
),
8938 wi::lrshift (wi::to_wide (v
), bitoff
));
8940 v
= wide_int_to_tree (TREE_TYPE (field
),
8941 wi::lrshift (wi::to_wide (v
),
8944 CONSTRUCTOR_APPEND_ELT (elts
, field
, v
);
8946 return build_constructor (type
, elts
);
8949 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8950 or extracted constant positions and/or sizes aren't byte aligned. */
8952 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8953 bits between adjacent elements. AMNT should be within
8956 00011111|11100000 << 2 = 01111111|10000000
8957 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8960 shift_bytes_in_array_left (unsigned char *ptr
, unsigned int sz
,
8966 unsigned char carry_over
= 0U;
8967 unsigned char carry_mask
= (~0U) << (unsigned char) (BITS_PER_UNIT
- amnt
);
8968 unsigned char clear_mask
= (~0U) << amnt
;
8970 for (unsigned int i
= 0; i
< sz
; i
++)
8972 unsigned prev_carry_over
= carry_over
;
8973 carry_over
= (ptr
[i
] & carry_mask
) >> (BITS_PER_UNIT
- amnt
);
8978 ptr
[i
] &= clear_mask
;
8979 ptr
[i
] |= prev_carry_over
;
8984 /* Like shift_bytes_in_array_left but for big-endian.
8985 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8986 bits between adjacent elements. AMNT should be within
8989 00011111|11100000 >> 2 = 00000111|11111000
8990 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8993 shift_bytes_in_array_right (unsigned char *ptr
, unsigned int sz
,
8999 unsigned char carry_over
= 0U;
9000 unsigned char carry_mask
= ~(~0U << amnt
);
9002 for (unsigned int i
= 0; i
< sz
; i
++)
9004 unsigned prev_carry_over
= carry_over
;
9005 carry_over
= ptr
[i
] & carry_mask
;
9007 carry_over
<<= (unsigned char) BITS_PER_UNIT
- amnt
;
9009 ptr
[i
] |= prev_carry_over
;
9013 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9014 directly on the VECTOR_CST encoding, in a way that works for variable-
9015 length vectors. Return the resulting VECTOR_CST on success or null
9019 fold_view_convert_vector_encoding (tree type
, tree expr
)
9021 tree expr_type
= TREE_TYPE (expr
);
9022 poly_uint64 type_bits
, expr_bits
;
9023 if (!poly_int_tree_p (TYPE_SIZE (type
), &type_bits
)
9024 || !poly_int_tree_p (TYPE_SIZE (expr_type
), &expr_bits
))
9027 poly_uint64 type_units
= TYPE_VECTOR_SUBPARTS (type
);
9028 poly_uint64 expr_units
= TYPE_VECTOR_SUBPARTS (expr_type
);
9029 unsigned int type_elt_bits
= vector_element_size (type_bits
, type_units
);
9030 unsigned int expr_elt_bits
= vector_element_size (expr_bits
, expr_units
);
9032 /* We can only preserve the semantics of a stepped pattern if the new
9033 vector element is an integer of the same size. */
9034 if (VECTOR_CST_STEPPED_P (expr
)
9035 && (!INTEGRAL_TYPE_P (type
) || type_elt_bits
!= expr_elt_bits
))
9038 /* The number of bits needed to encode one element from every pattern
9039 of the original vector. */
9040 unsigned int expr_sequence_bits
9041 = VECTOR_CST_NPATTERNS (expr
) * expr_elt_bits
;
9043 /* The number of bits needed to encode one element from every pattern
9045 unsigned int type_sequence_bits
9046 = least_common_multiple (expr_sequence_bits
, type_elt_bits
);
9048 /* Don't try to read more bytes than are available, which can happen
9049 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9050 The general VIEW_CONVERT handling can cope with that case, so there's
9051 no point complicating things here. */
9052 unsigned int nelts_per_pattern
= VECTOR_CST_NELTS_PER_PATTERN (expr
);
9053 unsigned int buffer_bytes
= CEIL (nelts_per_pattern
* type_sequence_bits
,
9055 unsigned int buffer_bits
= buffer_bytes
* BITS_PER_UNIT
;
9056 if (known_gt (buffer_bits
, expr_bits
))
9059 /* Get enough bytes of EXPR to form the new encoding. */
9060 auto_vec
<unsigned char, 128> buffer (buffer_bytes
);
9061 buffer
.quick_grow (buffer_bytes
);
9062 if (native_encode_vector_part (expr
, buffer
.address (), buffer_bytes
, 0,
9063 buffer_bits
/ expr_elt_bits
)
9064 != (int) buffer_bytes
)
9067 /* Reencode the bytes as TYPE. */
9068 unsigned int type_npatterns
= type_sequence_bits
/ type_elt_bits
;
9069 return native_interpret_vector_part (type
, &buffer
[0], buffer
.length (),
9070 type_npatterns
, nelts_per_pattern
);
9073 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9074 TYPE at compile-time. If we're unable to perform the conversion
9075 return NULL_TREE. */
9078 fold_view_convert_expr (tree type
, tree expr
)
9080 /* We support up to 512-bit values (for V8DFmode). */
9081 unsigned char buffer
[64];
9084 /* Check that the host and target are sane. */
9085 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
9088 if (VECTOR_TYPE_P (type
) && TREE_CODE (expr
) == VECTOR_CST
)
9089 if (tree res
= fold_view_convert_vector_encoding (type
, expr
))
9092 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
9096 return native_interpret_expr (type
, buffer
, len
);
9099 /* Build an expression for the address of T. Folds away INDIRECT_REF
9100 to avoid confusing the gimplify process. */
9103 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
9105 /* The size of the object is not relevant when talking about its address. */
9106 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
9107 t
= TREE_OPERAND (t
, 0);
9109 if (TREE_CODE (t
) == INDIRECT_REF
)
9111 t
= TREE_OPERAND (t
, 0);
9113 if (TREE_TYPE (t
) != ptrtype
)
9114 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
9116 else if (TREE_CODE (t
) == MEM_REF
9117 && integer_zerop (TREE_OPERAND (t
, 1)))
9119 t
= TREE_OPERAND (t
, 0);
9121 if (TREE_TYPE (t
) != ptrtype
)
9122 t
= fold_convert_loc (loc
, ptrtype
, t
);
9124 else if (TREE_CODE (t
) == MEM_REF
9125 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
9126 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
9127 TREE_OPERAND (t
, 0),
9128 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
9129 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
9131 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
9133 if (TREE_TYPE (t
) != ptrtype
)
9134 t
= fold_convert_loc (loc
, ptrtype
, t
);
9137 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
9142 /* Build an expression for the address of T. */
9145 build_fold_addr_expr_loc (location_t loc
, tree t
)
9147 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
9149 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
9152 /* Fold a unary expression of code CODE and type TYPE with operand
9153 OP0. Return the folded expression if folding is successful.
9154 Otherwise, return NULL_TREE. */
9157 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
9161 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9163 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9164 && TREE_CODE_LENGTH (code
) == 1);
9169 if (CONVERT_EXPR_CODE_P (code
)
9170 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
9172 /* Don't use STRIP_NOPS, because signedness of argument type
9174 STRIP_SIGN_NOPS (arg0
);
9178 /* Strip any conversions that don't change the mode. This
9179 is safe for every expression, except for a comparison
9180 expression because its signedness is derived from its
9183 Note that this is done as an internal manipulation within
9184 the constant folder, in order to find the simplest
9185 representation of the arguments so that their form can be
9186 studied. In any cases, the appropriate type conversions
9187 should be put back in the tree that will get out of the
9192 if (CONSTANT_CLASS_P (arg0
))
9194 tree tem
= const_unop (code
, type
, arg0
);
9197 if (TREE_TYPE (tem
) != type
)
9198 tem
= fold_convert_loc (loc
, type
, tem
);
9204 tem
= generic_simplify (loc
, code
, type
, op0
);
9208 if (TREE_CODE_CLASS (code
) == tcc_unary
)
9210 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9211 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9212 fold_build1_loc (loc
, code
, type
,
9213 fold_convert_loc (loc
, TREE_TYPE (op0
),
9214 TREE_OPERAND (arg0
, 1))));
9215 else if (TREE_CODE (arg0
) == COND_EXPR
)
9217 tree arg01
= TREE_OPERAND (arg0
, 1);
9218 tree arg02
= TREE_OPERAND (arg0
, 2);
9219 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
9220 arg01
= fold_build1_loc (loc
, code
, type
,
9221 fold_convert_loc (loc
,
9222 TREE_TYPE (op0
), arg01
));
9223 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
9224 arg02
= fold_build1_loc (loc
, code
, type
,
9225 fold_convert_loc (loc
,
9226 TREE_TYPE (op0
), arg02
));
9227 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9230 /* If this was a conversion, and all we did was to move into
9231 inside the COND_EXPR, bring it back out. But leave it if
9232 it is a conversion from integer to integer and the
9233 result precision is no wider than a word since such a
9234 conversion is cheap and may be optimized away by combine,
9235 while it couldn't if it were outside the COND_EXPR. Then return
9236 so we don't get into an infinite recursion loop taking the
9237 conversion out and then back in. */
9239 if ((CONVERT_EXPR_CODE_P (code
)
9240 || code
== NON_LVALUE_EXPR
)
9241 && TREE_CODE (tem
) == COND_EXPR
9242 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
9243 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
9244 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
9245 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
9246 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
9247 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
9248 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9250 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
9251 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
9252 || flag_syntax_only
))
9253 tem
= build1_loc (loc
, code
, type
,
9255 TREE_TYPE (TREE_OPERAND
9256 (TREE_OPERAND (tem
, 1), 0)),
9257 TREE_OPERAND (tem
, 0),
9258 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
9259 TREE_OPERAND (TREE_OPERAND (tem
, 2),
9267 case NON_LVALUE_EXPR
:
9268 if (!maybe_lvalue_p (op0
))
9269 return fold_convert_loc (loc
, type
, op0
);
9274 case FIX_TRUNC_EXPR
:
9275 if (COMPARISON_CLASS_P (op0
))
9277 /* If we have (type) (a CMP b) and type is an integral type, return
9278 new expression involving the new type. Canonicalize
9279 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9281 Do not fold the result as that would not simplify further, also
9282 folding again results in recursions. */
9283 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
9284 return build2_loc (loc
, TREE_CODE (op0
), type
,
9285 TREE_OPERAND (op0
, 0),
9286 TREE_OPERAND (op0
, 1));
9287 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
9288 && TREE_CODE (type
) != VECTOR_TYPE
)
9289 return build3_loc (loc
, COND_EXPR
, type
, op0
,
9290 constant_boolean_node (true, type
),
9291 constant_boolean_node (false, type
));
9294 /* Handle (T *)&A.B.C for A being of type T and B and C
9295 living at offset zero. This occurs frequently in
9296 C++ upcasting and then accessing the base. */
9297 if (TREE_CODE (op0
) == ADDR_EXPR
9298 && POINTER_TYPE_P (type
)
9299 && handled_component_p (TREE_OPERAND (op0
, 0)))
9301 poly_int64 bitsize
, bitpos
;
9304 int unsignedp
, reversep
, volatilep
;
9306 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
9307 &offset
, &mode
, &unsignedp
, &reversep
,
9309 /* If the reference was to a (constant) zero offset, we can use
9310 the address of the base if it has the same base type
9311 as the result type and the pointer type is unqualified. */
9313 && known_eq (bitpos
, 0)
9314 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
9315 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
9316 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
9317 return fold_convert_loc (loc
, type
,
9318 build_fold_addr_expr_loc (loc
, base
));
9321 if (TREE_CODE (op0
) == MODIFY_EXPR
9322 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
9323 /* Detect assigning a bitfield. */
9324 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
9326 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
9328 /* Don't leave an assignment inside a conversion
9329 unless assigning a bitfield. */
9330 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
9331 /* First do the assignment, then return converted constant. */
9332 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
9333 TREE_NO_WARNING (tem
) = 1;
9334 TREE_USED (tem
) = 1;
9338 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9339 constants (if x has signed type, the sign bit cannot be set
9340 in c). This folds extension into the BIT_AND_EXPR.
9341 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9342 very likely don't have maximal range for their precision and this
9343 transformation effectively doesn't preserve non-maximal ranges. */
9344 if (TREE_CODE (type
) == INTEGER_TYPE
9345 && TREE_CODE (op0
) == BIT_AND_EXPR
9346 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9348 tree and_expr
= op0
;
9349 tree and0
= TREE_OPERAND (and_expr
, 0);
9350 tree and1
= TREE_OPERAND (and_expr
, 1);
9353 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
9354 || (TYPE_PRECISION (type
)
9355 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
9357 else if (TYPE_PRECISION (TREE_TYPE (and1
))
9358 <= HOST_BITS_PER_WIDE_INT
9359 && tree_fits_uhwi_p (and1
))
9361 unsigned HOST_WIDE_INT cst
;
9363 cst
= tree_to_uhwi (and1
);
9364 cst
&= HOST_WIDE_INT_M1U
9365 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
9366 change
= (cst
== 0);
9368 && !flag_syntax_only
9369 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
9372 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
9373 and0
= fold_convert_loc (loc
, uns
, and0
);
9374 and1
= fold_convert_loc (loc
, uns
, and1
);
9379 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
9380 TREE_OVERFLOW (and1
));
9381 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9382 fold_convert_loc (loc
, type
, and0
), tem
);
9386 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9387 cast (T1)X will fold away. We assume that this happens when X itself
9389 if (POINTER_TYPE_P (type
)
9390 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9391 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
9393 tree arg00
= TREE_OPERAND (arg0
, 0);
9394 tree arg01
= TREE_OPERAND (arg0
, 1);
9396 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9397 when the pointed type needs higher alignment than
9398 the p+ first operand's pointed type. */
9400 && sanitize_flags_p (SANITIZE_ALIGNMENT
)
9401 && (min_align_of_type (TREE_TYPE (type
))
9402 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00
)))))
9405 arg00
= fold_convert_loc (loc
, type
, arg00
);
9406 return fold_build_pointer_plus_loc (loc
, arg00
, arg01
);
9409 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9410 of the same precision, and X is an integer type not narrower than
9411 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9412 if (INTEGRAL_TYPE_P (type
)
9413 && TREE_CODE (op0
) == BIT_NOT_EXPR
9414 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9415 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
9416 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
9418 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
9419 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
9420 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
9421 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
9422 fold_convert_loc (loc
, type
, tem
));
9425 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9426 type of X and Y (integer types only). */
9427 if (INTEGRAL_TYPE_P (type
)
9428 && TREE_CODE (op0
) == MULT_EXPR
9429 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
9430 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
9432 /* Be careful not to introduce new overflows. */
9434 if (TYPE_OVERFLOW_WRAPS (type
))
9437 mult_type
= unsigned_type_for (type
);
9439 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
9441 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
9442 fold_convert_loc (loc
, mult_type
,
9443 TREE_OPERAND (op0
, 0)),
9444 fold_convert_loc (loc
, mult_type
,
9445 TREE_OPERAND (op0
, 1)));
9446 return fold_convert_loc (loc
, type
, tem
);
9452 case VIEW_CONVERT_EXPR
:
9453 if (TREE_CODE (op0
) == MEM_REF
)
9455 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
9456 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
9457 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
9458 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
9459 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
9466 tem
= fold_negate_expr (loc
, arg0
);
9468 return fold_convert_loc (loc
, type
, tem
);
9472 /* Convert fabs((double)float) into (double)fabsf(float). */
9473 if (TREE_CODE (arg0
) == NOP_EXPR
9474 && TREE_CODE (type
) == REAL_TYPE
)
9476 tree targ0
= strip_float_extensions (arg0
);
9478 return fold_convert_loc (loc
, type
,
9479 fold_build1_loc (loc
, ABS_EXPR
,
9486 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9487 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9488 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9489 fold_convert_loc (loc
, type
,
9490 TREE_OPERAND (arg0
, 0)))))
9491 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
9492 fold_convert_loc (loc
, type
,
9493 TREE_OPERAND (arg0
, 1)));
9494 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9495 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
9496 fold_convert_loc (loc
, type
,
9497 TREE_OPERAND (arg0
, 1)))))
9498 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
9499 fold_convert_loc (loc
, type
,
9500 TREE_OPERAND (arg0
, 0)), tem
);
9504 case TRUTH_NOT_EXPR
:
9505 /* Note that the operand of this must be an int
9506 and its values must be 0 or 1.
9507 ("true" is a fixed value perhaps depending on the language,
9508 but we don't handle values other than 1 correctly yet.) */
9509 tem
= fold_truth_not_expr (loc
, arg0
);
9512 return fold_convert_loc (loc
, type
, tem
);
9515 /* Fold *&X to X if X is an lvalue. */
9516 if (TREE_CODE (op0
) == ADDR_EXPR
)
9518 tree op00
= TREE_OPERAND (op0
, 0);
9520 || TREE_CODE (op00
) == PARM_DECL
9521 || TREE_CODE (op00
) == RESULT_DECL
)
9522 && !TREE_READONLY (op00
))
9529 } /* switch (code) */
9533 /* If the operation was a conversion do _not_ mark a resulting constant
9534 with TREE_OVERFLOW if the original constant was not. These conversions
9535 have implementation defined behavior and retaining the TREE_OVERFLOW
9536 flag here would confuse later passes such as VRP. */
9538 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
9539 tree type
, tree op0
)
9541 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
9543 && TREE_CODE (res
) == INTEGER_CST
9544 && TREE_CODE (op0
) == INTEGER_CST
9545 && CONVERT_EXPR_CODE_P (code
))
9546 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
9551 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9552 operands OP0 and OP1. LOC is the location of the resulting expression.
9553 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9554 Return the folded expression if folding is successful. Otherwise,
9555 return NULL_TREE. */
9557 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
9558 tree arg0
, tree arg1
, tree op0
, tree op1
)
9562 /* We only do these simplifications if we are optimizing. */
9566 /* Check for things like (A || B) && (A || C). We can convert this
9567 to A || (B && C). Note that either operator can be any of the four
9568 truth and/or operations and the transformation will still be
9569 valid. Also note that we only care about order for the
9570 ANDIF and ORIF operators. If B contains side effects, this
9571 might change the truth-value of A. */
9572 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9573 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
9574 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
9575 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
9576 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
9577 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
9579 tree a00
= TREE_OPERAND (arg0
, 0);
9580 tree a01
= TREE_OPERAND (arg0
, 1);
9581 tree a10
= TREE_OPERAND (arg1
, 0);
9582 tree a11
= TREE_OPERAND (arg1
, 1);
9583 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
9584 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
9585 && (code
== TRUTH_AND_EXPR
9586 || code
== TRUTH_OR_EXPR
));
9588 if (operand_equal_p (a00
, a10
, 0))
9589 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9590 fold_build2_loc (loc
, code
, type
, a01
, a11
));
9591 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
9592 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9593 fold_build2_loc (loc
, code
, type
, a01
, a10
));
9594 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
9595 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
9596 fold_build2_loc (loc
, code
, type
, a00
, a11
));
9598 /* This case if tricky because we must either have commutative
9599 operators or else A10 must not have side-effects. */
9601 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
9602 && operand_equal_p (a01
, a11
, 0))
9603 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
9604 fold_build2_loc (loc
, code
, type
, a00
, a10
),
9608 /* See if we can build a range comparison. */
9609 if ((tem
= fold_range_test (loc
, code
, type
, op0
, op1
)) != 0)
9612 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
9613 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
9615 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
9617 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
9620 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
9621 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
9623 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
9625 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
9628 /* Check for the possibility of merging component references. If our
9629 lhs is another similar operation, try to merge its rhs with our
9630 rhs. Then try to merge our lhs and rhs. */
9631 if (TREE_CODE (arg0
) == code
9632 && (tem
= fold_truth_andor_1 (loc
, code
, type
,
9633 TREE_OPERAND (arg0
, 1), arg1
)) != 0)
9634 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9636 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
9639 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
9640 if (param_logical_op_non_short_circuit
!= -1)
9641 logical_op_non_short_circuit
9642 = param_logical_op_non_short_circuit
;
9643 if (logical_op_non_short_circuit
9644 && !flag_sanitize_coverage
9645 && (code
== TRUTH_AND_EXPR
9646 || code
== TRUTH_ANDIF_EXPR
9647 || code
== TRUTH_OR_EXPR
9648 || code
== TRUTH_ORIF_EXPR
))
9650 enum tree_code ncode
, icode
;
9652 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
9653 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
9654 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
9656 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9657 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9658 We don't want to pack more than two leafs to a non-IF AND/OR
9660 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9661 equal to IF-CODE, then we don't want to add right-hand operand.
9662 If the inner right-hand side of left-hand operand has
9663 side-effects, or isn't simple, then we can't add to it,
9664 as otherwise we might destroy if-sequence. */
9665 if (TREE_CODE (arg0
) == icode
9666 && simple_operand_p_2 (arg1
)
9667 /* Needed for sequence points to handle trappings, and
9669 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
9671 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
9673 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
9676 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9677 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9678 else if (TREE_CODE (arg1
) == icode
9679 && simple_operand_p_2 (arg0
)
9680 /* Needed for sequence points to handle trappings, and
9682 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
9684 tem
= fold_build2_loc (loc
, ncode
, type
,
9685 arg0
, TREE_OPERAND (arg1
, 0));
9686 return fold_build2_loc (loc
, icode
, type
, tem
,
9687 TREE_OPERAND (arg1
, 1));
9689 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9691 For sequence point consistancy, we need to check for trapping,
9692 and side-effects. */
9693 else if (code
== icode
&& simple_operand_p_2 (arg0
)
9694 && simple_operand_p_2 (arg1
))
9695 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
9701 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9702 by changing CODE to reduce the magnitude of constants involved in
9703 ARG0 of the comparison.
9704 Returns a canonicalized comparison tree if a simplification was
9705 possible, otherwise returns NULL_TREE.
9706 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9707 valid if signed overflow is undefined. */
9710 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9711 tree arg0
, tree arg1
,
9712 bool *strict_overflow_p
)
9714 enum tree_code code0
= TREE_CODE (arg0
);
9715 tree t
, cst0
= NULL_TREE
;
9718 /* Match A +- CST code arg1. We can change this only if overflow
9720 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9721 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
9722 /* In principle pointers also have undefined overflow behavior,
9723 but that causes problems elsewhere. */
9724 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9725 && (code0
== MINUS_EXPR
9726 || code0
== PLUS_EXPR
)
9727 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
9730 /* Identify the constant in arg0 and its sign. */
9731 cst0
= TREE_OPERAND (arg0
, 1);
9732 sgn0
= tree_int_cst_sgn (cst0
);
9734 /* Overflowed constants and zero will cause problems. */
9735 if (integer_zerop (cst0
)
9736 || TREE_OVERFLOW (cst0
))
9739 /* See if we can reduce the magnitude of the constant in
9740 arg0 by changing the comparison code. */
9741 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9743 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9745 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9746 else if (code
== GT_EXPR
9747 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9749 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9750 else if (code
== LE_EXPR
9751 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9753 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9754 else if (code
== GE_EXPR
9755 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9759 *strict_overflow_p
= true;
9761 /* Now build the constant reduced in magnitude. But not if that
9762 would produce one outside of its types range. */
9763 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9765 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9766 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9768 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9769 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9772 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9773 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
9774 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9775 t
= fold_convert (TREE_TYPE (arg1
), t
);
9777 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9780 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9781 overflow further. Try to decrease the magnitude of constants involved
9782 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9783 and put sole constants at the second argument position.
9784 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9787 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9788 tree arg0
, tree arg1
)
9791 bool strict_overflow_p
;
9792 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9793 "when reducing constant in comparison");
9795 /* Try canonicalization by simplifying arg0. */
9796 strict_overflow_p
= false;
9797 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9798 &strict_overflow_p
);
9801 if (strict_overflow_p
)
9802 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9806 /* Try canonicalization by simplifying arg1 using the swapped
9808 code
= swap_tree_comparison (code
);
9809 strict_overflow_p
= false;
9810 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9811 &strict_overflow_p
);
9812 if (t
&& strict_overflow_p
)
9813 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9817 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9818 space. This is used to avoid issuing overflow warnings for
9819 expressions like &p->x which cannot wrap. */
9822 pointer_may_wrap_p (tree base
, tree offset
, poly_int64 bitpos
)
9824 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9827 if (maybe_lt (bitpos
, 0))
9830 poly_wide_int wi_offset
;
9831 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
9832 if (offset
== NULL_TREE
)
9833 wi_offset
= wi::zero (precision
);
9834 else if (!poly_int_tree_p (offset
) || TREE_OVERFLOW (offset
))
9837 wi_offset
= wi::to_poly_wide (offset
);
9839 wi::overflow_type overflow
;
9840 poly_wide_int units
= wi::shwi (bits_to_bytes_round_down (bitpos
),
9842 poly_wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
9846 poly_uint64 total_hwi
, size
;
9847 if (!total
.to_uhwi (&total_hwi
)
9848 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base
))),
9850 || known_eq (size
, 0U))
9853 if (known_le (total_hwi
, size
))
9856 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9858 if (TREE_CODE (base
) == ADDR_EXPR
9859 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base
, 0))),
9861 && maybe_ne (size
, 0U)
9862 && known_le (total_hwi
, size
))
9868 /* Return a positive integer when the symbol DECL is known to have
9869 a nonzero address, zero when it's known not to (e.g., it's a weak
9870 symbol), and a negative integer when the symbol is not yet in the
9871 symbol table and so whether or not its address is zero is unknown.
9872 For function local objects always return positive integer. */
9874 maybe_nonzero_address (tree decl
)
9876 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
9877 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
9878 return symbol
->nonzero_address ();
9880 /* Function local objects are never NULL. */
9882 && (DECL_CONTEXT (decl
)
9883 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
9884 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
9890 /* Subroutine of fold_binary. This routine performs all of the
9891 transformations that are common to the equality/inequality
9892 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9893 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9894 fold_binary should call fold_binary. Fold a comparison with
9895 tree code CODE and type TYPE with operands OP0 and OP1. Return
9896 the folded comparison or NULL_TREE. */
9899 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9902 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
9903 tree arg0
, arg1
, tem
;
9908 STRIP_SIGN_NOPS (arg0
);
9909 STRIP_SIGN_NOPS (arg1
);
9911 /* For comparisons of pointers we can decompose it to a compile time
9912 comparison of the base objects and the offsets into the object.
9913 This requires at least one operand being an ADDR_EXPR or a
9914 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9915 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9916 && (TREE_CODE (arg0
) == ADDR_EXPR
9917 || TREE_CODE (arg1
) == ADDR_EXPR
9918 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9919 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9921 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9922 poly_int64 bitsize
, bitpos0
= 0, bitpos1
= 0;
9924 int volatilep
, reversep
, unsignedp
;
9925 bool indirect_base0
= false, indirect_base1
= false;
9927 /* Get base and offset for the access. Strip ADDR_EXPR for
9928 get_inner_reference, but put it back by stripping INDIRECT_REF
9929 off the base object if possible. indirect_baseN will be true
9930 if baseN is not an address but refers to the object itself. */
9932 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9935 = get_inner_reference (TREE_OPERAND (arg0
, 0),
9936 &bitsize
, &bitpos0
, &offset0
, &mode
,
9937 &unsignedp
, &reversep
, &volatilep
);
9938 if (TREE_CODE (base0
) == INDIRECT_REF
)
9939 base0
= TREE_OPERAND (base0
, 0);
9941 indirect_base0
= true;
9943 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9945 base0
= TREE_OPERAND (arg0
, 0);
9946 STRIP_SIGN_NOPS (base0
);
9947 if (TREE_CODE (base0
) == ADDR_EXPR
)
9950 = get_inner_reference (TREE_OPERAND (base0
, 0),
9951 &bitsize
, &bitpos0
, &offset0
, &mode
,
9952 &unsignedp
, &reversep
, &volatilep
);
9953 if (TREE_CODE (base0
) == INDIRECT_REF
)
9954 base0
= TREE_OPERAND (base0
, 0);
9956 indirect_base0
= true;
9958 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
9959 offset0
= TREE_OPERAND (arg0
, 1);
9961 offset0
= size_binop (PLUS_EXPR
, offset0
,
9962 TREE_OPERAND (arg0
, 1));
9963 if (poly_int_tree_p (offset0
))
9965 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset0
),
9966 TYPE_PRECISION (sizetype
));
9967 tem
<<= LOG2_BITS_PER_UNIT
;
9969 if (tem
.to_shwi (&bitpos0
))
9970 offset0
= NULL_TREE
;
9975 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9978 = get_inner_reference (TREE_OPERAND (arg1
, 0),
9979 &bitsize
, &bitpos1
, &offset1
, &mode
,
9980 &unsignedp
, &reversep
, &volatilep
);
9981 if (TREE_CODE (base1
) == INDIRECT_REF
)
9982 base1
= TREE_OPERAND (base1
, 0);
9984 indirect_base1
= true;
9986 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9988 base1
= TREE_OPERAND (arg1
, 0);
9989 STRIP_SIGN_NOPS (base1
);
9990 if (TREE_CODE (base1
) == ADDR_EXPR
)
9993 = get_inner_reference (TREE_OPERAND (base1
, 0),
9994 &bitsize
, &bitpos1
, &offset1
, &mode
,
9995 &unsignedp
, &reversep
, &volatilep
);
9996 if (TREE_CODE (base1
) == INDIRECT_REF
)
9997 base1
= TREE_OPERAND (base1
, 0);
9999 indirect_base1
= true;
10001 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
10002 offset1
= TREE_OPERAND (arg1
, 1);
10004 offset1
= size_binop (PLUS_EXPR
, offset1
,
10005 TREE_OPERAND (arg1
, 1));
10006 if (poly_int_tree_p (offset1
))
10008 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset1
),
10009 TYPE_PRECISION (sizetype
));
10010 tem
<<= LOG2_BITS_PER_UNIT
;
10012 if (tem
.to_shwi (&bitpos1
))
10013 offset1
= NULL_TREE
;
10017 /* If we have equivalent bases we might be able to simplify. */
10018 if (indirect_base0
== indirect_base1
10019 && operand_equal_p (base0
, base1
,
10020 indirect_base0
? OEP_ADDRESS_OF
: 0))
10022 /* We can fold this expression to a constant if the non-constant
10023 offset parts are equal. */
10024 if ((offset0
== offset1
10025 || (offset0
&& offset1
10026 && operand_equal_p (offset0
, offset1
, 0)))
10029 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10030 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10033 && maybe_ne (bitpos0
, bitpos1
)
10034 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10035 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10036 fold_overflow_warning (("assuming pointer wraparound does not "
10037 "occur when comparing P +- C1 with "
10039 WARN_STRICT_OVERFLOW_CONDITIONAL
);
10044 if (known_eq (bitpos0
, bitpos1
))
10045 return constant_boolean_node (true, type
);
10046 if (known_ne (bitpos0
, bitpos1
))
10047 return constant_boolean_node (false, type
);
10050 if (known_ne (bitpos0
, bitpos1
))
10051 return constant_boolean_node (true, type
);
10052 if (known_eq (bitpos0
, bitpos1
))
10053 return constant_boolean_node (false, type
);
10056 if (known_lt (bitpos0
, bitpos1
))
10057 return constant_boolean_node (true, type
);
10058 if (known_ge (bitpos0
, bitpos1
))
10059 return constant_boolean_node (false, type
);
10062 if (known_le (bitpos0
, bitpos1
))
10063 return constant_boolean_node (true, type
);
10064 if (known_gt (bitpos0
, bitpos1
))
10065 return constant_boolean_node (false, type
);
10068 if (known_ge (bitpos0
, bitpos1
))
10069 return constant_boolean_node (true, type
);
10070 if (known_lt (bitpos0
, bitpos1
))
10071 return constant_boolean_node (false, type
);
10074 if (known_gt (bitpos0
, bitpos1
))
10075 return constant_boolean_node (true, type
);
10076 if (known_le (bitpos0
, bitpos1
))
10077 return constant_boolean_node (false, type
);
10082 /* We can simplify the comparison to a comparison of the variable
10083 offset parts if the constant offset parts are equal.
10084 Be careful to use signed sizetype here because otherwise we
10085 mess with array offsets in the wrong way. This is possible
10086 because pointer arithmetic is restricted to retain within an
10087 object and overflow on pointer differences is undefined as of
10088 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10089 else if (known_eq (bitpos0
, bitpos1
)
10092 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
10093 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10095 /* By converting to signed sizetype we cover middle-end pointer
10096 arithmetic which operates on unsigned pointer types of size
10097 type size and ARRAY_REF offsets which are properly sign or
10098 zero extended from their type in case it is narrower than
10100 if (offset0
== NULL_TREE
)
10101 offset0
= build_int_cst (ssizetype
, 0);
10103 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
10104 if (offset1
== NULL_TREE
)
10105 offset1
= build_int_cst (ssizetype
, 0);
10107 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
10110 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
10111 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
10112 fold_overflow_warning (("assuming pointer wraparound does not "
10113 "occur when comparing P +- C1 with "
10115 WARN_STRICT_OVERFLOW_COMPARISON
);
10117 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
10120 /* For equal offsets we can simplify to a comparison of the
10122 else if (known_eq (bitpos0
, bitpos1
)
10124 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
10126 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
10127 && ((offset0
== offset1
)
10128 || (offset0
&& offset1
10129 && operand_equal_p (offset0
, offset1
, 0))))
10131 if (indirect_base0
)
10132 base0
= build_fold_addr_expr_loc (loc
, base0
);
10133 if (indirect_base1
)
10134 base1
= build_fold_addr_expr_loc (loc
, base1
);
10135 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
10137 /* Comparison between an ordinary (non-weak) symbol and a null
10138 pointer can be eliminated since such symbols must have a non
10139 null address. In C, relational expressions between pointers
10140 to objects and null pointers are undefined. The results
10141 below follow the C++ rules with the additional property that
10142 every object pointer compares greater than a null pointer.
10144 else if (((DECL_P (base0
)
10145 && maybe_nonzero_address (base0
) > 0
10146 /* Avoid folding references to struct members at offset 0 to
10147 prevent tests like '&ptr->firstmember == 0' from getting
10148 eliminated. When ptr is null, although the -> expression
10149 is strictly speaking invalid, GCC retains it as a matter
10150 of QoI. See PR c/44555. */
10151 && (offset0
== NULL_TREE
&& known_ne (bitpos0
, 0)))
10152 || CONSTANT_CLASS_P (base0
))
10154 /* The caller guarantees that when one of the arguments is
10155 constant (i.e., null in this case) it is second. */
10156 && integer_zerop (arg1
))
10163 return constant_boolean_node (false, type
);
10167 return constant_boolean_node (true, type
);
10169 gcc_unreachable ();
10174 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10175 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10176 the resulting offset is smaller in absolute value than the
10177 original one and has the same sign. */
10178 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10179 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
10180 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10181 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10182 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
10183 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
10184 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10185 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
10187 tree const1
= TREE_OPERAND (arg0
, 1);
10188 tree const2
= TREE_OPERAND (arg1
, 1);
10189 tree variable1
= TREE_OPERAND (arg0
, 0);
10190 tree variable2
= TREE_OPERAND (arg1
, 0);
10192 const char * const warnmsg
= G_("assuming signed overflow does not "
10193 "occur when combining constants around "
10196 /* Put the constant on the side where it doesn't overflow and is
10197 of lower absolute value and of same sign than before. */
10198 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10199 ? MINUS_EXPR
: PLUS_EXPR
,
10201 if (!TREE_OVERFLOW (cst
)
10202 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
10203 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
10205 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10206 return fold_build2_loc (loc
, code
, type
,
10208 fold_build2_loc (loc
, TREE_CODE (arg1
),
10213 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10214 ? MINUS_EXPR
: PLUS_EXPR
,
10216 if (!TREE_OVERFLOW (cst
)
10217 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
10218 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
10220 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
10221 return fold_build2_loc (loc
, code
, type
,
10222 fold_build2_loc (loc
, TREE_CODE (arg0
),
10229 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
10233 /* If we are comparing an expression that just has comparisons
10234 of two integer values, arithmetic expressions of those comparisons,
10235 and constants, we can simplify it. There are only three cases
10236 to check: the two values can either be equal, the first can be
10237 greater, or the second can be greater. Fold the expression for
10238 those three values. Since each value must be 0 or 1, we have
10239 eight possibilities, each of which corresponds to the constant 0
10240 or 1 or one of the six possible comparisons.
10242 This handles common cases like (a > b) == 0 but also handles
10243 expressions like ((x > y) - (y > x)) > 0, which supposedly
10244 occur in macroized code. */
10246 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
10248 tree cval1
= 0, cval2
= 0;
10250 if (twoval_comparison_p (arg0
, &cval1
, &cval2
)
10251 /* Don't handle degenerate cases here; they should already
10252 have been handled anyway. */
10253 && cval1
!= 0 && cval2
!= 0
10254 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
10255 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
10256 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
10257 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
10258 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
10259 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
10260 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
10262 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
10263 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
10265 /* We can't just pass T to eval_subst in case cval1 or cval2
10266 was the same as ARG1. */
10269 = fold_build2_loc (loc
, code
, type
,
10270 eval_subst (loc
, arg0
, cval1
, maxval
,
10274 = fold_build2_loc (loc
, code
, type
,
10275 eval_subst (loc
, arg0
, cval1
, maxval
,
10279 = fold_build2_loc (loc
, code
, type
,
10280 eval_subst (loc
, arg0
, cval1
, minval
,
10284 /* All three of these results should be 0 or 1. Confirm they are.
10285 Then use those values to select the proper code to use. */
10287 if (TREE_CODE (high_result
) == INTEGER_CST
10288 && TREE_CODE (equal_result
) == INTEGER_CST
10289 && TREE_CODE (low_result
) == INTEGER_CST
)
10291 /* Make a 3-bit mask with the high-order bit being the
10292 value for `>', the next for '=', and the low for '<'. */
10293 switch ((integer_onep (high_result
) * 4)
10294 + (integer_onep (equal_result
) * 2)
10295 + integer_onep (low_result
))
10298 /* Always false. */
10299 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10320 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10323 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
10332 /* Subroutine of fold_binary. Optimize complex multiplications of the
10333 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10334 argument EXPR represents the expression "z" of type TYPE. */
10337 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
10339 tree itype
= TREE_TYPE (type
);
10340 tree rpart
, ipart
, tem
;
10342 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
10344 rpart
= TREE_OPERAND (expr
, 0);
10345 ipart
= TREE_OPERAND (expr
, 1);
10347 else if (TREE_CODE (expr
) == COMPLEX_CST
)
10349 rpart
= TREE_REALPART (expr
);
10350 ipart
= TREE_IMAGPART (expr
);
10354 expr
= save_expr (expr
);
10355 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
10356 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
10359 rpart
= save_expr (rpart
);
10360 ipart
= save_expr (ipart
);
10361 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
10362 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
10363 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
10364 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
10365 build_zero_cst (itype
));
10369 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10370 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10371 true if successful. */
10374 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
10376 unsigned HOST_WIDE_INT i
, nunits
;
10378 if (TREE_CODE (arg
) == VECTOR_CST
10379 && VECTOR_CST_NELTS (arg
).is_constant (&nunits
))
10381 for (i
= 0; i
< nunits
; ++i
)
10382 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
10384 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
10386 constructor_elt
*elt
;
10388 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
10389 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
10392 elts
[i
] = elt
->value
;
10396 for (; i
< nelts
; i
++)
10398 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
10402 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10403 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10404 NULL_TREE otherwise. */
10407 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const vec_perm_indices
&sel
)
10410 unsigned HOST_WIDE_INT nelts
;
10411 bool need_ctor
= false;
10413 if (!sel
.length ().is_constant (&nelts
))
10415 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
), nelts
)
10416 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)), nelts
)
10417 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)), nelts
));
10418 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
10419 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
10422 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
10423 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
10424 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
10427 tree_vector_builder
out_elts (type
, nelts
, 1);
10428 for (i
= 0; i
< nelts
; i
++)
10430 HOST_WIDE_INT index
;
10431 if (!sel
[i
].is_constant (&index
))
10433 if (!CONSTANT_CLASS_P (in_elts
[index
]))
10435 out_elts
.quick_push (unshare_expr (in_elts
[index
]));
10440 vec
<constructor_elt
, va_gc
> *v
;
10441 vec_alloc (v
, nelts
);
10442 for (i
= 0; i
< nelts
; i
++)
10443 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
10444 return build_constructor (type
, v
);
10447 return out_elts
.build ();
10450 /* Try to fold a pointer difference of type TYPE two address expressions of
10451 array references AREF0 and AREF1 using location LOC. Return a
10452 simplified expression for the difference or NULL_TREE. */
10455 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
10456 tree aref0
, tree aref1
,
10457 bool use_pointer_diff
)
10459 tree base0
= TREE_OPERAND (aref0
, 0);
10460 tree base1
= TREE_OPERAND (aref1
, 0);
10461 tree base_offset
= build_int_cst (type
, 0);
10463 /* If the bases are array references as well, recurse. If the bases
10464 are pointer indirections compute the difference of the pointers.
10465 If the bases are equal, we are set. */
10466 if ((TREE_CODE (base0
) == ARRAY_REF
10467 && TREE_CODE (base1
) == ARRAY_REF
10469 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
10470 use_pointer_diff
)))
10471 || (INDIRECT_REF_P (base0
)
10472 && INDIRECT_REF_P (base1
)
10475 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
10476 TREE_OPERAND (base0
, 0),
10477 TREE_OPERAND (base1
, 0))
10478 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
10479 fold_convert (type
,
10480 TREE_OPERAND (base0
, 0)),
10481 fold_convert (type
,
10482 TREE_OPERAND (base1
, 0)))))
10483 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
10485 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10486 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10487 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
10488 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
10489 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10491 fold_build2_loc (loc
, MULT_EXPR
, type
,
10497 /* If the real or vector real constant CST of type TYPE has an exact
10498 inverse, return it, else return NULL. */
10501 exact_inverse (tree type
, tree cst
)
10507 switch (TREE_CODE (cst
))
10510 r
= TREE_REAL_CST (cst
);
10512 if (exact_real_inverse (TYPE_MODE (type
), &r
))
10513 return build_real (type
, r
);
10519 unit_type
= TREE_TYPE (type
);
10520 mode
= TYPE_MODE (unit_type
);
10522 tree_vector_builder elts
;
10523 if (!elts
.new_unary_operation (type
, cst
, false))
10525 unsigned int count
= elts
.encoded_nelts ();
10526 for (unsigned int i
= 0; i
< count
; ++i
)
10528 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
10529 if (!exact_real_inverse (mode
, &r
))
10531 elts
.quick_push (build_real (unit_type
, r
));
10534 return elts
.build ();
10542 /* Mask out the tz least significant bits of X of type TYPE where
10543 tz is the number of trailing zeroes in Y. */
10545 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
10547 int tz
= wi::ctz (y
);
10549 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
10553 /* Return true when T is an address and is known to be nonzero.
10554 For floating point we further ensure that T is not denormal.
10555 Similar logic is present in nonzero_address in rtlanal.h.
10557 If the return value is based on the assumption that signed overflow
10558 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10559 change *STRICT_OVERFLOW_P. */
10562 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10564 tree type
= TREE_TYPE (t
);
10565 enum tree_code code
;
10567 /* Doing something useful for floating point would need more work. */
10568 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10571 code
= TREE_CODE (t
);
10572 switch (TREE_CODE_CLASS (code
))
10575 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10576 strict_overflow_p
);
10578 case tcc_comparison
:
10579 return tree_binary_nonzero_warnv_p (code
, type
,
10580 TREE_OPERAND (t
, 0),
10581 TREE_OPERAND (t
, 1),
10582 strict_overflow_p
);
10584 case tcc_declaration
:
10585 case tcc_reference
:
10586 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10594 case TRUTH_NOT_EXPR
:
10595 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10596 strict_overflow_p
);
10598 case TRUTH_AND_EXPR
:
10599 case TRUTH_OR_EXPR
:
10600 case TRUTH_XOR_EXPR
:
10601 return tree_binary_nonzero_warnv_p (code
, type
,
10602 TREE_OPERAND (t
, 0),
10603 TREE_OPERAND (t
, 1),
10604 strict_overflow_p
);
10611 case WITH_SIZE_EXPR
:
10613 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10615 case COMPOUND_EXPR
:
10618 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10619 strict_overflow_p
);
10622 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10623 strict_overflow_p
);
10627 tree fndecl
= get_callee_fndecl (t
);
10628 if (!fndecl
) return false;
10629 if (flag_delete_null_pointer_checks
&& !flag_check_new
10630 && DECL_IS_OPERATOR_NEW_P (fndecl
)
10631 && !TREE_NOTHROW (fndecl
))
10633 if (flag_delete_null_pointer_checks
10634 && lookup_attribute ("returns_nonnull",
10635 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10637 return alloca_call_p (t
);
10646 /* Return true when T is an address and is known to be nonzero.
10647 Handle warnings about undefined signed overflow. */
10650 tree_expr_nonzero_p (tree t
)
10652 bool ret
, strict_overflow_p
;
10654 strict_overflow_p
= false;
10655 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10656 if (strict_overflow_p
)
10657 fold_overflow_warning (("assuming signed overflow does not occur when "
10658 "determining that expression is always "
10660 WARN_STRICT_OVERFLOW_MISC
);
10664 /* Return true if T is known not to be equal to an integer W. */
10667 expr_not_equal_to (tree t
, const wide_int
&w
)
10670 switch (TREE_CODE (t
))
10673 return wi::to_wide (t
) != w
;
10676 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
10678 get_range_info (t
, vr
);
10679 if (!vr
.undefined_p ()
10680 && !vr
.contains_p (wide_int_to_tree (TREE_TYPE (t
), w
)))
10682 /* If T has some known zero bits and W has any of those bits set,
10683 then T is known not to be equal to W. */
10684 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
10685 TYPE_PRECISION (TREE_TYPE (t
))), 0))
10694 /* Fold a binary expression of code CODE and type TYPE with operands
10695 OP0 and OP1. LOC is the location of the resulting expression.
10696 Return the folded expression if folding is successful. Otherwise,
10697 return NULL_TREE. */
10700 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
10701 tree op0
, tree op1
)
10703 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10704 tree arg0
, arg1
, tem
;
10705 tree t1
= NULL_TREE
;
10706 bool strict_overflow_p
;
10709 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10710 && TREE_CODE_LENGTH (code
) == 2
10711 && op0
!= NULL_TREE
10712 && op1
!= NULL_TREE
);
10717 /* Strip any conversions that don't change the mode. This is
10718 safe for every expression, except for a comparison expression
10719 because its signedness is derived from its operands. So, in
10720 the latter case, only strip conversions that don't change the
10721 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10724 Note that this is done as an internal manipulation within the
10725 constant folder, in order to find the simplest representation
10726 of the arguments so that their form can be studied. In any
10727 cases, the appropriate type conversions should be put back in
10728 the tree that will get out of the constant folder. */
10730 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10732 STRIP_SIGN_NOPS (arg0
);
10733 STRIP_SIGN_NOPS (arg1
);
10741 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10742 constant but we can't do arithmetic on them. */
10743 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
10745 tem
= const_binop (code
, type
, arg0
, arg1
);
10746 if (tem
!= NULL_TREE
)
10748 if (TREE_TYPE (tem
) != type
)
10749 tem
= fold_convert_loc (loc
, type
, tem
);
10754 /* If this is a commutative operation, and ARG0 is a constant, move it
10755 to ARG1 to reduce the number of tests below. */
10756 if (commutative_tree_code (code
)
10757 && tree_swap_operands_p (arg0
, arg1
))
10758 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10760 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10761 to ARG1 to reduce the number of tests below. */
10762 if (kind
== tcc_comparison
10763 && tree_swap_operands_p (arg0
, arg1
))
10764 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
10766 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
10770 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10772 First check for cases where an arithmetic operation is applied to a
10773 compound, conditional, or comparison operation. Push the arithmetic
10774 operation inside the compound or conditional to see if any folding
10775 can then be done. Convert comparison to conditional for this purpose.
10776 The also optimizes non-constant cases that used to be done in
10779 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10780 one of the operands is a comparison and the other is a comparison, a
10781 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10782 code below would make the expression more complex. Change it to a
10783 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10784 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10786 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10787 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10788 && !VECTOR_TYPE_P (TREE_TYPE (arg0
))
10789 && ((truth_value_p (TREE_CODE (arg0
))
10790 && (truth_value_p (TREE_CODE (arg1
))
10791 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10792 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10793 || (truth_value_p (TREE_CODE (arg1
))
10794 && (truth_value_p (TREE_CODE (arg0
))
10795 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10796 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10798 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10799 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10802 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10803 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10805 if (code
== EQ_EXPR
)
10806 tem
= invert_truthvalue_loc (loc
, tem
);
10808 return fold_convert_loc (loc
, type
, tem
);
10811 if (TREE_CODE_CLASS (code
) == tcc_binary
10812 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10814 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10816 tem
= fold_build2_loc (loc
, code
, type
,
10817 fold_convert_loc (loc
, TREE_TYPE (op0
),
10818 TREE_OPERAND (arg0
, 1)), op1
);
10819 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10822 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
10824 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10825 fold_convert_loc (loc
, TREE_TYPE (op1
),
10826 TREE_OPERAND (arg1
, 1)));
10827 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10831 if (TREE_CODE (arg0
) == COND_EXPR
10832 || TREE_CODE (arg0
) == VEC_COND_EXPR
10833 || COMPARISON_CLASS_P (arg0
))
10835 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10837 /*cond_first_p=*/1);
10838 if (tem
!= NULL_TREE
)
10842 if (TREE_CODE (arg1
) == COND_EXPR
10843 || TREE_CODE (arg1
) == VEC_COND_EXPR
10844 || COMPARISON_CLASS_P (arg1
))
10846 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10848 /*cond_first_p=*/0);
10849 if (tem
!= NULL_TREE
)
10857 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10858 if (TREE_CODE (arg0
) == ADDR_EXPR
10859 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10861 tree iref
= TREE_OPERAND (arg0
, 0);
10862 return fold_build2 (MEM_REF
, type
,
10863 TREE_OPERAND (iref
, 0),
10864 int_const_binop (PLUS_EXPR
, arg1
,
10865 TREE_OPERAND (iref
, 1)));
10868 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10869 if (TREE_CODE (arg0
) == ADDR_EXPR
10870 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10873 poly_int64 coffset
;
10874 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10878 return fold_build2 (MEM_REF
, type
,
10879 build1 (ADDR_EXPR
, TREE_TYPE (arg0
), base
),
10880 int_const_binop (PLUS_EXPR
, arg1
,
10881 size_int (coffset
)));
10886 case POINTER_PLUS_EXPR
:
10887 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10888 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10889 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10890 return fold_convert_loc (loc
, type
,
10891 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10892 fold_convert_loc (loc
, sizetype
,
10894 fold_convert_loc (loc
, sizetype
,
10900 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10902 /* X + (X / CST) * -CST is X % CST. */
10903 if (TREE_CODE (arg1
) == MULT_EXPR
10904 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10905 && operand_equal_p (arg0
,
10906 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10908 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10909 tree cst1
= TREE_OPERAND (arg1
, 1);
10910 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10912 if (sum
&& integer_zerop (sum
))
10913 return fold_convert_loc (loc
, type
,
10914 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10915 TREE_TYPE (arg0
), arg0
,
10920 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10921 one. Make sure the type is not saturating and has the signedness of
10922 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10923 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10924 if ((TREE_CODE (arg0
) == MULT_EXPR
10925 || TREE_CODE (arg1
) == MULT_EXPR
)
10926 && !TYPE_SATURATING (type
)
10927 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10928 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10929 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10931 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10936 if (! FLOAT_TYPE_P (type
))
10938 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10939 (plus (plus (mult) (mult)) (foo)) so that we can
10940 take advantage of the factoring cases below. */
10941 if (ANY_INTEGRAL_TYPE_P (type
)
10942 && TYPE_OVERFLOW_WRAPS (type
)
10943 && (((TREE_CODE (arg0
) == PLUS_EXPR
10944 || TREE_CODE (arg0
) == MINUS_EXPR
)
10945 && TREE_CODE (arg1
) == MULT_EXPR
)
10946 || ((TREE_CODE (arg1
) == PLUS_EXPR
10947 || TREE_CODE (arg1
) == MINUS_EXPR
)
10948 && TREE_CODE (arg0
) == MULT_EXPR
)))
10950 tree parg0
, parg1
, parg
, marg
;
10951 enum tree_code pcode
;
10953 if (TREE_CODE (arg1
) == MULT_EXPR
)
10954 parg
= arg0
, marg
= arg1
;
10956 parg
= arg1
, marg
= arg0
;
10957 pcode
= TREE_CODE (parg
);
10958 parg0
= TREE_OPERAND (parg
, 0);
10959 parg1
= TREE_OPERAND (parg
, 1);
10960 STRIP_NOPS (parg0
);
10961 STRIP_NOPS (parg1
);
10963 if (TREE_CODE (parg0
) == MULT_EXPR
10964 && TREE_CODE (parg1
) != MULT_EXPR
)
10965 return fold_build2_loc (loc
, pcode
, type
,
10966 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10967 fold_convert_loc (loc
, type
,
10969 fold_convert_loc (loc
, type
,
10971 fold_convert_loc (loc
, type
, parg1
));
10972 if (TREE_CODE (parg0
) != MULT_EXPR
10973 && TREE_CODE (parg1
) == MULT_EXPR
)
10975 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10976 fold_convert_loc (loc
, type
, parg0
),
10977 fold_build2_loc (loc
, pcode
, type
,
10978 fold_convert_loc (loc
, type
, marg
),
10979 fold_convert_loc (loc
, type
,
10985 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10986 to __complex__ ( x, y ). This is not the same for SNaNs or
10987 if signed zeros are involved. */
10988 if (!HONOR_SNANS (element_mode (arg0
))
10989 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10990 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10992 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10993 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10994 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10995 bool arg0rz
= false, arg0iz
= false;
10996 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10997 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10999 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11000 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11001 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11003 tree rp
= arg1r
? arg1r
11004 : build1 (REALPART_EXPR
, rtype
, arg1
);
11005 tree ip
= arg0i
? arg0i
11006 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11007 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11009 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11011 tree rp
= arg0r
? arg0r
11012 : build1 (REALPART_EXPR
, rtype
, arg0
);
11013 tree ip
= arg1i
? arg1i
11014 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
11015 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11020 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11021 We associate floats only if the user has specified
11022 -fassociative-math. */
11023 if (flag_associative_math
11024 && TREE_CODE (arg1
) == PLUS_EXPR
11025 && TREE_CODE (arg0
) != MULT_EXPR
)
11027 tree tree10
= TREE_OPERAND (arg1
, 0);
11028 tree tree11
= TREE_OPERAND (arg1
, 1);
11029 if (TREE_CODE (tree11
) == MULT_EXPR
11030 && TREE_CODE (tree10
) == MULT_EXPR
)
11033 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
11034 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
11037 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11038 We associate floats only if the user has specified
11039 -fassociative-math. */
11040 if (flag_associative_math
11041 && TREE_CODE (arg0
) == PLUS_EXPR
11042 && TREE_CODE (arg1
) != MULT_EXPR
)
11044 tree tree00
= TREE_OPERAND (arg0
, 0);
11045 tree tree01
= TREE_OPERAND (arg0
, 1);
11046 if (TREE_CODE (tree01
) == MULT_EXPR
11047 && TREE_CODE (tree00
) == MULT_EXPR
)
11050 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
11051 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
11057 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11058 is a rotate of A by C1 bits. */
11059 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11060 is a rotate of A by B bits.
11061 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11062 though in this case CODE must be | and not + or ^, otherwise
11063 it doesn't return A when B is 0. */
11065 enum tree_code code0
, code1
;
11067 code0
= TREE_CODE (arg0
);
11068 code1
= TREE_CODE (arg1
);
11069 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
11070 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
11071 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11072 TREE_OPERAND (arg1
, 0), 0)
11073 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
11074 TYPE_UNSIGNED (rtype
))
11075 /* Only create rotates in complete modes. Other cases are not
11076 expanded properly. */
11077 && (element_precision (rtype
)
11078 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
11080 tree tree01
, tree11
;
11081 tree orig_tree01
, orig_tree11
;
11082 enum tree_code code01
, code11
;
11084 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
11085 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
11086 STRIP_NOPS (tree01
);
11087 STRIP_NOPS (tree11
);
11088 code01
= TREE_CODE (tree01
);
11089 code11
= TREE_CODE (tree11
);
11090 if (code11
!= MINUS_EXPR
11091 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
11093 std::swap (code0
, code1
);
11094 std::swap (code01
, code11
);
11095 std::swap (tree01
, tree11
);
11096 std::swap (orig_tree01
, orig_tree11
);
11098 if (code01
== INTEGER_CST
11099 && code11
== INTEGER_CST
11100 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
11101 == element_precision (rtype
)))
11103 tem
= build2_loc (loc
, LROTATE_EXPR
,
11104 rtype
, TREE_OPERAND (arg0
, 0),
11105 code0
== LSHIFT_EXPR
11106 ? orig_tree01
: orig_tree11
);
11107 return fold_convert_loc (loc
, type
, tem
);
11109 else if (code11
== MINUS_EXPR
)
11111 tree tree110
, tree111
;
11112 tree110
= TREE_OPERAND (tree11
, 0);
11113 tree111
= TREE_OPERAND (tree11
, 1);
11114 STRIP_NOPS (tree110
);
11115 STRIP_NOPS (tree111
);
11116 if (TREE_CODE (tree110
) == INTEGER_CST
11117 && compare_tree_int (tree110
,
11118 element_precision (rtype
)) == 0
11119 && operand_equal_p (tree01
, tree111
, 0))
11121 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11122 ? LROTATE_EXPR
: RROTATE_EXPR
),
11123 rtype
, TREE_OPERAND (arg0
, 0),
11125 return fold_convert_loc (loc
, type
, tem
);
11128 else if (code
== BIT_IOR_EXPR
11129 && code11
== BIT_AND_EXPR
11130 && pow2p_hwi (element_precision (rtype
)))
11132 tree tree110
, tree111
;
11133 tree110
= TREE_OPERAND (tree11
, 0);
11134 tree111
= TREE_OPERAND (tree11
, 1);
11135 STRIP_NOPS (tree110
);
11136 STRIP_NOPS (tree111
);
11137 if (TREE_CODE (tree110
) == NEGATE_EXPR
11138 && TREE_CODE (tree111
) == INTEGER_CST
11139 && compare_tree_int (tree111
,
11140 element_precision (rtype
) - 1) == 0
11141 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
11143 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
11144 ? LROTATE_EXPR
: RROTATE_EXPR
),
11145 rtype
, TREE_OPERAND (arg0
, 0),
11147 return fold_convert_loc (loc
, type
, tem
);
11154 /* In most languages, can't associate operations on floats through
11155 parentheses. Rather than remember where the parentheses were, we
11156 don't associate floats at all, unless the user has specified
11157 -fassociative-math.
11158 And, we need to make sure type is not saturating. */
11160 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
11161 && !TYPE_SATURATING (type
))
11163 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
11164 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
11168 /* Split both trees into variables, constants, and literals. Then
11169 associate each group together, the constants with literals,
11170 then the result with variables. This increases the chances of
11171 literals being recombined later and of generating relocatable
11172 expressions for the sum of a constant and literal. */
11173 var0
= split_tree (arg0
, type
, code
,
11174 &minus_var0
, &con0
, &minus_con0
,
11175 &lit0
, &minus_lit0
, 0);
11176 var1
= split_tree (arg1
, type
, code
,
11177 &minus_var1
, &con1
, &minus_con1
,
11178 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
11180 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11181 if (code
== MINUS_EXPR
)
11184 /* With undefined overflow prefer doing association in a type
11185 which wraps on overflow, if that is one of the operand types. */
11186 if ((POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
11187 && !TYPE_OVERFLOW_WRAPS (type
))
11189 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11190 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11191 atype
= TREE_TYPE (arg0
);
11192 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
11193 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
11194 atype
= TREE_TYPE (arg1
);
11195 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
11198 /* With undefined overflow we can only associate constants with one
11199 variable, and constants whose association doesn't overflow. */
11200 if ((POINTER_TYPE_P (atype
) || INTEGRAL_TYPE_P (atype
))
11201 && !TYPE_OVERFLOW_WRAPS (atype
))
11203 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
11205 /* ??? If split_tree would handle NEGATE_EXPR we could
11206 simply reject these cases and the allowed cases would
11207 be the var0/minus_var1 ones. */
11208 tree tmp0
= var0
? var0
: minus_var0
;
11209 tree tmp1
= var1
? var1
: minus_var1
;
11210 bool one_neg
= false;
11212 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
11214 tmp0
= TREE_OPERAND (tmp0
, 0);
11215 one_neg
= !one_neg
;
11217 if (CONVERT_EXPR_P (tmp0
)
11218 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11219 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
11220 <= TYPE_PRECISION (atype
)))
11221 tmp0
= TREE_OPERAND (tmp0
, 0);
11222 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
11224 tmp1
= TREE_OPERAND (tmp1
, 0);
11225 one_neg
= !one_neg
;
11227 if (CONVERT_EXPR_P (tmp1
)
11228 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11229 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
11230 <= TYPE_PRECISION (atype
)))
11231 tmp1
= TREE_OPERAND (tmp1
, 0);
11232 /* The only case we can still associate with two variables
11233 is if they cancel out. */
11235 || !operand_equal_p (tmp0
, tmp1
, 0))
11238 else if ((var0
&& minus_var1
11239 && ! operand_equal_p (var0
, minus_var1
, 0))
11240 || (minus_var0
&& var1
11241 && ! operand_equal_p (minus_var0
, var1
, 0)))
11245 /* Only do something if we found more than two objects. Otherwise,
11246 nothing has changed and we risk infinite recursion. */
11248 && ((var0
!= 0) + (var1
!= 0)
11249 + (minus_var0
!= 0) + (minus_var1
!= 0)
11250 + (con0
!= 0) + (con1
!= 0)
11251 + (minus_con0
!= 0) + (minus_con1
!= 0)
11252 + (lit0
!= 0) + (lit1
!= 0)
11253 + (minus_lit0
!= 0) + (minus_lit1
!= 0)) > 2)
11255 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
11256 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
11258 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
11259 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
11261 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
11262 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
11265 if (minus_var0
&& var0
)
11267 var0
= associate_trees (loc
, var0
, minus_var0
,
11268 MINUS_EXPR
, atype
);
11271 if (minus_con0
&& con0
)
11273 con0
= associate_trees (loc
, con0
, minus_con0
,
11274 MINUS_EXPR
, atype
);
11278 /* Preserve the MINUS_EXPR if the negative part of the literal is
11279 greater than the positive part. Otherwise, the multiplicative
11280 folding code (i.e extract_muldiv) may be fooled in case
11281 unsigned constants are subtracted, like in the following
11282 example: ((X*2 + 4) - 8U)/2. */
11283 if (minus_lit0
&& lit0
)
11285 if (TREE_CODE (lit0
) == INTEGER_CST
11286 && TREE_CODE (minus_lit0
) == INTEGER_CST
11287 && tree_int_cst_lt (lit0
, minus_lit0
)
11288 /* But avoid ending up with only negated parts. */
11291 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
11292 MINUS_EXPR
, atype
);
11297 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
11298 MINUS_EXPR
, atype
);
11303 /* Don't introduce overflows through reassociation. */
11304 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
11305 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
11308 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11309 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
11311 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
11315 /* Eliminate minus_con0. */
11319 con0
= associate_trees (loc
, con0
, minus_con0
,
11320 MINUS_EXPR
, atype
);
11322 var0
= associate_trees (loc
, var0
, minus_con0
,
11323 MINUS_EXPR
, atype
);
11325 gcc_unreachable ();
11329 /* Eliminate minus_var0. */
11333 con0
= associate_trees (loc
, con0
, minus_var0
,
11334 MINUS_EXPR
, atype
);
11336 gcc_unreachable ();
11341 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
11348 case POINTER_DIFF_EXPR
:
11350 /* Fold &a[i] - &a[j] to i-j. */
11351 if (TREE_CODE (arg0
) == ADDR_EXPR
11352 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
11353 && TREE_CODE (arg1
) == ADDR_EXPR
11354 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
11356 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
11357 TREE_OPERAND (arg0
, 0),
11358 TREE_OPERAND (arg1
, 0),
11360 == POINTER_DIFF_EXPR
);
11365 /* Further transformations are not for pointers. */
11366 if (code
== POINTER_DIFF_EXPR
)
11369 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11370 if (TREE_CODE (arg0
) == NEGATE_EXPR
11371 && negate_expr_p (op1
)
11372 /* If arg0 is e.g. unsigned int and type is int, then this could
11373 introduce UB, because if A is INT_MIN at runtime, the original
11374 expression can be well defined while the latter is not.
11376 && !(ANY_INTEGRAL_TYPE_P (type
)
11377 && TYPE_OVERFLOW_UNDEFINED (type
)
11378 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11379 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
11380 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
11381 fold_convert_loc (loc
, type
,
11382 TREE_OPERAND (arg0
, 0)));
11384 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11385 __complex__ ( x, -y ). This is not the same for SNaNs or if
11386 signed zeros are involved. */
11387 if (!HONOR_SNANS (element_mode (arg0
))
11388 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
11389 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11391 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11392 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
11393 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
11394 bool arg0rz
= false, arg0iz
= false;
11395 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
11396 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
11398 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
11399 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
11400 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
11402 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11404 : build1 (REALPART_EXPR
, rtype
, arg1
));
11405 tree ip
= arg0i
? arg0i
11406 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
11407 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11409 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
11411 tree rp
= arg0r
? arg0r
11412 : build1 (REALPART_EXPR
, rtype
, arg0
);
11413 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
11415 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
11416 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
11421 /* A - B -> A + (-B) if B is easily negatable. */
11422 if (negate_expr_p (op1
)
11423 && ! TYPE_OVERFLOW_SANITIZED (type
)
11424 && ((FLOAT_TYPE_P (type
)
11425 /* Avoid this transformation if B is a positive REAL_CST. */
11426 && (TREE_CODE (op1
) != REAL_CST
11427 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
11428 || INTEGRAL_TYPE_P (type
)))
11429 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11430 fold_convert_loc (loc
, type
, arg0
),
11431 negate_expr (op1
));
11433 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11434 one. Make sure the type is not saturating and has the signedness of
11435 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11436 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11437 if ((TREE_CODE (arg0
) == MULT_EXPR
11438 || TREE_CODE (arg1
) == MULT_EXPR
)
11439 && !TYPE_SATURATING (type
)
11440 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
11441 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
11442 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
11444 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11452 if (! FLOAT_TYPE_P (type
))
11454 /* Transform x * -C into -x * C if x is easily negatable. */
11455 if (TREE_CODE (op1
) == INTEGER_CST
11456 && tree_int_cst_sgn (op1
) == -1
11457 && negate_expr_p (op0
)
11458 && negate_expr_p (op1
)
11459 && (tem
= negate_expr (op1
)) != op1
11460 && ! TREE_OVERFLOW (tem
))
11461 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11462 fold_convert_loc (loc
, type
,
11463 negate_expr (op0
)), tem
);
11465 strict_overflow_p
= false;
11466 if (TREE_CODE (arg1
) == INTEGER_CST
11467 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11468 &strict_overflow_p
)) != 0)
11470 if (strict_overflow_p
)
11471 fold_overflow_warning (("assuming signed overflow does not "
11472 "occur when simplifying "
11474 WARN_STRICT_OVERFLOW_MISC
);
11475 return fold_convert_loc (loc
, type
, tem
);
11478 /* Optimize z * conj(z) for integer complex numbers. */
11479 if (TREE_CODE (arg0
) == CONJ_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11481 return fold_mult_zconjz (loc
, type
, arg1
);
11482 if (TREE_CODE (arg1
) == CONJ_EXPR
11483 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11484 return fold_mult_zconjz (loc
, type
, arg0
);
11488 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11489 This is not the same for NaNs or if signed zeros are
11491 if (!HONOR_NANS (arg0
)
11492 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
11493 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11494 && TREE_CODE (arg1
) == COMPLEX_CST
11495 && real_zerop (TREE_REALPART (arg1
)))
11497 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11498 if (real_onep (TREE_IMAGPART (arg1
)))
11500 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11501 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11503 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11504 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11506 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11507 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11508 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11512 /* Optimize z * conj(z) for floating point complex numbers.
11513 Guarded by flag_unsafe_math_optimizations as non-finite
11514 imaginary components don't produce scalar results. */
11515 if (flag_unsafe_math_optimizations
11516 && TREE_CODE (arg0
) == CONJ_EXPR
11517 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11518 return fold_mult_zconjz (loc
, type
, arg1
);
11519 if (flag_unsafe_math_optimizations
11520 && TREE_CODE (arg1
) == CONJ_EXPR
11521 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11522 return fold_mult_zconjz (loc
, type
, arg0
);
11527 /* Canonicalize (X & C1) | C2. */
11528 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11529 && TREE_CODE (arg1
) == INTEGER_CST
11530 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11532 int width
= TYPE_PRECISION (type
), w
;
11533 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
11534 wide_int c2
= wi::to_wide (arg1
);
11536 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11537 if ((c1
& c2
) == c1
)
11538 return omit_one_operand_loc (loc
, type
, arg1
,
11539 TREE_OPERAND (arg0
, 0));
11541 wide_int msk
= wi::mask (width
, false,
11542 TYPE_PRECISION (TREE_TYPE (arg1
)));
11544 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11545 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
11547 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11548 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11551 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11552 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11553 mode which allows further optimizations. */
11556 wide_int c3
= wi::bit_and_not (c1
, c2
);
11557 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11559 wide_int mask
= wi::mask (w
, false,
11560 TYPE_PRECISION (type
));
11561 if (((c1
| c2
) & mask
) == mask
11562 && wi::bit_and_not (c1
, mask
) == 0)
11571 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11572 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
11573 wide_int_to_tree (type
, c3
));
11574 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11578 /* See if this can be simplified into a rotate first. If that
11579 is unsuccessful continue in the association code. */
11583 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11584 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11585 && INTEGRAL_TYPE_P (type
)
11586 && integer_onep (TREE_OPERAND (arg0
, 1))
11587 && integer_onep (arg1
))
11588 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11589 build_zero_cst (TREE_TYPE (arg0
)));
11591 /* See if this can be simplified into a rotate first. If that
11592 is unsuccessful continue in the association code. */
11596 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11597 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11598 && INTEGRAL_TYPE_P (type
)
11599 && integer_onep (TREE_OPERAND (arg0
, 1))
11600 && integer_onep (arg1
))
11603 tem
= TREE_OPERAND (arg0
, 0);
11604 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11605 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11607 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11608 build_zero_cst (TREE_TYPE (tem
)));
11610 /* Fold ~X & 1 as (X & 1) == 0. */
11611 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11612 && INTEGRAL_TYPE_P (type
)
11613 && integer_onep (arg1
))
11616 tem
= TREE_OPERAND (arg0
, 0);
11617 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11618 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11620 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11621 build_zero_cst (TREE_TYPE (tem
)));
11623 /* Fold !X & 1 as X == 0. */
11624 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11625 && integer_onep (arg1
))
11627 tem
= TREE_OPERAND (arg0
, 0);
11628 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11629 build_zero_cst (TREE_TYPE (tem
)));
11632 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11633 multiple of 1 << CST. */
11634 if (TREE_CODE (arg1
) == INTEGER_CST
)
11636 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
11637 wide_int ncst1
= -cst1
;
11638 if ((cst1
& ncst1
) == ncst1
11639 && multiple_of_p (type
, arg0
,
11640 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11641 return fold_convert_loc (loc
, type
, arg0
);
11644 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11646 if (TREE_CODE (arg1
) == INTEGER_CST
11647 && TREE_CODE (arg0
) == MULT_EXPR
11648 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11650 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
11652 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
11655 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11657 else if (masked
!= warg1
)
11659 /* Avoid the transform if arg1 is a mask of some
11660 mode which allows further optimizations. */
11661 int pop
= wi::popcount (warg1
);
11662 if (!(pop
>= BITS_PER_UNIT
11664 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11665 return fold_build2_loc (loc
, code
, type
, op0
,
11666 wide_int_to_tree (type
, masked
));
11670 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11671 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11672 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11674 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11676 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
11679 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11685 /* Don't touch a floating-point divide by zero unless the mode
11686 of the constant can represent infinity. */
11687 if (TREE_CODE (arg1
) == REAL_CST
11688 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11689 && real_zerop (arg1
))
11692 /* (-A) / (-B) -> A / B */
11693 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11694 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11695 TREE_OPERAND (arg0
, 0),
11696 negate_expr (arg1
));
11697 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11698 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11699 negate_expr (arg0
),
11700 TREE_OPERAND (arg1
, 0));
11703 case TRUNC_DIV_EXPR
:
11706 case FLOOR_DIV_EXPR
:
11707 /* Simplify A / (B << N) where A and B are positive and B is
11708 a power of 2, to A >> (N + log2(B)). */
11709 strict_overflow_p
= false;
11710 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11711 && (TYPE_UNSIGNED (type
)
11712 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11714 tree sval
= TREE_OPERAND (arg1
, 0);
11715 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11717 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11718 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11719 wi::exact_log2 (wi::to_wide (sval
)));
11721 if (strict_overflow_p
)
11722 fold_overflow_warning (("assuming signed overflow does not "
11723 "occur when simplifying A / (B << N)"),
11724 WARN_STRICT_OVERFLOW_MISC
);
11726 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11728 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11729 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11735 case ROUND_DIV_EXPR
:
11736 case CEIL_DIV_EXPR
:
11737 case EXACT_DIV_EXPR
:
11738 if (integer_zerop (arg1
))
11741 /* Convert -A / -B to A / B when the type is signed and overflow is
11743 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11744 && TREE_CODE (op0
) == NEGATE_EXPR
11745 && negate_expr_p (op1
))
11747 if (ANY_INTEGRAL_TYPE_P (type
))
11748 fold_overflow_warning (("assuming signed overflow does not occur "
11749 "when distributing negation across "
11751 WARN_STRICT_OVERFLOW_MISC
);
11752 return fold_build2_loc (loc
, code
, type
,
11753 fold_convert_loc (loc
, type
,
11754 TREE_OPERAND (arg0
, 0)),
11755 negate_expr (op1
));
11757 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11758 && TREE_CODE (arg1
) == NEGATE_EXPR
11759 && negate_expr_p (op0
))
11761 if (ANY_INTEGRAL_TYPE_P (type
))
11762 fold_overflow_warning (("assuming signed overflow does not occur "
11763 "when distributing negation across "
11765 WARN_STRICT_OVERFLOW_MISC
);
11766 return fold_build2_loc (loc
, code
, type
,
11768 fold_convert_loc (loc
, type
,
11769 TREE_OPERAND (arg1
, 0)));
11772 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11773 operation, EXACT_DIV_EXPR.
11775 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11776 At one time others generated faster code, it's not clear if they do
11777 after the last round to changes to the DIV code in expmed.c. */
11778 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11779 && multiple_of_p (type
, arg0
, arg1
))
11780 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
11781 fold_convert (type
, arg0
),
11782 fold_convert (type
, arg1
));
11784 strict_overflow_p
= false;
11785 if (TREE_CODE (arg1
) == INTEGER_CST
11786 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11787 &strict_overflow_p
)) != 0)
11789 if (strict_overflow_p
)
11790 fold_overflow_warning (("assuming signed overflow does not occur "
11791 "when simplifying division"),
11792 WARN_STRICT_OVERFLOW_MISC
);
11793 return fold_convert_loc (loc
, type
, tem
);
11798 case CEIL_MOD_EXPR
:
11799 case FLOOR_MOD_EXPR
:
11800 case ROUND_MOD_EXPR
:
11801 case TRUNC_MOD_EXPR
:
11802 strict_overflow_p
= false;
11803 if (TREE_CODE (arg1
) == INTEGER_CST
11804 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11805 &strict_overflow_p
)) != 0)
11807 if (strict_overflow_p
)
11808 fold_overflow_warning (("assuming signed overflow does not occur "
11809 "when simplifying modulus"),
11810 WARN_STRICT_OVERFLOW_MISC
);
11811 return fold_convert_loc (loc
, type
, tem
);
11820 /* Since negative shift count is not well-defined,
11821 don't try to compute it in the compiler. */
11822 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11825 prec
= element_precision (type
);
11827 /* If we have a rotate of a bit operation with the rotate count and
11828 the second operand of the bit operation both constant,
11829 permute the two operations. */
11830 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11831 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11832 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11833 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11834 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11836 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11837 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11838 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11839 fold_build2_loc (loc
, code
, type
,
11841 fold_build2_loc (loc
, code
, type
,
11845 /* Two consecutive rotates adding up to the some integer
11846 multiple of the precision of the type can be ignored. */
11847 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11848 && TREE_CODE (arg0
) == RROTATE_EXPR
11849 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11850 && wi::umod_trunc (wi::to_wide (arg1
)
11851 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
11853 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11861 case TRUTH_ANDIF_EXPR
:
11862 /* Note that the operands of this must be ints
11863 and their values must be 0 or 1.
11864 ("true" is a fixed value perhaps depending on the language.) */
11865 /* If first arg is constant zero, return it. */
11866 if (integer_zerop (arg0
))
11867 return fold_convert_loc (loc
, type
, arg0
);
11869 case TRUTH_AND_EXPR
:
11870 /* If either arg is constant true, drop it. */
11871 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11872 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11873 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11874 /* Preserve sequence points. */
11875 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11876 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11877 /* If second arg is constant zero, result is zero, but first arg
11878 must be evaluated. */
11879 if (integer_zerop (arg1
))
11880 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11881 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11882 case will be handled here. */
11883 if (integer_zerop (arg0
))
11884 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11886 /* !X && X is always false. */
11887 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11888 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11889 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11890 /* X && !X is always false. */
11891 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11892 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11893 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11895 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11896 means A >= Y && A != MAX, but in this case we know that
11899 if (!TREE_SIDE_EFFECTS (arg0
)
11900 && !TREE_SIDE_EFFECTS (arg1
))
11902 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11903 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11904 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11906 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11907 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11908 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11911 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11917 case TRUTH_ORIF_EXPR
:
11918 /* Note that the operands of this must be ints
11919 and their values must be 0 or true.
11920 ("true" is a fixed value perhaps depending on the language.) */
11921 /* If first arg is constant true, return it. */
11922 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11923 return fold_convert_loc (loc
, type
, arg0
);
11925 case TRUTH_OR_EXPR
:
11926 /* If either arg is constant zero, drop it. */
11927 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11928 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11929 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11930 /* Preserve sequence points. */
11931 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11932 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11933 /* If second arg is constant true, result is true, but we must
11934 evaluate first arg. */
11935 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11936 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11937 /* Likewise for first arg, but note this only occurs here for
11939 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11940 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11942 /* !X || X is always true. */
11943 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11944 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11945 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11946 /* X || !X is always true. */
11947 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11948 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11949 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11951 /* (X && !Y) || (!X && Y) is X ^ Y */
11952 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
11953 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
11955 tree a0
, a1
, l0
, l1
, n0
, n1
;
11957 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11958 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11960 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11961 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11963 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
11964 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
11966 if ((operand_equal_p (n0
, a0
, 0)
11967 && operand_equal_p (n1
, a1
, 0))
11968 || (operand_equal_p (n0
, a1
, 0)
11969 && operand_equal_p (n1
, a0
, 0)))
11970 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
11973 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11979 case TRUTH_XOR_EXPR
:
11980 /* If the second arg is constant zero, drop it. */
11981 if (integer_zerop (arg1
))
11982 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11983 /* If the second arg is constant true, this is a logical inversion. */
11984 if (integer_onep (arg1
))
11986 tem
= invert_truthvalue_loc (loc
, arg0
);
11987 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11989 /* Identical arguments cancel to zero. */
11990 if (operand_equal_p (arg0
, arg1
, 0))
11991 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11993 /* !X ^ X is always true. */
11994 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11995 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11996 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11998 /* X ^ !X is always true. */
11999 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12000 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12001 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12010 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12011 if (tem
!= NULL_TREE
)
12014 /* bool_var != 1 becomes !bool_var. */
12015 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12016 && code
== NE_EXPR
)
12017 return fold_convert_loc (loc
, type
,
12018 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12019 TREE_TYPE (arg0
), arg0
));
12021 /* bool_var == 0 becomes !bool_var. */
12022 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12023 && code
== EQ_EXPR
)
12024 return fold_convert_loc (loc
, type
,
12025 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12026 TREE_TYPE (arg0
), arg0
));
12028 /* !exp != 0 becomes !exp */
12029 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12030 && code
== NE_EXPR
)
12031 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12033 /* If this is an EQ or NE comparison with zero and ARG0 is
12034 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12035 two operations, but the latter can be done in one less insn
12036 on machines that have only two-operand insns or on which a
12037 constant cannot be the first operand. */
12038 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12039 && integer_zerop (arg1
))
12041 tree arg00
= TREE_OPERAND (arg0
, 0);
12042 tree arg01
= TREE_OPERAND (arg0
, 1);
12043 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12044 && integer_onep (TREE_OPERAND (arg00
, 0)))
12046 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12047 arg01
, TREE_OPERAND (arg00
, 1));
12048 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12049 build_int_cst (TREE_TYPE (arg0
), 1));
12050 return fold_build2_loc (loc
, code
, type
,
12051 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12054 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12055 && integer_onep (TREE_OPERAND (arg01
, 0)))
12057 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12058 arg00
, TREE_OPERAND (arg01
, 1));
12059 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12060 build_int_cst (TREE_TYPE (arg0
), 1));
12061 return fold_build2_loc (loc
, code
, type
,
12062 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12067 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12068 C1 is a valid shift constant, and C2 is a power of two, i.e.
12070 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12071 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12072 && integer_zerop (arg1
))
12074 tree arg00
= TREE_OPERAND (arg0
, 0);
12075 STRIP_NOPS (arg00
);
12076 if (TREE_CODE (arg00
) == RSHIFT_EXPR
12077 && TREE_CODE (TREE_OPERAND (arg00
, 1)) == INTEGER_CST
)
12079 tree itype
= TREE_TYPE (arg00
);
12080 tree arg001
= TREE_OPERAND (arg00
, 1);
12081 prec
= TYPE_PRECISION (itype
);
12083 /* Check for a valid shift count. */
12084 if (wi::ltu_p (wi::to_wide (arg001
), prec
))
12086 tree arg01
= TREE_OPERAND (arg0
, 1);
12087 tree arg000
= TREE_OPERAND (arg00
, 0);
12088 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12089 /* If (C2 << C1) doesn't overflow, then
12090 ((X >> C1) & C2) != 0 can be rewritten as
12091 (X & (C2 << C1)) != 0. */
12092 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12094 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
,
12096 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12098 return fold_build2_loc (loc
, code
, type
, tem
,
12099 fold_convert_loc (loc
, itype
, arg1
));
12101 /* Otherwise, for signed (arithmetic) shifts,
12102 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12103 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12104 else if (!TYPE_UNSIGNED (itype
))
12105 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
12108 build_int_cst (itype
, 0));
12109 /* Otherwise, of unsigned (logical) shifts,
12110 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12111 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12113 return omit_one_operand_loc (loc
, type
,
12114 code
== EQ_EXPR
? integer_one_node
12115 : integer_zero_node
,
12121 /* If this is a comparison of a field, we may be able to simplify it. */
12122 if ((TREE_CODE (arg0
) == COMPONENT_REF
12123 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12124 /* Handle the constant case even without -O
12125 to make sure the warnings are given. */
12126 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12128 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12133 /* Optimize comparisons of strlen vs zero to a compare of the
12134 first character of the string vs zero. To wit,
12135 strlen(ptr) == 0 => *ptr == 0
12136 strlen(ptr) != 0 => *ptr != 0
12137 Other cases should reduce to one of these two (or a constant)
12138 due to the return value of strlen being unsigned. */
12139 if (TREE_CODE (arg0
) == CALL_EXPR
&& integer_zerop (arg1
))
12141 tree fndecl
= get_callee_fndecl (arg0
);
12144 && fndecl_built_in_p (fndecl
, BUILT_IN_STRLEN
)
12145 && call_expr_nargs (arg0
) == 1
12146 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0)))
12150 = build_pointer_type (build_qualified_type (char_type_node
,
12152 tree ptr
= fold_convert_loc (loc
, ptrtype
,
12153 CALL_EXPR_ARG (arg0
, 0));
12154 tree iref
= build_fold_indirect_ref_loc (loc
, ptr
);
12155 return fold_build2_loc (loc
, code
, type
, iref
,
12156 build_int_cst (TREE_TYPE (iref
), 0));
12160 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12161 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12162 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12163 && integer_zerop (arg1
)
12164 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12166 tree arg00
= TREE_OPERAND (arg0
, 0);
12167 tree arg01
= TREE_OPERAND (arg0
, 1);
12168 tree itype
= TREE_TYPE (arg00
);
12169 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
12171 if (TYPE_UNSIGNED (itype
))
12173 itype
= signed_type_for (itype
);
12174 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12176 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12177 type
, arg00
, build_zero_cst (itype
));
12181 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12182 (X & C) == 0 when C is a single bit. */
12183 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12184 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12185 && integer_zerop (arg1
)
12186 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12188 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12189 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12190 TREE_OPERAND (arg0
, 1));
12191 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12193 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12197 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12198 constant C is a power of two, i.e. a single bit. */
12199 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12200 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12201 && integer_zerop (arg1
)
12202 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12203 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12204 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12206 tree arg00
= TREE_OPERAND (arg0
, 0);
12207 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12208 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12211 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12212 when is C is a power of two, i.e. a single bit. */
12213 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12214 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12215 && integer_zerop (arg1
)
12216 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12217 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12218 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12220 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12221 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12222 arg000
, TREE_OPERAND (arg0
, 1));
12223 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12224 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12227 if (integer_zerop (arg1
)
12228 && tree_expr_nonzero_p (arg0
))
12230 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12231 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12234 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12235 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12237 tree arg00
= TREE_OPERAND (arg0
, 0);
12238 tree arg01
= TREE_OPERAND (arg0
, 1);
12239 tree arg10
= TREE_OPERAND (arg1
, 0);
12240 tree arg11
= TREE_OPERAND (arg1
, 1);
12241 tree itype
= TREE_TYPE (arg0
);
12243 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12244 operand_equal_p guarantees no side-effects so we don't need
12245 to use omit_one_operand on Z. */
12246 if (operand_equal_p (arg01
, arg11
, 0))
12247 return fold_build2_loc (loc
, code
, type
, arg00
,
12248 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12250 if (operand_equal_p (arg01
, arg10
, 0))
12251 return fold_build2_loc (loc
, code
, type
, arg00
,
12252 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12254 if (operand_equal_p (arg00
, arg11
, 0))
12255 return fold_build2_loc (loc
, code
, type
, arg01
,
12256 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12258 if (operand_equal_p (arg00
, arg10
, 0))
12259 return fold_build2_loc (loc
, code
, type
, arg01
,
12260 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12263 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12264 if (TREE_CODE (arg01
) == INTEGER_CST
12265 && TREE_CODE (arg11
) == INTEGER_CST
)
12267 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12268 fold_convert_loc (loc
, itype
, arg11
));
12269 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12270 return fold_build2_loc (loc
, code
, type
, tem
,
12271 fold_convert_loc (loc
, itype
, arg10
));
12275 /* Attempt to simplify equality/inequality comparisons of complex
12276 values. Only lower the comparison if the result is known or
12277 can be simplified to a single scalar comparison. */
12278 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12279 || TREE_CODE (arg0
) == COMPLEX_CST
)
12280 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12281 || TREE_CODE (arg1
) == COMPLEX_CST
))
12283 tree real0
, imag0
, real1
, imag1
;
12286 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12288 real0
= TREE_OPERAND (arg0
, 0);
12289 imag0
= TREE_OPERAND (arg0
, 1);
12293 real0
= TREE_REALPART (arg0
);
12294 imag0
= TREE_IMAGPART (arg0
);
12297 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12299 real1
= TREE_OPERAND (arg1
, 0);
12300 imag1
= TREE_OPERAND (arg1
, 1);
12304 real1
= TREE_REALPART (arg1
);
12305 imag1
= TREE_IMAGPART (arg1
);
12308 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12309 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12311 if (integer_zerop (rcond
))
12313 if (code
== EQ_EXPR
)
12314 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12316 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12320 if (code
== NE_EXPR
)
12321 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12323 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12327 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12328 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12330 if (integer_zerop (icond
))
12332 if (code
== EQ_EXPR
)
12333 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12335 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12339 if (code
== NE_EXPR
)
12340 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12342 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12353 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12354 if (tem
!= NULL_TREE
)
12357 /* Transform comparisons of the form X +- C CMP X. */
12358 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12359 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12360 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12361 && !HONOR_SNANS (arg0
))
12363 tree arg01
= TREE_OPERAND (arg0
, 1);
12364 enum tree_code code0
= TREE_CODE (arg0
);
12365 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12367 /* (X - c) > X becomes false. */
12368 if (code
== GT_EXPR
12369 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12370 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12371 return constant_boolean_node (0, type
);
12373 /* Likewise (X + c) < X becomes false. */
12374 if (code
== LT_EXPR
12375 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12376 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12377 return constant_boolean_node (0, type
);
12379 /* Convert (X - c) <= X to true. */
12380 if (!HONOR_NANS (arg1
)
12382 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12383 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12384 return constant_boolean_node (1, type
);
12386 /* Convert (X + c) >= X to true. */
12387 if (!HONOR_NANS (arg1
)
12389 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12390 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12391 return constant_boolean_node (1, type
);
12394 /* If we are comparing an ABS_EXPR with a constant, we can
12395 convert all the cases into explicit comparisons, but they may
12396 well not be faster than doing the ABS and one comparison.
12397 But ABS (X) <= C is a range comparison, which becomes a subtraction
12398 and a comparison, and is probably faster. */
12399 if (code
== LE_EXPR
12400 && TREE_CODE (arg1
) == INTEGER_CST
12401 && TREE_CODE (arg0
) == ABS_EXPR
12402 && ! TREE_SIDE_EFFECTS (arg0
)
12403 && (tem
= negate_expr (arg1
)) != 0
12404 && TREE_CODE (tem
) == INTEGER_CST
12405 && !TREE_OVERFLOW (tem
))
12406 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12407 build2 (GE_EXPR
, type
,
12408 TREE_OPERAND (arg0
, 0), tem
),
12409 build2 (LE_EXPR
, type
,
12410 TREE_OPERAND (arg0
, 0), arg1
));
12412 /* Convert ABS_EXPR<x> >= 0 to true. */
12413 strict_overflow_p
= false;
12414 if (code
== GE_EXPR
12415 && (integer_zerop (arg1
)
12416 || (! HONOR_NANS (arg0
)
12417 && real_zerop (arg1
)))
12418 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12420 if (strict_overflow_p
)
12421 fold_overflow_warning (("assuming signed overflow does not occur "
12422 "when simplifying comparison of "
12423 "absolute value and zero"),
12424 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12425 return omit_one_operand_loc (loc
, type
,
12426 constant_boolean_node (true, type
),
12430 /* Convert ABS_EXPR<x> < 0 to false. */
12431 strict_overflow_p
= false;
12432 if (code
== LT_EXPR
12433 && (integer_zerop (arg1
) || real_zerop (arg1
))
12434 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12436 if (strict_overflow_p
)
12437 fold_overflow_warning (("assuming signed overflow does not occur "
12438 "when simplifying comparison of "
12439 "absolute value and zero"),
12440 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12441 return omit_one_operand_loc (loc
, type
,
12442 constant_boolean_node (false, type
),
12446 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12447 and similarly for >= into !=. */
12448 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12449 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12450 && TREE_CODE (arg1
) == LSHIFT_EXPR
12451 && integer_onep (TREE_OPERAND (arg1
, 0)))
12452 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12453 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12454 TREE_OPERAND (arg1
, 1)),
12455 build_zero_cst (TREE_TYPE (arg0
)));
12457 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12458 otherwise Y might be >= # of bits in X's type and thus e.g.
12459 (unsigned char) (1 << Y) for Y 15 might be 0.
12460 If the cast is widening, then 1 << Y should have unsigned type,
12461 otherwise if Y is number of bits in the signed shift type minus 1,
12462 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12463 31 might be 0xffffffff80000000. */
12464 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12465 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12466 && CONVERT_EXPR_P (arg1
)
12467 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12468 && (element_precision (TREE_TYPE (arg1
))
12469 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12470 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12471 || (element_precision (TREE_TYPE (arg1
))
12472 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12473 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12475 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12476 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12477 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12478 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12479 build_zero_cst (TREE_TYPE (arg0
)));
12484 case UNORDERED_EXPR
:
12492 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12494 tree targ0
= strip_float_extensions (arg0
);
12495 tree targ1
= strip_float_extensions (arg1
);
12496 tree newtype
= TREE_TYPE (targ0
);
12498 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12499 newtype
= TREE_TYPE (targ1
);
12501 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12502 return fold_build2_loc (loc
, code
, type
,
12503 fold_convert_loc (loc
, newtype
, targ0
),
12504 fold_convert_loc (loc
, newtype
, targ1
));
12509 case COMPOUND_EXPR
:
12510 /* When pedantic, a compound expression can be neither an lvalue
12511 nor an integer constant expression. */
12512 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12514 /* Don't let (0, 0) be null pointer constant. */
12515 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12516 : fold_convert_loc (loc
, type
, arg1
);
12517 return pedantic_non_lvalue_loc (loc
, tem
);
12520 /* An ASSERT_EXPR should never be passed to fold_binary. */
12521 gcc_unreachable ();
12525 } /* switch (code) */
12528 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12529 ((A & N) + B) & M -> (A + B) & M
12530 Similarly if (N & M) == 0,
12531 ((A | N) + B) & M -> (A + B) & M
12532 and for - instead of + (or unary - instead of +)
12533 and/or ^ instead of |.
12534 If B is constant and (B & M) == 0, fold into A & M.
12536 This function is a helper for match.pd patterns. Return non-NULL
12537 type in which the simplified operation should be performed only
12538 if any optimization is possible.
12540 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12541 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12542 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12545 fold_bit_and_mask (tree type
, tree arg1
, enum tree_code code
,
12546 tree arg00
, enum tree_code code00
, tree arg000
, tree arg001
,
12547 tree arg01
, enum tree_code code01
, tree arg010
, tree arg011
,
12550 gcc_assert (TREE_CODE (arg1
) == INTEGER_CST
);
12551 gcc_assert (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== NEGATE_EXPR
);
12552 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
12554 || (cst1
& (cst1
+ 1)) != 0
12555 || !INTEGRAL_TYPE_P (type
)
12556 || (!TYPE_OVERFLOW_WRAPS (type
)
12557 && TREE_CODE (type
) != INTEGER_TYPE
)
12558 || (wi::max_value (type
) & cst1
) != cst1
)
12561 enum tree_code codes
[2] = { code00
, code01
};
12562 tree arg0xx
[4] = { arg000
, arg001
, arg010
, arg011
};
12566 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12567 arg1 (M) is == (1LL << cst) - 1.
12568 Store C into PMOP[0] and D into PMOP[1]. */
12571 which
= code
!= NEGATE_EXPR
;
12573 for (; which
>= 0; which
--)
12574 switch (codes
[which
])
12579 gcc_assert (TREE_CODE (arg0xx
[2 * which
+ 1]) == INTEGER_CST
);
12580 cst0
= wi::to_wide (arg0xx
[2 * which
+ 1]) & cst1
;
12581 if (codes
[which
] == BIT_AND_EXPR
)
12586 else if (cst0
!= 0)
12588 /* If C or D is of the form (A & N) where
12589 (N & M) == M, or of the form (A | N) or
12590 (A ^ N) where (N & M) == 0, replace it with A. */
12591 pmop
[which
] = arg0xx
[2 * which
];
12594 if (TREE_CODE (pmop
[which
]) != INTEGER_CST
)
12596 /* If C or D is a N where (N & M) == 0, it can be
12597 omitted (replaced with 0). */
12598 if ((code
== PLUS_EXPR
12599 || (code
== MINUS_EXPR
&& which
== 0))
12600 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
12601 pmop
[which
] = build_int_cst (type
, 0);
12602 /* Similarly, with C - N where (-N & M) == 0. */
12603 if (code
== MINUS_EXPR
12605 && (cst1
& -wi::to_wide (pmop
[which
])) == 0)
12606 pmop
[which
] = build_int_cst (type
, 0);
12609 gcc_unreachable ();
12612 /* Only build anything new if we optimized one or both arguments above. */
12613 if (pmop
[0] == arg00
&& pmop
[1] == arg01
)
12616 if (TYPE_OVERFLOW_WRAPS (type
))
12619 return unsigned_type_for (type
);
12622 /* Used by contains_label_[p1]. */
12624 struct contains_label_data
12626 hash_set
<tree
> *pset
;
12627 bool inside_switch_p
;
12630 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12631 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12632 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12635 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data
)
12637 contains_label_data
*d
= (contains_label_data
*) data
;
12638 switch (TREE_CODE (*tp
))
12643 case CASE_LABEL_EXPR
:
12644 if (!d
->inside_switch_p
)
12649 if (!d
->inside_switch_p
)
12651 if (walk_tree (&SWITCH_COND (*tp
), contains_label_1
, data
, d
->pset
))
12653 d
->inside_switch_p
= true;
12654 if (walk_tree (&SWITCH_BODY (*tp
), contains_label_1
, data
, d
->pset
))
12656 d
->inside_switch_p
= false;
12657 *walk_subtrees
= 0;
12662 *walk_subtrees
= 0;
12670 /* Return whether the sub-tree ST contains a label which is accessible from
12671 outside the sub-tree. */
12674 contains_label_p (tree st
)
12676 hash_set
<tree
> pset
;
12677 contains_label_data data
= { &pset
, false };
12678 return walk_tree (&st
, contains_label_1
, &data
, &pset
) != NULL_TREE
;
12681 /* Fold a ternary expression of code CODE and type TYPE with operands
12682 OP0, OP1, and OP2. Return the folded expression if folding is
12683 successful. Otherwise, return NULL_TREE. */
12686 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12687 tree op0
, tree op1
, tree op2
)
12690 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12691 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12693 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12694 && TREE_CODE_LENGTH (code
) == 3);
12696 /* If this is a commutative operation, and OP0 is a constant, move it
12697 to OP1 to reduce the number of tests below. */
12698 if (commutative_ternary_tree_code (code
)
12699 && tree_swap_operands_p (op0
, op1
))
12700 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12702 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12706 /* Strip any conversions that don't change the mode. This is safe
12707 for every expression, except for a comparison expression because
12708 its signedness is derived from its operands. So, in the latter
12709 case, only strip conversions that don't change the signedness.
12711 Note that this is done as an internal manipulation within the
12712 constant folder, in order to find the simplest representation of
12713 the arguments so that their form can be studied. In any cases,
12714 the appropriate type conversions should be put back in the tree
12715 that will get out of the constant folder. */
12736 case COMPONENT_REF
:
12737 if (TREE_CODE (arg0
) == CONSTRUCTOR
12738 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12740 unsigned HOST_WIDE_INT idx
;
12742 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12749 case VEC_COND_EXPR
:
12750 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12751 so all simple results must be passed through pedantic_non_lvalue. */
12752 if (TREE_CODE (arg0
) == INTEGER_CST
)
12754 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12755 tem
= integer_zerop (arg0
) ? op2
: op1
;
12756 /* Only optimize constant conditions when the selected branch
12757 has the same type as the COND_EXPR. This avoids optimizing
12758 away "c ? x : throw", where the throw has a void type.
12759 Avoid throwing away that operand which contains label. */
12760 if ((!TREE_SIDE_EFFECTS (unused_op
)
12761 || !contains_label_p (unused_op
))
12762 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12763 || VOID_TYPE_P (type
)))
12764 return pedantic_non_lvalue_loc (loc
, tem
);
12767 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12769 unsigned HOST_WIDE_INT nelts
;
12770 if ((TREE_CODE (arg1
) == VECTOR_CST
12771 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12772 && (TREE_CODE (arg2
) == VECTOR_CST
12773 || TREE_CODE (arg2
) == CONSTRUCTOR
)
12774 && TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
12776 vec_perm_builder
sel (nelts
, nelts
, 1);
12777 for (unsigned int i
= 0; i
< nelts
; i
++)
12779 tree val
= VECTOR_CST_ELT (arg0
, i
);
12780 if (integer_all_onesp (val
))
12781 sel
.quick_push (i
);
12782 else if (integer_zerop (val
))
12783 sel
.quick_push (nelts
+ i
);
12784 else /* Currently unreachable. */
12787 vec_perm_indices
indices (sel
, 2, nelts
);
12788 tree t
= fold_vec_perm (type
, arg1
, arg2
, indices
);
12789 if (t
!= NULL_TREE
)
12794 /* If we have A op B ? A : C, we may be able to convert this to a
12795 simpler expression, depending on the operation and the values
12796 of B and C. Signed zeros prevent all of these transformations,
12797 for reasons given above each one.
12799 Also try swapping the arguments and inverting the conditional. */
12800 if (COMPARISON_CLASS_P (arg0
)
12801 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
12802 && !HONOR_SIGNED_ZEROS (element_mode (op1
)))
12804 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
12809 if (COMPARISON_CLASS_P (arg0
)
12810 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
12811 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
12813 location_t loc0
= expr_location_or (arg0
, loc
);
12814 tem
= fold_invert_truthvalue (loc0
, arg0
);
12815 if (tem
&& COMPARISON_CLASS_P (tem
))
12817 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
12823 /* If the second operand is simpler than the third, swap them
12824 since that produces better jump optimization results. */
12825 if (truth_value_p (TREE_CODE (arg0
))
12826 && tree_swap_operands_p (op1
, op2
))
12828 location_t loc0
= expr_location_or (arg0
, loc
);
12829 /* See if this can be inverted. If it can't, possibly because
12830 it was a floating-point inequality comparison, don't do
12832 tem
= fold_invert_truthvalue (loc0
, arg0
);
12834 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12837 /* Convert A ? 1 : 0 to simply A. */
12838 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12839 : (integer_onep (op1
)
12840 && !VECTOR_TYPE_P (type
)))
12841 && integer_zerop (op2
)
12842 /* If we try to convert OP0 to our type, the
12843 call to fold will try to move the conversion inside
12844 a COND, which will recurse. In that case, the COND_EXPR
12845 is probably the best choice, so leave it alone. */
12846 && type
== TREE_TYPE (arg0
))
12847 return pedantic_non_lvalue_loc (loc
, arg0
);
12849 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12850 over COND_EXPR in cases such as floating point comparisons. */
12851 if (integer_zerop (op1
)
12852 && code
== COND_EXPR
12853 && integer_onep (op2
)
12854 && !VECTOR_TYPE_P (type
)
12855 && truth_value_p (TREE_CODE (arg0
)))
12856 return pedantic_non_lvalue_loc (loc
,
12857 fold_convert_loc (loc
, type
,
12858 invert_truthvalue_loc (loc
,
12861 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12862 if (TREE_CODE (arg0
) == LT_EXPR
12863 && integer_zerop (TREE_OPERAND (arg0
, 1))
12864 && integer_zerop (op2
)
12865 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12867 /* sign_bit_p looks through both zero and sign extensions,
12868 but for this optimization only sign extensions are
12870 tree tem2
= TREE_OPERAND (arg0
, 0);
12871 while (tem
!= tem2
)
12873 if (TREE_CODE (tem2
) != NOP_EXPR
12874 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12879 tem2
= TREE_OPERAND (tem2
, 0);
12881 /* sign_bit_p only checks ARG1 bits within A's precision.
12882 If <sign bit of A> has wider type than A, bits outside
12883 of A's precision in <sign bit of A> need to be checked.
12884 If they are all 0, this optimization needs to be done
12885 in unsigned A's type, if they are all 1 in signed A's type,
12886 otherwise this can't be done. */
12888 && TYPE_PRECISION (TREE_TYPE (tem
))
12889 < TYPE_PRECISION (TREE_TYPE (arg1
))
12890 && TYPE_PRECISION (TREE_TYPE (tem
))
12891 < TYPE_PRECISION (type
))
12893 int inner_width
, outer_width
;
12896 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12897 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12898 if (outer_width
> TYPE_PRECISION (type
))
12899 outer_width
= TYPE_PRECISION (type
);
12901 wide_int mask
= wi::shifted_mask
12902 (inner_width
, outer_width
- inner_width
, false,
12903 TYPE_PRECISION (TREE_TYPE (arg1
)));
12905 wide_int common
= mask
& wi::to_wide (arg1
);
12906 if (common
== mask
)
12908 tem_type
= signed_type_for (TREE_TYPE (tem
));
12909 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12911 else if (common
== 0)
12913 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12914 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12922 fold_convert_loc (loc
, type
,
12923 fold_build2_loc (loc
, BIT_AND_EXPR
,
12924 TREE_TYPE (tem
), tem
,
12925 fold_convert_loc (loc
,
12930 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12931 already handled above. */
12932 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12933 && integer_onep (TREE_OPERAND (arg0
, 1))
12934 && integer_zerop (op2
)
12935 && integer_pow2p (arg1
))
12937 tree tem
= TREE_OPERAND (arg0
, 0);
12939 if (TREE_CODE (tem
) == RSHIFT_EXPR
12940 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12941 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
12942 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12943 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12944 fold_convert_loc (loc
, type
,
12945 TREE_OPERAND (tem
, 0)),
12949 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12950 is probably obsolete because the first operand should be a
12951 truth value (that's why we have the two cases above), but let's
12952 leave it in until we can confirm this for all front-ends. */
12953 if (integer_zerop (op2
)
12954 && TREE_CODE (arg0
) == NE_EXPR
12955 && integer_zerop (TREE_OPERAND (arg0
, 1))
12956 && integer_pow2p (arg1
)
12957 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12959 arg1
, OEP_ONLY_CONST
)
12960 /* operand_equal_p compares just value, not precision, so e.g.
12961 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12962 second operand 32-bit -128, which is not a power of two (or vice
12964 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)))
12965 return pedantic_non_lvalue_loc (loc
,
12966 fold_convert_loc (loc
, type
,
12967 TREE_OPERAND (arg0
,
12970 /* Disable the transformations below for vectors, since
12971 fold_binary_op_with_conditional_arg may undo them immediately,
12972 yielding an infinite loop. */
12973 if (code
== VEC_COND_EXPR
)
12976 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12977 if (integer_zerop (op2
)
12978 && truth_value_p (TREE_CODE (arg0
))
12979 && truth_value_p (TREE_CODE (arg1
))
12980 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12981 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12982 : TRUTH_ANDIF_EXPR
,
12983 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
12985 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12986 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12987 && truth_value_p (TREE_CODE (arg0
))
12988 && truth_value_p (TREE_CODE (arg1
))
12989 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12991 location_t loc0
= expr_location_or (arg0
, loc
);
12992 /* Only perform transformation if ARG0 is easily inverted. */
12993 tem
= fold_invert_truthvalue (loc0
, arg0
);
12995 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12998 type
, fold_convert_loc (loc
, type
, tem
),
13002 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13003 if (integer_zerop (arg1
)
13004 && truth_value_p (TREE_CODE (arg0
))
13005 && truth_value_p (TREE_CODE (op2
))
13006 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13008 location_t loc0
= expr_location_or (arg0
, loc
);
13009 /* Only perform transformation if ARG0 is easily inverted. */
13010 tem
= fold_invert_truthvalue (loc0
, arg0
);
13012 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13013 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13014 type
, fold_convert_loc (loc
, type
, tem
),
13018 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13019 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13020 && truth_value_p (TREE_CODE (arg0
))
13021 && truth_value_p (TREE_CODE (op2
))
13022 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13023 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13024 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13025 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13030 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13031 of fold_ternary on them. */
13032 gcc_unreachable ();
13034 case BIT_FIELD_REF
:
13035 if (TREE_CODE (arg0
) == VECTOR_CST
13036 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13037 || (VECTOR_TYPE_P (type
)
13038 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
))))
13039 && tree_fits_uhwi_p (op1
)
13040 && tree_fits_uhwi_p (op2
))
13042 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13043 unsigned HOST_WIDE_INT width
13044 = (TREE_CODE (eltype
) == BOOLEAN_TYPE
13045 ? TYPE_PRECISION (eltype
) : tree_to_uhwi (TYPE_SIZE (eltype
)));
13046 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13047 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13050 && (idx
% width
) == 0
13051 && (n
% width
) == 0
13052 && known_le ((idx
+ n
) / width
,
13053 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))))
13058 if (TREE_CODE (arg0
) == VECTOR_CST
)
13062 tem
= VECTOR_CST_ELT (arg0
, idx
);
13063 if (VECTOR_TYPE_P (type
))
13064 tem
= fold_build1 (VIEW_CONVERT_EXPR
, type
, tem
);
13068 tree_vector_builder
vals (type
, n
, 1);
13069 for (unsigned i
= 0; i
< n
; ++i
)
13070 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
13071 return vals
.build ();
13076 /* On constants we can use native encode/interpret to constant
13077 fold (nearly) all BIT_FIELD_REFs. */
13078 if (CONSTANT_CLASS_P (arg0
)
13079 && can_native_interpret_type_p (type
)
13080 && BITS_PER_UNIT
== 8
13081 && tree_fits_uhwi_p (op1
)
13082 && tree_fits_uhwi_p (op2
))
13084 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13085 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13086 /* Limit us to a reasonable amount of work. To relax the
13087 other limitations we need bit-shifting of the buffer
13088 and rounding up the size. */
13089 if (bitpos
% BITS_PER_UNIT
== 0
13090 && bitsize
% BITS_PER_UNIT
== 0
13091 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
13093 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
13094 unsigned HOST_WIDE_INT len
13095 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
13096 bitpos
/ BITS_PER_UNIT
);
13098 && len
* BITS_PER_UNIT
>= bitsize
)
13100 tree v
= native_interpret_expr (type
, b
,
13101 bitsize
/ BITS_PER_UNIT
);
13110 case VEC_PERM_EXPR
:
13111 /* Perform constant folding of BIT_INSERT_EXPR. */
13112 if (TREE_CODE (arg2
) == VECTOR_CST
13113 && TREE_CODE (op0
) == VECTOR_CST
13114 && TREE_CODE (op1
) == VECTOR_CST
)
13116 /* Build a vector of integers from the tree mask. */
13117 vec_perm_builder builder
;
13118 if (!tree_to_vec_perm_builder (&builder
, arg2
))
13121 /* Create a vec_perm_indices for the integer vector. */
13122 poly_uint64 nelts
= TYPE_VECTOR_SUBPARTS (type
);
13123 bool single_arg
= (op0
== op1
);
13124 vec_perm_indices
sel (builder
, single_arg
? 1 : 2, nelts
);
13125 return fold_vec_perm (type
, op0
, op1
, sel
);
13129 case BIT_INSERT_EXPR
:
13130 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13131 if (TREE_CODE (arg0
) == INTEGER_CST
13132 && TREE_CODE (arg1
) == INTEGER_CST
)
13134 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13135 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
13136 wide_int tem
= (wi::to_wide (arg0
)
13137 & wi::shifted_mask (bitpos
, bitsize
, true,
13138 TYPE_PRECISION (type
)));
13140 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
13142 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
13144 else if (TREE_CODE (arg0
) == VECTOR_CST
13145 && CONSTANT_CLASS_P (arg1
)
13146 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
13149 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13150 unsigned HOST_WIDE_INT elsize
13151 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
13152 if (bitpos
% elsize
== 0)
13154 unsigned k
= bitpos
/ elsize
;
13155 unsigned HOST_WIDE_INT nelts
;
13156 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
13158 else if (VECTOR_CST_NELTS (arg0
).is_constant (&nelts
))
13160 tree_vector_builder
elts (type
, nelts
, 1);
13161 elts
.quick_grow (nelts
);
13162 for (unsigned HOST_WIDE_INT i
= 0; i
< nelts
; ++i
)
13163 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
13164 return elts
.build ();
13172 } /* switch (code) */
13175 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13176 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13177 constructor element index of the value returned. If the element is
13178 not found NULL_TREE is returned and *CTOR_IDX is updated to
13179 the index of the element after the ACCESS_INDEX position (which
13180 may be outside of the CTOR array). */
13183 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
,
13184 unsigned *ctor_idx
)
13186 tree index_type
= NULL_TREE
;
13187 signop index_sgn
= UNSIGNED
;
13188 offset_int low_bound
= 0;
13190 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
13192 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
13193 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
13195 /* Static constructors for variably sized objects makes no sense. */
13196 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
13197 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
13198 /* ??? When it is obvious that the range is signed, treat it so. */
13199 if (TYPE_UNSIGNED (index_type
)
13200 && TYPE_MAX_VALUE (domain_type
)
13201 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
),
13202 TYPE_MIN_VALUE (domain_type
)))
13204 index_sgn
= SIGNED
;
13206 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type
)),
13211 index_sgn
= TYPE_SIGN (index_type
);
13212 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
13218 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
13221 offset_int index
= low_bound
;
13223 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13225 offset_int max_index
= index
;
13228 bool first_p
= true;
13230 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
13232 /* Array constructor might explicitly set index, or specify a range,
13233 or leave index NULL meaning that it is next index after previous
13237 if (TREE_CODE (cfield
) == INTEGER_CST
)
13239 = offset_int::from (wi::to_wide (cfield
), index_sgn
);
13242 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
13243 index
= offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 0)),
13246 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 1)),
13248 gcc_checking_assert (wi::le_p (index
, max_index
, index_sgn
));
13253 index
= max_index
+ 1;
13255 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
13256 gcc_checking_assert (wi::gt_p (index
, max_index
, index_sgn
));
13262 /* Do we have match? */
13263 if (wi::cmp (access_index
, index
, index_sgn
) >= 0)
13265 if (wi::cmp (access_index
, max_index
, index_sgn
) <= 0)
13272 else if (in_gimple_form
)
13273 /* We're past the element we search for. Note during parsing
13274 the elements might not be sorted.
13275 ??? We should use a binary search and a flag on the
13276 CONSTRUCTOR as to whether elements are sorted in declaration
13285 /* Perform constant folding and related simplification of EXPR.
13286 The related simplifications include x*1 => x, x*0 => 0, etc.,
13287 and application of the associative law.
13288 NOP_EXPR conversions may be removed freely (as long as we
13289 are careful not to change the type of the overall expression).
13290 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13291 but we can constant-fold them if they have constant operands. */
13293 #ifdef ENABLE_FOLD_CHECKING
13294 # define fold(x) fold_1 (x)
13295 static tree
fold_1 (tree
);
13301 const tree t
= expr
;
13302 enum tree_code code
= TREE_CODE (t
);
13303 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13305 location_t loc
= EXPR_LOCATION (expr
);
13307 /* Return right away if a constant. */
13308 if (kind
== tcc_constant
)
13311 /* CALL_EXPR-like objects with variable numbers of operands are
13312 treated specially. */
13313 if (kind
== tcc_vl_exp
)
13315 if (code
== CALL_EXPR
)
13317 tem
= fold_call_expr (loc
, expr
, false);
13318 return tem
? tem
: expr
;
13323 if (IS_EXPR_CODE_CLASS (kind
))
13325 tree type
= TREE_TYPE (t
);
13326 tree op0
, op1
, op2
;
13328 switch (TREE_CODE_LENGTH (code
))
13331 op0
= TREE_OPERAND (t
, 0);
13332 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13333 return tem
? tem
: expr
;
13335 op0
= TREE_OPERAND (t
, 0);
13336 op1
= TREE_OPERAND (t
, 1);
13337 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13338 return tem
? tem
: expr
;
13340 op0
= TREE_OPERAND (t
, 0);
13341 op1
= TREE_OPERAND (t
, 1);
13342 op2
= TREE_OPERAND (t
, 2);
13343 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13344 return tem
? tem
: expr
;
13354 tree op0
= TREE_OPERAND (t
, 0);
13355 tree op1
= TREE_OPERAND (t
, 1);
13357 if (TREE_CODE (op1
) == INTEGER_CST
13358 && TREE_CODE (op0
) == CONSTRUCTOR
13359 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13361 tree val
= get_array_ctor_element_at_index (op0
,
13362 wi::to_offset (op1
));
13370 /* Return a VECTOR_CST if possible. */
13373 tree type
= TREE_TYPE (t
);
13374 if (TREE_CODE (type
) != VECTOR_TYPE
)
13379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
13380 if (! CONSTANT_CLASS_P (val
))
13383 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
13387 return fold (DECL_INITIAL (t
));
13391 } /* switch (code) */
13394 #ifdef ENABLE_FOLD_CHECKING
13397 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13398 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
13399 static void fold_check_failed (const_tree
, const_tree
);
13400 void print_fold_checksum (const_tree
);
13402 /* When --enable-checking=fold, compute a digest of expr before
13403 and after actual fold call to see if fold did not accidentally
13404 change original expr. */
13410 struct md5_ctx ctx
;
13411 unsigned char checksum_before
[16], checksum_after
[16];
13412 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13414 md5_init_ctx (&ctx
);
13415 fold_checksum_tree (expr
, &ctx
, &ht
);
13416 md5_finish_ctx (&ctx
, checksum_before
);
13419 ret
= fold_1 (expr
);
13421 md5_init_ctx (&ctx
);
13422 fold_checksum_tree (expr
, &ctx
, &ht
);
13423 md5_finish_ctx (&ctx
, checksum_after
);
13425 if (memcmp (checksum_before
, checksum_after
, 16))
13426 fold_check_failed (expr
, ret
);
13432 print_fold_checksum (const_tree expr
)
13434 struct md5_ctx ctx
;
13435 unsigned char checksum
[16], cnt
;
13436 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13438 md5_init_ctx (&ctx
);
13439 fold_checksum_tree (expr
, &ctx
, &ht
);
13440 md5_finish_ctx (&ctx
, checksum
);
13441 for (cnt
= 0; cnt
< 16; ++cnt
)
13442 fprintf (stderr
, "%02x", checksum
[cnt
]);
13443 putc ('\n', stderr
);
13447 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13449 internal_error ("fold check: original tree changed by fold");
13453 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13454 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
13456 const tree_node
**slot
;
13457 enum tree_code code
;
13458 union tree_node
*buf
;
13464 slot
= ht
->find_slot (expr
, INSERT
);
13468 code
= TREE_CODE (expr
);
13469 if (TREE_CODE_CLASS (code
) == tcc_declaration
13470 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13472 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13473 size_t sz
= tree_size (expr
);
13474 buf
= XALLOCAVAR (union tree_node
, sz
);
13475 memcpy ((char *) buf
, expr
, sz
);
13476 SET_DECL_ASSEMBLER_NAME ((tree
) buf
, NULL
);
13477 buf
->decl_with_vis
.symtab_node
= NULL
;
13478 buf
->base
.nowarning_flag
= 0;
13481 else if (TREE_CODE_CLASS (code
) == tcc_type
13482 && (TYPE_POINTER_TO (expr
)
13483 || TYPE_REFERENCE_TO (expr
)
13484 || TYPE_CACHED_VALUES_P (expr
)
13485 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13486 || TYPE_NEXT_VARIANT (expr
)
13487 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
13489 /* Allow these fields to be modified. */
13491 size_t sz
= tree_size (expr
);
13492 buf
= XALLOCAVAR (union tree_node
, sz
);
13493 memcpy ((char *) buf
, expr
, sz
);
13494 expr
= tmp
= (tree
) buf
;
13495 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13496 TYPE_POINTER_TO (tmp
) = NULL
;
13497 TYPE_REFERENCE_TO (tmp
) = NULL
;
13498 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13499 TYPE_ALIAS_SET (tmp
) = -1;
13500 if (TYPE_CACHED_VALUES_P (tmp
))
13502 TYPE_CACHED_VALUES_P (tmp
) = 0;
13503 TYPE_CACHED_VALUES (tmp
) = NULL
;
13506 else if (TREE_NO_WARNING (expr
) && (DECL_P (expr
) || EXPR_P (expr
)))
13508 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13509 and change builtins.c etc. instead - see PR89543. */
13510 size_t sz
= tree_size (expr
);
13511 buf
= XALLOCAVAR (union tree_node
, sz
);
13512 memcpy ((char *) buf
, expr
, sz
);
13513 buf
->base
.nowarning_flag
= 0;
13516 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13517 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13518 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13519 if (TREE_CODE_CLASS (code
) != tcc_type
13520 && TREE_CODE_CLASS (code
) != tcc_declaration
13521 && code
!= TREE_LIST
13522 && code
!= SSA_NAME
13523 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13524 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13525 switch (TREE_CODE_CLASS (code
))
13531 md5_process_bytes (TREE_STRING_POINTER (expr
),
13532 TREE_STRING_LENGTH (expr
), ctx
);
13535 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13536 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13539 len
= vector_cst_encoded_nelts (expr
);
13540 for (i
= 0; i
< len
; ++i
)
13541 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
13547 case tcc_exceptional
:
13551 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13552 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13553 expr
= TREE_CHAIN (expr
);
13554 goto recursive_label
;
13557 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13558 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13564 case tcc_expression
:
13565 case tcc_reference
:
13566 case tcc_comparison
:
13569 case tcc_statement
:
13571 len
= TREE_OPERAND_LENGTH (expr
);
13572 for (i
= 0; i
< len
; ++i
)
13573 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13575 case tcc_declaration
:
13576 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13577 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13578 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13580 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13581 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13582 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13583 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13584 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13587 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13589 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13591 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13592 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13594 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13598 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13599 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13600 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13601 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13602 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13603 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13604 if (INTEGRAL_TYPE_P (expr
)
13605 || SCALAR_FLOAT_TYPE_P (expr
))
13607 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13608 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13610 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13611 if (TREE_CODE (expr
) == RECORD_TYPE
13612 || TREE_CODE (expr
) == UNION_TYPE
13613 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13614 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13615 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13622 /* Helper function for outputting the checksum of a tree T. When
13623 debugging with gdb, you can "define mynext" to be "next" followed
13624 by "call debug_fold_checksum (op0)", then just trace down till the
13627 DEBUG_FUNCTION
void
13628 debug_fold_checksum (const_tree t
)
13631 unsigned char checksum
[16];
13632 struct md5_ctx ctx
;
13633 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13635 md5_init_ctx (&ctx
);
13636 fold_checksum_tree (t
, &ctx
, &ht
);
13637 md5_finish_ctx (&ctx
, checksum
);
13640 for (i
= 0; i
< 16; i
++)
13641 fprintf (stderr
, "%d ", checksum
[i
]);
13643 fprintf (stderr
, "\n");
13648 /* Fold a unary tree expression with code CODE of type TYPE with an
13649 operand OP0. LOC is the location of the resulting expression.
13650 Return a folded expression if successful. Otherwise, return a tree
13651 expression with code CODE of type TYPE with an operand OP0. */
13654 fold_build1_loc (location_t loc
,
13655 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13658 #ifdef ENABLE_FOLD_CHECKING
13659 unsigned char checksum_before
[16], checksum_after
[16];
13660 struct md5_ctx ctx
;
13661 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13663 md5_init_ctx (&ctx
);
13664 fold_checksum_tree (op0
, &ctx
, &ht
);
13665 md5_finish_ctx (&ctx
, checksum_before
);
13669 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13671 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13673 #ifdef ENABLE_FOLD_CHECKING
13674 md5_init_ctx (&ctx
);
13675 fold_checksum_tree (op0
, &ctx
, &ht
);
13676 md5_finish_ctx (&ctx
, checksum_after
);
13678 if (memcmp (checksum_before
, checksum_after
, 16))
13679 fold_check_failed (op0
, tem
);
13684 /* Fold a binary tree expression with code CODE of type TYPE with
13685 operands OP0 and OP1. LOC is the location of the resulting
13686 expression. Return a folded expression if successful. Otherwise,
13687 return a tree expression with code CODE of type TYPE with operands
13691 fold_build2_loc (location_t loc
,
13692 enum tree_code code
, tree type
, tree op0
, tree op1
13696 #ifdef ENABLE_FOLD_CHECKING
13697 unsigned char checksum_before_op0
[16],
13698 checksum_before_op1
[16],
13699 checksum_after_op0
[16],
13700 checksum_after_op1
[16];
13701 struct md5_ctx ctx
;
13702 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13704 md5_init_ctx (&ctx
);
13705 fold_checksum_tree (op0
, &ctx
, &ht
);
13706 md5_finish_ctx (&ctx
, checksum_before_op0
);
13709 md5_init_ctx (&ctx
);
13710 fold_checksum_tree (op1
, &ctx
, &ht
);
13711 md5_finish_ctx (&ctx
, checksum_before_op1
);
13715 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13717 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13719 #ifdef ENABLE_FOLD_CHECKING
13720 md5_init_ctx (&ctx
);
13721 fold_checksum_tree (op0
, &ctx
, &ht
);
13722 md5_finish_ctx (&ctx
, checksum_after_op0
);
13725 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13726 fold_check_failed (op0
, tem
);
13728 md5_init_ctx (&ctx
);
13729 fold_checksum_tree (op1
, &ctx
, &ht
);
13730 md5_finish_ctx (&ctx
, checksum_after_op1
);
13732 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13733 fold_check_failed (op1
, tem
);
13738 /* Fold a ternary tree expression with code CODE of type TYPE with
13739 operands OP0, OP1, and OP2. Return a folded expression if
13740 successful. Otherwise, return a tree expression with code CODE of
13741 type TYPE with operands OP0, OP1, and OP2. */
13744 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
13745 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13748 #ifdef ENABLE_FOLD_CHECKING
13749 unsigned char checksum_before_op0
[16],
13750 checksum_before_op1
[16],
13751 checksum_before_op2
[16],
13752 checksum_after_op0
[16],
13753 checksum_after_op1
[16],
13754 checksum_after_op2
[16];
13755 struct md5_ctx ctx
;
13756 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13758 md5_init_ctx (&ctx
);
13759 fold_checksum_tree (op0
, &ctx
, &ht
);
13760 md5_finish_ctx (&ctx
, checksum_before_op0
);
13763 md5_init_ctx (&ctx
);
13764 fold_checksum_tree (op1
, &ctx
, &ht
);
13765 md5_finish_ctx (&ctx
, checksum_before_op1
);
13768 md5_init_ctx (&ctx
);
13769 fold_checksum_tree (op2
, &ctx
, &ht
);
13770 md5_finish_ctx (&ctx
, checksum_before_op2
);
13774 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13775 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13777 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13779 #ifdef ENABLE_FOLD_CHECKING
13780 md5_init_ctx (&ctx
);
13781 fold_checksum_tree (op0
, &ctx
, &ht
);
13782 md5_finish_ctx (&ctx
, checksum_after_op0
);
13785 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13786 fold_check_failed (op0
, tem
);
13788 md5_init_ctx (&ctx
);
13789 fold_checksum_tree (op1
, &ctx
, &ht
);
13790 md5_finish_ctx (&ctx
, checksum_after_op1
);
13793 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13794 fold_check_failed (op1
, tem
);
13796 md5_init_ctx (&ctx
);
13797 fold_checksum_tree (op2
, &ctx
, &ht
);
13798 md5_finish_ctx (&ctx
, checksum_after_op2
);
13800 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13801 fold_check_failed (op2
, tem
);
13806 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13807 arguments in ARGARRAY, and a null static chain.
13808 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13809 of type TYPE from the given operands as constructed by build_call_array. */
13812 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13813 int nargs
, tree
*argarray
)
13816 #ifdef ENABLE_FOLD_CHECKING
13817 unsigned char checksum_before_fn
[16],
13818 checksum_before_arglist
[16],
13819 checksum_after_fn
[16],
13820 checksum_after_arglist
[16];
13821 struct md5_ctx ctx
;
13822 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13825 md5_init_ctx (&ctx
);
13826 fold_checksum_tree (fn
, &ctx
, &ht
);
13827 md5_finish_ctx (&ctx
, checksum_before_fn
);
13830 md5_init_ctx (&ctx
);
13831 for (i
= 0; i
< nargs
; i
++)
13832 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13833 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13837 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13839 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13841 #ifdef ENABLE_FOLD_CHECKING
13842 md5_init_ctx (&ctx
);
13843 fold_checksum_tree (fn
, &ctx
, &ht
);
13844 md5_finish_ctx (&ctx
, checksum_after_fn
);
13847 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13848 fold_check_failed (fn
, tem
);
13850 md5_init_ctx (&ctx
);
13851 for (i
= 0; i
< nargs
; i
++)
13852 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13853 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13855 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13856 fold_check_failed (NULL_TREE
, tem
);
13861 /* Perform constant folding and related simplification of initializer
13862 expression EXPR. These behave identically to "fold_buildN" but ignore
13863 potential run-time traps and exceptions that fold must preserve. */
13865 #define START_FOLD_INIT \
13866 int saved_signaling_nans = flag_signaling_nans;\
13867 int saved_trapping_math = flag_trapping_math;\
13868 int saved_rounding_math = flag_rounding_math;\
13869 int saved_trapv = flag_trapv;\
13870 int saved_folding_initializer = folding_initializer;\
13871 flag_signaling_nans = 0;\
13872 flag_trapping_math = 0;\
13873 flag_rounding_math = 0;\
13875 folding_initializer = 1;
13877 #define END_FOLD_INIT \
13878 flag_signaling_nans = saved_signaling_nans;\
13879 flag_trapping_math = saved_trapping_math;\
13880 flag_rounding_math = saved_rounding_math;\
13881 flag_trapv = saved_trapv;\
13882 folding_initializer = saved_folding_initializer;
13885 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13886 tree type
, tree op
)
13891 result
= fold_build1_loc (loc
, code
, type
, op
);
13898 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13899 tree type
, tree op0
, tree op1
)
13904 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13911 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13912 int nargs
, tree
*argarray
)
13917 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13923 #undef START_FOLD_INIT
13924 #undef END_FOLD_INIT
13926 /* Determine if first argument is a multiple of second argument. Return 0 if
13927 it is not, or we cannot easily determined it to be.
13929 An example of the sort of thing we care about (at this point; this routine
13930 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13931 fold cases do now) is discovering that
13933 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13939 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13941 This code also handles discovering that
13943 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13945 is a multiple of 8 so we don't have to worry about dealing with a
13946 possible remainder.
13948 Note that we *look* inside a SAVE_EXPR only to determine how it was
13949 calculated; it is not safe for fold to do much of anything else with the
13950 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13951 at run time. For example, the latter example above *cannot* be implemented
13952 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13953 evaluation time of the original SAVE_EXPR is not necessarily the same at
13954 the time the new expression is evaluated. The only optimization of this
13955 sort that would be valid is changing
13957 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13961 SAVE_EXPR (I) * SAVE_EXPR (J)
13963 (where the same SAVE_EXPR (J) is used in the original and the
13964 transformed version). */
13967 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13972 if (operand_equal_p (top
, bottom
, 0))
13975 if (TREE_CODE (type
) != INTEGER_TYPE
)
13978 switch (TREE_CODE (top
))
13981 /* Bitwise and provides a power of two multiple. If the mask is
13982 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13983 if (!integer_pow2p (bottom
))
13985 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13986 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13989 if (TREE_CODE (bottom
) == INTEGER_CST
)
13991 op1
= TREE_OPERAND (top
, 0);
13992 op2
= TREE_OPERAND (top
, 1);
13993 if (TREE_CODE (op1
) == INTEGER_CST
)
13994 std::swap (op1
, op2
);
13995 if (TREE_CODE (op2
) == INTEGER_CST
)
13997 if (multiple_of_p (type
, op2
, bottom
))
13999 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14000 if (multiple_of_p (type
, bottom
, op2
))
14002 widest_int w
= wi::sdiv_trunc (wi::to_widest (bottom
),
14003 wi::to_widest (op2
));
14004 if (wi::fits_to_tree_p (w
, TREE_TYPE (bottom
)))
14006 op2
= wide_int_to_tree (TREE_TYPE (bottom
), w
);
14007 return multiple_of_p (type
, op1
, op2
);
14010 return multiple_of_p (type
, op1
, bottom
);
14013 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14014 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
14017 /* It is impossible to prove if op0 - op1 is multiple of bottom
14018 precisely, so be conservative here checking if both op0 and op1
14019 are multiple of bottom. Note we check the second operand first
14020 since it's usually simpler. */
14021 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14022 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
14025 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14026 as op0 - 3 if the expression has unsigned type. For example,
14027 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14028 op1
= TREE_OPERAND (top
, 1);
14029 if (TYPE_UNSIGNED (type
)
14030 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
14031 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
14032 return (multiple_of_p (type
, op1
, bottom
)
14033 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
14036 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14038 op1
= TREE_OPERAND (top
, 1);
14039 /* const_binop may not detect overflow correctly,
14040 so check for it explicitly here. */
14041 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
14043 && (t1
= fold_convert (type
,
14044 const_binop (LSHIFT_EXPR
, size_one_node
,
14046 && !TREE_OVERFLOW (t1
))
14047 return multiple_of_p (type
, t1
, bottom
);
14052 /* Can't handle conversions from non-integral or wider integral type. */
14053 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14054 || (TYPE_PRECISION (type
)
14055 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14061 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14064 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14065 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14068 if (TREE_CODE (bottom
) != INTEGER_CST
14069 || integer_zerop (bottom
)
14070 || (TYPE_UNSIGNED (type
)
14071 && (tree_int_cst_sgn (top
) < 0
14072 || tree_int_cst_sgn (bottom
) < 0)))
14074 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14078 if (TREE_CODE (bottom
) == INTEGER_CST
14079 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
14080 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
14082 enum tree_code code
= gimple_assign_rhs_code (stmt
);
14084 /* Check for special cases to see if top is defined as multiple
14087 top = (X & ~(bottom - 1) ; bottom is power of 2
14093 if (code
== BIT_AND_EXPR
14094 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14095 && TREE_CODE (op2
) == INTEGER_CST
14096 && integer_pow2p (bottom
)
14097 && wi::multiple_of_p (wi::to_widest (op2
),
14098 wi::to_widest (bottom
), UNSIGNED
))
14101 op1
= gimple_assign_rhs1 (stmt
);
14102 if (code
== MINUS_EXPR
14103 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
14104 && TREE_CODE (op2
) == SSA_NAME
14105 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
14106 && gimple_code (stmt
) == GIMPLE_ASSIGN
14107 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
14108 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
14109 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
14116 if (POLY_INT_CST_P (top
) && poly_int_tree_p (bottom
))
14117 return multiple_p (wi::to_poly_widest (top
),
14118 wi::to_poly_widest (bottom
));
14124 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14125 This function returns true for integer expressions, and returns
14126 false if uncertain. */
14129 tree_expr_finite_p (const_tree x
)
14131 machine_mode mode
= element_mode (x
);
14132 if (!HONOR_NANS (mode
) && !HONOR_INFINITIES (mode
))
14134 switch (TREE_CODE (x
))
14137 return real_isfinite (TREE_REAL_CST_PTR (x
));
14139 return tree_expr_finite_p (TREE_REALPART (x
))
14140 && tree_expr_finite_p (TREE_IMAGPART (x
));
14145 case NON_LVALUE_EXPR
:
14148 return tree_expr_finite_p (TREE_OPERAND (x
, 0));
14151 return tree_expr_finite_p (TREE_OPERAND (x
, 0))
14152 && tree_expr_finite_p (TREE_OPERAND (x
, 1));
14154 return tree_expr_finite_p (TREE_OPERAND (x
, 1))
14155 && tree_expr_finite_p (TREE_OPERAND (x
, 2));
14157 switch (get_call_combined_fn (x
))
14160 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0));
14163 return tree_expr_finite_p (CALL_EXPR_ARG (x
, 0))
14164 && tree_expr_finite_p (CALL_EXPR_ARG (x
, 1));
14174 /* Return true if expression X evaluates to an infinity.
14175 This function returns false for integer expressions. */
14178 tree_expr_infinite_p (const_tree x
)
14180 if (!HONOR_INFINITIES (x
))
14182 switch (TREE_CODE (x
))
14185 return real_isinf (TREE_REAL_CST_PTR (x
));
14188 case NON_LVALUE_EXPR
:
14190 return tree_expr_infinite_p (TREE_OPERAND (x
, 0));
14192 return tree_expr_infinite_p (TREE_OPERAND (x
, 1))
14193 && tree_expr_infinite_p (TREE_OPERAND (x
, 2));
14199 /* Return true if expression X could evaluate to an infinity.
14200 This function returns false for integer expressions, and returns
14201 true if uncertain. */
14204 tree_expr_maybe_infinite_p (const_tree x
)
14206 if (!HONOR_INFINITIES (x
))
14208 switch (TREE_CODE (x
))
14211 return real_isinf (TREE_REAL_CST_PTR (x
));
14216 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 0));
14218 return tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 1))
14219 || tree_expr_maybe_infinite_p (TREE_OPERAND (x
, 2));
14225 /* Return true if expression X evaluates to a signaling NaN.
14226 This function returns false for integer expressions. */
14229 tree_expr_signaling_nan_p (const_tree x
)
14231 if (!HONOR_SNANS (x
))
14233 switch (TREE_CODE (x
))
14236 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14237 case NON_LVALUE_EXPR
:
14239 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 0));
14241 return tree_expr_signaling_nan_p (TREE_OPERAND (x
, 1))
14242 && tree_expr_signaling_nan_p (TREE_OPERAND (x
, 2));
14248 /* Return true if expression X could evaluate to a signaling NaN.
14249 This function returns false for integer expressions, and returns
14250 true if uncertain. */
14253 tree_expr_maybe_signaling_nan_p (const_tree x
)
14255 if (!HONOR_SNANS (x
))
14257 switch (TREE_CODE (x
))
14260 return real_issignaling_nan (TREE_REAL_CST_PTR (x
));
14266 case NON_LVALUE_EXPR
:
14268 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0));
14271 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 0))
14272 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1));
14274 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 1))
14275 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x
, 2));
14277 switch (get_call_combined_fn (x
))
14280 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0));
14283 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 0))
14284 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x
, 1));
14293 /* Return true if expression X evaluates to a NaN.
14294 This function returns false for integer expressions. */
14297 tree_expr_nan_p (const_tree x
)
14299 if (!HONOR_NANS (x
))
14301 switch (TREE_CODE (x
))
14304 return real_isnan (TREE_REAL_CST_PTR (x
));
14305 case NON_LVALUE_EXPR
:
14307 return tree_expr_nan_p (TREE_OPERAND (x
, 0));
14309 return tree_expr_nan_p (TREE_OPERAND (x
, 1))
14310 && tree_expr_nan_p (TREE_OPERAND (x
, 2));
14316 /* Return true if expression X could evaluate to a NaN.
14317 This function returns false for integer expressions, and returns
14318 true if uncertain. */
14321 tree_expr_maybe_nan_p (const_tree x
)
14323 if (!HONOR_NANS (x
))
14325 switch (TREE_CODE (x
))
14328 return real_isnan (TREE_REAL_CST_PTR (x
));
14334 return !tree_expr_finite_p (TREE_OPERAND (x
, 0))
14335 || !tree_expr_finite_p (TREE_OPERAND (x
, 1));
14339 case NON_LVALUE_EXPR
:
14341 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0));
14344 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 0))
14345 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1));
14347 return tree_expr_maybe_nan_p (TREE_OPERAND (x
, 1))
14348 || tree_expr_maybe_nan_p (TREE_OPERAND (x
, 2));
14350 switch (get_call_combined_fn (x
))
14353 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0));
14356 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 0))
14357 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x
, 1));
14366 #define tree_expr_nonnegative_warnv_p(X, Y) \
14367 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14369 #define RECURSE(X) \
14370 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14372 /* Return true if CODE or TYPE is known to be non-negative. */
14375 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14377 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14378 && truth_value_p (code
))
14379 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14380 have a signed:1 type (where the value is -1 and 0). */
14385 /* Return true if (CODE OP0) is known to be non-negative. If the return
14386 value is based on the assumption that signed overflow is undefined,
14387 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14388 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14391 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14392 bool *strict_overflow_p
, int depth
)
14394 if (TYPE_UNSIGNED (type
))
14400 /* We can't return 1 if flag_wrapv is set because
14401 ABS_EXPR<INT_MIN> = INT_MIN. */
14402 if (!ANY_INTEGRAL_TYPE_P (type
))
14404 if (TYPE_OVERFLOW_UNDEFINED (type
))
14406 *strict_overflow_p
= true;
14411 case NON_LVALUE_EXPR
:
14413 case FIX_TRUNC_EXPR
:
14414 return RECURSE (op0
);
14418 tree inner_type
= TREE_TYPE (op0
);
14419 tree outer_type
= type
;
14421 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14423 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14424 return RECURSE (op0
);
14425 if (INTEGRAL_TYPE_P (inner_type
))
14427 if (TYPE_UNSIGNED (inner_type
))
14429 return RECURSE (op0
);
14432 else if (INTEGRAL_TYPE_P (outer_type
))
14434 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14435 return RECURSE (op0
);
14436 if (INTEGRAL_TYPE_P (inner_type
))
14437 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14438 && TYPE_UNSIGNED (inner_type
);
14444 return tree_simple_nonnegative_warnv_p (code
, type
);
14447 /* We don't know sign of `t', so be conservative and return false. */
14451 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14452 value is based on the assumption that signed overflow is undefined,
14453 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14454 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14457 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14458 tree op1
, bool *strict_overflow_p
,
14461 if (TYPE_UNSIGNED (type
))
14466 case POINTER_PLUS_EXPR
:
14468 if (FLOAT_TYPE_P (type
))
14469 return RECURSE (op0
) && RECURSE (op1
);
14471 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14472 both unsigned and at least 2 bits shorter than the result. */
14473 if (TREE_CODE (type
) == INTEGER_TYPE
14474 && TREE_CODE (op0
) == NOP_EXPR
14475 && TREE_CODE (op1
) == NOP_EXPR
)
14477 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14478 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14479 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14480 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14482 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14483 TYPE_PRECISION (inner2
)) + 1;
14484 return prec
< TYPE_PRECISION (type
);
14490 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14492 /* x * x is always non-negative for floating point x
14493 or without overflow. */
14494 if (operand_equal_p (op0
, op1
, 0)
14495 || (RECURSE (op0
) && RECURSE (op1
)))
14497 if (ANY_INTEGRAL_TYPE_P (type
)
14498 && TYPE_OVERFLOW_UNDEFINED (type
))
14499 *strict_overflow_p
= true;
14504 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14505 both unsigned and their total bits is shorter than the result. */
14506 if (TREE_CODE (type
) == INTEGER_TYPE
14507 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14508 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14510 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14511 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14513 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14514 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14517 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14518 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14520 if (TREE_CODE (op0
) == INTEGER_CST
)
14521 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14523 if (TREE_CODE (op1
) == INTEGER_CST
)
14524 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14526 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14527 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14529 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14530 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14531 : TYPE_PRECISION (inner0
);
14533 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14534 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14535 : TYPE_PRECISION (inner1
);
14537 return precision0
+ precision1
< TYPE_PRECISION (type
);
14543 return RECURSE (op0
) || RECURSE (op1
);
14546 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14548 if (tree_expr_maybe_nan_p (op0
) || tree_expr_maybe_nan_p (op1
))
14549 return RECURSE (op0
) && RECURSE (op1
);
14550 return RECURSE (op0
) || RECURSE (op1
);
14556 case TRUNC_DIV_EXPR
:
14557 case CEIL_DIV_EXPR
:
14558 case FLOOR_DIV_EXPR
:
14559 case ROUND_DIV_EXPR
:
14560 return RECURSE (op0
) && RECURSE (op1
);
14562 case TRUNC_MOD_EXPR
:
14563 return RECURSE (op0
);
14565 case FLOOR_MOD_EXPR
:
14566 return RECURSE (op1
);
14568 case CEIL_MOD_EXPR
:
14569 case ROUND_MOD_EXPR
:
14571 return tree_simple_nonnegative_warnv_p (code
, type
);
14574 /* We don't know sign of `t', so be conservative and return false. */
14578 /* Return true if T is known to be non-negative. If the return
14579 value is based on the assumption that signed overflow is undefined,
14580 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14581 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14584 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14586 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14589 switch (TREE_CODE (t
))
14592 return tree_int_cst_sgn (t
) >= 0;
14595 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14598 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14601 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
14604 /* Limit the depth of recursion to avoid quadratic behavior.
14605 This is expected to catch almost all occurrences in practice.
14606 If this code misses important cases that unbounded recursion
14607 would not, passes that need this information could be revised
14608 to provide it through dataflow propagation. */
14609 return (!name_registered_for_update_p (t
)
14610 && depth
< param_max_ssa_name_query_depth
14611 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
14612 strict_overflow_p
, depth
));
14615 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14619 /* Return true if T is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14625 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
14626 bool *strict_overflow_p
, int depth
)
14647 case CFN_BUILT_IN_BSWAP16
:
14648 case CFN_BUILT_IN_BSWAP32
:
14649 case CFN_BUILT_IN_BSWAP64
:
14650 case CFN_BUILT_IN_BSWAP128
:
14656 /* sqrt(-0.0) is -0.0. */
14657 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14659 return RECURSE (arg0
);
14687 CASE_CFN_NEARBYINT
:
14688 CASE_CFN_NEARBYINT_FN
:
14693 CASE_CFN_ROUNDEVEN
:
14694 CASE_CFN_ROUNDEVEN_FN
:
14699 CASE_CFN_SIGNIFICAND
:
14704 /* True if the 1st argument is nonnegative. */
14705 return RECURSE (arg0
);
14709 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14710 things. In the presence of sNaNs, we're only guaranteed to be
14711 non-negative if both operands are non-negative. In the presence
14712 of qNaNs, we're non-negative if either operand is non-negative
14713 and can't be a qNaN, or if both operands are non-negative. */
14714 if (tree_expr_maybe_signaling_nan_p (arg0
) ||
14715 tree_expr_maybe_signaling_nan_p (arg1
))
14716 return RECURSE (arg0
) && RECURSE (arg1
);
14717 return RECURSE (arg0
) ? (!tree_expr_maybe_nan_p (arg0
)
14720 && !tree_expr_maybe_nan_p (arg1
));
14724 /* True if the 1st AND 2nd arguments are nonnegative. */
14725 return RECURSE (arg0
) && RECURSE (arg1
);
14728 CASE_CFN_COPYSIGN_FN
:
14729 /* True if the 2nd argument is nonnegative. */
14730 return RECURSE (arg1
);
14733 /* True if the 1st argument is nonnegative or the second
14734 argument is an even integer. */
14735 if (TREE_CODE (arg1
) == INTEGER_CST
14736 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14738 return RECURSE (arg0
);
14741 /* True if the 1st argument is nonnegative or the second
14742 argument is an even integer valued real. */
14743 if (TREE_CODE (arg1
) == REAL_CST
)
14748 c
= TREE_REAL_CST (arg1
);
14749 n
= real_to_integer (&c
);
14752 REAL_VALUE_TYPE cint
;
14753 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14754 if (real_identical (&c
, &cint
))
14758 return RECURSE (arg0
);
14763 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
14766 /* Return true if T is known to be non-negative. If the return
14767 value is based on the assumption that signed overflow is undefined,
14768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14769 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14772 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14774 enum tree_code code
= TREE_CODE (t
);
14775 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14782 tree temp
= TARGET_EXPR_SLOT (t
);
14783 t
= TARGET_EXPR_INITIAL (t
);
14785 /* If the initializer is non-void, then it's a normal expression
14786 that will be assigned to the slot. */
14787 if (!VOID_TYPE_P (t
))
14788 return RECURSE (t
);
14790 /* Otherwise, the initializer sets the slot in some way. One common
14791 way is an assignment statement at the end of the initializer. */
14794 if (TREE_CODE (t
) == BIND_EXPR
)
14795 t
= expr_last (BIND_EXPR_BODY (t
));
14796 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14797 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14798 t
= expr_last (TREE_OPERAND (t
, 0));
14799 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14804 if (TREE_CODE (t
) == MODIFY_EXPR
14805 && TREE_OPERAND (t
, 0) == temp
)
14806 return RECURSE (TREE_OPERAND (t
, 1));
14813 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14814 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14816 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14817 get_call_combined_fn (t
),
14820 strict_overflow_p
, depth
);
14822 case COMPOUND_EXPR
:
14824 return RECURSE (TREE_OPERAND (t
, 1));
14827 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
14830 return RECURSE (TREE_OPERAND (t
, 0));
14833 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14838 #undef tree_expr_nonnegative_warnv_p
14840 /* Return true if T is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14846 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14848 enum tree_code code
;
14849 if (t
== error_mark_node
)
14852 code
= TREE_CODE (t
);
14853 switch (TREE_CODE_CLASS (code
))
14856 case tcc_comparison
:
14857 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14859 TREE_OPERAND (t
, 0),
14860 TREE_OPERAND (t
, 1),
14861 strict_overflow_p
, depth
);
14864 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14866 TREE_OPERAND (t
, 0),
14867 strict_overflow_p
, depth
);
14870 case tcc_declaration
:
14871 case tcc_reference
:
14872 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14880 case TRUTH_AND_EXPR
:
14881 case TRUTH_OR_EXPR
:
14882 case TRUTH_XOR_EXPR
:
14883 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14885 TREE_OPERAND (t
, 0),
14886 TREE_OPERAND (t
, 1),
14887 strict_overflow_p
, depth
);
14888 case TRUTH_NOT_EXPR
:
14889 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14891 TREE_OPERAND (t
, 0),
14892 strict_overflow_p
, depth
);
14899 case WITH_SIZE_EXPR
:
14901 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14904 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14908 /* Return true if `t' is known to be non-negative. Handle warnings
14909 about undefined signed overflow. */
14912 tree_expr_nonnegative_p (tree t
)
14914 bool ret
, strict_overflow_p
;
14916 strict_overflow_p
= false;
14917 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14918 if (strict_overflow_p
)
14919 fold_overflow_warning (("assuming signed overflow does not occur when "
14920 "determining that expression is always "
14922 WARN_STRICT_OVERFLOW_MISC
);
14927 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14928 For floating point we further ensure that T is not denormal.
14929 Similar logic is present in nonzero_address in rtlanal.h.
14931 If the return value is based on the assumption that signed overflow
14932 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14933 change *STRICT_OVERFLOW_P. */
14936 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14937 bool *strict_overflow_p
)
14942 return tree_expr_nonzero_warnv_p (op0
,
14943 strict_overflow_p
);
14947 tree inner_type
= TREE_TYPE (op0
);
14948 tree outer_type
= type
;
14950 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14951 && tree_expr_nonzero_warnv_p (op0
,
14952 strict_overflow_p
));
14956 case NON_LVALUE_EXPR
:
14957 return tree_expr_nonzero_warnv_p (op0
,
14958 strict_overflow_p
);
14967 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14968 For floating point we further ensure that T is not denormal.
14969 Similar logic is present in nonzero_address in rtlanal.h.
14971 If the return value is based on the assumption that signed overflow
14972 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14973 change *STRICT_OVERFLOW_P. */
14976 tree_binary_nonzero_warnv_p (enum tree_code code
,
14979 tree op1
, bool *strict_overflow_p
)
14981 bool sub_strict_overflow_p
;
14984 case POINTER_PLUS_EXPR
:
14986 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
14988 /* With the presence of negative values it is hard
14989 to say something. */
14990 sub_strict_overflow_p
= false;
14991 if (!tree_expr_nonnegative_warnv_p (op0
,
14992 &sub_strict_overflow_p
)
14993 || !tree_expr_nonnegative_warnv_p (op1
,
14994 &sub_strict_overflow_p
))
14996 /* One of operands must be positive and the other non-negative. */
14997 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14998 overflows, on a twos-complement machine the sum of two
14999 nonnegative numbers can never be zero. */
15000 return (tree_expr_nonzero_warnv_p (op0
,
15002 || tree_expr_nonzero_warnv_p (op1
,
15003 strict_overflow_p
));
15008 if (TYPE_OVERFLOW_UNDEFINED (type
))
15010 if (tree_expr_nonzero_warnv_p (op0
,
15012 && tree_expr_nonzero_warnv_p (op1
,
15013 strict_overflow_p
))
15015 *strict_overflow_p
= true;
15022 sub_strict_overflow_p
= false;
15023 if (tree_expr_nonzero_warnv_p (op0
,
15024 &sub_strict_overflow_p
)
15025 && tree_expr_nonzero_warnv_p (op1
,
15026 &sub_strict_overflow_p
))
15028 if (sub_strict_overflow_p
)
15029 *strict_overflow_p
= true;
15034 sub_strict_overflow_p
= false;
15035 if (tree_expr_nonzero_warnv_p (op0
,
15036 &sub_strict_overflow_p
))
15038 if (sub_strict_overflow_p
)
15039 *strict_overflow_p
= true;
15041 /* When both operands are nonzero, then MAX must be too. */
15042 if (tree_expr_nonzero_warnv_p (op1
,
15043 strict_overflow_p
))
15046 /* MAX where operand 0 is positive is positive. */
15047 return tree_expr_nonnegative_warnv_p (op0
,
15048 strict_overflow_p
);
15050 /* MAX where operand 1 is positive is positive. */
15051 else if (tree_expr_nonzero_warnv_p (op1
,
15052 &sub_strict_overflow_p
)
15053 && tree_expr_nonnegative_warnv_p (op1
,
15054 &sub_strict_overflow_p
))
15056 if (sub_strict_overflow_p
)
15057 *strict_overflow_p
= true;
15063 return (tree_expr_nonzero_warnv_p (op1
,
15065 || tree_expr_nonzero_warnv_p (op0
,
15066 strict_overflow_p
));
15075 /* Return true when T is an address and is known to be nonzero.
15076 For floating point we further ensure that T is not denormal.
15077 Similar logic is present in nonzero_address in rtlanal.h.
15079 If the return value is based on the assumption that signed overflow
15080 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15081 change *STRICT_OVERFLOW_P. */
15084 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15086 bool sub_strict_overflow_p
;
15087 switch (TREE_CODE (t
))
15090 return !integer_zerop (t
);
15094 tree base
= TREE_OPERAND (t
, 0);
15096 if (!DECL_P (base
))
15097 base
= get_base_address (base
);
15099 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
15100 base
= TARGET_EXPR_SLOT (base
);
15105 /* For objects in symbol table check if we know they are non-zero.
15106 Don't do anything for variables and functions before symtab is built;
15107 it is quite possible that they will be declared weak later. */
15108 int nonzero_addr
= maybe_nonzero_address (base
);
15109 if (nonzero_addr
>= 0)
15110 return nonzero_addr
;
15112 /* Constants are never weak. */
15113 if (CONSTANT_CLASS_P (base
))
15120 sub_strict_overflow_p
= false;
15121 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15122 &sub_strict_overflow_p
)
15123 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15124 &sub_strict_overflow_p
))
15126 if (sub_strict_overflow_p
)
15127 *strict_overflow_p
= true;
15133 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
15135 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
15143 #define integer_valued_real_p(X) \
15144 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15146 #define RECURSE(X) \
15147 ((integer_valued_real_p) (X, depth + 1))
15149 /* Return true if the floating point result of (CODE OP0) has an
15150 integer value. We also allow +Inf, -Inf and NaN to be considered
15151 integer values. Return false for signaling NaN.
15153 DEPTH is the current nesting depth of the query. */
15156 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
15164 return RECURSE (op0
);
15168 tree type
= TREE_TYPE (op0
);
15169 if (TREE_CODE (type
) == INTEGER_TYPE
)
15171 if (TREE_CODE (type
) == REAL_TYPE
)
15172 return RECURSE (op0
);
15182 /* Return true if the floating point result of (CODE OP0 OP1) has an
15183 integer value. We also allow +Inf, -Inf and NaN to be considered
15184 integer values. Return false for signaling NaN.
15186 DEPTH is the current nesting depth of the query. */
15189 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
15198 return RECURSE (op0
) && RECURSE (op1
);
15206 /* Return true if the floating point result of calling FNDECL with arguments
15207 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15208 considered integer values. Return false for signaling NaN. If FNDECL
15209 takes fewer than 2 arguments, the remaining ARGn are null.
15211 DEPTH is the current nesting depth of the query. */
15214 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
15222 CASE_CFN_NEARBYINT
:
15223 CASE_CFN_NEARBYINT_FN
:
15228 CASE_CFN_ROUNDEVEN
:
15229 CASE_CFN_ROUNDEVEN_FN
:
15238 return RECURSE (arg0
) && RECURSE (arg1
);
15246 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15247 has an integer value. We also allow +Inf, -Inf and NaN to be
15248 considered integer values. Return false for signaling NaN.
15250 DEPTH is the current nesting depth of the query. */
15253 integer_valued_real_single_p (tree t
, int depth
)
15255 switch (TREE_CODE (t
))
15258 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
15261 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
15264 /* Limit the depth of recursion to avoid quadratic behavior.
15265 This is expected to catch almost all occurrences in practice.
15266 If this code misses important cases that unbounded recursion
15267 would not, passes that need this information could be revised
15268 to provide it through dataflow propagation. */
15269 return (!name_registered_for_update_p (t
)
15270 && depth
< param_max_ssa_name_query_depth
15271 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
15280 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15281 has an integer value. We also allow +Inf, -Inf and NaN to be
15282 considered integer values. Return false for signaling NaN.
15284 DEPTH is the current nesting depth of the query. */
15287 integer_valued_real_invalid_p (tree t
, int depth
)
15289 switch (TREE_CODE (t
))
15291 case COMPOUND_EXPR
:
15294 return RECURSE (TREE_OPERAND (t
, 1));
15297 return RECURSE (TREE_OPERAND (t
, 0));
15306 #undef integer_valued_real_p
15308 /* Return true if the floating point expression T has an integer value.
15309 We also allow +Inf, -Inf and NaN to be considered integer values.
15310 Return false for signaling NaN.
15312 DEPTH is the current nesting depth of the query. */
15315 integer_valued_real_p (tree t
, int depth
)
15317 if (t
== error_mark_node
)
15320 STRIP_ANY_LOCATION_WRAPPER (t
);
15322 tree_code code
= TREE_CODE (t
);
15323 switch (TREE_CODE_CLASS (code
))
15326 case tcc_comparison
:
15327 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
15328 TREE_OPERAND (t
, 1), depth
);
15331 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
15334 case tcc_declaration
:
15335 case tcc_reference
:
15336 return integer_valued_real_single_p (t
, depth
);
15346 return integer_valued_real_single_p (t
, depth
);
15350 tree arg0
= (call_expr_nargs (t
) > 0
15351 ? CALL_EXPR_ARG (t
, 0)
15353 tree arg1
= (call_expr_nargs (t
) > 1
15354 ? CALL_EXPR_ARG (t
, 1)
15356 return integer_valued_real_call_p (get_call_combined_fn (t
),
15357 arg0
, arg1
, depth
);
15361 return integer_valued_real_invalid_p (t
, depth
);
15365 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15366 attempt to fold the expression to a constant without modifying TYPE,
15369 If the expression could be simplified to a constant, then return
15370 the constant. If the expression would not be simplified to a
15371 constant, then return NULL_TREE. */
15374 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15376 tree tem
= fold_binary (code
, type
, op0
, op1
);
15377 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15380 /* Given the components of a unary expression CODE, TYPE and OP0,
15381 attempt to fold the expression to a constant without modifying
15384 If the expression could be simplified to a constant, then return
15385 the constant. If the expression would not be simplified to a
15386 constant, then return NULL_TREE. */
15389 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15391 tree tem
= fold_unary (code
, type
, op0
);
15392 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15395 /* If EXP represents referencing an element in a constant string
15396 (either via pointer arithmetic or array indexing), return the
15397 tree representing the value accessed, otherwise return NULL. */
15400 fold_read_from_constant_string (tree exp
)
15402 if ((TREE_CODE (exp
) == INDIRECT_REF
15403 || TREE_CODE (exp
) == ARRAY_REF
)
15404 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15406 tree exp1
= TREE_OPERAND (exp
, 0);
15409 location_t loc
= EXPR_LOCATION (exp
);
15411 if (TREE_CODE (exp
) == INDIRECT_REF
)
15412 string
= string_constant (exp1
, &index
, NULL
, NULL
);
15415 tree low_bound
= array_ref_low_bound (exp
);
15416 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15418 /* Optimize the special-case of a zero lower bound.
15420 We convert the low_bound to sizetype to avoid some problems
15421 with constant folding. (E.g. suppose the lower bound is 1,
15422 and its mode is QI. Without the conversion,l (ARRAY
15423 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15424 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15425 if (! integer_zerop (low_bound
))
15426 index
= size_diffop_loc (loc
, index
,
15427 fold_convert_loc (loc
, sizetype
, low_bound
));
15432 scalar_int_mode char_mode
;
15434 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15435 && TREE_CODE (string
) == STRING_CST
15436 && TREE_CODE (index
) == INTEGER_CST
15437 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15438 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
15440 && GET_MODE_SIZE (char_mode
) == 1)
15441 return build_int_cst_type (TREE_TYPE (exp
),
15442 (TREE_STRING_POINTER (string
)
15443 [TREE_INT_CST_LOW (index
)]));
15448 /* Folds a read from vector element at IDX of vector ARG. */
15451 fold_read_from_vector (tree arg
, poly_uint64 idx
)
15453 unsigned HOST_WIDE_INT i
;
15454 if (known_lt (idx
, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)))
15455 && known_ge (idx
, 0u)
15456 && idx
.is_constant (&i
))
15458 if (TREE_CODE (arg
) == VECTOR_CST
)
15459 return VECTOR_CST_ELT (arg
, i
);
15460 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
15462 if (i
>= CONSTRUCTOR_NELTS (arg
))
15463 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg
)));
15464 return CONSTRUCTOR_ELT (arg
, i
)->value
;
15470 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15471 an integer constant, real, or fixed-point constant.
15473 TYPE is the type of the result. */
15476 fold_negate_const (tree arg0
, tree type
)
15478 tree t
= NULL_TREE
;
15480 switch (TREE_CODE (arg0
))
15483 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15488 FIXED_VALUE_TYPE f
;
15489 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15490 &(TREE_FIXED_CST (arg0
)), NULL
,
15491 TYPE_SATURATING (type
));
15492 t
= build_fixed (type
, f
);
15493 /* Propagate overflow flags. */
15494 if (overflow_p
| TREE_OVERFLOW (arg0
))
15495 TREE_OVERFLOW (t
) = 1;
15500 if (poly_int_tree_p (arg0
))
15502 wi::overflow_type overflow
;
15503 poly_wide_int res
= wi::neg (wi::to_poly_wide (arg0
), &overflow
);
15504 t
= force_fit_type (type
, res
, 1,
15505 (overflow
&& ! TYPE_UNSIGNED (type
))
15506 || TREE_OVERFLOW (arg0
));
15510 gcc_unreachable ();
15516 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15517 an integer constant or real constant.
15519 TYPE is the type of the result. */
15522 fold_abs_const (tree arg0
, tree type
)
15524 tree t
= NULL_TREE
;
15526 switch (TREE_CODE (arg0
))
15530 /* If the value is unsigned or non-negative, then the absolute value
15531 is the same as the ordinary value. */
15532 wide_int val
= wi::to_wide (arg0
);
15533 wi::overflow_type overflow
= wi::OVF_NONE
;
15534 if (!wi::neg_p (val
, TYPE_SIGN (TREE_TYPE (arg0
))))
15537 /* If the value is negative, then the absolute value is
15540 val
= wi::neg (val
, &overflow
);
15542 /* Force to the destination type, set TREE_OVERFLOW for signed
15544 t
= force_fit_type (type
, val
, 1, overflow
| TREE_OVERFLOW (arg0
));
15549 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15550 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15556 gcc_unreachable ();
15562 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15563 constant. TYPE is the type of the result. */
15566 fold_not_const (const_tree arg0
, tree type
)
15568 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15570 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
15573 /* Given CODE, a relational operator, the target type, TYPE and two
15574 constant operands OP0 and OP1, return the result of the
15575 relational operation. If the result is not a compile time
15576 constant, then return NULL_TREE. */
15579 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15581 int result
, invert
;
15583 /* From here on, the only cases we handle are when the result is
15584 known to be a constant. */
15586 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15588 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15589 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15591 /* Handle the cases where either operand is a NaN. */
15592 if (real_isnan (c0
) || real_isnan (c1
))
15602 case UNORDERED_EXPR
:
15616 if (flag_trapping_math
)
15622 gcc_unreachable ();
15625 return constant_boolean_node (result
, type
);
15628 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15631 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15633 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15634 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15635 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15638 /* Handle equality/inequality of complex constants. */
15639 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15641 tree rcond
= fold_relational_const (code
, type
,
15642 TREE_REALPART (op0
),
15643 TREE_REALPART (op1
));
15644 tree icond
= fold_relational_const (code
, type
,
15645 TREE_IMAGPART (op0
),
15646 TREE_IMAGPART (op1
));
15647 if (code
== EQ_EXPR
)
15648 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15649 else if (code
== NE_EXPR
)
15650 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15655 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15657 if (!VECTOR_TYPE_P (type
))
15659 /* Have vector comparison with scalar boolean result. */
15660 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
15661 && known_eq (VECTOR_CST_NELTS (op0
),
15662 VECTOR_CST_NELTS (op1
)));
15663 unsigned HOST_WIDE_INT nunits
;
15664 if (!VECTOR_CST_NELTS (op0
).is_constant (&nunits
))
15666 for (unsigned i
= 0; i
< nunits
; i
++)
15668 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15669 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15670 tree tmp
= fold_relational_const (EQ_EXPR
, type
, elem0
, elem1
);
15671 if (tmp
== NULL_TREE
)
15673 if (integer_zerop (tmp
))
15674 return constant_boolean_node (code
== NE_EXPR
, type
);
15676 return constant_boolean_node (code
== EQ_EXPR
, type
);
15678 tree_vector_builder elts
;
15679 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
15681 unsigned int count
= elts
.encoded_nelts ();
15682 for (unsigned i
= 0; i
< count
; i
++)
15684 tree elem_type
= TREE_TYPE (type
);
15685 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15686 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15688 tree tem
= fold_relational_const (code
, elem_type
,
15691 if (tem
== NULL_TREE
)
15694 elts
.quick_push (build_int_cst (elem_type
,
15695 integer_zerop (tem
) ? 0 : -1));
15698 return elts
.build ();
15701 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15703 To compute GT, swap the arguments and do LT.
15704 To compute GE, do LT and invert the result.
15705 To compute LE, swap the arguments, do LT and invert the result.
15706 To compute NE, do EQ and invert the result.
15708 Therefore, the code below must handle only EQ and LT. */
15710 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15712 std::swap (op0
, op1
);
15713 code
= swap_tree_comparison (code
);
15716 /* Note that it is safe to invert for real values here because we
15717 have already handled the one case that it matters. */
15720 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15723 code
= invert_tree_comparison (code
, false);
15726 /* Compute a result for LT or EQ if args permit;
15727 Otherwise return T. */
15728 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15730 if (code
== EQ_EXPR
)
15731 result
= tree_int_cst_equal (op0
, op1
);
15733 result
= tree_int_cst_lt (op0
, op1
);
15740 return constant_boolean_node (result
, type
);
15743 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15744 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15748 fold_build_cleanup_point_expr (tree type
, tree expr
)
15750 /* If the expression does not have side effects then we don't have to wrap
15751 it with a cleanup point expression. */
15752 if (!TREE_SIDE_EFFECTS (expr
))
15755 /* If the expression is a return, check to see if the expression inside the
15756 return has no side effects or the right hand side of the modify expression
15757 inside the return. If either don't have side effects set we don't need to
15758 wrap the expression in a cleanup point expression. Note we don't check the
15759 left hand side of the modify because it should always be a return decl. */
15760 if (TREE_CODE (expr
) == RETURN_EXPR
)
15762 tree op
= TREE_OPERAND (expr
, 0);
15763 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15765 op
= TREE_OPERAND (op
, 1);
15766 if (!TREE_SIDE_EFFECTS (op
))
15770 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
15773 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15774 of an indirection through OP0, or NULL_TREE if no simplification is
15778 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15782 poly_uint64 const_op01
;
15785 subtype
= TREE_TYPE (sub
);
15786 if (!POINTER_TYPE_P (subtype
)
15787 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
15790 if (TREE_CODE (sub
) == ADDR_EXPR
)
15792 tree op
= TREE_OPERAND (sub
, 0);
15793 tree optype
= TREE_TYPE (op
);
15795 /* *&CONST_DECL -> to the value of the const decl. */
15796 if (TREE_CODE (op
) == CONST_DECL
)
15797 return DECL_INITIAL (op
);
15798 /* *&p => p; make sure to handle *&"str"[cst] here. */
15799 if (type
== optype
)
15801 tree fop
= fold_read_from_constant_string (op
);
15807 /* *(foo *)&fooarray => fooarray[0] */
15808 else if (TREE_CODE (optype
) == ARRAY_TYPE
15809 && type
== TREE_TYPE (optype
)
15810 && (!in_gimple_form
15811 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15813 tree type_domain
= TYPE_DOMAIN (optype
);
15814 tree min_val
= size_zero_node
;
15815 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15816 min_val
= TYPE_MIN_VALUE (type_domain
);
15818 && TREE_CODE (min_val
) != INTEGER_CST
)
15820 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15821 NULL_TREE
, NULL_TREE
);
15823 /* *(foo *)&complexfoo => __real__ complexfoo */
15824 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15825 && type
== TREE_TYPE (optype
))
15826 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15827 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15828 else if (VECTOR_TYPE_P (optype
)
15829 && type
== TREE_TYPE (optype
))
15831 tree part_width
= TYPE_SIZE (type
);
15832 tree index
= bitsize_int (0);
15833 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
,
15838 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15839 && poly_int_tree_p (TREE_OPERAND (sub
, 1), &const_op01
))
15841 tree op00
= TREE_OPERAND (sub
, 0);
15842 tree op01
= TREE_OPERAND (sub
, 1);
15845 if (TREE_CODE (op00
) == ADDR_EXPR
)
15848 op00
= TREE_OPERAND (op00
, 0);
15849 op00type
= TREE_TYPE (op00
);
15851 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15852 if (VECTOR_TYPE_P (op00type
)
15853 && type
== TREE_TYPE (op00type
)
15854 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15855 but we want to treat offsets with MSB set as negative.
15856 For the code below negative offsets are invalid and
15857 TYPE_SIZE of the element is something unsigned, so
15858 check whether op01 fits into poly_int64, which implies
15859 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15860 then just use poly_uint64 because we want to treat the
15861 value as unsigned. */
15862 && tree_fits_poly_int64_p (op01
))
15864 tree part_width
= TYPE_SIZE (type
);
15865 poly_uint64 max_offset
15866 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
15867 * TYPE_VECTOR_SUBPARTS (op00type
));
15868 if (known_lt (const_op01
, max_offset
))
15870 tree index
= bitsize_int (const_op01
* BITS_PER_UNIT
);
15871 return fold_build3_loc (loc
,
15872 BIT_FIELD_REF
, type
, op00
,
15873 part_width
, index
);
15876 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15877 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15878 && type
== TREE_TYPE (op00type
))
15880 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type
)),
15882 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15884 /* ((foo *)&fooarray)[1] => fooarray[1] */
15885 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15886 && type
== TREE_TYPE (op00type
))
15888 tree type_domain
= TYPE_DOMAIN (op00type
);
15889 tree min_val
= size_zero_node
;
15890 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15891 min_val
= TYPE_MIN_VALUE (type_domain
);
15892 poly_uint64 type_size
, index
;
15893 if (poly_int_tree_p (min_val
)
15894 && poly_int_tree_p (TYPE_SIZE_UNIT (type
), &type_size
)
15895 && multiple_p (const_op01
, type_size
, &index
))
15897 poly_offset_int off
= index
+ wi::to_poly_offset (min_val
);
15898 op01
= wide_int_to_tree (sizetype
, off
);
15899 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15900 NULL_TREE
, NULL_TREE
);
15906 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15907 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15908 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15909 && (!in_gimple_form
15910 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15913 tree min_val
= size_zero_node
;
15914 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15915 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15916 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15917 min_val
= TYPE_MIN_VALUE (type_domain
);
15919 && TREE_CODE (min_val
) != INTEGER_CST
)
15921 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15928 /* Builds an expression for an indirection through T, simplifying some
15932 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15934 tree type
= TREE_TYPE (TREE_TYPE (t
));
15935 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15940 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15943 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15946 fold_indirect_ref_loc (location_t loc
, tree t
)
15948 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15956 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15957 whose result is ignored. The type of the returned tree need not be
15958 the same as the original expression. */
15961 fold_ignored_result (tree t
)
15963 if (!TREE_SIDE_EFFECTS (t
))
15964 return integer_zero_node
;
15967 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15970 t
= TREE_OPERAND (t
, 0);
15974 case tcc_comparison
:
15975 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15976 t
= TREE_OPERAND (t
, 0);
15977 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15978 t
= TREE_OPERAND (t
, 1);
15983 case tcc_expression
:
15984 switch (TREE_CODE (t
))
15986 case COMPOUND_EXPR
:
15987 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15989 t
= TREE_OPERAND (t
, 0);
15993 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15994 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15996 t
= TREE_OPERAND (t
, 0);
16009 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16012 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16014 tree div
= NULL_TREE
;
16019 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16020 have to do anything. Only do this when we are not given a const,
16021 because in that case, this check is more expensive than just
16023 if (TREE_CODE (value
) != INTEGER_CST
)
16025 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16027 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16031 /* If divisor is a power of two, simplify this to bit manipulation. */
16032 if (pow2_or_zerop (divisor
))
16034 if (TREE_CODE (value
) == INTEGER_CST
)
16036 wide_int val
= wi::to_wide (value
);
16039 if ((val
& (divisor
- 1)) == 0)
16042 overflow_p
= TREE_OVERFLOW (value
);
16043 val
+= divisor
- 1;
16044 val
&= (int) -divisor
;
16048 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16054 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16055 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16056 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
16057 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16063 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16064 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16065 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16071 /* Likewise, but round down. */
16074 round_down_loc (location_t loc
, tree value
, int divisor
)
16076 tree div
= NULL_TREE
;
16078 gcc_assert (divisor
> 0);
16082 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16083 have to do anything. Only do this when we are not given a const,
16084 because in that case, this check is more expensive than just
16086 if (TREE_CODE (value
) != INTEGER_CST
)
16088 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16090 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16094 /* If divisor is a power of two, simplify this to bit manipulation. */
16095 if (pow2_or_zerop (divisor
))
16099 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16100 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16105 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16106 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16107 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16113 /* Returns the pointer to the base of the object addressed by EXP and
16114 extracts the information about the offset of the access, storing it
16115 to PBITPOS and POFFSET. */
16118 split_address_to_core_and_offset (tree exp
,
16119 poly_int64_pod
*pbitpos
, tree
*poffset
)
16123 int unsignedp
, reversep
, volatilep
;
16124 poly_int64 bitsize
;
16125 location_t loc
= EXPR_LOCATION (exp
);
16127 if (TREE_CODE (exp
) == ADDR_EXPR
)
16129 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16130 poffset
, &mode
, &unsignedp
, &reversep
,
16132 core
= build_fold_addr_expr_loc (loc
, core
);
16134 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
16136 core
= TREE_OPERAND (exp
, 0);
16139 *poffset
= TREE_OPERAND (exp
, 1);
16140 if (poly_int_tree_p (*poffset
))
16142 poly_offset_int tem
16143 = wi::sext (wi::to_poly_offset (*poffset
),
16144 TYPE_PRECISION (TREE_TYPE (*poffset
)));
16145 tem
<<= LOG2_BITS_PER_UNIT
;
16146 if (tem
.to_shwi (pbitpos
))
16147 *poffset
= NULL_TREE
;
16154 *poffset
= NULL_TREE
;
16160 /* Returns true if addresses of E1 and E2 differ by a constant, false
16161 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16164 ptr_difference_const (tree e1
, tree e2
, poly_int64_pod
*diff
)
16167 poly_int64 bitpos1
, bitpos2
;
16168 tree toffset1
, toffset2
, tdiff
, type
;
16170 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16171 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16173 poly_int64 bytepos1
, bytepos2
;
16174 if (!multiple_p (bitpos1
, BITS_PER_UNIT
, &bytepos1
)
16175 || !multiple_p (bitpos2
, BITS_PER_UNIT
, &bytepos2
)
16176 || !operand_equal_p (core1
, core2
, 0))
16179 if (toffset1
&& toffset2
)
16181 type
= TREE_TYPE (toffset1
);
16182 if (type
!= TREE_TYPE (toffset2
))
16183 toffset2
= fold_convert (type
, toffset2
);
16185 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16186 if (!cst_and_fits_in_hwi (tdiff
))
16189 *diff
= int_cst_value (tdiff
);
16191 else if (toffset1
|| toffset2
)
16193 /* If only one of the offsets is non-constant, the difference cannot
16200 *diff
+= bytepos1
- bytepos2
;
16204 /* Return OFF converted to a pointer offset type suitable as offset for
16205 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16207 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16209 if (ptrofftype_p (TREE_TYPE (off
)))
16211 return fold_convert_loc (loc
, sizetype
, off
);
16214 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16216 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16218 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16219 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16222 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16224 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16226 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16227 ptr
, size_int (off
));
16230 /* Return a pointer to a NUL-terminated string containing the sequence
16231 of bytes corresponding to the representation of the object referred to
16232 by SRC (or a subsequence of such bytes within it if SRC is a reference
16233 to an initialized constant array plus some constant offset).
16234 Set *STRSIZE the number of bytes in the constant sequence including
16235 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16236 where A is the array that stores the constant sequence that SRC points
16237 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16238 need not point to a string or even an array of characters but may point
16239 to an object of any type. */
16242 getbyterep (tree src
, unsigned HOST_WIDE_INT
*strsize
)
16244 /* The offset into the array A storing the string, and A's byte size. */
16252 src
= byte_representation (src
, &offset_node
, &mem_size
, NULL
);
16254 src
= string_constant (src
, &offset_node
, &mem_size
, NULL
);
16258 unsigned HOST_WIDE_INT offset
= 0;
16259 if (offset_node
!= NULL_TREE
)
16261 if (!tree_fits_uhwi_p (offset_node
))
16264 offset
= tree_to_uhwi (offset_node
);
16267 if (!tree_fits_uhwi_p (mem_size
))
16270 /* ARRAY_SIZE is the byte size of the array the constant sequence
16271 is stored in and equal to sizeof A. INIT_BYTES is the number
16272 of bytes in the constant sequence used to initialize the array,
16273 including any embedded NULs as well as the terminating NUL (for
16274 strings), but not including any trailing zeros/NULs past
16275 the terminating one appended implicitly to a string literal to
16276 zero out the remainder of the array it's stored in. For example,
16278 const char a[7] = "abc\0d";
16279 n = strlen (a + 1);
16280 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16281 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16282 is equal to strlen (A) + 1. */
16283 const unsigned HOST_WIDE_INT array_size
= tree_to_uhwi (mem_size
);
16284 unsigned HOST_WIDE_INT init_bytes
= TREE_STRING_LENGTH (src
);
16285 const char *string
= TREE_STRING_POINTER (src
);
16287 /* Ideally this would turn into a gcc_checking_assert over time. */
16288 if (init_bytes
> array_size
)
16289 init_bytes
= array_size
;
16291 if (init_bytes
== 0 || offset
>= array_size
)
16296 /* Compute and store the number of characters from the beginning
16297 of the substring at OFFSET to the end, including the terminating
16298 nul. Offsets past the initial length refer to null strings. */
16299 if (offset
< init_bytes
)
16300 *strsize
= init_bytes
- offset
;
16306 tree eltype
= TREE_TYPE (TREE_TYPE (src
));
16307 /* Support only properly NUL-terminated single byte strings. */
16308 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype
)) != 1)
16310 if (string
[init_bytes
- 1] != '\0')
16314 return offset
< init_bytes
? string
+ offset
: "";
16317 /* Return a pointer to a NUL-terminated string corresponding to
16318 the expression STR referencing a constant string, possibly
16319 involving a constant offset. Return null if STR either doesn't
16320 reference a constant string or if it involves a nonconstant
16324 c_getstr (tree str
)
16326 return getbyterep (str
, NULL
);
16329 /* Given a tree T, compute which bits in T may be nonzero. */
16332 tree_nonzero_bits (const_tree t
)
16334 switch (TREE_CODE (t
))
16337 return wi::to_wide (t
);
16339 return get_nonzero_bits (t
);
16340 case NON_LVALUE_EXPR
:
16342 return tree_nonzero_bits (TREE_OPERAND (t
, 0));
16344 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16345 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16348 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16349 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
16351 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 1)),
16352 tree_nonzero_bits (TREE_OPERAND (t
, 2)));
16354 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
16355 TYPE_PRECISION (TREE_TYPE (t
)),
16356 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t
, 0))));
16358 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
16360 wide_int nzbits1
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16361 wide_int nzbits2
= tree_nonzero_bits (TREE_OPERAND (t
, 1));
16362 if (wi::bit_and (nzbits1
, nzbits2
) == 0)
16363 return wi::bit_or (nzbits1
, nzbits2
);
16367 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16369 tree type
= TREE_TYPE (t
);
16370 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16371 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16372 TYPE_PRECISION (type
));
16373 return wi::neg_p (arg1
)
16374 ? wi::rshift (nzbits
, -arg1
, TYPE_SIGN (type
))
16375 : wi::lshift (nzbits
, arg1
);
16379 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
16381 tree type
= TREE_TYPE (t
);
16382 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
16383 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
16384 TYPE_PRECISION (type
));
16385 return wi::neg_p (arg1
)
16386 ? wi::lshift (nzbits
, -arg1
)
16387 : wi::rshift (nzbits
, arg1
, TYPE_SIGN (type
));
16394 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t
)));
16399 namespace selftest
{
16401 /* Helper functions for writing tests of folding trees. */
16403 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16406 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
16409 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
16412 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16413 wrapping WRAPPED_EXPR. */
16416 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
16419 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
16420 ASSERT_NE (wrapped_expr
, result
);
16421 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
16422 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
16425 /* Verify that various arithmetic binary operations are folded
16429 test_arithmetic_folding ()
16431 tree type
= integer_type_node
;
16432 tree x
= create_tmp_var_raw (type
, "x");
16433 tree zero
= build_zero_cst (type
);
16434 tree one
= build_int_cst (type
, 1);
16437 /* 1 <-- (0 + 1) */
16438 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
16440 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
16443 /* (nonlvalue)x <-- (x + 0) */
16444 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
16448 /* 0 <-- (x - x) */
16449 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
16451 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
16454 /* Multiplication. */
16455 /* 0 <-- (x * 0) */
16456 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
16459 /* (nonlvalue)x <-- (x * 1) */
16460 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
16464 /* Verify that various binary operations on vectors are folded
16468 test_vector_folding ()
16470 tree inner_type
= integer_type_node
;
16471 tree type
= build_vector_type (inner_type
, 4);
16472 tree zero
= build_zero_cst (type
);
16473 tree one
= build_one_cst (type
);
16474 tree index
= build_index_vector (type
, 0, 1);
16476 /* Verify equality tests that return a scalar boolean result. */
16477 tree res_type
= boolean_type_node
;
16478 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
16479 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
16480 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
16481 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
16482 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, index
, one
)));
16483 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16485 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
,
16487 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
16491 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16494 test_vec_duplicate_folding ()
16496 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
16497 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
16498 /* This will be 1 if VEC_MODE isn't a vector mode. */
16499 poly_uint64 nunits
= GET_MODE_NUNITS (vec_mode
);
16501 tree type
= build_vector_type (ssizetype
, nunits
);
16502 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
16503 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
16504 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
16507 /* Run all of the selftests within this file. */
16510 fold_const_c_tests ()
16512 test_arithmetic_folding ();
16513 test_vector_folding ();
16514 test_vec_duplicate_folding ();
16517 } // namespace selftest
16519 #endif /* CHECKING_P */