Daily bump.
[official-gcc.git] / gcc / fold-const.cc
blob43105d20be35c8f9853b0f0ef647b899ffb66260
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 min value to RES. */
1218 bool
1219 can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1221 if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1223 res = wi::to_poly_wide (arg1);
1224 return true;
1226 else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1228 res = wi::to_poly_wide (arg2);
1229 return true;
1232 return false;
1235 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 produce a new constant in RES. Return FALSE if we don't know how
1237 to evaluate CODE at compile-time. */
1239 static bool
1240 poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 const_tree arg1, const_tree arg2,
1242 signop sign, wi::overflow_type *overflow)
1244 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 switch (code)
1248 case PLUS_EXPR:
1249 res = wi::add (wi::to_poly_wide (arg1),
1250 wi::to_poly_wide (arg2), sign, overflow);
1251 break;
1253 case MINUS_EXPR:
1254 res = wi::sub (wi::to_poly_wide (arg1),
1255 wi::to_poly_wide (arg2), sign, overflow);
1256 break;
1258 case MULT_EXPR:
1259 if (TREE_CODE (arg2) == INTEGER_CST)
1260 res = wi::mul (wi::to_poly_wide (arg1),
1261 wi::to_wide (arg2), sign, overflow);
1262 else if (TREE_CODE (arg1) == INTEGER_CST)
1263 res = wi::mul (wi::to_poly_wide (arg2),
1264 wi::to_wide (arg1), sign, overflow);
1265 else
1266 return NULL_TREE;
1267 break;
1269 case LSHIFT_EXPR:
1270 if (TREE_CODE (arg2) == INTEGER_CST)
1271 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1272 else
1273 return false;
1274 break;
1276 case BIT_IOR_EXPR:
1277 if (TREE_CODE (arg2) != INTEGER_CST
1278 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1279 &res))
1280 return false;
1281 break;
1283 case MIN_EXPR:
1284 if (!can_min_p (arg1, arg2, res))
1285 return false;
1286 break;
1288 default:
1289 return false;
1291 return true;
1294 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 produce a new constant. Return NULL_TREE if we don't know how to
1296 evaluate CODE at compile-time. */
1298 tree
1299 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 int overflowable)
1302 poly_wide_int poly_res;
1303 tree type = TREE_TYPE (arg1);
1304 signop sign = TYPE_SIGN (type);
1305 wi::overflow_type overflow = wi::OVF_NONE;
1307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1309 wide_int warg1 = wi::to_wide (arg1), res;
1310 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1311 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1312 return NULL_TREE;
1313 poly_res = res;
1315 else if (!poly_int_tree_p (arg1)
1316 || !poly_int_tree_p (arg2)
1317 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1318 return NULL_TREE;
1319 return force_fit_type (type, poly_res, overflowable,
1320 (((sign == SIGNED || overflowable == -1)
1321 && overflow)
1322 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1325 /* Return true if binary operation OP distributes over addition in operand
1326 OPNO, with the other operand being held constant. OPNO counts from 1. */
1328 static bool
1329 distributes_over_addition_p (tree_code op, int opno)
1331 switch (op)
1333 case PLUS_EXPR:
1334 case MINUS_EXPR:
1335 case MULT_EXPR:
1336 return true;
1338 case LSHIFT_EXPR:
1339 return opno == 1;
1341 default:
1342 return false;
1346 /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1347 is the other operand. Try to use the value of OP to simplify the
1348 operation in one step, without having to process individual elements. */
1349 static tree
1350 simplify_const_binop (tree_code code, tree op, tree other_op,
1351 int index ATTRIBUTE_UNUSED)
1353 /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1354 if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1356 if (integer_zerop (other_op))
1358 if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1359 return op;
1360 else if (code == BIT_AND_EXPR)
1361 return other_op;
1365 return NULL_TREE;
1369 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1370 constant. We assume ARG1 and ARG2 have the same data type, or at least
1371 are the same kind of constant and the same machine mode. Return zero if
1372 combining the constants is not allowed in the current operating mode. */
1374 static tree
1375 const_binop (enum tree_code code, tree arg1, tree arg2)
1377 /* Sanity check for the recursive cases. */
1378 if (!arg1 || !arg2)
1379 return NULL_TREE;
1381 STRIP_NOPS (arg1);
1382 STRIP_NOPS (arg2);
1384 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1386 if (code == POINTER_PLUS_EXPR)
1387 return int_const_binop (PLUS_EXPR,
1388 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1390 return int_const_binop (code, arg1, arg2);
1393 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1395 machine_mode mode;
1396 REAL_VALUE_TYPE d1;
1397 REAL_VALUE_TYPE d2;
1398 REAL_VALUE_TYPE value;
1399 REAL_VALUE_TYPE result;
1400 bool inexact;
1401 tree t, type;
1403 /* The following codes are handled by real_arithmetic. */
1404 switch (code)
1406 case PLUS_EXPR:
1407 case MINUS_EXPR:
1408 case MULT_EXPR:
1409 case RDIV_EXPR:
1410 case MIN_EXPR:
1411 case MAX_EXPR:
1412 break;
1414 default:
1415 return NULL_TREE;
1418 d1 = TREE_REAL_CST (arg1);
1419 d2 = TREE_REAL_CST (arg2);
1421 type = TREE_TYPE (arg1);
1422 mode = TYPE_MODE (type);
1424 /* Don't perform operation if we honor signaling NaNs and
1425 either operand is a signaling NaN. */
1426 if (HONOR_SNANS (mode)
1427 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1428 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1429 return NULL_TREE;
1431 /* Don't perform operation if it would raise a division
1432 by zero exception. */
1433 if (code == RDIV_EXPR
1434 && real_equal (&d2, &dconst0)
1435 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1436 return NULL_TREE;
1438 /* If either operand is a NaN, just return it. Otherwise, set up
1439 for floating-point trap; we return an overflow. */
1440 if (REAL_VALUE_ISNAN (d1))
1442 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1443 is off. */
1444 d1.signalling = 0;
1445 t = build_real (type, d1);
1446 return t;
1448 else if (REAL_VALUE_ISNAN (d2))
1450 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1451 is off. */
1452 d2.signalling = 0;
1453 t = build_real (type, d2);
1454 return t;
1457 inexact = real_arithmetic (&value, code, &d1, &d2);
1458 real_convert (&result, mode, &value);
1460 /* Don't constant fold this floating point operation if
1461 both operands are not NaN but the result is NaN, and
1462 flag_trapping_math. Such operations should raise an
1463 invalid operation exception. */
1464 if (flag_trapping_math
1465 && MODE_HAS_NANS (mode)
1466 && REAL_VALUE_ISNAN (result)
1467 && !REAL_VALUE_ISNAN (d1)
1468 && !REAL_VALUE_ISNAN (d2))
1469 return NULL_TREE;
1471 /* Don't constant fold this floating point operation if
1472 the result has overflowed and flag_trapping_math. */
1473 if (flag_trapping_math
1474 && MODE_HAS_INFINITIES (mode)
1475 && REAL_VALUE_ISINF (result)
1476 && !REAL_VALUE_ISINF (d1)
1477 && !REAL_VALUE_ISINF (d2))
1478 return NULL_TREE;
1480 /* Don't constant fold this floating point operation if the
1481 result may dependent upon the run-time rounding mode and
1482 flag_rounding_math is set, or if GCC's software emulation
1483 is unable to accurately represent the result. */
1484 if ((flag_rounding_math
1485 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1486 && (inexact || !real_identical (&result, &value)))
1487 return NULL_TREE;
1489 t = build_real (type, result);
1491 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1492 return t;
1495 if (TREE_CODE (arg1) == FIXED_CST)
1497 FIXED_VALUE_TYPE f1;
1498 FIXED_VALUE_TYPE f2;
1499 FIXED_VALUE_TYPE result;
1500 tree t, type;
1501 bool sat_p;
1502 bool overflow_p;
1504 /* The following codes are handled by fixed_arithmetic. */
1505 switch (code)
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 case MULT_EXPR:
1510 case TRUNC_DIV_EXPR:
1511 if (TREE_CODE (arg2) != FIXED_CST)
1512 return NULL_TREE;
1513 f2 = TREE_FIXED_CST (arg2);
1514 break;
1516 case LSHIFT_EXPR:
1517 case RSHIFT_EXPR:
1519 if (TREE_CODE (arg2) != INTEGER_CST)
1520 return NULL_TREE;
1521 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1522 f2.data.high = w2.elt (1);
1523 f2.data.low = w2.ulow ();
1524 f2.mode = SImode;
1526 break;
1528 default:
1529 return NULL_TREE;
1532 f1 = TREE_FIXED_CST (arg1);
1533 type = TREE_TYPE (arg1);
1534 sat_p = TYPE_SATURATING (type);
1535 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1536 t = build_fixed (type, result);
1537 /* Propagate overflow flags. */
1538 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1539 TREE_OVERFLOW (t) = 1;
1540 return t;
1543 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1545 tree type = TREE_TYPE (arg1);
1546 tree r1 = TREE_REALPART (arg1);
1547 tree i1 = TREE_IMAGPART (arg1);
1548 tree r2 = TREE_REALPART (arg2);
1549 tree i2 = TREE_IMAGPART (arg2);
1550 tree real, imag;
1552 switch (code)
1554 case PLUS_EXPR:
1555 case MINUS_EXPR:
1556 real = const_binop (code, r1, r2);
1557 imag = const_binop (code, i1, i2);
1558 break;
1560 case MULT_EXPR:
1561 if (COMPLEX_FLOAT_TYPE_P (type))
1562 return do_mpc_arg2 (arg1, arg2, type,
1563 /* do_nonfinite= */ folding_initializer,
1564 mpc_mul);
1566 real = const_binop (MINUS_EXPR,
1567 const_binop (MULT_EXPR, r1, r2),
1568 const_binop (MULT_EXPR, i1, i2));
1569 imag = const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR, r1, i2),
1571 const_binop (MULT_EXPR, i1, r2));
1572 break;
1574 case RDIV_EXPR:
1575 if (COMPLEX_FLOAT_TYPE_P (type))
1576 return do_mpc_arg2 (arg1, arg2, type,
1577 /* do_nonfinite= */ folding_initializer,
1578 mpc_div);
1579 /* Fallthru. */
1580 case TRUNC_DIV_EXPR:
1581 case CEIL_DIV_EXPR:
1582 case FLOOR_DIV_EXPR:
1583 case ROUND_DIV_EXPR:
1584 if (flag_complex_method == 0)
1586 /* Keep this algorithm in sync with
1587 tree-complex.cc:expand_complex_div_straight().
1589 Expand complex division to scalars, straightforward algorithm.
1590 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1591 t = br*br + bi*bi
1593 tree magsquared
1594 = const_binop (PLUS_EXPR,
1595 const_binop (MULT_EXPR, r2, r2),
1596 const_binop (MULT_EXPR, i2, i2));
1597 tree t1
1598 = const_binop (PLUS_EXPR,
1599 const_binop (MULT_EXPR, r1, r2),
1600 const_binop (MULT_EXPR, i1, i2));
1601 tree t2
1602 = const_binop (MINUS_EXPR,
1603 const_binop (MULT_EXPR, i1, r2),
1604 const_binop (MULT_EXPR, r1, i2));
1606 real = const_binop (code, t1, magsquared);
1607 imag = const_binop (code, t2, magsquared);
1609 else
1611 /* Keep this algorithm in sync with
1612 tree-complex.cc:expand_complex_div_wide().
1614 Expand complex division to scalars, modified algorithm to minimize
1615 overflow with wide input ranges. */
1616 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1617 fold_abs_const (r2, TREE_TYPE (type)),
1618 fold_abs_const (i2, TREE_TYPE (type)));
1620 if (integer_nonzerop (compare))
1622 /* In the TRUE branch, we compute
1623 ratio = br/bi;
1624 div = (br * ratio) + bi;
1625 tr = (ar * ratio) + ai;
1626 ti = (ai * ratio) - ar;
1627 tr = tr / div;
1628 ti = ti / div; */
1629 tree ratio = const_binop (code, r2, i2);
1630 tree div = const_binop (PLUS_EXPR, i2,
1631 const_binop (MULT_EXPR, r2, ratio));
1632 real = const_binop (MULT_EXPR, r1, ratio);
1633 real = const_binop (PLUS_EXPR, real, i1);
1634 real = const_binop (code, real, div);
1636 imag = const_binop (MULT_EXPR, i1, ratio);
1637 imag = const_binop (MINUS_EXPR, imag, r1);
1638 imag = const_binop (code, imag, div);
1640 else
1642 /* In the FALSE branch, we compute
1643 ratio = d/c;
1644 divisor = (d * ratio) + c;
1645 tr = (b * ratio) + a;
1646 ti = b - (a * ratio);
1647 tr = tr / div;
1648 ti = ti / div; */
1649 tree ratio = const_binop (code, i2, r2);
1650 tree div = const_binop (PLUS_EXPR, r2,
1651 const_binop (MULT_EXPR, i2, ratio));
1653 real = const_binop (MULT_EXPR, i1, ratio);
1654 real = const_binop (PLUS_EXPR, real, r1);
1655 real = const_binop (code, real, div);
1657 imag = const_binop (MULT_EXPR, r1, ratio);
1658 imag = const_binop (MINUS_EXPR, i1, imag);
1659 imag = const_binop (code, imag, div);
1662 break;
1664 default:
1665 return NULL_TREE;
1668 if (real && imag)
1669 return build_complex (type, real, imag);
1672 tree simplified;
1673 if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1674 return simplified;
1676 if (commutative_tree_code (code)
1677 && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1678 return simplified;
1680 if (TREE_CODE (arg1) == VECTOR_CST
1681 && TREE_CODE (arg2) == VECTOR_CST
1682 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1683 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1685 tree type = TREE_TYPE (arg1);
1686 bool step_ok_p;
1687 if (VECTOR_CST_STEPPED_P (arg1)
1688 && VECTOR_CST_STEPPED_P (arg2))
1689 /* We can operate directly on the encoding if:
1691 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1692 implies
1693 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1695 Addition and subtraction are the supported operators
1696 for which this is true. */
1697 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1698 else if (VECTOR_CST_STEPPED_P (arg1))
1699 /* We can operate directly on stepped encodings if:
1701 a3 - a2 == a2 - a1
1702 implies:
1703 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1705 which is true if (x -> x op c) distributes over addition. */
1706 step_ok_p = distributes_over_addition_p (code, 1);
1707 else
1708 /* Similarly in reverse. */
1709 step_ok_p = distributes_over_addition_p (code, 2);
1710 tree_vector_builder elts;
1711 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1712 return NULL_TREE;
1713 unsigned int count = elts.encoded_nelts ();
1714 for (unsigned int i = 0; i < count; ++i)
1716 tree elem1 = VECTOR_CST_ELT (arg1, i);
1717 tree elem2 = VECTOR_CST_ELT (arg2, i);
1719 tree elt = const_binop (code, elem1, elem2);
1721 /* It is possible that const_binop cannot handle the given
1722 code and return NULL_TREE */
1723 if (elt == NULL_TREE)
1724 return NULL_TREE;
1725 elts.quick_push (elt);
1728 return elts.build ();
1731 /* Shifts allow a scalar offset for a vector. */
1732 if (TREE_CODE (arg1) == VECTOR_CST
1733 && TREE_CODE (arg2) == INTEGER_CST)
1735 tree type = TREE_TYPE (arg1);
1736 bool step_ok_p = distributes_over_addition_p (code, 1);
1737 tree_vector_builder elts;
1738 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1739 return NULL_TREE;
1740 unsigned int count = elts.encoded_nelts ();
1741 for (unsigned int i = 0; i < count; ++i)
1743 tree elem1 = VECTOR_CST_ELT (arg1, i);
1745 tree elt = const_binop (code, elem1, arg2);
1747 /* It is possible that const_binop cannot handle the given
1748 code and return NULL_TREE. */
1749 if (elt == NULL_TREE)
1750 return NULL_TREE;
1751 elts.quick_push (elt);
1754 return elts.build ();
1756 return NULL_TREE;
1759 /* Overload that adds a TYPE parameter to be able to dispatch
1760 to fold_relational_const. */
1762 tree
1763 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1765 if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 return fold_relational_const (code, type, arg1, arg2);
1768 /* ??? Until we make the const_binop worker take the type of the
1769 result as argument put those cases that need it here. */
1770 switch (code)
1772 case VEC_SERIES_EXPR:
1773 if (CONSTANT_CLASS_P (arg1)
1774 && CONSTANT_CLASS_P (arg2))
1775 return build_vec_series (type, arg1, arg2);
1776 return NULL_TREE;
1778 case COMPLEX_EXPR:
1779 if ((TREE_CODE (arg1) == REAL_CST
1780 && TREE_CODE (arg2) == REAL_CST)
1781 || (TREE_CODE (arg1) == INTEGER_CST
1782 && TREE_CODE (arg2) == INTEGER_CST))
1783 return build_complex (type, arg1, arg2);
1784 return NULL_TREE;
1786 case POINTER_DIFF_EXPR:
1787 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1789 poly_offset_int res = (wi::to_poly_offset (arg1)
1790 - wi::to_poly_offset (arg2));
1791 return force_fit_type (type, res, 1,
1792 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1794 return NULL_TREE;
1796 case VEC_PACK_TRUNC_EXPR:
1797 case VEC_PACK_FIX_TRUNC_EXPR:
1798 case VEC_PACK_FLOAT_EXPR:
1800 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1802 if (TREE_CODE (arg1) != VECTOR_CST
1803 || TREE_CODE (arg2) != VECTOR_CST)
1804 return NULL_TREE;
1806 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1807 return NULL_TREE;
1809 out_nelts = in_nelts * 2;
1810 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1811 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1813 tree_vector_builder elts (type, out_nelts, 1);
1814 for (i = 0; i < out_nelts; i++)
1816 tree elt = (i < in_nelts
1817 ? VECTOR_CST_ELT (arg1, i)
1818 : VECTOR_CST_ELT (arg2, i - in_nelts));
1819 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1820 ? NOP_EXPR
1821 : code == VEC_PACK_FLOAT_EXPR
1822 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1823 TREE_TYPE (type), elt);
1824 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1825 return NULL_TREE;
1826 elts.quick_push (elt);
1829 return elts.build ();
1832 case VEC_WIDEN_MULT_LO_EXPR:
1833 case VEC_WIDEN_MULT_HI_EXPR:
1834 case VEC_WIDEN_MULT_EVEN_EXPR:
1835 case VEC_WIDEN_MULT_ODD_EXPR:
1837 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1839 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1840 return NULL_TREE;
1842 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1843 return NULL_TREE;
1844 out_nelts = in_nelts / 2;
1845 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1846 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1848 if (code == VEC_WIDEN_MULT_LO_EXPR)
1849 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1850 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1851 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1852 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1853 scale = 1, ofs = 0;
1854 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1855 scale = 1, ofs = 1;
1857 tree_vector_builder elts (type, out_nelts, 1);
1858 for (out = 0; out < out_nelts; out++)
1860 unsigned int in = (out << scale) + ofs;
1861 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1862 VECTOR_CST_ELT (arg1, in));
1863 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1864 VECTOR_CST_ELT (arg2, in));
1866 if (t1 == NULL_TREE || t2 == NULL_TREE)
1867 return NULL_TREE;
1868 tree elt = const_binop (MULT_EXPR, t1, t2);
1869 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1870 return NULL_TREE;
1871 elts.quick_push (elt);
1874 return elts.build ();
1877 default:;
1880 if (TREE_CODE_CLASS (code) != tcc_binary)
1881 return NULL_TREE;
1883 /* Make sure type and arg0 have the same saturating flag. */
1884 gcc_checking_assert (TYPE_SATURATING (type)
1885 == TYPE_SATURATING (TREE_TYPE (arg1)));
1887 return const_binop (code, arg1, arg2);
1890 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1891 Return zero if computing the constants is not possible. */
1893 tree
1894 const_unop (enum tree_code code, tree type, tree arg0)
1896 /* Don't perform the operation, other than NEGATE and ABS, if
1897 flag_signaling_nans is on and the operand is a signaling NaN. */
1898 if (TREE_CODE (arg0) == REAL_CST
1899 && HONOR_SNANS (arg0)
1900 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1901 && code != NEGATE_EXPR
1902 && code != ABS_EXPR
1903 && code != ABSU_EXPR)
1904 return NULL_TREE;
1906 switch (code)
1908 CASE_CONVERT:
1909 case FLOAT_EXPR:
1910 case FIX_TRUNC_EXPR:
1911 case FIXED_CONVERT_EXPR:
1912 return fold_convert_const (code, type, arg0);
1914 case ADDR_SPACE_CONVERT_EXPR:
1915 /* If the source address is 0, and the source address space
1916 cannot have a valid object at 0, fold to dest type null. */
1917 if (integer_zerop (arg0)
1918 && !(targetm.addr_space.zero_address_valid
1919 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1920 return fold_convert_const (code, type, arg0);
1921 break;
1923 case VIEW_CONVERT_EXPR:
1924 return fold_view_convert_expr (type, arg0);
1926 case NEGATE_EXPR:
1928 /* Can't call fold_negate_const directly here as that doesn't
1929 handle all cases and we might not be able to negate some
1930 constants. */
1931 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1932 if (tem && CONSTANT_CLASS_P (tem))
1933 return tem;
1934 break;
1937 case ABS_EXPR:
1938 case ABSU_EXPR:
1939 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1940 return fold_abs_const (arg0, type);
1941 break;
1943 case CONJ_EXPR:
1944 if (TREE_CODE (arg0) == COMPLEX_CST)
1946 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1947 TREE_TYPE (type));
1948 return build_complex (type, TREE_REALPART (arg0), ipart);
1950 break;
1952 case BIT_NOT_EXPR:
1953 if (TREE_CODE (arg0) == INTEGER_CST)
1954 return fold_not_const (arg0, type);
1955 else if (POLY_INT_CST_P (arg0))
1956 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1957 /* Perform BIT_NOT_EXPR on each element individually. */
1958 else if (TREE_CODE (arg0) == VECTOR_CST)
1960 tree elem;
1962 /* This can cope with stepped encodings because ~x == -1 - x. */
1963 tree_vector_builder elements;
1964 elements.new_unary_operation (type, arg0, true);
1965 unsigned int i, count = elements.encoded_nelts ();
1966 for (i = 0; i < count; ++i)
1968 elem = VECTOR_CST_ELT (arg0, i);
1969 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1970 if (elem == NULL_TREE)
1971 break;
1972 elements.quick_push (elem);
1974 if (i == count)
1975 return elements.build ();
1977 break;
1979 case TRUTH_NOT_EXPR:
1980 if (TREE_CODE (arg0) == INTEGER_CST)
1981 return constant_boolean_node (integer_zerop (arg0), type);
1982 break;
1984 case REALPART_EXPR:
1985 if (TREE_CODE (arg0) == COMPLEX_CST)
1986 return fold_convert (type, TREE_REALPART (arg0));
1987 break;
1989 case IMAGPART_EXPR:
1990 if (TREE_CODE (arg0) == COMPLEX_CST)
1991 return fold_convert (type, TREE_IMAGPART (arg0));
1992 break;
1994 case VEC_UNPACK_LO_EXPR:
1995 case VEC_UNPACK_HI_EXPR:
1996 case VEC_UNPACK_FLOAT_LO_EXPR:
1997 case VEC_UNPACK_FLOAT_HI_EXPR:
1998 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1999 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2001 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2002 enum tree_code subcode;
2004 if (TREE_CODE (arg0) != VECTOR_CST)
2005 return NULL_TREE;
2007 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2008 return NULL_TREE;
2009 out_nelts = in_nelts / 2;
2010 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2012 unsigned int offset = 0;
2013 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2014 || code == VEC_UNPACK_FLOAT_LO_EXPR
2015 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2016 offset = out_nelts;
2018 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2019 subcode = NOP_EXPR;
2020 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2021 || code == VEC_UNPACK_FLOAT_HI_EXPR)
2022 subcode = FLOAT_EXPR;
2023 else
2024 subcode = FIX_TRUNC_EXPR;
2026 tree_vector_builder elts (type, out_nelts, 1);
2027 for (i = 0; i < out_nelts; i++)
2029 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2030 VECTOR_CST_ELT (arg0, i + offset));
2031 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2032 return NULL_TREE;
2033 elts.quick_push (elt);
2036 return elts.build ();
2039 case VEC_DUPLICATE_EXPR:
2040 if (CONSTANT_CLASS_P (arg0))
2041 return build_vector_from_val (type, arg0);
2042 return NULL_TREE;
2044 default:
2045 break;
2048 return NULL_TREE;
2051 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2052 indicates which particular sizetype to create. */
2054 tree
2055 size_int_kind (poly_int64 number, enum size_type_kind kind)
2057 return build_int_cst (sizetype_tab[(int) kind], number);
2060 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2061 is a tree code. The type of the result is taken from the operands.
2062 Both must be equivalent integer types, ala int_binop_types_match_p.
2063 If the operands are constant, so is the result. */
2065 tree
2066 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2068 tree type = TREE_TYPE (arg0);
2070 if (arg0 == error_mark_node || arg1 == error_mark_node)
2071 return error_mark_node;
2073 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* Handle the special case of two poly_int constants faster. */
2077 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2079 /* And some specific cases even faster than that. */
2080 if (code == PLUS_EXPR)
2082 if (integer_zerop (arg0)
2083 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2084 return arg1;
2085 if (integer_zerop (arg1)
2086 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2087 return arg0;
2089 else if (code == MINUS_EXPR)
2091 if (integer_zerop (arg1)
2092 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2093 return arg0;
2095 else if (code == MULT_EXPR)
2097 if (integer_onep (arg0)
2098 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2099 return arg1;
2102 /* Handle general case of two integer constants. For sizetype
2103 constant calculations we always want to know about overflow,
2104 even in the unsigned case. */
2105 tree res = int_const_binop (code, arg0, arg1, -1);
2106 if (res != NULL_TREE)
2107 return res;
2110 return fold_build2_loc (loc, code, type, arg0, arg1);
2113 /* Given two values, either both of sizetype or both of bitsizetype,
2114 compute the difference between the two values. Return the value
2115 in signed type corresponding to the type of the operands. */
2117 tree
2118 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2120 tree type = TREE_TYPE (arg0);
2121 tree ctype;
2123 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2124 TREE_TYPE (arg1)));
2126 /* If the type is already signed, just do the simple thing. */
2127 if (!TYPE_UNSIGNED (type))
2128 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2130 if (type == sizetype)
2131 ctype = ssizetype;
2132 else if (type == bitsizetype)
2133 ctype = sbitsizetype;
2134 else
2135 ctype = signed_type_for (type);
2137 /* If either operand is not a constant, do the conversions to the signed
2138 type and subtract. The hardware will do the right thing with any
2139 overflow in the subtraction. */
2140 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2141 return size_binop_loc (loc, MINUS_EXPR,
2142 fold_convert_loc (loc, ctype, arg0),
2143 fold_convert_loc (loc, ctype, arg1));
2145 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2146 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2147 overflow) and negate (which can't either). Special-case a result
2148 of zero while we're here. */
2149 if (tree_int_cst_equal (arg0, arg1))
2150 return build_int_cst (ctype, 0);
2151 else if (tree_int_cst_lt (arg1, arg0))
2152 return fold_convert_loc (loc, ctype,
2153 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2154 else
2155 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2156 fold_convert_loc (loc, ctype,
2157 size_binop_loc (loc,
2158 MINUS_EXPR,
2159 arg1, arg0)));
2162 /* A subroutine of fold_convert_const handling conversions of an
2163 INTEGER_CST to another integer type. */
2165 static tree
2166 fold_convert_const_int_from_int (tree type, const_tree arg1)
2168 /* Given an integer constant, make new constant with new type,
2169 appropriately sign-extended or truncated. Use widest_int
2170 so that any extension is done according ARG1's type. */
2171 tree arg1_type = TREE_TYPE (arg1);
2172 unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2173 return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2174 TYPE_SIGN (arg1_type)),
2175 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2176 TREE_OVERFLOW (arg1));
2179 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2180 to an integer type. */
2182 static tree
2183 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2185 bool overflow = false;
2186 tree t;
2188 /* The following code implements the floating point to integer
2189 conversion rules required by the Java Language Specification,
2190 that IEEE NaNs are mapped to zero and values that overflow
2191 the target precision saturate, i.e. values greater than
2192 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2193 are mapped to INT_MIN. These semantics are allowed by the
2194 C and C++ standards that simply state that the behavior of
2195 FP-to-integer conversion is unspecified upon overflow. */
2197 wide_int val;
2198 REAL_VALUE_TYPE r;
2199 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2201 switch (code)
2203 case FIX_TRUNC_EXPR:
2204 real_trunc (&r, VOIDmode, &x);
2205 break;
2207 default:
2208 gcc_unreachable ();
2211 /* If R is NaN, return zero and show we have an overflow. */
2212 if (REAL_VALUE_ISNAN (r))
2214 overflow = true;
2215 val = wi::zero (TYPE_PRECISION (type));
2218 /* See if R is less than the lower bound or greater than the
2219 upper bound. */
2221 if (! overflow)
2223 tree lt = TYPE_MIN_VALUE (type);
2224 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2225 if (real_less (&r, &l))
2227 overflow = true;
2228 val = wi::to_wide (lt);
2232 if (! overflow)
2234 tree ut = TYPE_MAX_VALUE (type);
2235 if (ut)
2237 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2238 if (real_less (&u, &r))
2240 overflow = true;
2241 val = wi::to_wide (ut);
2246 if (! overflow)
2247 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2249 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2250 return t;
2253 /* A subroutine of fold_convert_const handling conversions of a
2254 FIXED_CST to an integer type. */
2256 static tree
2257 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2259 tree t;
2260 double_int temp, temp_trunc;
2261 scalar_mode mode;
2263 /* Right shift FIXED_CST to temp by fbit. */
2264 temp = TREE_FIXED_CST (arg1).data;
2265 mode = TREE_FIXED_CST (arg1).mode;
2266 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2268 temp = temp.rshift (GET_MODE_FBIT (mode),
2269 HOST_BITS_PER_DOUBLE_INT,
2270 SIGNED_FIXED_POINT_MODE_P (mode));
2272 /* Left shift temp to temp_trunc by fbit. */
2273 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2274 HOST_BITS_PER_DOUBLE_INT,
2275 SIGNED_FIXED_POINT_MODE_P (mode));
2277 else
2279 temp = double_int_zero;
2280 temp_trunc = double_int_zero;
2283 /* If FIXED_CST is negative, we need to round the value toward 0.
2284 By checking if the fractional bits are not zero to add 1 to temp. */
2285 if (SIGNED_FIXED_POINT_MODE_P (mode)
2286 && temp_trunc.is_negative ()
2287 && TREE_FIXED_CST (arg1).data != temp_trunc)
2288 temp += double_int_one;
2290 /* Given a fixed-point constant, make new constant with new type,
2291 appropriately sign-extended or truncated. */
2292 t = force_fit_type (type, temp, -1,
2293 (temp.is_negative ()
2294 && (TYPE_UNSIGNED (type)
2295 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2296 | TREE_OVERFLOW (arg1));
2298 return t;
2301 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2302 to another floating point type. */
2304 static tree
2305 fold_convert_const_real_from_real (tree type, const_tree arg1)
2307 REAL_VALUE_TYPE value;
2308 tree t;
2310 /* If the underlying modes are the same, simply treat it as
2311 copy and rebuild with TREE_REAL_CST information and the
2312 given type. */
2313 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2315 t = build_real (type, TREE_REAL_CST (arg1));
2316 return t;
2319 /* Don't perform the operation if flag_signaling_nans is on
2320 and the operand is a signaling NaN. */
2321 if (HONOR_SNANS (arg1)
2322 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2323 return NULL_TREE;
2325 /* With flag_rounding_math we should respect the current rounding mode
2326 unless the conversion is exact. */
2327 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2328 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2329 return NULL_TREE;
2331 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2332 t = build_real (type, value);
2334 /* If converting an infinity or NAN to a representation that doesn't
2335 have one, set the overflow bit so that we can produce some kind of
2336 error message at the appropriate point if necessary. It's not the
2337 most user-friendly message, but it's better than nothing. */
2338 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2339 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2340 TREE_OVERFLOW (t) = 1;
2341 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2342 && !MODE_HAS_NANS (TYPE_MODE (type)))
2343 TREE_OVERFLOW (t) = 1;
2344 /* Regular overflow, conversion produced an infinity in a mode that
2345 can't represent them. */
2346 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2347 && REAL_VALUE_ISINF (value)
2348 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2349 TREE_OVERFLOW (t) = 1;
2350 else
2351 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2352 return t;
2355 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2356 to a floating point type. */
2358 static tree
2359 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2361 REAL_VALUE_TYPE value;
2362 tree t;
2364 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2365 &TREE_FIXED_CST (arg1));
2366 t = build_real (type, value);
2368 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 return t;
2372 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 to another fixed-point type. */
2375 static tree
2376 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2378 FIXED_VALUE_TYPE value;
2379 tree t;
2380 bool overflow_p;
2382 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2383 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 return t;
2392 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2393 to a fixed-point type. */
2395 static tree
2396 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2398 FIXED_VALUE_TYPE value;
2399 tree t;
2400 bool overflow_p;
2401 double_int di;
2403 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2405 di.low = TREE_INT_CST_ELT (arg1, 0);
2406 if (TREE_INT_CST_NUNITS (arg1) == 1)
2407 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2408 else
2409 di.high = TREE_INT_CST_ELT (arg1, 1);
2411 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2412 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2413 TYPE_SATURATING (type));
2414 t = build_fixed (type, value);
2416 /* Propagate overflow flags. */
2417 if (overflow_p | TREE_OVERFLOW (arg1))
2418 TREE_OVERFLOW (t) = 1;
2419 return t;
2422 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2423 to a fixed-point type. */
2425 static tree
2426 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2428 FIXED_VALUE_TYPE value;
2429 tree t;
2430 bool overflow_p;
2432 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2433 &TREE_REAL_CST (arg1),
2434 TYPE_SATURATING (type));
2435 t = build_fixed (type, value);
2437 /* Propagate overflow flags. */
2438 if (overflow_p | TREE_OVERFLOW (arg1))
2439 TREE_OVERFLOW (t) = 1;
2440 return t;
2443 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2444 type TYPE. If no simplification can be done return NULL_TREE. */
2446 static tree
2447 fold_convert_const (enum tree_code code, tree type, tree arg1)
2449 tree arg_type = TREE_TYPE (arg1);
2450 if (arg_type == type)
2451 return arg1;
2453 /* We can't widen types, since the runtime value could overflow the
2454 original type before being extended to the new type. */
2455 if (POLY_INT_CST_P (arg1)
2456 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2457 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2458 return build_poly_int_cst (type,
2459 poly_wide_int::from (poly_int_cst_value (arg1),
2460 TYPE_PRECISION (type),
2461 TYPE_SIGN (arg_type)));
2463 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2464 || TREE_CODE (type) == OFFSET_TYPE)
2466 if (TREE_CODE (arg1) == INTEGER_CST)
2467 return fold_convert_const_int_from_int (type, arg1);
2468 else if (TREE_CODE (arg1) == REAL_CST)
2469 return fold_convert_const_int_from_real (code, type, arg1);
2470 else if (TREE_CODE (arg1) == FIXED_CST)
2471 return fold_convert_const_int_from_fixed (type, arg1);
2473 else if (SCALAR_FLOAT_TYPE_P (type))
2475 if (TREE_CODE (arg1) == INTEGER_CST)
2477 tree res = build_real_from_int_cst (type, arg1);
2478 /* Avoid the folding if flag_rounding_math is on and the
2479 conversion is not exact. */
2480 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2482 bool fail = false;
2483 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2484 TYPE_PRECISION (TREE_TYPE (arg1)));
2485 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2486 return NULL_TREE;
2488 return res;
2490 else if (TREE_CODE (arg1) == REAL_CST)
2491 return fold_convert_const_real_from_real (type, arg1);
2492 else if (TREE_CODE (arg1) == FIXED_CST)
2493 return fold_convert_const_real_from_fixed (type, arg1);
2495 else if (FIXED_POINT_TYPE_P (type))
2497 if (TREE_CODE (arg1) == FIXED_CST)
2498 return fold_convert_const_fixed_from_fixed (type, arg1);
2499 else if (TREE_CODE (arg1) == INTEGER_CST)
2500 return fold_convert_const_fixed_from_int (type, arg1);
2501 else if (TREE_CODE (arg1) == REAL_CST)
2502 return fold_convert_const_fixed_from_real (type, arg1);
2504 else if (VECTOR_TYPE_P (type))
2506 if (TREE_CODE (arg1) == VECTOR_CST
2507 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2509 tree elttype = TREE_TYPE (type);
2510 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2511 /* We can't handle steps directly when extending, since the
2512 values need to wrap at the original precision first. */
2513 bool step_ok_p
2514 = (INTEGRAL_TYPE_P (elttype)
2515 && INTEGRAL_TYPE_P (arg1_elttype)
2516 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2517 tree_vector_builder v;
2518 if (!v.new_unary_operation (type, arg1, step_ok_p))
2519 return NULL_TREE;
2520 unsigned int len = v.encoded_nelts ();
2521 for (unsigned int i = 0; i < len; ++i)
2523 tree elt = VECTOR_CST_ELT (arg1, i);
2524 tree cvt = fold_convert_const (code, elttype, elt);
2525 if (cvt == NULL_TREE)
2526 return NULL_TREE;
2527 v.quick_push (cvt);
2529 return v.build ();
2532 return NULL_TREE;
2535 /* Construct a vector of zero elements of vector type TYPE. */
2537 static tree
2538 build_zero_vector (tree type)
2540 tree t;
2542 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2543 return build_vector_from_val (type, t);
2546 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2548 bool
2549 fold_convertible_p (const_tree type, const_tree arg)
2551 const_tree orig = TREE_TYPE (arg);
2553 if (type == orig)
2554 return true;
2556 if (TREE_CODE (arg) == ERROR_MARK
2557 || TREE_CODE (type) == ERROR_MARK
2558 || TREE_CODE (orig) == ERROR_MARK)
2559 return false;
2561 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2562 return true;
2564 switch (TREE_CODE (type))
2566 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2567 case POINTER_TYPE: case REFERENCE_TYPE:
2568 case OFFSET_TYPE:
2569 return (INTEGRAL_TYPE_P (orig)
2570 || (POINTER_TYPE_P (orig)
2571 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2572 || TREE_CODE (orig) == OFFSET_TYPE);
2574 case REAL_TYPE:
2575 case FIXED_POINT_TYPE:
2576 case VOID_TYPE:
2577 return TREE_CODE (type) == TREE_CODE (orig);
2579 case VECTOR_TYPE:
2580 return (VECTOR_TYPE_P (orig)
2581 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2582 TYPE_VECTOR_SUBPARTS (orig))
2583 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2585 default:
2586 return false;
2590 /* Convert expression ARG to type TYPE. Used by the middle-end for
2591 simple conversions in preference to calling the front-end's convert. */
2593 tree
2594 fold_convert_loc (location_t loc, tree type, tree arg)
2596 tree orig = TREE_TYPE (arg);
2597 tree tem;
2599 if (type == orig)
2600 return arg;
2602 if (TREE_CODE (arg) == ERROR_MARK
2603 || TREE_CODE (type) == ERROR_MARK
2604 || TREE_CODE (orig) == ERROR_MARK)
2605 return error_mark_node;
2607 switch (TREE_CODE (type))
2609 case POINTER_TYPE:
2610 case REFERENCE_TYPE:
2611 /* Handle conversions between pointers to different address spaces. */
2612 if (POINTER_TYPE_P (orig)
2613 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2614 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2615 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2616 /* fall through */
2618 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2619 case OFFSET_TYPE: case BITINT_TYPE:
2620 if (TREE_CODE (arg) == INTEGER_CST)
2622 tem = fold_convert_const (NOP_EXPR, type, arg);
2623 if (tem != NULL_TREE)
2624 return tem;
2626 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2627 || TREE_CODE (orig) == OFFSET_TYPE)
2628 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2629 if (TREE_CODE (orig) == COMPLEX_TYPE)
2630 return fold_convert_loc (loc, type,
2631 fold_build1_loc (loc, REALPART_EXPR,
2632 TREE_TYPE (orig), arg));
2633 gcc_assert (VECTOR_TYPE_P (orig)
2634 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2637 case REAL_TYPE:
2638 if (TREE_CODE (arg) == INTEGER_CST)
2640 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2641 if (tem != NULL_TREE)
2642 return tem;
2644 else if (TREE_CODE (arg) == REAL_CST)
2646 tem = fold_convert_const (NOP_EXPR, type, arg);
2647 if (tem != NULL_TREE)
2648 return tem;
2650 else if (TREE_CODE (arg) == FIXED_CST)
2652 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2653 if (tem != NULL_TREE)
2654 return tem;
2657 switch (TREE_CODE (orig))
2659 case INTEGER_TYPE: case BITINT_TYPE:
2660 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2661 case POINTER_TYPE: case REFERENCE_TYPE:
2662 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2664 case REAL_TYPE:
2665 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2667 case FIXED_POINT_TYPE:
2668 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2670 case COMPLEX_TYPE:
2671 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 return fold_convert_loc (loc, type, tem);
2674 default:
2675 gcc_unreachable ();
2678 case FIXED_POINT_TYPE:
2679 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2680 || TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2684 goto fold_convert_exit;
2687 switch (TREE_CODE (orig))
2689 case FIXED_POINT_TYPE:
2690 case INTEGER_TYPE:
2691 case ENUMERAL_TYPE:
2692 case BOOLEAN_TYPE:
2693 case REAL_TYPE:
2694 case BITINT_TYPE:
2695 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2697 case COMPLEX_TYPE:
2698 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2699 return fold_convert_loc (loc, type, tem);
2701 default:
2702 gcc_unreachable ();
2705 case COMPLEX_TYPE:
2706 switch (TREE_CODE (orig))
2708 case INTEGER_TYPE: case BITINT_TYPE:
2709 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2710 case POINTER_TYPE: case REFERENCE_TYPE:
2711 case REAL_TYPE:
2712 case FIXED_POINT_TYPE:
2713 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2714 fold_convert_loc (loc, TREE_TYPE (type), arg),
2715 fold_convert_loc (loc, TREE_TYPE (type),
2716 integer_zero_node));
2717 case COMPLEX_TYPE:
2719 tree rpart, ipart;
2721 if (TREE_CODE (arg) == COMPLEX_EXPR)
2723 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2724 TREE_OPERAND (arg, 0));
2725 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2726 TREE_OPERAND (arg, 1));
2727 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2730 arg = save_expr (arg);
2731 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2732 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2733 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2734 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2735 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2738 default:
2739 gcc_unreachable ();
2742 case VECTOR_TYPE:
2743 if (integer_zerop (arg))
2744 return build_zero_vector (type);
2745 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2746 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2747 || VECTOR_TYPE_P (orig));
2748 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2750 case VOID_TYPE:
2751 tem = fold_ignored_result (arg);
2752 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2754 default:
2755 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2756 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2757 gcc_unreachable ();
2759 fold_convert_exit:
2760 tem = protected_set_expr_location_unshare (tem, loc);
2761 return tem;
2764 /* Return false if expr can be assumed not to be an lvalue, true
2765 otherwise. */
2767 static bool
2768 maybe_lvalue_p (const_tree x)
2770 /* We only need to wrap lvalue tree codes. */
2771 switch (TREE_CODE (x))
2773 case VAR_DECL:
2774 case PARM_DECL:
2775 case RESULT_DECL:
2776 case LABEL_DECL:
2777 case FUNCTION_DECL:
2778 case SSA_NAME:
2779 case COMPOUND_LITERAL_EXPR:
2781 case COMPONENT_REF:
2782 case MEM_REF:
2783 case INDIRECT_REF:
2784 case ARRAY_REF:
2785 case ARRAY_RANGE_REF:
2786 case BIT_FIELD_REF:
2787 case OBJ_TYPE_REF:
2789 case REALPART_EXPR:
2790 case IMAGPART_EXPR:
2791 case PREINCREMENT_EXPR:
2792 case PREDECREMENT_EXPR:
2793 case SAVE_EXPR:
2794 case TRY_CATCH_EXPR:
2795 case WITH_CLEANUP_EXPR:
2796 case COMPOUND_EXPR:
2797 case MODIFY_EXPR:
2798 case TARGET_EXPR:
2799 case COND_EXPR:
2800 case BIND_EXPR:
2801 case VIEW_CONVERT_EXPR:
2802 break;
2804 default:
2805 /* Assume the worst for front-end tree codes. */
2806 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2807 break;
2808 return false;
2811 return true;
2814 /* Return an expr equal to X but certainly not valid as an lvalue. */
2816 tree
2817 non_lvalue_loc (location_t loc, tree x)
2819 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2820 us. */
2821 if (in_gimple_form)
2822 return x;
2824 if (! maybe_lvalue_p (x))
2825 return x;
2826 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2829 /* Given a tree comparison code, return the code that is the logical inverse.
2830 It is generally not safe to do this for floating-point comparisons, except
2831 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2832 ERROR_MARK in this case. */
2834 enum tree_code
2835 invert_tree_comparison (enum tree_code code, bool honor_nans)
2837 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2838 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2839 return ERROR_MARK;
2841 switch (code)
2843 case EQ_EXPR:
2844 return NE_EXPR;
2845 case NE_EXPR:
2846 return EQ_EXPR;
2847 case GT_EXPR:
2848 return honor_nans ? UNLE_EXPR : LE_EXPR;
2849 case GE_EXPR:
2850 return honor_nans ? UNLT_EXPR : LT_EXPR;
2851 case LT_EXPR:
2852 return honor_nans ? UNGE_EXPR : GE_EXPR;
2853 case LE_EXPR:
2854 return honor_nans ? UNGT_EXPR : GT_EXPR;
2855 case LTGT_EXPR:
2856 return UNEQ_EXPR;
2857 case UNEQ_EXPR:
2858 return LTGT_EXPR;
2859 case UNGT_EXPR:
2860 return LE_EXPR;
2861 case UNGE_EXPR:
2862 return LT_EXPR;
2863 case UNLT_EXPR:
2864 return GE_EXPR;
2865 case UNLE_EXPR:
2866 return GT_EXPR;
2867 case ORDERED_EXPR:
2868 return UNORDERED_EXPR;
2869 case UNORDERED_EXPR:
2870 return ORDERED_EXPR;
2871 default:
2872 gcc_unreachable ();
2876 /* Similar, but return the comparison that results if the operands are
2877 swapped. This is safe for floating-point. */
2879 enum tree_code
2880 swap_tree_comparison (enum tree_code code)
2882 switch (code)
2884 case EQ_EXPR:
2885 case NE_EXPR:
2886 case ORDERED_EXPR:
2887 case UNORDERED_EXPR:
2888 case LTGT_EXPR:
2889 case UNEQ_EXPR:
2890 return code;
2891 case GT_EXPR:
2892 return LT_EXPR;
2893 case GE_EXPR:
2894 return LE_EXPR;
2895 case LT_EXPR:
2896 return GT_EXPR;
2897 case LE_EXPR:
2898 return GE_EXPR;
2899 case UNGT_EXPR:
2900 return UNLT_EXPR;
2901 case UNGE_EXPR:
2902 return UNLE_EXPR;
2903 case UNLT_EXPR:
2904 return UNGT_EXPR;
2905 case UNLE_EXPR:
2906 return UNGE_EXPR;
2907 default:
2908 gcc_unreachable ();
2913 /* Convert a comparison tree code from an enum tree_code representation
2914 into a compcode bit-based encoding. This function is the inverse of
2915 compcode_to_comparison. */
2917 static enum comparison_code
2918 comparison_to_compcode (enum tree_code code)
2920 switch (code)
2922 case LT_EXPR:
2923 return COMPCODE_LT;
2924 case EQ_EXPR:
2925 return COMPCODE_EQ;
2926 case LE_EXPR:
2927 return COMPCODE_LE;
2928 case GT_EXPR:
2929 return COMPCODE_GT;
2930 case NE_EXPR:
2931 return COMPCODE_NE;
2932 case GE_EXPR:
2933 return COMPCODE_GE;
2934 case ORDERED_EXPR:
2935 return COMPCODE_ORD;
2936 case UNORDERED_EXPR:
2937 return COMPCODE_UNORD;
2938 case UNLT_EXPR:
2939 return COMPCODE_UNLT;
2940 case UNEQ_EXPR:
2941 return COMPCODE_UNEQ;
2942 case UNLE_EXPR:
2943 return COMPCODE_UNLE;
2944 case UNGT_EXPR:
2945 return COMPCODE_UNGT;
2946 case LTGT_EXPR:
2947 return COMPCODE_LTGT;
2948 case UNGE_EXPR:
2949 return COMPCODE_UNGE;
2950 default:
2951 gcc_unreachable ();
2955 /* Convert a compcode bit-based encoding of a comparison operator back
2956 to GCC's enum tree_code representation. This function is the
2957 inverse of comparison_to_compcode. */
2959 static enum tree_code
2960 compcode_to_comparison (enum comparison_code code)
2962 switch (code)
2964 case COMPCODE_LT:
2965 return LT_EXPR;
2966 case COMPCODE_EQ:
2967 return EQ_EXPR;
2968 case COMPCODE_LE:
2969 return LE_EXPR;
2970 case COMPCODE_GT:
2971 return GT_EXPR;
2972 case COMPCODE_NE:
2973 return NE_EXPR;
2974 case COMPCODE_GE:
2975 return GE_EXPR;
2976 case COMPCODE_ORD:
2977 return ORDERED_EXPR;
2978 case COMPCODE_UNORD:
2979 return UNORDERED_EXPR;
2980 case COMPCODE_UNLT:
2981 return UNLT_EXPR;
2982 case COMPCODE_UNEQ:
2983 return UNEQ_EXPR;
2984 case COMPCODE_UNLE:
2985 return UNLE_EXPR;
2986 case COMPCODE_UNGT:
2987 return UNGT_EXPR;
2988 case COMPCODE_LTGT:
2989 return LTGT_EXPR;
2990 case COMPCODE_UNGE:
2991 return UNGE_EXPR;
2992 default:
2993 gcc_unreachable ();
2997 /* Return true if COND1 tests the opposite condition of COND2. */
2999 bool
3000 inverse_conditions_p (const_tree cond1, const_tree cond2)
3002 return (COMPARISON_CLASS_P (cond1)
3003 && COMPARISON_CLASS_P (cond2)
3004 && (invert_tree_comparison
3005 (TREE_CODE (cond1),
3006 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3007 && operand_equal_p (TREE_OPERAND (cond1, 0),
3008 TREE_OPERAND (cond2, 0), 0)
3009 && operand_equal_p (TREE_OPERAND (cond1, 1),
3010 TREE_OPERAND (cond2, 1), 0));
3013 /* Return a tree for the comparison which is the combination of
3014 doing the AND or OR (depending on CODE) of the two operations LCODE
3015 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3016 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3017 if this makes the transformation invalid. */
3019 tree
3020 combine_comparisons (location_t loc,
3021 enum tree_code code, enum tree_code lcode,
3022 enum tree_code rcode, tree truth_type,
3023 tree ll_arg, tree lr_arg)
3025 bool honor_nans = HONOR_NANS (ll_arg);
3026 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3027 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3028 int compcode;
3030 switch (code)
3032 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3033 compcode = lcompcode & rcompcode;
3034 break;
3036 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3037 compcode = lcompcode | rcompcode;
3038 break;
3040 default:
3041 return NULL_TREE;
3044 if (!honor_nans)
3046 /* Eliminate unordered comparisons, as well as LTGT and ORD
3047 which are not used unless the mode has NaNs. */
3048 compcode &= ~COMPCODE_UNORD;
3049 if (compcode == COMPCODE_LTGT)
3050 compcode = COMPCODE_NE;
3051 else if (compcode == COMPCODE_ORD)
3052 compcode = COMPCODE_TRUE;
3054 else if (flag_trapping_math)
3056 /* Check that the original operation and the optimized ones will trap
3057 under the same condition. */
3058 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3059 && (lcompcode != COMPCODE_EQ)
3060 && (lcompcode != COMPCODE_ORD);
3061 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3062 && (rcompcode != COMPCODE_EQ)
3063 && (rcompcode != COMPCODE_ORD);
3064 bool trap = (compcode & COMPCODE_UNORD) == 0
3065 && (compcode != COMPCODE_EQ)
3066 && (compcode != COMPCODE_ORD);
3068 /* In a short-circuited boolean expression the LHS might be
3069 such that the RHS, if evaluated, will never trap. For
3070 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3071 if neither x nor y is NaN. (This is a mixed blessing: for
3072 example, the expression above will never trap, hence
3073 optimizing it to x < y would be invalid). */
3074 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3075 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3076 rtrap = false;
3078 /* If the comparison was short-circuited, and only the RHS
3079 trapped, we may now generate a spurious trap. */
3080 if (rtrap && !ltrap
3081 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3082 return NULL_TREE;
3084 /* If we changed the conditions that cause a trap, we lose. */
3085 if ((ltrap || rtrap) != trap)
3086 return NULL_TREE;
3089 if (compcode == COMPCODE_TRUE)
3090 return constant_boolean_node (true, truth_type);
3091 else if (compcode == COMPCODE_FALSE)
3092 return constant_boolean_node (false, truth_type);
3093 else
3095 enum tree_code tcode;
3097 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3098 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3102 /* Return nonzero if two operands (typically of the same tree node)
3103 are necessarily equal. FLAGS modifies behavior as follows:
3105 If OEP_ONLY_CONST is set, only return nonzero for constants.
3106 This function tests whether the operands are indistinguishable;
3107 it does not test whether they are equal using C's == operation.
3108 The distinction is important for IEEE floating point, because
3109 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3110 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3112 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3113 even though it may hold multiple values during a function.
3114 This is because a GCC tree node guarantees that nothing else is
3115 executed between the evaluation of its "operands" (which may often
3116 be evaluated in arbitrary order). Hence if the operands themselves
3117 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3118 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3119 unset means assuming isochronic (or instantaneous) tree equivalence.
3120 Unless comparing arbitrary expression trees, such as from different
3121 statements, this flag can usually be left unset.
3123 If OEP_PURE_SAME is set, then pure functions with identical arguments
3124 are considered the same. It is used when the caller has other ways
3125 to ensure that global memory is unchanged in between.
3127 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3128 not values of expressions.
3130 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3131 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3133 If OEP_BITWISE is set, then require the values to be bitwise identical
3134 rather than simply numerically equal. Do not take advantage of things
3135 like math-related flags or undefined behavior; only return true for
3136 values that are provably bitwise identical in all circumstances.
3138 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3139 any operand with side effect. This is unnecesarily conservative in the
3140 case we know that arg0 and arg1 are in disjoint code paths (such as in
3141 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3142 addresses with TREE_CONSTANT flag set so we know that &var == &var
3143 even if var is volatile. */
3145 bool
3146 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3147 unsigned int flags)
3149 bool r;
3150 if (verify_hash_value (arg0, arg1, flags, &r))
3151 return r;
3153 STRIP_ANY_LOCATION_WRAPPER (arg0);
3154 STRIP_ANY_LOCATION_WRAPPER (arg1);
3156 /* If either is ERROR_MARK, they aren't equal. */
3157 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3158 || TREE_TYPE (arg0) == error_mark_node
3159 || TREE_TYPE (arg1) == error_mark_node)
3160 return false;
3162 /* Similar, if either does not have a type (like a template id),
3163 they aren't equal. */
3164 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3165 return false;
3167 /* Bitwise identity makes no sense if the values have different layouts. */
3168 if ((flags & OEP_BITWISE)
3169 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3170 return false;
3172 /* We cannot consider pointers to different address space equal. */
3173 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3174 && POINTER_TYPE_P (TREE_TYPE (arg1))
3175 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3176 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3177 return false;
3179 /* Check equality of integer constants before bailing out due to
3180 precision differences. */
3181 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3183 /* Address of INTEGER_CST is not defined; check that we did not forget
3184 to drop the OEP_ADDRESS_OF flags. */
3185 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3186 return tree_int_cst_equal (arg0, arg1);
3189 if (!(flags & OEP_ADDRESS_OF))
3191 /* If both types don't have the same signedness, then we can't consider
3192 them equal. We must check this before the STRIP_NOPS calls
3193 because they may change the signedness of the arguments. As pointers
3194 strictly don't have a signedness, require either two pointers or
3195 two non-pointers as well. */
3196 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3197 || POINTER_TYPE_P (TREE_TYPE (arg0))
3198 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3199 return false;
3201 /* If both types don't have the same precision, then it is not safe
3202 to strip NOPs. */
3203 if (element_precision (TREE_TYPE (arg0))
3204 != element_precision (TREE_TYPE (arg1)))
3205 return false;
3207 STRIP_NOPS (arg0);
3208 STRIP_NOPS (arg1);
3210 #if 0
3211 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3212 sanity check once the issue is solved. */
3213 else
3214 /* Addresses of conversions and SSA_NAMEs (and many other things)
3215 are not defined. Check that we did not forget to drop the
3216 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3217 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3218 && TREE_CODE (arg0) != SSA_NAME);
3219 #endif
3221 /* In case both args are comparisons but with different comparison
3222 code, try to swap the comparison operands of one arg to produce
3223 a match and compare that variant. */
3224 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3225 && COMPARISON_CLASS_P (arg0)
3226 && COMPARISON_CLASS_P (arg1))
3228 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3230 if (TREE_CODE (arg0) == swap_code)
3231 return operand_equal_p (TREE_OPERAND (arg0, 0),
3232 TREE_OPERAND (arg1, 1), flags)
3233 && operand_equal_p (TREE_OPERAND (arg0, 1),
3234 TREE_OPERAND (arg1, 0), flags);
3237 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3239 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3240 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3242 else if (flags & OEP_ADDRESS_OF)
3244 /* If we are interested in comparing addresses ignore
3245 MEM_REF wrappings of the base that can appear just for
3246 TBAA reasons. */
3247 if (TREE_CODE (arg0) == MEM_REF
3248 && DECL_P (arg1)
3249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3250 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3251 && integer_zerop (TREE_OPERAND (arg0, 1)))
3252 return true;
3253 else if (TREE_CODE (arg1) == MEM_REF
3254 && DECL_P (arg0)
3255 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3256 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3257 && integer_zerop (TREE_OPERAND (arg1, 1)))
3258 return true;
3259 return false;
3261 else
3262 return false;
3265 /* When not checking adddresses, this is needed for conversions and for
3266 COMPONENT_REF. Might as well play it safe and always test this. */
3267 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3268 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3269 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3270 && !(flags & OEP_ADDRESS_OF)))
3271 return false;
3273 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3274 We don't care about side effects in that case because the SAVE_EXPR
3275 takes care of that for us. In all other cases, two expressions are
3276 equal if they have no side effects. If we have two identical
3277 expressions with side effects that should be treated the same due
3278 to the only side effects being identical SAVE_EXPR's, that will
3279 be detected in the recursive calls below.
3280 If we are taking an invariant address of two identical objects
3281 they are necessarily equal as well. */
3282 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3283 && (TREE_CODE (arg0) == SAVE_EXPR
3284 || (flags & OEP_MATCH_SIDE_EFFECTS)
3285 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3286 return true;
3288 /* Next handle constant cases, those for which we can return 1 even
3289 if ONLY_CONST is set. */
3290 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3291 switch (TREE_CODE (arg0))
3293 case INTEGER_CST:
3294 return tree_int_cst_equal (arg0, arg1);
3296 case FIXED_CST:
3297 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3298 TREE_FIXED_CST (arg1));
3300 case REAL_CST:
3301 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3302 return true;
3304 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3306 /* If we do not distinguish between signed and unsigned zero,
3307 consider them equal. */
3308 if (real_zerop (arg0) && real_zerop (arg1))
3309 return true;
3311 return false;
3313 case VECTOR_CST:
3315 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3316 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3317 return false;
3319 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3320 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3321 return false;
3323 unsigned int count = vector_cst_encoded_nelts (arg0);
3324 for (unsigned int i = 0; i < count; ++i)
3325 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3326 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3327 return false;
3328 return true;
3331 case COMPLEX_CST:
3332 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3333 flags)
3334 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3335 flags));
3337 case STRING_CST:
3338 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3339 && ! memcmp (TREE_STRING_POINTER (arg0),
3340 TREE_STRING_POINTER (arg1),
3341 TREE_STRING_LENGTH (arg0)));
3343 case ADDR_EXPR:
3344 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3345 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3346 flags | OEP_ADDRESS_OF
3347 | OEP_MATCH_SIDE_EFFECTS);
3348 case CONSTRUCTOR:
3350 /* In GIMPLE empty constructors are allowed in initializers of
3351 aggregates. */
3352 if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3353 return true;
3355 /* See sem_variable::equals in ipa-icf for a similar approach. */
3356 tree typ0 = TREE_TYPE (arg0);
3357 tree typ1 = TREE_TYPE (arg1);
3359 if (TREE_CODE (typ0) != TREE_CODE (typ1))
3360 return false;
3361 else if (TREE_CODE (typ0) == ARRAY_TYPE)
3363 /* For arrays, check that the sizes all match. */
3364 const HOST_WIDE_INT siz0 = int_size_in_bytes (typ0);
3365 if (TYPE_MODE (typ0) != TYPE_MODE (typ1)
3366 || siz0 < 0
3367 || siz0 != int_size_in_bytes (typ1))
3368 return false;
3370 else if (!types_compatible_p (typ0, typ1))
3371 return false;
3373 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3374 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3375 if (vec_safe_length (v0) != vec_safe_length (v1))
3376 return false;
3378 /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3379 of the CONSTRUCTOR referenced indirectly. */
3380 flags &= ~OEP_ADDRESS_OF;
3382 for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3384 constructor_elt *c0 = &(*v0)[idx];
3385 constructor_elt *c1 = &(*v1)[idx];
3387 /* Check that the values are the same... */
3388 if (c0->value != c1->value
3389 && !operand_equal_p (c0->value, c1->value, flags))
3390 return false;
3392 /* ... and that they apply to the same field! */
3393 if (c0->index != c1->index
3394 && (TREE_CODE (typ0) == ARRAY_TYPE
3395 ? !operand_equal_p (c0->index, c1->index, flags)
3396 : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3397 DECL_FIELD_OFFSET (c1->index),
3398 flags)
3399 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3400 DECL_FIELD_BIT_OFFSET (c1->index),
3401 flags)))
3402 return false;
3405 return true;
3408 default:
3409 break;
3412 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3413 two instances of undefined behavior will give identical results. */
3414 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3415 return false;
3417 /* Define macros to test an operand from arg0 and arg1 for equality and a
3418 variant that allows null and views null as being different from any
3419 non-null value. In the latter case, if either is null, the both
3420 must be; otherwise, do the normal comparison. */
3421 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3422 TREE_OPERAND (arg1, N), flags)
3424 #define OP_SAME_WITH_NULL(N) \
3425 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3426 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3428 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3430 case tcc_unary:
3431 /* Two conversions are equal only if signedness and modes match. */
3432 switch (TREE_CODE (arg0))
3434 CASE_CONVERT:
3435 case FIX_TRUNC_EXPR:
3436 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3437 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3438 return false;
3439 break;
3440 default:
3441 break;
3444 return OP_SAME (0);
3447 case tcc_comparison:
3448 case tcc_binary:
3449 if (OP_SAME (0) && OP_SAME (1))
3450 return true;
3452 /* For commutative ops, allow the other order. */
3453 return (commutative_tree_code (TREE_CODE (arg0))
3454 && operand_equal_p (TREE_OPERAND (arg0, 0),
3455 TREE_OPERAND (arg1, 1), flags)
3456 && operand_equal_p (TREE_OPERAND (arg0, 1),
3457 TREE_OPERAND (arg1, 0), flags));
3459 case tcc_reference:
3460 /* If either of the pointer (or reference) expressions we are
3461 dereferencing contain a side effect, these cannot be equal,
3462 but their addresses can be. */
3463 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3464 && (TREE_SIDE_EFFECTS (arg0)
3465 || TREE_SIDE_EFFECTS (arg1)))
3466 return false;
3468 switch (TREE_CODE (arg0))
3470 case INDIRECT_REF:
3471 if (!(flags & OEP_ADDRESS_OF))
3473 if (TYPE_ALIGN (TREE_TYPE (arg0))
3474 != TYPE_ALIGN (TREE_TYPE (arg1)))
3475 return false;
3476 /* Verify that the access types are compatible. */
3477 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3478 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3479 return false;
3481 flags &= ~OEP_ADDRESS_OF;
3482 return OP_SAME (0);
3484 case IMAGPART_EXPR:
3485 /* Require the same offset. */
3486 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3487 TYPE_SIZE (TREE_TYPE (arg1)),
3488 flags & ~OEP_ADDRESS_OF))
3489 return false;
3491 /* Fallthru. */
3492 case REALPART_EXPR:
3493 case VIEW_CONVERT_EXPR:
3494 return OP_SAME (0);
3496 case TARGET_MEM_REF:
3497 case MEM_REF:
3498 if (!(flags & OEP_ADDRESS_OF))
3500 /* Require equal access sizes */
3501 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3502 && (!TYPE_SIZE (TREE_TYPE (arg0))
3503 || !TYPE_SIZE (TREE_TYPE (arg1))
3504 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3505 TYPE_SIZE (TREE_TYPE (arg1)),
3506 flags)))
3507 return false;
3508 /* Verify that access happens in similar types. */
3509 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3510 return false;
3511 /* Verify that accesses are TBAA compatible. */
3512 if (!alias_ptr_types_compatible_p
3513 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3514 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3515 || (MR_DEPENDENCE_CLIQUE (arg0)
3516 != MR_DEPENDENCE_CLIQUE (arg1))
3517 || (MR_DEPENDENCE_BASE (arg0)
3518 != MR_DEPENDENCE_BASE (arg1)))
3519 return false;
3520 /* Verify that alignment is compatible. */
3521 if (TYPE_ALIGN (TREE_TYPE (arg0))
3522 != TYPE_ALIGN (TREE_TYPE (arg1)))
3523 return false;
3525 flags &= ~OEP_ADDRESS_OF;
3526 return (OP_SAME (0) && OP_SAME (1)
3527 /* TARGET_MEM_REF require equal extra operands. */
3528 && (TREE_CODE (arg0) != TARGET_MEM_REF
3529 || (OP_SAME_WITH_NULL (2)
3530 && OP_SAME_WITH_NULL (3)
3531 && OP_SAME_WITH_NULL (4))));
3533 case ARRAY_REF:
3534 case ARRAY_RANGE_REF:
3535 if (!OP_SAME (0))
3536 return false;
3537 flags &= ~OEP_ADDRESS_OF;
3538 /* Compare the array index by value if it is constant first as we
3539 may have different types but same value here. */
3540 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3541 TREE_OPERAND (arg1, 1))
3542 || OP_SAME (1))
3543 && OP_SAME_WITH_NULL (2)
3544 && OP_SAME_WITH_NULL (3)
3545 /* Compare low bound and element size as with OEP_ADDRESS_OF
3546 we have to account for the offset of the ref. */
3547 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3548 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3549 || (operand_equal_p (array_ref_low_bound
3550 (CONST_CAST_TREE (arg0)),
3551 array_ref_low_bound
3552 (CONST_CAST_TREE (arg1)), flags)
3553 && operand_equal_p (array_ref_element_size
3554 (CONST_CAST_TREE (arg0)),
3555 array_ref_element_size
3556 (CONST_CAST_TREE (arg1)),
3557 flags))));
3559 case COMPONENT_REF:
3560 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3561 may be NULL when we're called to compare MEM_EXPRs. */
3562 if (!OP_SAME_WITH_NULL (0))
3563 return false;
3565 bool compare_address = flags & OEP_ADDRESS_OF;
3567 /* Most of time we only need to compare FIELD_DECLs for equality.
3568 However when determining address look into actual offsets.
3569 These may match for unions and unshared record types. */
3570 flags &= ~OEP_ADDRESS_OF;
3571 if (!OP_SAME (1))
3573 if (compare_address
3574 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3576 tree field0 = TREE_OPERAND (arg0, 1);
3577 tree field1 = TREE_OPERAND (arg1, 1);
3579 /* Non-FIELD_DECL operands can appear in C++ templates. */
3580 if (TREE_CODE (field0) != FIELD_DECL
3581 || TREE_CODE (field1) != FIELD_DECL
3582 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3583 DECL_FIELD_OFFSET (field1), flags)
3584 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3585 DECL_FIELD_BIT_OFFSET (field1),
3586 flags))
3587 return false;
3589 else
3590 return false;
3593 return OP_SAME_WITH_NULL (2);
3595 case BIT_FIELD_REF:
3596 if (!OP_SAME (0))
3597 return false;
3598 flags &= ~OEP_ADDRESS_OF;
3599 return OP_SAME (1) && OP_SAME (2);
3601 default:
3602 return false;
3605 case tcc_expression:
3606 switch (TREE_CODE (arg0))
3608 case ADDR_EXPR:
3609 /* Be sure we pass right ADDRESS_OF flag. */
3610 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3611 return operand_equal_p (TREE_OPERAND (arg0, 0),
3612 TREE_OPERAND (arg1, 0),
3613 flags | OEP_ADDRESS_OF);
3615 case TRUTH_NOT_EXPR:
3616 return OP_SAME (0);
3618 case TRUTH_ANDIF_EXPR:
3619 case TRUTH_ORIF_EXPR:
3620 return OP_SAME (0) && OP_SAME (1);
3622 case WIDEN_MULT_PLUS_EXPR:
3623 case WIDEN_MULT_MINUS_EXPR:
3624 if (!OP_SAME (2))
3625 return false;
3626 /* The multiplcation operands are commutative. */
3627 /* FALLTHRU */
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 if (OP_SAME (0) && OP_SAME (1))
3633 return true;
3635 /* Otherwise take into account this is a commutative operation. */
3636 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 1), flags)
3638 && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 TREE_OPERAND (arg1, 0), flags));
3641 case COND_EXPR:
3642 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3643 return false;
3644 flags &= ~OEP_ADDRESS_OF;
3645 return OP_SAME (0);
3647 case BIT_INSERT_EXPR:
3648 /* BIT_INSERT_EXPR has an implict operand as the type precision
3649 of op1. Need to check to make sure they are the same. */
3650 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3651 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3652 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3653 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3654 return false;
3655 /* FALLTHRU */
3657 case VEC_COND_EXPR:
3658 case DOT_PROD_EXPR:
3659 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3661 case MODIFY_EXPR:
3662 case INIT_EXPR:
3663 case COMPOUND_EXPR:
3664 case PREDECREMENT_EXPR:
3665 case PREINCREMENT_EXPR:
3666 case POSTDECREMENT_EXPR:
3667 case POSTINCREMENT_EXPR:
3668 if (flags & OEP_LEXICOGRAPHIC)
3669 return OP_SAME (0) && OP_SAME (1);
3670 return false;
3672 case CLEANUP_POINT_EXPR:
3673 case EXPR_STMT:
3674 case SAVE_EXPR:
3675 if (flags & OEP_LEXICOGRAPHIC)
3676 return OP_SAME (0);
3677 return false;
3679 case OBJ_TYPE_REF:
3680 /* Virtual table reference. */
3681 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3682 OBJ_TYPE_REF_EXPR (arg1), flags))
3683 return false;
3684 flags &= ~OEP_ADDRESS_OF;
3685 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3686 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3687 return false;
3688 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3689 OBJ_TYPE_REF_OBJECT (arg1), flags))
3690 return false;
3691 if (virtual_method_call_p (arg0))
3693 if (!virtual_method_call_p (arg1))
3694 return false;
3695 return types_same_for_odr (obj_type_ref_class (arg0),
3696 obj_type_ref_class (arg1));
3698 return false;
3700 default:
3701 return false;
3704 case tcc_vl_exp:
3705 switch (TREE_CODE (arg0))
3707 case CALL_EXPR:
3708 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3709 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3710 /* If not both CALL_EXPRs are either internal or normal function
3711 functions, then they are not equal. */
3712 return false;
3713 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3715 /* If the CALL_EXPRs call different internal functions, then they
3716 are not equal. */
3717 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3718 return false;
3720 else
3722 /* If the CALL_EXPRs call different functions, then they are not
3723 equal. */
3724 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3725 flags))
3726 return false;
3729 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3731 unsigned int cef = call_expr_flags (arg0);
3732 if (flags & OEP_PURE_SAME)
3733 cef &= ECF_CONST | ECF_PURE;
3734 else
3735 cef &= ECF_CONST;
3736 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3737 return false;
3740 /* Now see if all the arguments are the same. */
3742 const_call_expr_arg_iterator iter0, iter1;
3743 const_tree a0, a1;
3744 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3745 a1 = first_const_call_expr_arg (arg1, &iter1);
3746 a0 && a1;
3747 a0 = next_const_call_expr_arg (&iter0),
3748 a1 = next_const_call_expr_arg (&iter1))
3749 if (! operand_equal_p (a0, a1, flags))
3750 return false;
3752 /* If we get here and both argument lists are exhausted
3753 then the CALL_EXPRs are equal. */
3754 return ! (a0 || a1);
3756 default:
3757 return false;
3760 case tcc_declaration:
3761 /* Consider __builtin_sqrt equal to sqrt. */
3762 if (TREE_CODE (arg0) == FUNCTION_DECL)
3763 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3764 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3765 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3766 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3768 if (DECL_P (arg0)
3769 && (flags & OEP_DECL_NAME)
3770 && (flags & OEP_LEXICOGRAPHIC))
3772 /* Consider decls with the same name equal. The caller needs
3773 to make sure they refer to the same entity (such as a function
3774 formal parameter). */
3775 tree a0name = DECL_NAME (arg0);
3776 tree a1name = DECL_NAME (arg1);
3777 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3778 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3779 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3781 return false;
3783 case tcc_exceptional:
3784 if (TREE_CODE (arg0) == CONSTRUCTOR)
3786 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3787 return false;
3789 /* In GIMPLE constructors are used only to build vectors from
3790 elements. Individual elements in the constructor must be
3791 indexed in increasing order and form an initial sequence.
3793 We make no effort to compare nonconstant ones in GENERIC. */
3794 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3795 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3796 return false;
3798 /* Be sure that vectors constructed have the same representation.
3799 We only tested element precision and modes to match.
3800 Vectors may be BLKmode and thus also check that the number of
3801 parts match. */
3802 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3803 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3804 return false;
3806 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3807 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3808 unsigned int len = vec_safe_length (v0);
3810 if (len != vec_safe_length (v1))
3811 return false;
3813 for (unsigned int i = 0; i < len; i++)
3815 constructor_elt *c0 = &(*v0)[i];
3816 constructor_elt *c1 = &(*v1)[i];
3818 if (!operand_equal_p (c0->value, c1->value, flags)
3819 /* In GIMPLE the indexes can be either NULL or matching i.
3820 Double check this so we won't get false
3821 positives for GENERIC. */
3822 || (c0->index
3823 && (TREE_CODE (c0->index) != INTEGER_CST
3824 || compare_tree_int (c0->index, i)))
3825 || (c1->index
3826 && (TREE_CODE (c1->index) != INTEGER_CST
3827 || compare_tree_int (c1->index, i))))
3828 return false;
3830 return true;
3832 else if (TREE_CODE (arg0) == STATEMENT_LIST
3833 && (flags & OEP_LEXICOGRAPHIC))
3835 /* Compare the STATEMENT_LISTs. */
3836 tree_stmt_iterator tsi1, tsi2;
3837 tree body1 = CONST_CAST_TREE (arg0);
3838 tree body2 = CONST_CAST_TREE (arg1);
3839 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3840 tsi_next (&tsi1), tsi_next (&tsi2))
3842 /* The lists don't have the same number of statements. */
3843 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3844 return false;
3845 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3846 return true;
3847 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3848 flags & (OEP_LEXICOGRAPHIC
3849 | OEP_NO_HASH_CHECK)))
3850 return false;
3853 return false;
3855 case tcc_statement:
3856 switch (TREE_CODE (arg0))
3858 case RETURN_EXPR:
3859 if (flags & OEP_LEXICOGRAPHIC)
3860 return OP_SAME_WITH_NULL (0);
3861 return false;
3862 case DEBUG_BEGIN_STMT:
3863 if (flags & OEP_LEXICOGRAPHIC)
3864 return true;
3865 return false;
3866 default:
3867 return false;
3870 default:
3871 return false;
3874 #undef OP_SAME
3875 #undef OP_SAME_WITH_NULL
3878 /* Generate a hash value for an expression. This can be used iteratively
3879 by passing a previous result as the HSTATE argument. */
3881 void
3882 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3883 unsigned int flags)
3885 int i;
3886 enum tree_code code;
3887 enum tree_code_class tclass;
3889 if (t == NULL_TREE || t == error_mark_node)
3891 hstate.merge_hash (0);
3892 return;
3895 STRIP_ANY_LOCATION_WRAPPER (t);
3897 if (!(flags & OEP_ADDRESS_OF))
3898 STRIP_NOPS (t);
3900 code = TREE_CODE (t);
3902 switch (code)
3904 /* Alas, constants aren't shared, so we can't rely on pointer
3905 identity. */
3906 case VOID_CST:
3907 hstate.merge_hash (0);
3908 return;
3909 case INTEGER_CST:
3910 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3911 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3912 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3913 return;
3914 case REAL_CST:
3916 unsigned int val2;
3917 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3918 val2 = rvc_zero;
3919 else
3920 val2 = real_hash (TREE_REAL_CST_PTR (t));
3921 hstate.merge_hash (val2);
3922 return;
3924 case FIXED_CST:
3926 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3927 hstate.merge_hash (val2);
3928 return;
3930 case STRING_CST:
3931 hstate.add ((const void *) TREE_STRING_POINTER (t),
3932 TREE_STRING_LENGTH (t));
3933 return;
3934 case COMPLEX_CST:
3935 hash_operand (TREE_REALPART (t), hstate, flags);
3936 hash_operand (TREE_IMAGPART (t), hstate, flags);
3937 return;
3938 case VECTOR_CST:
3940 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3941 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3942 unsigned int count = vector_cst_encoded_nelts (t);
3943 for (unsigned int i = 0; i < count; ++i)
3944 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3945 return;
3947 case SSA_NAME:
3948 /* We can just compare by pointer. */
3949 hstate.add_hwi (SSA_NAME_VERSION (t));
3950 return;
3951 case PLACEHOLDER_EXPR:
3952 /* The node itself doesn't matter. */
3953 return;
3954 case BLOCK:
3955 case OMP_CLAUSE:
3956 /* Ignore. */
3957 return;
3958 case TREE_LIST:
3959 /* A list of expressions, for a CALL_EXPR or as the elements of a
3960 VECTOR_CST. */
3961 for (; t; t = TREE_CHAIN (t))
3962 hash_operand (TREE_VALUE (t), hstate, flags);
3963 return;
3964 case CONSTRUCTOR:
3966 unsigned HOST_WIDE_INT idx;
3967 tree field, value;
3968 flags &= ~OEP_ADDRESS_OF;
3969 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3972 /* In GIMPLE the indexes can be either NULL or matching i. */
3973 if (field == NULL_TREE)
3974 field = bitsize_int (idx);
3975 if (TREE_CODE (field) == FIELD_DECL)
3977 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
3978 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
3980 else
3981 hash_operand (field, hstate, flags);
3982 hash_operand (value, hstate, flags);
3984 return;
3986 case STATEMENT_LIST:
3988 tree_stmt_iterator i;
3989 for (i = tsi_start (CONST_CAST_TREE (t));
3990 !tsi_end_p (i); tsi_next (&i))
3991 hash_operand (tsi_stmt (i), hstate, flags);
3992 return;
3994 case TREE_VEC:
3995 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3996 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3997 return;
3998 case IDENTIFIER_NODE:
3999 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4000 return;
4001 case FUNCTION_DECL:
4002 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4003 Otherwise nodes that compare equal according to operand_equal_p might
4004 get different hash codes. However, don't do this for machine specific
4005 or front end builtins, since the function code is overloaded in those
4006 cases. */
4007 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4008 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4010 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4011 code = TREE_CODE (t);
4013 /* FALL THROUGH */
4014 default:
4015 if (POLY_INT_CST_P (t))
4017 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4018 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4019 return;
4021 tclass = TREE_CODE_CLASS (code);
4023 if (tclass == tcc_declaration)
4025 /* DECL's have a unique ID */
4026 hstate.add_hwi (DECL_UID (t));
4028 else if (tclass == tcc_comparison && !commutative_tree_code (code))
4030 /* For comparisons that can be swapped, use the lower
4031 tree code. */
4032 enum tree_code ccode = swap_tree_comparison (code);
4033 if (code < ccode)
4034 ccode = code;
4035 hstate.add_object (ccode);
4036 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4037 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4039 else if (CONVERT_EXPR_CODE_P (code))
4041 /* NOP_EXPR and CONVERT_EXPR are considered equal by
4042 operand_equal_p. */
4043 enum tree_code ccode = NOP_EXPR;
4044 hstate.add_object (ccode);
4046 /* Don't hash the type, that can lead to having nodes which
4047 compare equal according to operand_equal_p, but which
4048 have different hash codes. Make sure to include signedness
4049 in the hash computation. */
4050 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4051 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4053 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4054 else if (code == MEM_REF
4055 && (flags & OEP_ADDRESS_OF) != 0
4056 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4057 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4058 && integer_zerop (TREE_OPERAND (t, 1)))
4059 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4060 hstate, flags);
4061 /* Don't ICE on FE specific trees, or their arguments etc.
4062 during operand_equal_p hash verification. */
4063 else if (!IS_EXPR_CODE_CLASS (tclass))
4064 gcc_assert (flags & OEP_HASH_CHECK);
4065 else
4067 unsigned int sflags = flags;
4069 hstate.add_object (code);
4071 switch (code)
4073 case ADDR_EXPR:
4074 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4075 flags |= OEP_ADDRESS_OF;
4076 sflags = flags;
4077 break;
4079 case INDIRECT_REF:
4080 case MEM_REF:
4081 case TARGET_MEM_REF:
4082 flags &= ~OEP_ADDRESS_OF;
4083 sflags = flags;
4084 break;
4086 case COMPONENT_REF:
4087 if (sflags & OEP_ADDRESS_OF)
4089 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4090 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4091 hstate, flags & ~OEP_ADDRESS_OF);
4092 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4093 hstate, flags & ~OEP_ADDRESS_OF);
4094 return;
4096 break;
4097 case ARRAY_REF:
4098 case ARRAY_RANGE_REF:
4099 case BIT_FIELD_REF:
4100 sflags &= ~OEP_ADDRESS_OF;
4101 break;
4103 case COND_EXPR:
4104 flags &= ~OEP_ADDRESS_OF;
4105 break;
4107 case WIDEN_MULT_PLUS_EXPR:
4108 case WIDEN_MULT_MINUS_EXPR:
4110 /* The multiplication operands are commutative. */
4111 inchash::hash one, two;
4112 hash_operand (TREE_OPERAND (t, 0), one, flags);
4113 hash_operand (TREE_OPERAND (t, 1), two, flags);
4114 hstate.add_commutative (one, two);
4115 hash_operand (TREE_OPERAND (t, 2), two, flags);
4116 return;
4119 case CALL_EXPR:
4120 if (CALL_EXPR_FN (t) == NULL_TREE)
4121 hstate.add_int (CALL_EXPR_IFN (t));
4122 break;
4124 case TARGET_EXPR:
4125 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4126 Usually different TARGET_EXPRs just should use
4127 different temporaries in their slots. */
4128 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4129 return;
4131 case OBJ_TYPE_REF:
4132 /* Virtual table reference. */
4133 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4134 flags &= ~OEP_ADDRESS_OF;
4135 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4136 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4137 if (!virtual_method_call_p (t))
4138 return;
4139 if (tree c = obj_type_ref_class (t))
4141 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4142 /* We compute mangled names only when free_lang_data is run.
4143 In that case we can hash precisely. */
4144 if (TREE_CODE (c) == TYPE_DECL
4145 && DECL_ASSEMBLER_NAME_SET_P (c))
4146 hstate.add_object
4147 (IDENTIFIER_HASH_VALUE
4148 (DECL_ASSEMBLER_NAME (c)));
4150 return;
4151 default:
4152 break;
4155 /* Don't hash the type, that can lead to having nodes which
4156 compare equal according to operand_equal_p, but which
4157 have different hash codes. */
4158 if (code == NON_LVALUE_EXPR)
4160 /* Make sure to include signness in the hash computation. */
4161 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4162 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4165 else if (commutative_tree_code (code))
4167 /* It's a commutative expression. We want to hash it the same
4168 however it appears. We do this by first hashing both operands
4169 and then rehashing based on the order of their independent
4170 hashes. */
4171 inchash::hash one, two;
4172 hash_operand (TREE_OPERAND (t, 0), one, flags);
4173 hash_operand (TREE_OPERAND (t, 1), two, flags);
4174 hstate.add_commutative (one, two);
4176 else
4177 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4178 hash_operand (TREE_OPERAND (t, i), hstate,
4179 i == 0 ? flags : sflags);
4181 return;
4185 bool
4186 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4187 unsigned int flags, bool *ret)
4189 /* When checking and unless comparing DECL names, verify that if
4190 the outermost operand_equal_p call returns non-zero then ARG0
4191 and ARG1 have the same hash value. */
4192 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4194 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4196 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4198 inchash::hash hstate0 (0), hstate1 (0);
4199 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4200 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4201 hashval_t h0 = hstate0.end ();
4202 hashval_t h1 = hstate1.end ();
4203 gcc_assert (h0 == h1);
4205 *ret = true;
4207 else
4208 *ret = false;
4210 return true;
4213 return false;
4217 static operand_compare default_compare_instance;
4219 /* Conveinece wrapper around operand_compare class because usually we do
4220 not need to play with the valueizer. */
4222 bool
4223 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4225 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4228 namespace inchash
4231 /* Generate a hash value for an expression. This can be used iteratively
4232 by passing a previous result as the HSTATE argument.
4234 This function is intended to produce the same hash for expressions which
4235 would compare equal using operand_equal_p. */
4236 void
4237 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4239 default_compare_instance.hash_operand (t, hstate, flags);
4244 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4245 with a different signedness or a narrower precision. */
4247 static bool
4248 operand_equal_for_comparison_p (tree arg0, tree arg1)
4250 if (operand_equal_p (arg0, arg1, 0))
4251 return true;
4253 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4254 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4255 return false;
4257 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4258 and see if the inner values are the same. This removes any
4259 signedness comparison, which doesn't matter here. */
4260 tree op0 = arg0;
4261 tree op1 = arg1;
4262 STRIP_NOPS (op0);
4263 STRIP_NOPS (op1);
4264 if (operand_equal_p (op0, op1, 0))
4265 return true;
4267 /* Discard a single widening conversion from ARG1 and see if the inner
4268 value is the same as ARG0. */
4269 if (CONVERT_EXPR_P (arg1)
4270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4271 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4272 < TYPE_PRECISION (TREE_TYPE (arg1))
4273 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4274 return true;
4276 return false;
4279 /* See if ARG is an expression that is either a comparison or is performing
4280 arithmetic on comparisons. The comparisons must only be comparing
4281 two different values, which will be stored in *CVAL1 and *CVAL2; if
4282 they are nonzero it means that some operands have already been found.
4283 No variables may be used anywhere else in the expression except in the
4284 comparisons.
4286 If this is true, return 1. Otherwise, return zero. */
4288 static bool
4289 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4291 enum tree_code code = TREE_CODE (arg);
4292 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4294 /* We can handle some of the tcc_expression cases here. */
4295 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4296 tclass = tcc_unary;
4297 else if (tclass == tcc_expression
4298 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4299 || code == COMPOUND_EXPR))
4300 tclass = tcc_binary;
4302 switch (tclass)
4304 case tcc_unary:
4305 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4307 case tcc_binary:
4308 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4309 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4311 case tcc_constant:
4312 return true;
4314 case tcc_expression:
4315 if (code == COND_EXPR)
4316 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4317 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4318 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4319 return false;
4321 case tcc_comparison:
4322 /* First see if we can handle the first operand, then the second. For
4323 the second operand, we know *CVAL1 can't be zero. It must be that
4324 one side of the comparison is each of the values; test for the
4325 case where this isn't true by failing if the two operands
4326 are the same. */
4328 if (operand_equal_p (TREE_OPERAND (arg, 0),
4329 TREE_OPERAND (arg, 1), 0))
4330 return false;
4332 if (*cval1 == 0)
4333 *cval1 = TREE_OPERAND (arg, 0);
4334 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4336 else if (*cval2 == 0)
4337 *cval2 = TREE_OPERAND (arg, 0);
4338 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4340 else
4341 return false;
4343 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4345 else if (*cval2 == 0)
4346 *cval2 = TREE_OPERAND (arg, 1);
4347 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4349 else
4350 return false;
4352 return true;
4354 default:
4355 return false;
4359 /* ARG is a tree that is known to contain just arithmetic operations and
4360 comparisons. Evaluate the operations in the tree substituting NEW0 for
4361 any occurrence of OLD0 as an operand of a comparison and likewise for
4362 NEW1 and OLD1. */
4364 static tree
4365 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4366 tree old1, tree new1)
4368 tree type = TREE_TYPE (arg);
4369 enum tree_code code = TREE_CODE (arg);
4370 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4372 /* We can handle some of the tcc_expression cases here. */
4373 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4374 tclass = tcc_unary;
4375 else if (tclass == tcc_expression
4376 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4377 tclass = tcc_binary;
4379 switch (tclass)
4381 case tcc_unary:
4382 return fold_build1_loc (loc, code, type,
4383 eval_subst (loc, TREE_OPERAND (arg, 0),
4384 old0, new0, old1, new1));
4386 case tcc_binary:
4387 return fold_build2_loc (loc, code, type,
4388 eval_subst (loc, TREE_OPERAND (arg, 0),
4389 old0, new0, old1, new1),
4390 eval_subst (loc, TREE_OPERAND (arg, 1),
4391 old0, new0, old1, new1));
4393 case tcc_expression:
4394 switch (code)
4396 case SAVE_EXPR:
4397 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4398 old1, new1);
4400 case COMPOUND_EXPR:
4401 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4402 old1, new1);
4404 case COND_EXPR:
4405 return fold_build3_loc (loc, code, type,
4406 eval_subst (loc, TREE_OPERAND (arg, 0),
4407 old0, new0, old1, new1),
4408 eval_subst (loc, TREE_OPERAND (arg, 1),
4409 old0, new0, old1, new1),
4410 eval_subst (loc, TREE_OPERAND (arg, 2),
4411 old0, new0, old1, new1));
4412 default:
4413 break;
4415 /* Fall through - ??? */
4417 case tcc_comparison:
4419 tree arg0 = TREE_OPERAND (arg, 0);
4420 tree arg1 = TREE_OPERAND (arg, 1);
4422 /* We need to check both for exact equality and tree equality. The
4423 former will be true if the operand has a side-effect. In that
4424 case, we know the operand occurred exactly once. */
4426 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4427 arg0 = new0;
4428 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4429 arg0 = new1;
4431 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4432 arg1 = new0;
4433 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4434 arg1 = new1;
4436 return fold_build2_loc (loc, code, type, arg0, arg1);
4439 default:
4440 return arg;
4444 /* Return a tree for the case when the result of an expression is RESULT
4445 converted to TYPE and OMITTED was previously an operand of the expression
4446 but is now not needed (e.g., we folded OMITTED * 0).
4448 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4449 the conversion of RESULT to TYPE. */
4451 tree
4452 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4454 tree t = fold_convert_loc (loc, type, result);
4456 /* If the resulting operand is an empty statement, just return the omitted
4457 statement casted to void. */
4458 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4459 return build1_loc (loc, NOP_EXPR, void_type_node,
4460 fold_ignored_result (omitted));
4462 if (TREE_SIDE_EFFECTS (omitted))
4463 return build2_loc (loc, COMPOUND_EXPR, type,
4464 fold_ignored_result (omitted), t);
4466 return non_lvalue_loc (loc, t);
4469 /* Return a tree for the case when the result of an expression is RESULT
4470 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4471 of the expression but are now not needed.
4473 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4474 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4475 evaluated before OMITTED2. Otherwise, if neither has side effects,
4476 just do the conversion of RESULT to TYPE. */
4478 tree
4479 omit_two_operands_loc (location_t loc, tree type, tree result,
4480 tree omitted1, tree omitted2)
4482 tree t = fold_convert_loc (loc, type, result);
4484 if (TREE_SIDE_EFFECTS (omitted2))
4485 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4486 if (TREE_SIDE_EFFECTS (omitted1))
4487 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4489 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4493 /* Return a simplified tree node for the truth-negation of ARG. This
4494 never alters ARG itself. We assume that ARG is an operation that
4495 returns a truth value (0 or 1).
4497 FIXME: one would think we would fold the result, but it causes
4498 problems with the dominator optimizer. */
4500 static tree
4501 fold_truth_not_expr (location_t loc, tree arg)
4503 tree type = TREE_TYPE (arg);
4504 enum tree_code code = TREE_CODE (arg);
4505 location_t loc1, loc2;
4507 /* If this is a comparison, we can simply invert it, except for
4508 floating-point non-equality comparisons, in which case we just
4509 enclose a TRUTH_NOT_EXPR around what we have. */
4511 if (TREE_CODE_CLASS (code) == tcc_comparison)
4513 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4514 if (FLOAT_TYPE_P (op_type)
4515 && flag_trapping_math
4516 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4517 && code != NE_EXPR && code != EQ_EXPR)
4518 return NULL_TREE;
4520 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4521 if (code == ERROR_MARK)
4522 return NULL_TREE;
4524 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4525 TREE_OPERAND (arg, 1));
4526 copy_warning (ret, arg);
4527 return ret;
4530 switch (code)
4532 case INTEGER_CST:
4533 return constant_boolean_node (integer_zerop (arg), type);
4535 case TRUTH_AND_EXPR:
4536 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4537 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4538 return build2_loc (loc, TRUTH_OR_EXPR, type,
4539 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4540 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4542 case TRUTH_OR_EXPR:
4543 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4544 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4545 return build2_loc (loc, TRUTH_AND_EXPR, type,
4546 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4547 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4549 case TRUTH_XOR_EXPR:
4550 /* Here we can invert either operand. We invert the first operand
4551 unless the second operand is a TRUTH_NOT_EXPR in which case our
4552 result is the XOR of the first operand with the inside of the
4553 negation of the second operand. */
4555 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4556 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4557 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4558 else
4559 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4560 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4561 TREE_OPERAND (arg, 1));
4563 case TRUTH_ANDIF_EXPR:
4564 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4565 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4566 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4567 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4568 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4570 case TRUTH_ORIF_EXPR:
4571 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4572 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4573 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4574 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4575 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4577 case TRUTH_NOT_EXPR:
4578 return TREE_OPERAND (arg, 0);
4580 case COND_EXPR:
4582 tree arg1 = TREE_OPERAND (arg, 1);
4583 tree arg2 = TREE_OPERAND (arg, 2);
4585 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4586 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4588 /* A COND_EXPR may have a throw as one operand, which
4589 then has void type. Just leave void operands
4590 as they are. */
4591 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4592 VOID_TYPE_P (TREE_TYPE (arg1))
4593 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4594 VOID_TYPE_P (TREE_TYPE (arg2))
4595 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4598 case COMPOUND_EXPR:
4599 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4600 return build2_loc (loc, COMPOUND_EXPR, type,
4601 TREE_OPERAND (arg, 0),
4602 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4604 case NON_LVALUE_EXPR:
4605 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4606 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4608 CASE_CONVERT:
4609 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4610 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4612 /* fall through */
4614 case FLOAT_EXPR:
4615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4616 return build1_loc (loc, TREE_CODE (arg), type,
4617 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4619 case BIT_AND_EXPR:
4620 if (!integer_onep (TREE_OPERAND (arg, 1)))
4621 return NULL_TREE;
4622 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4624 case SAVE_EXPR:
4625 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4627 case CLEANUP_POINT_EXPR:
4628 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4629 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4630 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4632 default:
4633 return NULL_TREE;
4637 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4638 assume that ARG is an operation that returns a truth value (0 or 1
4639 for scalars, 0 or -1 for vectors). Return the folded expression if
4640 folding is successful. Otherwise, return NULL_TREE. */
4642 static tree
4643 fold_invert_truthvalue (location_t loc, tree arg)
4645 tree type = TREE_TYPE (arg);
4646 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4647 ? BIT_NOT_EXPR
4648 : TRUTH_NOT_EXPR,
4649 type, arg);
4652 /* Return a simplified tree node for the truth-negation of ARG. This
4653 never alters ARG itself. We assume that ARG is an operation that
4654 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4656 tree
4657 invert_truthvalue_loc (location_t loc, tree arg)
4659 if (TREE_CODE (arg) == ERROR_MARK)
4660 return arg;
4662 tree type = TREE_TYPE (arg);
4663 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4664 ? BIT_NOT_EXPR
4665 : TRUTH_NOT_EXPR,
4666 type, arg);
4669 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4670 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4671 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4672 is the original memory reference used to preserve the alias set of
4673 the access. */
4675 static tree
4676 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4677 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4678 int unsignedp, int reversep)
4680 tree result, bftype;
4682 /* Attempt not to lose the access path if possible. */
4683 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4685 tree ninner = TREE_OPERAND (orig_inner, 0);
4686 machine_mode nmode;
4687 poly_int64 nbitsize, nbitpos;
4688 tree noffset;
4689 int nunsignedp, nreversep, nvolatilep = 0;
4690 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4691 &noffset, &nmode, &nunsignedp,
4692 &nreversep, &nvolatilep);
4693 if (base == inner
4694 && noffset == NULL_TREE
4695 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4696 && !reversep
4697 && !nreversep
4698 && !nvolatilep)
4700 inner = ninner;
4701 bitpos -= nbitpos;
4705 alias_set_type iset = get_alias_set (orig_inner);
4706 if (iset == 0 && get_alias_set (inner) != iset)
4707 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4708 build_fold_addr_expr (inner),
4709 build_int_cst (ptr_type_node, 0));
4711 if (known_eq (bitpos, 0) && !reversep)
4713 tree size = TYPE_SIZE (TREE_TYPE (inner));
4714 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4715 || POINTER_TYPE_P (TREE_TYPE (inner)))
4716 && tree_fits_shwi_p (size)
4717 && tree_to_shwi (size) == bitsize)
4718 return fold_convert_loc (loc, type, inner);
4721 bftype = type;
4722 if (TYPE_PRECISION (bftype) != bitsize
4723 || TYPE_UNSIGNED (bftype) == !unsignedp)
4724 bftype = build_nonstandard_integer_type (bitsize, 0);
4726 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4727 bitsize_int (bitsize), bitsize_int (bitpos));
4728 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4730 if (bftype != type)
4731 result = fold_convert_loc (loc, type, result);
4733 return result;
4736 /* Optimize a bit-field compare.
4738 There are two cases: First is a compare against a constant and the
4739 second is a comparison of two items where the fields are at the same
4740 bit position relative to the start of a chunk (byte, halfword, word)
4741 large enough to contain it. In these cases we can avoid the shift
4742 implicit in bitfield extractions.
4744 For constants, we emit a compare of the shifted constant with the
4745 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4746 compared. For two fields at the same position, we do the ANDs with the
4747 similar mask and compare the result of the ANDs.
4749 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4750 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4751 are the left and right operands of the comparison, respectively.
4753 If the optimization described above can be done, we return the resulting
4754 tree. Otherwise we return zero. */
4756 static tree
4757 optimize_bit_field_compare (location_t loc, enum tree_code code,
4758 tree compare_type, tree lhs, tree rhs)
4760 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4761 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4762 tree type = TREE_TYPE (lhs);
4763 tree unsigned_type;
4764 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4765 machine_mode lmode, rmode;
4766 scalar_int_mode nmode;
4767 int lunsignedp, runsignedp;
4768 int lreversep, rreversep;
4769 int lvolatilep = 0, rvolatilep = 0;
4770 tree linner, rinner = NULL_TREE;
4771 tree mask;
4772 tree offset;
4774 /* Get all the information about the extractions being done. If the bit size
4775 is the same as the size of the underlying object, we aren't doing an
4776 extraction at all and so can do nothing. We also don't want to
4777 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4778 then will no longer be able to replace it. */
4779 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4780 &lunsignedp, &lreversep, &lvolatilep);
4781 if (linner == lhs
4782 || !known_size_p (plbitsize)
4783 || !plbitsize.is_constant (&lbitsize)
4784 || !plbitpos.is_constant (&lbitpos)
4785 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4786 || offset != 0
4787 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4788 || lvolatilep)
4789 return 0;
4791 if (const_p)
4792 rreversep = lreversep;
4793 else
4795 /* If this is not a constant, we can only do something if bit positions,
4796 sizes, signedness and storage order are the same. */
4797 rinner
4798 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4799 &runsignedp, &rreversep, &rvolatilep);
4801 if (rinner == rhs
4802 || maybe_ne (lbitpos, rbitpos)
4803 || maybe_ne (lbitsize, rbitsize)
4804 || lunsignedp != runsignedp
4805 || lreversep != rreversep
4806 || offset != 0
4807 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4808 || rvolatilep)
4809 return 0;
4812 /* Honor the C++ memory model and mimic what RTL expansion does. */
4813 poly_uint64 bitstart = 0;
4814 poly_uint64 bitend = 0;
4815 if (TREE_CODE (lhs) == COMPONENT_REF)
4817 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4818 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4819 return 0;
4822 /* See if we can find a mode to refer to this field. We should be able to,
4823 but fail if we can't. */
4824 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4825 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4826 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4827 TYPE_ALIGN (TREE_TYPE (rinner))),
4828 BITS_PER_WORD, false, &nmode))
4829 return 0;
4831 /* Set signed and unsigned types of the precision of this mode for the
4832 shifts below. */
4833 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4835 /* Compute the bit position and size for the new reference and our offset
4836 within it. If the new reference is the same size as the original, we
4837 won't optimize anything, so return zero. */
4838 nbitsize = GET_MODE_BITSIZE (nmode);
4839 nbitpos = lbitpos & ~ (nbitsize - 1);
4840 lbitpos -= nbitpos;
4841 if (nbitsize == lbitsize)
4842 return 0;
4844 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4845 lbitpos = nbitsize - lbitsize - lbitpos;
4847 /* Make the mask to be used against the extracted field. */
4848 mask = build_int_cst_type (unsigned_type, -1);
4849 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4850 mask = const_binop (RSHIFT_EXPR, mask,
4851 size_int (nbitsize - lbitsize - lbitpos));
4853 if (! const_p)
4855 if (nbitpos < 0)
4856 return 0;
4858 /* If not comparing with constant, just rework the comparison
4859 and return. */
4860 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4861 nbitsize, nbitpos, 1, lreversep);
4862 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4863 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4864 nbitsize, nbitpos, 1, rreversep);
4865 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4866 return fold_build2_loc (loc, code, compare_type, t1, t2);
4869 /* Otherwise, we are handling the constant case. See if the constant is too
4870 big for the field. Warn and return a tree for 0 (false) if so. We do
4871 this not only for its own sake, but to avoid having to test for this
4872 error case below. If we didn't, we might generate wrong code.
4874 For unsigned fields, the constant shifted right by the field length should
4875 be all zero. For signed fields, the high-order bits should agree with
4876 the sign bit. */
4878 if (lunsignedp)
4880 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4882 warning (0, "comparison is always %d due to width of bit-field",
4883 code == NE_EXPR);
4884 return constant_boolean_node (code == NE_EXPR, compare_type);
4887 else
4889 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4890 if (tem != 0 && tem != -1)
4892 warning (0, "comparison is always %d due to width of bit-field",
4893 code == NE_EXPR);
4894 return constant_boolean_node (code == NE_EXPR, compare_type);
4898 if (nbitpos < 0)
4899 return 0;
4901 /* Single-bit compares should always be against zero. */
4902 if (lbitsize == 1 && ! integer_zerop (rhs))
4904 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4905 rhs = build_int_cst (type, 0);
4908 /* Make a new bitfield reference, shift the constant over the
4909 appropriate number of bits and mask it with the computed mask
4910 (in case this was a signed field). If we changed it, make a new one. */
4911 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4912 nbitsize, nbitpos, 1, lreversep);
4914 rhs = const_binop (BIT_AND_EXPR,
4915 const_binop (LSHIFT_EXPR,
4916 fold_convert_loc (loc, unsigned_type, rhs),
4917 size_int (lbitpos)),
4918 mask);
4920 lhs = build2_loc (loc, code, compare_type,
4921 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4922 return lhs;
4925 /* Subroutine for fold_truth_andor_1: decode a field reference.
4927 If EXP is a comparison reference, we return the innermost reference.
4929 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4930 set to the starting bit number.
4932 If the innermost field can be completely contained in a mode-sized
4933 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4935 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4936 otherwise it is not changed.
4938 *PUNSIGNEDP is set to the signedness of the field.
4940 *PREVERSEP is set to the storage order of the field.
4942 *PMASK is set to the mask used. This is either contained in a
4943 BIT_AND_EXPR or derived from the width of the field.
4945 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4947 Return 0 if this is not a component reference or is one that we can't
4948 do anything with. */
4950 static tree
4951 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4952 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4953 int *punsignedp, int *preversep, int *pvolatilep,
4954 tree *pmask, tree *pand_mask)
4956 tree exp = *exp_;
4957 tree outer_type = 0;
4958 tree and_mask = 0;
4959 tree mask, inner, offset;
4960 tree unsigned_type;
4961 unsigned int precision;
4963 /* All the optimizations using this function assume integer fields.
4964 There are problems with FP fields since the type_for_size call
4965 below can fail for, e.g., XFmode. */
4966 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4967 return NULL_TREE;
4969 /* We are interested in the bare arrangement of bits, so strip everything
4970 that doesn't affect the machine mode. However, record the type of the
4971 outermost expression if it may matter below. */
4972 if (CONVERT_EXPR_P (exp)
4973 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4974 outer_type = TREE_TYPE (exp);
4975 STRIP_NOPS (exp);
4977 if (TREE_CODE (exp) == BIT_AND_EXPR)
4979 and_mask = TREE_OPERAND (exp, 1);
4980 exp = TREE_OPERAND (exp, 0);
4981 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4982 if (TREE_CODE (and_mask) != INTEGER_CST)
4983 return NULL_TREE;
4986 poly_int64 poly_bitsize, poly_bitpos;
4987 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4988 pmode, punsignedp, preversep, pvolatilep);
4989 if ((inner == exp && and_mask == 0)
4990 || !poly_bitsize.is_constant (pbitsize)
4991 || !poly_bitpos.is_constant (pbitpos)
4992 || *pbitsize < 0
4993 || offset != 0
4994 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4995 /* Reject out-of-bound accesses (PR79731). */
4996 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4997 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4998 *pbitpos + *pbitsize) < 0))
4999 return NULL_TREE;
5001 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
5002 if (unsigned_type == NULL_TREE)
5003 return NULL_TREE;
5005 *exp_ = exp;
5007 /* If the number of bits in the reference is the same as the bitsize of
5008 the outer type, then the outer type gives the signedness. Otherwise
5009 (in case of a small bitfield) the signedness is unchanged. */
5010 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
5011 *punsignedp = TYPE_UNSIGNED (outer_type);
5013 /* Compute the mask to access the bitfield. */
5014 precision = TYPE_PRECISION (unsigned_type);
5016 mask = build_int_cst_type (unsigned_type, -1);
5018 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5019 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5021 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
5022 if (and_mask != 0)
5023 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
5024 fold_convert_loc (loc, unsigned_type, and_mask), mask);
5026 *pmask = mask;
5027 *pand_mask = and_mask;
5028 return inner;
5031 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
5032 bit positions and MASK is SIGNED. */
5034 static bool
5035 all_ones_mask_p (const_tree mask, unsigned int size)
5037 tree type = TREE_TYPE (mask);
5038 unsigned int precision = TYPE_PRECISION (type);
5040 /* If this function returns true when the type of the mask is
5041 UNSIGNED, then there will be errors. In particular see
5042 gcc.c-torture/execute/990326-1.c. There does not appear to be
5043 any documentation paper trail as to why this is so. But the pre
5044 wide-int worked with that restriction and it has been preserved
5045 here. */
5046 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
5047 return false;
5049 return wi::mask (size, false, precision) == wi::to_wide (mask);
5052 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5053 represents the sign bit of EXP's type. If EXP represents a sign
5054 or zero extension, also test VAL against the unextended type.
5055 The return value is the (sub)expression whose sign bit is VAL,
5056 or NULL_TREE otherwise. */
5058 tree
5059 sign_bit_p (tree exp, const_tree val)
5061 int width;
5062 tree t;
5064 /* Tree EXP must have an integral type. */
5065 t = TREE_TYPE (exp);
5066 if (! INTEGRAL_TYPE_P (t))
5067 return NULL_TREE;
5069 /* Tree VAL must be an integer constant. */
5070 if (TREE_CODE (val) != INTEGER_CST
5071 || TREE_OVERFLOW (val))
5072 return NULL_TREE;
5074 width = TYPE_PRECISION (t);
5075 if (wi::only_sign_bit_p (wi::to_wide (val), width))
5076 return exp;
5078 /* Handle extension from a narrower type. */
5079 if (TREE_CODE (exp) == NOP_EXPR
5080 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5081 return sign_bit_p (TREE_OPERAND (exp, 0), val);
5083 return NULL_TREE;
5086 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5087 operand is simple enough to be evaluated unconditionally. */
5089 static bool
5090 simple_operand_p (const_tree exp)
5092 /* Strip any conversions that don't change the machine mode. */
5093 STRIP_NOPS (exp);
5095 return (CONSTANT_CLASS_P (exp)
5096 || TREE_CODE (exp) == SSA_NAME
5097 || (DECL_P (exp)
5098 && ! TREE_ADDRESSABLE (exp)
5099 && ! TREE_THIS_VOLATILE (exp)
5100 && ! DECL_NONLOCAL (exp)
5101 /* Don't regard global variables as simple. They may be
5102 allocated in ways unknown to the compiler (shared memory,
5103 #pragma weak, etc). */
5104 && ! TREE_PUBLIC (exp)
5105 && ! DECL_EXTERNAL (exp)
5106 /* Weakrefs are not safe to be read, since they can be NULL.
5107 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5108 have DECL_WEAK flag set. */
5109 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5110 /* Loading a static variable is unduly expensive, but global
5111 registers aren't expensive. */
5112 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5115 /* Determine if an operand is simple enough to be evaluated unconditionally.
5116 In addition to simple_operand_p, we assume that comparisons, conversions,
5117 and logic-not operations are simple, if their operands are simple, too. */
5119 bool
5120 simple_condition_p (tree exp)
5122 enum tree_code code;
5124 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5125 return false;
5127 while (CONVERT_EXPR_P (exp))
5128 exp = TREE_OPERAND (exp, 0);
5130 code = TREE_CODE (exp);
5132 if (TREE_CODE_CLASS (code) == tcc_comparison)
5133 return (simple_operand_p (TREE_OPERAND (exp, 0))
5134 && simple_operand_p (TREE_OPERAND (exp, 1)));
5136 if (code == TRUTH_NOT_EXPR)
5137 return simple_condition_p (TREE_OPERAND (exp, 0));
5139 return simple_operand_p (exp);
5143 /* The following functions are subroutines to fold_range_test and allow it to
5144 try to change a logical combination of comparisons into a range test.
5146 For example, both
5147 X == 2 || X == 3 || X == 4 || X == 5
5149 X >= 2 && X <= 5
5150 are converted to
5151 (unsigned) (X - 2) <= 3
5153 We describe each set of comparisons as being either inside or outside
5154 a range, using a variable named like IN_P, and then describe the
5155 range with a lower and upper bound. If one of the bounds is omitted,
5156 it represents either the highest or lowest value of the type.
5158 In the comments below, we represent a range by two numbers in brackets
5159 preceded by a "+" to designate being inside that range, or a "-" to
5160 designate being outside that range, so the condition can be inverted by
5161 flipping the prefix. An omitted bound is represented by a "-". For
5162 example, "- [-, 10]" means being outside the range starting at the lowest
5163 possible value and ending at 10, in other words, being greater than 10.
5164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5165 always false.
5167 We set up things so that the missing bounds are handled in a consistent
5168 manner so neither a missing bound nor "true" and "false" need to be
5169 handled using a special case. */
5171 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5173 and UPPER1_P are nonzero if the respective argument is an upper bound
5174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5175 must be specified for a comparison. ARG1 will be converted to ARG0's
5176 type if both are specified. */
5178 static tree
5179 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5180 tree arg1, int upper1_p)
5182 tree tem;
5183 int result;
5184 int sgn0, sgn1;
5186 /* If neither arg represents infinity, do the normal operation.
5187 Else, if not a comparison, return infinity. Else handle the special
5188 comparison rules. Note that most of the cases below won't occur, but
5189 are handled for consistency. */
5191 if (arg0 != 0 && arg1 != 0)
5193 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5194 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5195 STRIP_NOPS (tem);
5196 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5199 if (TREE_CODE_CLASS (code) != tcc_comparison)
5200 return 0;
5202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5203 for neither. In real maths, we cannot assume open ended ranges are
5204 the same. But, this is computer arithmetic, where numbers are finite.
5205 We can therefore make the transformation of any unbounded range with
5206 the value Z, Z being greater than any representable number. This permits
5207 us to treat unbounded ranges as equal. */
5208 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5209 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5210 switch (code)
5212 case EQ_EXPR:
5213 result = sgn0 == sgn1;
5214 break;
5215 case NE_EXPR:
5216 result = sgn0 != sgn1;
5217 break;
5218 case LT_EXPR:
5219 result = sgn0 < sgn1;
5220 break;
5221 case LE_EXPR:
5222 result = sgn0 <= sgn1;
5223 break;
5224 case GT_EXPR:
5225 result = sgn0 > sgn1;
5226 break;
5227 case GE_EXPR:
5228 result = sgn0 >= sgn1;
5229 break;
5230 default:
5231 gcc_unreachable ();
5234 return constant_boolean_node (result, type);
5237 /* Helper routine for make_range. Perform one step for it, return
5238 new expression if the loop should continue or NULL_TREE if it should
5239 stop. */
5241 tree
5242 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5243 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5244 bool *strict_overflow_p)
5246 tree arg0_type = TREE_TYPE (arg0);
5247 tree n_low, n_high, low = *p_low, high = *p_high;
5248 int in_p = *p_in_p, n_in_p;
5250 switch (code)
5252 case TRUTH_NOT_EXPR:
5253 /* We can only do something if the range is testing for zero. */
5254 if (low == NULL_TREE || high == NULL_TREE
5255 || ! integer_zerop (low) || ! integer_zerop (high))
5256 return NULL_TREE;
5257 *p_in_p = ! in_p;
5258 return arg0;
5260 case EQ_EXPR: case NE_EXPR:
5261 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5262 /* We can only do something if the range is testing for zero
5263 and if the second operand is an integer constant. Note that
5264 saying something is "in" the range we make is done by
5265 complementing IN_P since it will set in the initial case of
5266 being not equal to zero; "out" is leaving it alone. */
5267 if (low == NULL_TREE || high == NULL_TREE
5268 || ! integer_zerop (low) || ! integer_zerop (high)
5269 || TREE_CODE (arg1) != INTEGER_CST)
5270 return NULL_TREE;
5272 switch (code)
5274 case NE_EXPR: /* - [c, c] */
5275 low = high = arg1;
5276 break;
5277 case EQ_EXPR: /* + [c, c] */
5278 in_p = ! in_p, low = high = arg1;
5279 break;
5280 case GT_EXPR: /* - [-, c] */
5281 low = 0, high = arg1;
5282 break;
5283 case GE_EXPR: /* + [c, -] */
5284 in_p = ! in_p, low = arg1, high = 0;
5285 break;
5286 case LT_EXPR: /* - [c, -] */
5287 low = arg1, high = 0;
5288 break;
5289 case LE_EXPR: /* + [-, c] */
5290 in_p = ! in_p, low = 0, high = arg1;
5291 break;
5292 default:
5293 gcc_unreachable ();
5296 /* If this is an unsigned comparison, we also know that EXP is
5297 greater than or equal to zero. We base the range tests we make
5298 on that fact, so we record it here so we can parse existing
5299 range tests. We test arg0_type since often the return type
5300 of, e.g. EQ_EXPR, is boolean. */
5301 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5303 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5304 in_p, low, high, 1,
5305 build_int_cst (arg0_type, 0),
5306 NULL_TREE))
5307 return NULL_TREE;
5309 in_p = n_in_p, low = n_low, high = n_high;
5311 /* If the high bound is missing, but we have a nonzero low
5312 bound, reverse the range so it goes from zero to the low bound
5313 minus 1. */
5314 if (high == 0 && low && ! integer_zerop (low))
5316 in_p = ! in_p;
5317 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5318 build_int_cst (TREE_TYPE (low), 1), 0);
5319 low = build_int_cst (arg0_type, 0);
5323 *p_low = low;
5324 *p_high = high;
5325 *p_in_p = in_p;
5326 return arg0;
5328 case NEGATE_EXPR:
5329 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5330 low and high are non-NULL, then normalize will DTRT. */
5331 if (!TYPE_UNSIGNED (arg0_type)
5332 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5334 if (low == NULL_TREE)
5335 low = TYPE_MIN_VALUE (arg0_type);
5336 if (high == NULL_TREE)
5337 high = TYPE_MAX_VALUE (arg0_type);
5340 /* (-x) IN [a,b] -> x in [-b, -a] */
5341 n_low = range_binop (MINUS_EXPR, exp_type,
5342 build_int_cst (exp_type, 0),
5343 0, high, 1);
5344 n_high = range_binop (MINUS_EXPR, exp_type,
5345 build_int_cst (exp_type, 0),
5346 0, low, 0);
5347 if (n_high != 0 && TREE_OVERFLOW (n_high))
5348 return NULL_TREE;
5349 goto normalize;
5351 case BIT_NOT_EXPR:
5352 /* ~ X -> -X - 1 */
5353 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5354 build_int_cst (exp_type, 1));
5356 case PLUS_EXPR:
5357 case MINUS_EXPR:
5358 if (TREE_CODE (arg1) != INTEGER_CST)
5359 return NULL_TREE;
5361 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5362 move a constant to the other side. */
5363 if (!TYPE_UNSIGNED (arg0_type)
5364 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5365 return NULL_TREE;
5367 /* If EXP is signed, any overflow in the computation is undefined,
5368 so we don't worry about it so long as our computations on
5369 the bounds don't overflow. For unsigned, overflow is defined
5370 and this is exactly the right thing. */
5371 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5372 arg0_type, low, 0, arg1, 0);
5373 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 arg0_type, high, 1, arg1, 0);
5375 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5376 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5377 return NULL_TREE;
5379 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5380 *strict_overflow_p = true;
5382 normalize:
5383 /* Check for an unsigned range which has wrapped around the maximum
5384 value thus making n_high < n_low, and normalize it. */
5385 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5387 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5388 build_int_cst (TREE_TYPE (n_high), 1), 0);
5389 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5390 build_int_cst (TREE_TYPE (n_low), 1), 0);
5392 /* If the range is of the form +/- [ x+1, x ], we won't
5393 be able to normalize it. But then, it represents the
5394 whole range or the empty set, so make it
5395 +/- [ -, - ]. */
5396 if (tree_int_cst_equal (n_low, low)
5397 && tree_int_cst_equal (n_high, high))
5398 low = high = 0;
5399 else
5400 in_p = ! in_p;
5402 else
5403 low = n_low, high = n_high;
5405 *p_low = low;
5406 *p_high = high;
5407 *p_in_p = in_p;
5408 return arg0;
5410 CASE_CONVERT:
5411 case NON_LVALUE_EXPR:
5412 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5413 return NULL_TREE;
5415 if (! INTEGRAL_TYPE_P (arg0_type)
5416 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5417 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5418 return NULL_TREE;
5420 n_low = low, n_high = high;
5422 if (n_low != 0)
5423 n_low = fold_convert_loc (loc, arg0_type, n_low);
5425 if (n_high != 0)
5426 n_high = fold_convert_loc (loc, arg0_type, n_high);
5428 /* If we're converting arg0 from an unsigned type, to exp,
5429 a signed type, we will be doing the comparison as unsigned.
5430 The tests above have already verified that LOW and HIGH
5431 are both positive.
5433 So we have to ensure that we will handle large unsigned
5434 values the same way that the current signed bounds treat
5435 negative values. */
5437 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5439 tree high_positive;
5440 tree equiv_type;
5441 /* For fixed-point modes, we need to pass the saturating flag
5442 as the 2nd parameter. */
5443 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5444 equiv_type
5445 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5446 TYPE_SATURATING (arg0_type));
5447 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5448 equiv_type = arg0_type;
5449 else
5450 equiv_type
5451 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5453 /* A range without an upper bound is, naturally, unbounded.
5454 Since convert would have cropped a very large value, use
5455 the max value for the destination type. */
5456 high_positive
5457 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5458 : TYPE_MAX_VALUE (arg0_type);
5460 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5461 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5462 fold_convert_loc (loc, arg0_type,
5463 high_positive),
5464 build_int_cst (arg0_type, 1));
5466 /* If the low bound is specified, "and" the range with the
5467 range for which the original unsigned value will be
5468 positive. */
5469 if (low != 0)
5471 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5472 1, fold_convert_loc (loc, arg0_type,
5473 integer_zero_node),
5474 high_positive))
5475 return NULL_TREE;
5477 in_p = (n_in_p == in_p);
5479 else
5481 /* Otherwise, "or" the range with the range of the input
5482 that will be interpreted as negative. */
5483 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5484 1, fold_convert_loc (loc, arg0_type,
5485 integer_zero_node),
5486 high_positive))
5487 return NULL_TREE;
5489 in_p = (in_p != n_in_p);
5493 /* Otherwise, if we are converting arg0 from signed type, to exp,
5494 an unsigned type, we will do the comparison as signed. If
5495 high is non-NULL, we punt above if it doesn't fit in the signed
5496 type, so if we get through here, +[-, high] or +[low, high] are
5497 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5498 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5499 high is not, the +[low, -] range is equivalent to union of
5500 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5501 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5502 low being 0, which should be treated as [-, -]. */
5503 else if (TYPE_UNSIGNED (exp_type)
5504 && !TYPE_UNSIGNED (arg0_type)
5505 && low
5506 && !high)
5508 if (integer_zerop (low))
5509 n_low = NULL_TREE;
5510 else
5512 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5513 n_low, build_int_cst (arg0_type, -1));
5514 n_low = build_zero_cst (arg0_type);
5515 in_p = !in_p;
5519 *p_low = n_low;
5520 *p_high = n_high;
5521 *p_in_p = in_p;
5522 return arg0;
5524 default:
5525 return NULL_TREE;
5529 /* Given EXP, a logical expression, set the range it is testing into
5530 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5531 actually being tested. *PLOW and *PHIGH will be made of the same
5532 type as the returned expression. If EXP is not a comparison, we
5533 will most likely not be returning a useful value and range. Set
5534 *STRICT_OVERFLOW_P to true if the return value is only valid
5535 because signed overflow is undefined; otherwise, do not change
5536 *STRICT_OVERFLOW_P. */
5538 tree
5539 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5540 bool *strict_overflow_p)
5542 enum tree_code code;
5543 tree arg0, arg1 = NULL_TREE;
5544 tree exp_type, nexp;
5545 int in_p;
5546 tree low, high;
5547 location_t loc = EXPR_LOCATION (exp);
5549 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5550 and see if we can refine the range. Some of the cases below may not
5551 happen, but it doesn't seem worth worrying about this. We "continue"
5552 the outer loop when we've changed something; otherwise we "break"
5553 the switch, which will "break" the while. */
5555 in_p = 0;
5556 low = high = build_int_cst (TREE_TYPE (exp), 0);
5558 while (1)
5560 code = TREE_CODE (exp);
5561 exp_type = TREE_TYPE (exp);
5562 arg0 = NULL_TREE;
5564 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5566 if (TREE_OPERAND_LENGTH (exp) > 0)
5567 arg0 = TREE_OPERAND (exp, 0);
5568 if (TREE_CODE_CLASS (code) == tcc_binary
5569 || TREE_CODE_CLASS (code) == tcc_comparison
5570 || (TREE_CODE_CLASS (code) == tcc_expression
5571 && TREE_OPERAND_LENGTH (exp) > 1))
5572 arg1 = TREE_OPERAND (exp, 1);
5574 if (arg0 == NULL_TREE)
5575 break;
5577 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5578 &high, &in_p, strict_overflow_p);
5579 if (nexp == NULL_TREE)
5580 break;
5581 exp = nexp;
5584 /* If EXP is a constant, we can evaluate whether this is true or false. */
5585 if (TREE_CODE (exp) == INTEGER_CST)
5587 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5588 exp, 0, low, 0))
5589 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5590 exp, 1, high, 1)));
5591 low = high = 0;
5592 exp = 0;
5595 *pin_p = in_p, *plow = low, *phigh = high;
5596 return exp;
5599 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5600 a bitwise check i.e. when
5601 LOW == 0xXX...X00...0
5602 HIGH == 0xXX...X11...1
5603 Return corresponding mask in MASK and stem in VALUE. */
5605 static bool
5606 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5607 tree *value)
5609 if (TREE_CODE (low) != INTEGER_CST
5610 || TREE_CODE (high) != INTEGER_CST)
5611 return false;
5613 unsigned prec = TYPE_PRECISION (type);
5614 wide_int lo = wi::to_wide (low, prec);
5615 wide_int hi = wi::to_wide (high, prec);
5617 wide_int end_mask = lo ^ hi;
5618 if ((end_mask & (end_mask + 1)) != 0
5619 || (lo & end_mask) != 0)
5620 return false;
5622 wide_int stem_mask = ~end_mask;
5623 wide_int stem = lo & stem_mask;
5624 if (stem != (hi & stem_mask))
5625 return false;
5627 *mask = wide_int_to_tree (type, stem_mask);
5628 *value = wide_int_to_tree (type, stem);
5630 return true;
5633 /* Helper routine for build_range_check and match.pd. Return the type to
5634 perform the check or NULL if it shouldn't be optimized. */
5636 tree
5637 range_check_type (tree etype)
5639 /* First make sure that arithmetics in this type is valid, then make sure
5640 that it wraps around. */
5641 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5642 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5644 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5646 tree utype, minv, maxv;
5648 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5649 for the type in question, as we rely on this here. */
5650 utype = unsigned_type_for (etype);
5651 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5652 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5653 build_int_cst (TREE_TYPE (maxv), 1), 1);
5654 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5656 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5657 minv, 1, maxv, 1)))
5658 etype = utype;
5659 else
5660 return NULL_TREE;
5662 else if (POINTER_TYPE_P (etype)
5663 || TREE_CODE (etype) == OFFSET_TYPE
5664 /* Right now all BITINT_TYPEs satisfy
5665 (unsigned) max + 1 == (unsigned) min, so no need to verify
5666 that like for INTEGER_TYPEs. */
5667 || TREE_CODE (etype) == BITINT_TYPE)
5668 etype = unsigned_type_for (etype);
5669 return etype;
5672 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5673 type, TYPE, return an expression to test if EXP is in (or out of, depending
5674 on IN_P) the range. Return 0 if the test couldn't be created. */
5676 tree
5677 build_range_check (location_t loc, tree type, tree exp, int in_p,
5678 tree low, tree high)
5680 tree etype = TREE_TYPE (exp), mask, value;
5682 /* Disable this optimization for function pointer expressions
5683 on targets that require function pointer canonicalization. */
5684 if (targetm.have_canonicalize_funcptr_for_compare ()
5685 && POINTER_TYPE_P (etype)
5686 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5687 return NULL_TREE;
5689 if (! in_p)
5691 value = build_range_check (loc, type, exp, 1, low, high);
5692 if (value != 0)
5693 return invert_truthvalue_loc (loc, value);
5695 return 0;
5698 if (low == 0 && high == 0)
5699 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5701 if (low == 0)
5702 return fold_build2_loc (loc, LE_EXPR, type, exp,
5703 fold_convert_loc (loc, etype, high));
5705 if (high == 0)
5706 return fold_build2_loc (loc, GE_EXPR, type, exp,
5707 fold_convert_loc (loc, etype, low));
5709 if (operand_equal_p (low, high, 0))
5710 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5711 fold_convert_loc (loc, etype, low));
5713 if (TREE_CODE (exp) == BIT_AND_EXPR
5714 && maskable_range_p (low, high, etype, &mask, &value))
5715 return fold_build2_loc (loc, EQ_EXPR, type,
5716 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5717 exp, mask),
5718 value);
5720 if (integer_zerop (low))
5722 if (! TYPE_UNSIGNED (etype))
5724 etype = unsigned_type_for (etype);
5725 high = fold_convert_loc (loc, etype, high);
5726 exp = fold_convert_loc (loc, etype, exp);
5728 return build_range_check (loc, type, exp, 1, 0, high);
5731 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5732 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5734 int prec = TYPE_PRECISION (etype);
5736 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5738 if (TYPE_UNSIGNED (etype))
5740 tree signed_etype = signed_type_for (etype);
5741 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5742 etype
5743 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5744 else
5745 etype = signed_etype;
5746 exp = fold_convert_loc (loc, etype, exp);
5748 return fold_build2_loc (loc, GT_EXPR, type, exp,
5749 build_int_cst (etype, 0));
5753 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5754 This requires wrap-around arithmetics for the type of the expression. */
5755 etype = range_check_type (etype);
5756 if (etype == NULL_TREE)
5757 return NULL_TREE;
5759 high = fold_convert_loc (loc, etype, high);
5760 low = fold_convert_loc (loc, etype, low);
5761 exp = fold_convert_loc (loc, etype, exp);
5763 value = const_binop (MINUS_EXPR, high, low);
5765 if (value != 0 && !TREE_OVERFLOW (value))
5766 return build_range_check (loc, type,
5767 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5768 1, build_int_cst (etype, 0), value);
5770 return 0;
5773 /* Return the predecessor of VAL in its type, handling the infinite case. */
5775 static tree
5776 range_predecessor (tree val)
5778 tree type = TREE_TYPE (val);
5780 if (INTEGRAL_TYPE_P (type)
5781 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5782 return 0;
5783 else
5784 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5785 build_int_cst (TREE_TYPE (val), 1), 0);
5788 /* Return the successor of VAL in its type, handling the infinite case. */
5790 static tree
5791 range_successor (tree val)
5793 tree type = TREE_TYPE (val);
5795 if (INTEGRAL_TYPE_P (type)
5796 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5797 return 0;
5798 else
5799 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5800 build_int_cst (TREE_TYPE (val), 1), 0);
5803 /* Given two ranges, see if we can merge them into one. Return 1 if we
5804 can, 0 if we can't. Set the output range into the specified parameters. */
5806 bool
5807 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5808 tree high0, int in1_p, tree low1, tree high1)
5810 bool no_overlap;
5811 int subset;
5812 int temp;
5813 tree tem;
5814 int in_p;
5815 tree low, high;
5816 int lowequal = ((low0 == 0 && low1 == 0)
5817 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5818 low0, 0, low1, 0)));
5819 int highequal = ((high0 == 0 && high1 == 0)
5820 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5821 high0, 1, high1, 1)));
5823 /* Make range 0 be the range that starts first, or ends last if they
5824 start at the same value. Swap them if it isn't. */
5825 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5826 low0, 0, low1, 0))
5827 || (lowequal
5828 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5829 high1, 1, high0, 1))))
5831 temp = in0_p, in0_p = in1_p, in1_p = temp;
5832 tem = low0, low0 = low1, low1 = tem;
5833 tem = high0, high0 = high1, high1 = tem;
5836 /* If the second range is != high1 where high1 is the type maximum of
5837 the type, try first merging with < high1 range. */
5838 if (low1
5839 && high1
5840 && TREE_CODE (low1) == INTEGER_CST
5841 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5842 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5843 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5844 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5845 && operand_equal_p (low1, high1, 0))
5847 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5848 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5849 !in1_p, NULL_TREE, range_predecessor (low1)))
5850 return true;
5851 /* Similarly for the second range != low1 where low1 is the type minimum
5852 of the type, try first merging with > low1 range. */
5853 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5854 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5855 !in1_p, range_successor (low1), NULL_TREE))
5856 return true;
5859 /* Now flag two cases, whether the ranges are disjoint or whether the
5860 second range is totally subsumed in the first. Note that the tests
5861 below are simplified by the ones above. */
5862 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5863 high0, 1, low1, 0));
5864 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5865 high1, 1, high0, 1));
5867 /* We now have four cases, depending on whether we are including or
5868 excluding the two ranges. */
5869 if (in0_p && in1_p)
5871 /* If they don't overlap, the result is false. If the second range
5872 is a subset it is the result. Otherwise, the range is from the start
5873 of the second to the end of the first. */
5874 if (no_overlap)
5875 in_p = 0, low = high = 0;
5876 else if (subset)
5877 in_p = 1, low = low1, high = high1;
5878 else
5879 in_p = 1, low = low1, high = high0;
5882 else if (in0_p && ! in1_p)
5884 /* If they don't overlap, the result is the first range. If they are
5885 equal, the result is false. If the second range is a subset of the
5886 first, and the ranges begin at the same place, we go from just after
5887 the end of the second range to the end of the first. If the second
5888 range is not a subset of the first, or if it is a subset and both
5889 ranges end at the same place, the range starts at the start of the
5890 first range and ends just before the second range.
5891 Otherwise, we can't describe this as a single range. */
5892 if (no_overlap)
5893 in_p = 1, low = low0, high = high0;
5894 else if (lowequal && highequal)
5895 in_p = 0, low = high = 0;
5896 else if (subset && lowequal)
5898 low = range_successor (high1);
5899 high = high0;
5900 in_p = 1;
5901 if (low == 0)
5903 /* We are in the weird situation where high0 > high1 but
5904 high1 has no successor. Punt. */
5905 return 0;
5908 else if (! subset || highequal)
5910 low = low0;
5911 high = range_predecessor (low1);
5912 in_p = 1;
5913 if (high == 0)
5915 /* low0 < low1 but low1 has no predecessor. Punt. */
5916 return 0;
5919 else
5920 return 0;
5923 else if (! in0_p && in1_p)
5925 /* If they don't overlap, the result is the second range. If the second
5926 is a subset of the first, the result is false. Otherwise,
5927 the range starts just after the first range and ends at the
5928 end of the second. */
5929 if (no_overlap)
5930 in_p = 1, low = low1, high = high1;
5931 else if (subset || highequal)
5932 in_p = 0, low = high = 0;
5933 else
5935 low = range_successor (high0);
5936 high = high1;
5937 in_p = 1;
5938 if (low == 0)
5940 /* high1 > high0 but high0 has no successor. Punt. */
5941 return 0;
5946 else
5948 /* The case where we are excluding both ranges. Here the complex case
5949 is if they don't overlap. In that case, the only time we have a
5950 range is if they are adjacent. If the second is a subset of the
5951 first, the result is the first. Otherwise, the range to exclude
5952 starts at the beginning of the first range and ends at the end of the
5953 second. */
5954 if (no_overlap)
5956 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5957 range_successor (high0),
5958 1, low1, 0)))
5959 in_p = 0, low = low0, high = high1;
5960 else
5962 /* Canonicalize - [min, x] into - [-, x]. */
5963 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5964 switch (TREE_CODE (TREE_TYPE (low0)))
5966 case ENUMERAL_TYPE:
5967 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5968 GET_MODE_BITSIZE
5969 (TYPE_MODE (TREE_TYPE (low0)))))
5970 break;
5971 /* FALLTHROUGH */
5972 case INTEGER_TYPE:
5973 if (tree_int_cst_equal (low0,
5974 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5975 low0 = 0;
5976 break;
5977 case POINTER_TYPE:
5978 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5979 && integer_zerop (low0))
5980 low0 = 0;
5981 break;
5982 default:
5983 break;
5986 /* Canonicalize - [x, max] into - [x, -]. */
5987 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5988 switch (TREE_CODE (TREE_TYPE (high1)))
5990 case ENUMERAL_TYPE:
5991 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5992 GET_MODE_BITSIZE
5993 (TYPE_MODE (TREE_TYPE (high1)))))
5994 break;
5995 /* FALLTHROUGH */
5996 case INTEGER_TYPE:
5997 if (tree_int_cst_equal (high1,
5998 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5999 high1 = 0;
6000 break;
6001 case POINTER_TYPE:
6002 if (TYPE_UNSIGNED (TREE_TYPE (high1))
6003 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
6004 high1, 1,
6005 build_int_cst (TREE_TYPE (high1), 1),
6006 1)))
6007 high1 = 0;
6008 break;
6009 default:
6010 break;
6013 /* The ranges might be also adjacent between the maximum and
6014 minimum values of the given type. For
6015 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6016 return + [x + 1, y - 1]. */
6017 if (low0 == 0 && high1 == 0)
6019 low = range_successor (high0);
6020 high = range_predecessor (low1);
6021 if (low == 0 || high == 0)
6022 return 0;
6024 in_p = 1;
6026 else
6027 return 0;
6030 else if (subset)
6031 in_p = 0, low = low0, high = high0;
6032 else
6033 in_p = 0, low = low0, high = high1;
6036 *pin_p = in_p, *plow = low, *phigh = high;
6037 return 1;
6041 /* Subroutine of fold, looking inside expressions of the form
6042 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6043 are the three operands of the COND_EXPR. This function is
6044 being used also to optimize A op B ? C : A, by reversing the
6045 comparison first.
6047 Return a folded expression whose code is not a COND_EXPR
6048 anymore, or NULL_TREE if no folding opportunity is found. */
6050 static tree
6051 fold_cond_expr_with_comparison (location_t loc, tree type,
6052 enum tree_code comp_code,
6053 tree arg00, tree arg01, tree arg1, tree arg2)
6055 tree arg1_type = TREE_TYPE (arg1);
6056 tree tem;
6058 STRIP_NOPS (arg1);
6059 STRIP_NOPS (arg2);
6061 /* If we have A op 0 ? A : -A, consider applying the following
6062 transformations:
6064 A == 0? A : -A same as -A
6065 A != 0? A : -A same as A
6066 A >= 0? A : -A same as abs (A)
6067 A > 0? A : -A same as abs (A)
6068 A <= 0? A : -A same as -abs (A)
6069 A < 0? A : -A same as -abs (A)
6071 None of these transformations work for modes with signed
6072 zeros. If A is +/-0, the first two transformations will
6073 change the sign of the result (from +0 to -0, or vice
6074 versa). The last four will fix the sign of the result,
6075 even though the original expressions could be positive or
6076 negative, depending on the sign of A.
6078 Note that all these transformations are correct if A is
6079 NaN, since the two alternatives (A and -A) are also NaNs. */
6080 if (!HONOR_SIGNED_ZEROS (type)
6081 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6082 ? real_zerop (arg01)
6083 : integer_zerop (arg01))
6084 && ((TREE_CODE (arg2) == NEGATE_EXPR
6085 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6086 /* In the case that A is of the form X-Y, '-A' (arg2) may
6087 have already been folded to Y-X, check for that. */
6088 || (TREE_CODE (arg1) == MINUS_EXPR
6089 && TREE_CODE (arg2) == MINUS_EXPR
6090 && operand_equal_p (TREE_OPERAND (arg1, 0),
6091 TREE_OPERAND (arg2, 1), 0)
6092 && operand_equal_p (TREE_OPERAND (arg1, 1),
6093 TREE_OPERAND (arg2, 0), 0))))
6094 switch (comp_code)
6096 case EQ_EXPR:
6097 case UNEQ_EXPR:
6098 tem = fold_convert_loc (loc, arg1_type, arg1);
6099 return fold_convert_loc (loc, type, negate_expr (tem));
6100 case NE_EXPR:
6101 case LTGT_EXPR:
6102 return fold_convert_loc (loc, type, arg1);
6103 case UNGE_EXPR:
6104 case UNGT_EXPR:
6105 if (flag_trapping_math)
6106 break;
6107 /* Fall through. */
6108 case GE_EXPR:
6109 case GT_EXPR:
6110 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6111 break;
6112 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6113 return fold_convert_loc (loc, type, tem);
6114 case UNLE_EXPR:
6115 case UNLT_EXPR:
6116 if (flag_trapping_math)
6117 break;
6118 /* FALLTHRU */
6119 case LE_EXPR:
6120 case LT_EXPR:
6121 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6122 break;
6123 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6124 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6126 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6127 is not, invokes UB both in abs and in the negation of it.
6128 So, use ABSU_EXPR instead. */
6129 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6130 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6131 tem = negate_expr (tem);
6132 return fold_convert_loc (loc, type, tem);
6134 else
6136 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6137 return negate_expr (fold_convert_loc (loc, type, tem));
6139 default:
6140 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6141 break;
6144 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6145 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6146 both transformations are correct when A is NaN: A != 0
6147 is then true, and A == 0 is false. */
6149 if (!HONOR_SIGNED_ZEROS (type)
6150 && integer_zerop (arg01) && integer_zerop (arg2))
6152 if (comp_code == NE_EXPR)
6153 return fold_convert_loc (loc, type, arg1);
6154 else if (comp_code == EQ_EXPR)
6155 return build_zero_cst (type);
6158 /* Try some transformations of A op B ? A : B.
6160 A == B? A : B same as B
6161 A != B? A : B same as A
6162 A >= B? A : B same as max (A, B)
6163 A > B? A : B same as max (B, A)
6164 A <= B? A : B same as min (A, B)
6165 A < B? A : B same as min (B, A)
6167 As above, these transformations don't work in the presence
6168 of signed zeros. For example, if A and B are zeros of
6169 opposite sign, the first two transformations will change
6170 the sign of the result. In the last four, the original
6171 expressions give different results for (A=+0, B=-0) and
6172 (A=-0, B=+0), but the transformed expressions do not.
6174 The first two transformations are correct if either A or B
6175 is a NaN. In the first transformation, the condition will
6176 be false, and B will indeed be chosen. In the case of the
6177 second transformation, the condition A != B will be true,
6178 and A will be chosen.
6180 The conversions to max() and min() are not correct if B is
6181 a number and A is not. The conditions in the original
6182 expressions will be false, so all four give B. The min()
6183 and max() versions would give a NaN instead. */
6184 if (!HONOR_SIGNED_ZEROS (type)
6185 && operand_equal_for_comparison_p (arg01, arg2)
6186 /* Avoid these transformations if the COND_EXPR may be used
6187 as an lvalue in the C++ front-end. PR c++/19199. */
6188 && (in_gimple_form
6189 || VECTOR_TYPE_P (type)
6190 || (! lang_GNU_CXX ()
6191 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6192 || ! maybe_lvalue_p (arg1)
6193 || ! maybe_lvalue_p (arg2)))
6195 tree comp_op0 = arg00;
6196 tree comp_op1 = arg01;
6197 tree comp_type = TREE_TYPE (comp_op0);
6199 switch (comp_code)
6201 case EQ_EXPR:
6202 return fold_convert_loc (loc, type, arg2);
6203 case NE_EXPR:
6204 return fold_convert_loc (loc, type, arg1);
6205 case LE_EXPR:
6206 case LT_EXPR:
6207 case UNLE_EXPR:
6208 case UNLT_EXPR:
6209 /* In C++ a ?: expression can be an lvalue, so put the
6210 operand which will be used if they are equal first
6211 so that we can convert this back to the
6212 corresponding COND_EXPR. */
6213 if (!HONOR_NANS (arg1))
6215 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6216 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6217 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6218 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6219 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6220 comp_op1, comp_op0);
6221 return fold_convert_loc (loc, type, tem);
6223 break;
6224 case GE_EXPR:
6225 case GT_EXPR:
6226 case UNGE_EXPR:
6227 case UNGT_EXPR:
6228 if (!HONOR_NANS (arg1))
6230 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6231 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6232 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6233 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6234 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6235 comp_op1, comp_op0);
6236 return fold_convert_loc (loc, type, tem);
6238 break;
6239 case UNEQ_EXPR:
6240 if (!HONOR_NANS (arg1))
6241 return fold_convert_loc (loc, type, arg2);
6242 break;
6243 case LTGT_EXPR:
6244 if (!HONOR_NANS (arg1))
6245 return fold_convert_loc (loc, type, arg1);
6246 break;
6247 default:
6248 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6249 break;
6253 return NULL_TREE;
6258 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6259 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6260 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6261 false) >= 2)
6262 #endif
6264 /* EXP is some logical combination of boolean tests. See if we can
6265 merge it into some range test. Return the new tree if so. */
6267 static tree
6268 fold_range_test (location_t loc, enum tree_code code, tree type,
6269 tree op0, tree op1)
6271 int or_op = (code == TRUTH_ORIF_EXPR
6272 || code == TRUTH_OR_EXPR);
6273 int in0_p, in1_p, in_p;
6274 tree low0, low1, low, high0, high1, high;
6275 bool strict_overflow_p = false;
6276 tree tem, lhs, rhs;
6277 const char * const warnmsg = G_("assuming signed overflow does not occur "
6278 "when simplifying range test");
6280 if (!INTEGRAL_TYPE_P (type))
6281 return 0;
6283 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6284 /* If op0 is known true or false and this is a short-circuiting
6285 operation we must not merge with op1 since that makes side-effects
6286 unconditional. So special-case this. */
6287 if (!lhs
6288 && ((code == TRUTH_ORIF_EXPR && in0_p)
6289 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6290 return op0;
6291 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6293 /* If this is an OR operation, invert both sides; we will invert
6294 again at the end. */
6295 if (or_op)
6296 in0_p = ! in0_p, in1_p = ! in1_p;
6298 /* If both expressions are the same, if we can merge the ranges, and we
6299 can build the range test, return it or it inverted. If one of the
6300 ranges is always true or always false, consider it to be the same
6301 expression as the other. */
6302 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6303 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6304 in1_p, low1, high1)
6305 && (tem = (build_range_check (loc, type,
6306 lhs != 0 ? lhs
6307 : rhs != 0 ? rhs : integer_zero_node,
6308 in_p, low, high))) != 0)
6310 if (strict_overflow_p)
6311 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6312 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6315 /* On machines where the branch cost is expensive, if this is a
6316 short-circuited branch and the underlying object on both sides
6317 is the same, make a non-short-circuit operation. */
6318 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6319 if (param_logical_op_non_short_circuit != -1)
6320 logical_op_non_short_circuit
6321 = param_logical_op_non_short_circuit;
6322 if (logical_op_non_short_circuit
6323 && !sanitize_coverage_p ()
6324 && lhs != 0 && rhs != 0
6325 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6326 && operand_equal_p (lhs, rhs, 0))
6328 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6329 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6330 which cases we can't do this. */
6331 if (simple_operand_p (lhs))
6332 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6333 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6334 type, op0, op1);
6336 else if (!lang_hooks.decls.global_bindings_p ()
6337 && !CONTAINS_PLACEHOLDER_P (lhs))
6339 tree common = save_expr (lhs);
6341 if ((lhs = build_range_check (loc, type, common,
6342 or_op ? ! in0_p : in0_p,
6343 low0, high0)) != 0
6344 && (rhs = build_range_check (loc, type, common,
6345 or_op ? ! in1_p : in1_p,
6346 low1, high1)) != 0)
6348 if (strict_overflow_p)
6349 fold_overflow_warning (warnmsg,
6350 WARN_STRICT_OVERFLOW_COMPARISON);
6351 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6353 type, lhs, rhs);
6358 return 0;
6361 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6362 bit value. Arrange things so the extra bits will be set to zero if and
6363 only if C is signed-extended to its full width. If MASK is nonzero,
6364 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6366 static tree
6367 unextend (tree c, int p, int unsignedp, tree mask)
6369 tree type = TREE_TYPE (c);
6370 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6371 tree temp;
6373 if (p == modesize || unsignedp)
6374 return c;
6376 /* We work by getting just the sign bit into the low-order bit, then
6377 into the high-order bit, then sign-extend. We then XOR that value
6378 with C. */
6379 temp = build_int_cst (TREE_TYPE (c),
6380 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6382 /* We must use a signed type in order to get an arithmetic right shift.
6383 However, we must also avoid introducing accidental overflows, so that
6384 a subsequent call to integer_zerop will work. Hence we must
6385 do the type conversion here. At this point, the constant is either
6386 zero or one, and the conversion to a signed type can never overflow.
6387 We could get an overflow if this conversion is done anywhere else. */
6388 if (TYPE_UNSIGNED (type))
6389 temp = fold_convert (signed_type_for (type), temp);
6391 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6392 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6393 if (mask != 0)
6394 temp = const_binop (BIT_AND_EXPR, temp,
6395 fold_convert (TREE_TYPE (c), mask));
6396 /* If necessary, convert the type back to match the type of C. */
6397 if (TYPE_UNSIGNED (type))
6398 temp = fold_convert (type, temp);
6400 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6403 /* For an expression that has the form
6404 (A && B) || ~B
6406 (A || B) && ~B,
6407 we can drop one of the inner expressions and simplify to
6408 A || ~B
6410 A && ~B
6411 LOC is the location of the resulting expression. OP is the inner
6412 logical operation; the left-hand side in the examples above, while CMPOP
6413 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6414 removing a condition that guards another, as in
6415 (A != NULL && A->...) || A == NULL
6416 which we must not transform. If RHS_ONLY is true, only eliminate the
6417 right-most operand of the inner logical operation. */
6419 static tree
6420 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6421 bool rhs_only)
6423 tree type = TREE_TYPE (cmpop);
6424 enum tree_code code = TREE_CODE (cmpop);
6425 enum tree_code truthop_code = TREE_CODE (op);
6426 tree lhs = TREE_OPERAND (op, 0);
6427 tree rhs = TREE_OPERAND (op, 1);
6428 tree orig_lhs = lhs, orig_rhs = rhs;
6429 enum tree_code rhs_code = TREE_CODE (rhs);
6430 enum tree_code lhs_code = TREE_CODE (lhs);
6431 enum tree_code inv_code;
6433 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6434 return NULL_TREE;
6436 if (TREE_CODE_CLASS (code) != tcc_comparison)
6437 return NULL_TREE;
6439 if (rhs_code == truthop_code)
6441 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6442 if (newrhs != NULL_TREE)
6444 rhs = newrhs;
6445 rhs_code = TREE_CODE (rhs);
6448 if (lhs_code == truthop_code && !rhs_only)
6450 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6451 if (newlhs != NULL_TREE)
6453 lhs = newlhs;
6454 lhs_code = TREE_CODE (lhs);
6458 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6459 if (inv_code == rhs_code
6460 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6461 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6462 return lhs;
6463 if (!rhs_only && inv_code == lhs_code
6464 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6465 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6466 return rhs;
6467 if (rhs != orig_rhs || lhs != orig_lhs)
6468 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6469 lhs, rhs);
6470 return NULL_TREE;
6473 /* Find ways of folding logical expressions of LHS and RHS:
6474 Try to merge two comparisons to the same innermost item.
6475 Look for range tests like "ch >= '0' && ch <= '9'".
6476 Look for combinations of simple terms on machines with expensive branches
6477 and evaluate the RHS unconditionally.
6479 For example, if we have p->a == 2 && p->b == 4 and we can make an
6480 object large enough to span both A and B, we can do this with a comparison
6481 against the object ANDed with the a mask.
6483 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6484 operations to do this with one comparison.
6486 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6487 function and the one above.
6489 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6490 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6492 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6493 two operands.
6495 We return the simplified tree or 0 if no optimization is possible. */
6497 static tree
6498 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6499 tree lhs, tree rhs)
6501 /* If this is the "or" of two comparisons, we can do something if
6502 the comparisons are NE_EXPR. If this is the "and", we can do something
6503 if the comparisons are EQ_EXPR. I.e.,
6504 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6506 WANTED_CODE is this operation code. For single bit fields, we can
6507 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6508 comparison for one-bit fields. */
6510 enum tree_code wanted_code;
6511 enum tree_code lcode, rcode;
6512 tree ll_arg, lr_arg, rl_arg, rr_arg;
6513 tree ll_inner, lr_inner, rl_inner, rr_inner;
6514 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6515 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6516 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6517 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6518 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6519 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6520 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6521 scalar_int_mode lnmode, rnmode;
6522 tree ll_mask, lr_mask, rl_mask, rr_mask;
6523 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6524 tree l_const, r_const;
6525 tree lntype, rntype, result;
6526 HOST_WIDE_INT first_bit, end_bit;
6527 int volatilep;
6529 /* Start by getting the comparison codes. Fail if anything is volatile.
6530 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6531 it were surrounded with a NE_EXPR. */
6533 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6534 return 0;
6536 lcode = TREE_CODE (lhs);
6537 rcode = TREE_CODE (rhs);
6539 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6541 lhs = build2 (NE_EXPR, truth_type, lhs,
6542 build_int_cst (TREE_TYPE (lhs), 0));
6543 lcode = NE_EXPR;
6546 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6548 rhs = build2 (NE_EXPR, truth_type, rhs,
6549 build_int_cst (TREE_TYPE (rhs), 0));
6550 rcode = NE_EXPR;
6553 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6554 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6555 return 0;
6557 ll_arg = TREE_OPERAND (lhs, 0);
6558 lr_arg = TREE_OPERAND (lhs, 1);
6559 rl_arg = TREE_OPERAND (rhs, 0);
6560 rr_arg = TREE_OPERAND (rhs, 1);
6562 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6563 if (simple_operand_p (ll_arg)
6564 && simple_operand_p (lr_arg))
6566 if (operand_equal_p (ll_arg, rl_arg, 0)
6567 && operand_equal_p (lr_arg, rr_arg, 0))
6569 result = combine_comparisons (loc, code, lcode, rcode,
6570 truth_type, ll_arg, lr_arg);
6571 if (result)
6572 return result;
6574 else if (operand_equal_p (ll_arg, rr_arg, 0)
6575 && operand_equal_p (lr_arg, rl_arg, 0))
6577 result = combine_comparisons (loc, code, lcode,
6578 swap_tree_comparison (rcode),
6579 truth_type, ll_arg, lr_arg);
6580 if (result)
6581 return result;
6585 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6586 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6588 /* If the RHS can be evaluated unconditionally and its operands are
6589 simple, it wins to evaluate the RHS unconditionally on machines
6590 with expensive branches. In this case, this isn't a comparison
6591 that can be merged. */
6593 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6594 false) >= 2
6595 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6596 && simple_operand_p (rl_arg)
6597 && simple_operand_p (rr_arg))
6599 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6600 if (code == TRUTH_OR_EXPR
6601 && lcode == NE_EXPR && integer_zerop (lr_arg)
6602 && rcode == NE_EXPR && integer_zerop (rr_arg)
6603 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6604 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6605 return build2_loc (loc, NE_EXPR, truth_type,
6606 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6607 ll_arg, rl_arg),
6608 build_int_cst (TREE_TYPE (ll_arg), 0));
6610 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6611 if (code == TRUTH_AND_EXPR
6612 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6613 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6614 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6615 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6616 return build2_loc (loc, EQ_EXPR, truth_type,
6617 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6618 ll_arg, rl_arg),
6619 build_int_cst (TREE_TYPE (ll_arg), 0));
6622 /* See if the comparisons can be merged. Then get all the parameters for
6623 each side. */
6625 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6626 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6627 return 0;
6629 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6630 volatilep = 0;
6631 ll_inner = decode_field_reference (loc, &ll_arg,
6632 &ll_bitsize, &ll_bitpos, &ll_mode,
6633 &ll_unsignedp, &ll_reversep, &volatilep,
6634 &ll_mask, &ll_and_mask);
6635 lr_inner = decode_field_reference (loc, &lr_arg,
6636 &lr_bitsize, &lr_bitpos, &lr_mode,
6637 &lr_unsignedp, &lr_reversep, &volatilep,
6638 &lr_mask, &lr_and_mask);
6639 rl_inner = decode_field_reference (loc, &rl_arg,
6640 &rl_bitsize, &rl_bitpos, &rl_mode,
6641 &rl_unsignedp, &rl_reversep, &volatilep,
6642 &rl_mask, &rl_and_mask);
6643 rr_inner = decode_field_reference (loc, &rr_arg,
6644 &rr_bitsize, &rr_bitpos, &rr_mode,
6645 &rr_unsignedp, &rr_reversep, &volatilep,
6646 &rr_mask, &rr_and_mask);
6648 /* It must be true that the inner operation on the lhs of each
6649 comparison must be the same if we are to be able to do anything.
6650 Then see if we have constants. If not, the same must be true for
6651 the rhs's. */
6652 if (volatilep
6653 || ll_reversep != rl_reversep
6654 || ll_inner == 0 || rl_inner == 0
6655 || ! operand_equal_p (ll_inner, rl_inner, 0))
6656 return 0;
6658 if (TREE_CODE (lr_arg) == INTEGER_CST
6659 && TREE_CODE (rr_arg) == INTEGER_CST)
6661 l_const = lr_arg, r_const = rr_arg;
6662 lr_reversep = ll_reversep;
6664 else if (lr_reversep != rr_reversep
6665 || lr_inner == 0 || rr_inner == 0
6666 || ! operand_equal_p (lr_inner, rr_inner, 0))
6667 return 0;
6668 else
6669 l_const = r_const = 0;
6671 /* If either comparison code is not correct for our logical operation,
6672 fail. However, we can convert a one-bit comparison against zero into
6673 the opposite comparison against that bit being set in the field. */
6675 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6676 if (lcode != wanted_code)
6678 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6680 /* Make the left operand unsigned, since we are only interested
6681 in the value of one bit. Otherwise we are doing the wrong
6682 thing below. */
6683 ll_unsignedp = 1;
6684 l_const = ll_mask;
6686 else
6687 return 0;
6690 /* This is analogous to the code for l_const above. */
6691 if (rcode != wanted_code)
6693 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6695 rl_unsignedp = 1;
6696 r_const = rl_mask;
6698 else
6699 return 0;
6702 /* See if we can find a mode that contains both fields being compared on
6703 the left. If we can't, fail. Otherwise, update all constants and masks
6704 to be relative to a field of that size. */
6705 first_bit = MIN (ll_bitpos, rl_bitpos);
6706 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6707 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6708 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6709 volatilep, &lnmode))
6710 return 0;
6712 lnbitsize = GET_MODE_BITSIZE (lnmode);
6713 lnbitpos = first_bit & ~ (lnbitsize - 1);
6714 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6715 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6717 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6719 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6720 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6723 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6724 size_int (xll_bitpos));
6725 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6726 size_int (xrl_bitpos));
6727 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6728 return 0;
6730 if (l_const)
6732 l_const = fold_convert_loc (loc, lntype, l_const);
6733 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6734 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6735 if (l_const == NULL_TREE)
6736 return 0;
6737 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6738 fold_build1_loc (loc, BIT_NOT_EXPR,
6739 lntype, ll_mask))))
6741 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6743 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6746 if (r_const)
6748 r_const = fold_convert_loc (loc, lntype, r_const);
6749 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6750 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6751 if (r_const == NULL_TREE)
6752 return 0;
6753 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6754 fold_build1_loc (loc, BIT_NOT_EXPR,
6755 lntype, rl_mask))))
6757 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6759 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6763 /* If the right sides are not constant, do the same for it. Also,
6764 disallow this optimization if a size, signedness or storage order
6765 mismatch occurs between the left and right sides. */
6766 if (l_const == 0)
6768 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6769 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6770 || ll_reversep != lr_reversep
6771 /* Make sure the two fields on the right
6772 correspond to the left without being swapped. */
6773 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6774 return 0;
6776 first_bit = MIN (lr_bitpos, rr_bitpos);
6777 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6778 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6779 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6780 volatilep, &rnmode))
6781 return 0;
6783 rnbitsize = GET_MODE_BITSIZE (rnmode);
6784 rnbitpos = first_bit & ~ (rnbitsize - 1);
6785 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6786 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6788 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6790 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6791 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6794 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6795 rntype, lr_mask),
6796 size_int (xlr_bitpos));
6797 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6798 rntype, rr_mask),
6799 size_int (xrr_bitpos));
6800 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6801 return 0;
6803 /* Make a mask that corresponds to both fields being compared.
6804 Do this for both items being compared. If the operands are the
6805 same size and the bits being compared are in the same position
6806 then we can do this by masking both and comparing the masked
6807 results. */
6808 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6809 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6810 if (lnbitsize == rnbitsize
6811 && xll_bitpos == xlr_bitpos
6812 && lnbitpos >= 0
6813 && rnbitpos >= 0)
6815 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6816 lntype, lnbitsize, lnbitpos,
6817 ll_unsignedp || rl_unsignedp, ll_reversep);
6818 if (! all_ones_mask_p (ll_mask, lnbitsize))
6819 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6821 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6822 rntype, rnbitsize, rnbitpos,
6823 lr_unsignedp || rr_unsignedp, lr_reversep);
6824 if (! all_ones_mask_p (lr_mask, rnbitsize))
6825 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6827 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6830 /* There is still another way we can do something: If both pairs of
6831 fields being compared are adjacent, we may be able to make a wider
6832 field containing them both.
6834 Note that we still must mask the lhs/rhs expressions. Furthermore,
6835 the mask must be shifted to account for the shift done by
6836 make_bit_field_ref. */
6837 if (((ll_bitsize + ll_bitpos == rl_bitpos
6838 && lr_bitsize + lr_bitpos == rr_bitpos)
6839 || (ll_bitpos == rl_bitpos + rl_bitsize
6840 && lr_bitpos == rr_bitpos + rr_bitsize))
6841 && ll_bitpos >= 0
6842 && rl_bitpos >= 0
6843 && lr_bitpos >= 0
6844 && rr_bitpos >= 0)
6846 tree type;
6848 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6849 ll_bitsize + rl_bitsize,
6850 MIN (ll_bitpos, rl_bitpos),
6851 ll_unsignedp, ll_reversep);
6852 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6853 lr_bitsize + rr_bitsize,
6854 MIN (lr_bitpos, rr_bitpos),
6855 lr_unsignedp, lr_reversep);
6857 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6858 size_int (MIN (xll_bitpos, xrl_bitpos)));
6859 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6860 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6861 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6862 return 0;
6864 /* Convert to the smaller type before masking out unwanted bits. */
6865 type = lntype;
6866 if (lntype != rntype)
6868 if (lnbitsize > rnbitsize)
6870 lhs = fold_convert_loc (loc, rntype, lhs);
6871 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6872 type = rntype;
6874 else if (lnbitsize < rnbitsize)
6876 rhs = fold_convert_loc (loc, lntype, rhs);
6877 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6878 type = lntype;
6882 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6883 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6885 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6886 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6888 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6891 return 0;
6894 /* Handle the case of comparisons with constants. If there is something in
6895 common between the masks, those bits of the constants must be the same.
6896 If not, the condition is always false. Test for this to avoid generating
6897 incorrect code below. */
6898 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6899 if (! integer_zerop (result)
6900 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6901 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6903 if (wanted_code == NE_EXPR)
6905 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6906 return constant_boolean_node (true, truth_type);
6908 else
6910 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6911 return constant_boolean_node (false, truth_type);
6915 if (lnbitpos < 0)
6916 return 0;
6918 /* Construct the expression we will return. First get the component
6919 reference we will make. Unless the mask is all ones the width of
6920 that field, perform the mask operation. Then compare with the
6921 merged constant. */
6922 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6923 lntype, lnbitsize, lnbitpos,
6924 ll_unsignedp || rl_unsignedp, ll_reversep);
6926 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6927 if (! all_ones_mask_p (ll_mask, lnbitsize))
6928 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6930 return build2_loc (loc, wanted_code, truth_type, result,
6931 const_binop (BIT_IOR_EXPR, l_const, r_const));
6934 /* T is an integer expression that is being multiplied, divided, or taken a
6935 modulus (CODE says which and what kind of divide or modulus) by a
6936 constant C. See if we can eliminate that operation by folding it with
6937 other operations already in T. WIDE_TYPE, if non-null, is a type that
6938 should be used for the computation if wider than our type.
6940 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6941 (X * 2) + (Y * 4). We must, however, be assured that either the original
6942 expression would not overflow or that overflow is undefined for the type
6943 in the language in question.
6945 If we return a non-null expression, it is an equivalent form of the
6946 original computation, but need not be in the original type.
6948 We set *STRICT_OVERFLOW_P to true if the return values depends on
6949 signed overflow being undefined. Otherwise we do not change
6950 *STRICT_OVERFLOW_P. */
6952 static tree
6953 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6954 bool *strict_overflow_p)
6956 /* To avoid exponential search depth, refuse to allow recursion past
6957 three levels. Beyond that (1) it's highly unlikely that we'll find
6958 something interesting and (2) we've probably processed it before
6959 when we built the inner expression. */
6961 static int depth;
6962 tree ret;
6964 if (depth > 3)
6965 return NULL;
6967 depth++;
6968 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6969 depth--;
6971 return ret;
6974 static tree
6975 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6976 bool *strict_overflow_p)
6978 tree type = TREE_TYPE (t);
6979 enum tree_code tcode = TREE_CODE (t);
6980 tree ctype = type;
6981 if (wide_type)
6983 if (TREE_CODE (type) == BITINT_TYPE
6984 || TREE_CODE (wide_type) == BITINT_TYPE)
6986 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6987 ctype = wide_type;
6989 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6990 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6991 ctype = wide_type;
6993 tree t1, t2;
6994 bool same_p = tcode == code;
6995 tree op0 = NULL_TREE, op1 = NULL_TREE;
6996 bool sub_strict_overflow_p;
6998 /* Don't deal with constants of zero here; they confuse the code below. */
6999 if (integer_zerop (c))
7000 return NULL_TREE;
7002 if (TREE_CODE_CLASS (tcode) == tcc_unary)
7003 op0 = TREE_OPERAND (t, 0);
7005 if (TREE_CODE_CLASS (tcode) == tcc_binary)
7006 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
7008 /* Note that we need not handle conditional operations here since fold
7009 already handles those cases. So just do arithmetic here. */
7010 switch (tcode)
7012 case INTEGER_CST:
7013 /* For a constant, we can always simplify if we are a multiply
7014 or (for divide and modulus) if it is a multiple of our constant. */
7015 if (code == MULT_EXPR
7016 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
7017 TYPE_SIGN (type)))
7019 tree tem = const_binop (code, fold_convert (ctype, t),
7020 fold_convert (ctype, c));
7021 /* If the multiplication overflowed, we lost information on it.
7022 See PR68142 and PR69845. */
7023 if (TREE_OVERFLOW (tem))
7024 return NULL_TREE;
7025 return tem;
7027 break;
7029 CASE_CONVERT: case NON_LVALUE_EXPR:
7030 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
7031 break;
7032 /* If op0 is an expression ... */
7033 if ((COMPARISON_CLASS_P (op0)
7034 || UNARY_CLASS_P (op0)
7035 || BINARY_CLASS_P (op0)
7036 || VL_EXP_CLASS_P (op0)
7037 || EXPRESSION_CLASS_P (op0))
7038 /* ... and has wrapping overflow, and its type is smaller
7039 than ctype, then we cannot pass through as widening. */
7040 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
7041 && (TYPE_PRECISION (ctype)
7042 > TYPE_PRECISION (TREE_TYPE (op0))))
7043 /* ... or this is a truncation (t is narrower than op0),
7044 then we cannot pass through this narrowing. */
7045 || (TYPE_PRECISION (type)
7046 < TYPE_PRECISION (TREE_TYPE (op0)))
7047 /* ... or signedness changes for division or modulus,
7048 then we cannot pass through this conversion. */
7049 || (code != MULT_EXPR
7050 && (TYPE_UNSIGNED (ctype)
7051 != TYPE_UNSIGNED (TREE_TYPE (op0))))
7052 /* ... or has undefined overflow while the converted to
7053 type has not, we cannot do the operation in the inner type
7054 as that would introduce undefined overflow. */
7055 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
7056 && !TYPE_OVERFLOW_UNDEFINED (type))))
7057 break;
7059 /* Pass the constant down and see if we can make a simplification. If
7060 we can, replace this expression with the inner simplification for
7061 possible later conversion to our or some other type. */
7062 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
7063 && TREE_CODE (t2) == INTEGER_CST
7064 && !TREE_OVERFLOW (t2)
7065 && (t1 = extract_muldiv (op0, t2, code,
7066 code == MULT_EXPR ? ctype : NULL_TREE,
7067 strict_overflow_p)) != 0)
7068 return t1;
7069 break;
7071 case ABS_EXPR:
7072 /* If widening the type changes it from signed to unsigned, then we
7073 must avoid building ABS_EXPR itself as unsigned. */
7074 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
7076 tree cstype = (*signed_type_for) (ctype);
7077 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
7078 != 0)
7080 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
7081 return fold_convert (ctype, t1);
7083 break;
7085 /* If the constant is negative, we cannot simplify this. */
7086 if (tree_int_cst_sgn (c) == -1)
7087 break;
7088 /* FALLTHROUGH */
7089 case NEGATE_EXPR:
7090 /* For division and modulus, type can't be unsigned, as e.g.
7091 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
7092 For signed types, even with wrapping overflow, this is fine. */
7093 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7094 break;
7095 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
7096 != 0)
7097 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7098 break;
7100 case MIN_EXPR: case MAX_EXPR:
7101 /* If widening the type changes the signedness, then we can't perform
7102 this optimization as that changes the result. */
7103 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7104 break;
7106 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7107 sub_strict_overflow_p = false;
7108 if ((t1 = extract_muldiv (op0, c, code, wide_type,
7109 &sub_strict_overflow_p)) != 0
7110 && (t2 = extract_muldiv (op1, c, code, wide_type,
7111 &sub_strict_overflow_p)) != 0)
7113 if (tree_int_cst_sgn (c) < 0)
7114 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7115 if (sub_strict_overflow_p)
7116 *strict_overflow_p = true;
7117 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7118 fold_convert (ctype, t2));
7120 break;
7122 case LSHIFT_EXPR: case RSHIFT_EXPR:
7123 /* If the second operand is constant, this is a multiplication
7124 or floor division, by a power of two, so we can treat it that
7125 way unless the multiplier or divisor overflows. Signed
7126 left-shift overflow is implementation-defined rather than
7127 undefined in C90, so do not convert signed left shift into
7128 multiplication. */
7129 if (TREE_CODE (op1) == INTEGER_CST
7130 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7131 /* const_binop may not detect overflow correctly,
7132 so check for it explicitly here. */
7133 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7134 wi::to_wide (op1))
7135 && (t1 = fold_convert (ctype,
7136 const_binop (LSHIFT_EXPR, size_one_node,
7137 op1))) != 0
7138 && !TREE_OVERFLOW (t1))
7139 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7140 ? MULT_EXPR : FLOOR_DIV_EXPR,
7141 ctype,
7142 fold_convert (ctype, op0),
7143 t1),
7144 c, code, wide_type, strict_overflow_p);
7145 break;
7147 case PLUS_EXPR: case MINUS_EXPR:
7148 /* See if we can eliminate the operation on both sides. If we can, we
7149 can return a new PLUS or MINUS. If we can't, the only remaining
7150 cases where we can do anything are if the second operand is a
7151 constant. */
7152 sub_strict_overflow_p = false;
7153 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7154 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7155 if (t1 != 0 && t2 != 0
7156 && TYPE_OVERFLOW_WRAPS (ctype)
7157 && (code == MULT_EXPR
7158 /* If not multiplication, we can only do this if both operands
7159 are divisible by c. */
7160 || (multiple_of_p (ctype, op0, c)
7161 && multiple_of_p (ctype, op1, c))))
7163 if (sub_strict_overflow_p)
7164 *strict_overflow_p = true;
7165 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7166 fold_convert (ctype, t2));
7169 /* If this was a subtraction, negate OP1 and set it to be an addition.
7170 This simplifies the logic below. */
7171 if (tcode == MINUS_EXPR)
7173 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7174 /* If OP1 was not easily negatable, the constant may be OP0. */
7175 if (TREE_CODE (op0) == INTEGER_CST)
7177 std::swap (op0, op1);
7178 std::swap (t1, t2);
7182 if (TREE_CODE (op1) != INTEGER_CST)
7183 break;
7185 /* If either OP1 or C are negative, this optimization is not safe for
7186 some of the division and remainder types while for others we need
7187 to change the code. */
7188 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7190 if (code == CEIL_DIV_EXPR)
7191 code = FLOOR_DIV_EXPR;
7192 else if (code == FLOOR_DIV_EXPR)
7193 code = CEIL_DIV_EXPR;
7194 else if (code != MULT_EXPR
7195 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7196 break;
7199 /* If it's a multiply or a division/modulus operation of a multiple
7200 of our constant, do the operation and verify it doesn't overflow. */
7201 if (code == MULT_EXPR
7202 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7203 TYPE_SIGN (type)))
7205 op1 = const_binop (code, fold_convert (ctype, op1),
7206 fold_convert (ctype, c));
7207 /* We allow the constant to overflow with wrapping semantics. */
7208 if (op1 == 0
7209 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7210 break;
7212 else
7213 break;
7215 /* If we have an unsigned type, we cannot widen the operation since it
7216 will change the result if the original computation overflowed. */
7217 if (TYPE_UNSIGNED (ctype) && ctype != type)
7218 break;
7220 /* The last case is if we are a multiply. In that case, we can
7221 apply the distributive law to commute the multiply and addition
7222 if the multiplication of the constants doesn't overflow
7223 and overflow is defined. With undefined overflow
7224 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7225 But fold_plusminus_mult_expr would factor back any power-of-two
7226 value so do not distribute in the first place in this case. */
7227 if (code == MULT_EXPR
7228 && TYPE_OVERFLOW_WRAPS (ctype)
7229 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7230 return fold_build2 (tcode, ctype,
7231 fold_build2 (code, ctype,
7232 fold_convert (ctype, op0),
7233 fold_convert (ctype, c)),
7234 op1);
7236 break;
7238 case MULT_EXPR:
7239 /* We have a special case here if we are doing something like
7240 (C * 8) % 4 since we know that's zero. */
7241 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7242 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7243 /* If the multiplication can overflow we cannot optimize this. */
7244 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7245 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7246 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7247 TYPE_SIGN (type)))
7249 *strict_overflow_p = true;
7250 return omit_one_operand (type, integer_zero_node, op0);
7253 /* ... fall through ... */
7255 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7256 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7257 /* If we can extract our operation from the LHS, do so and return a
7258 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7259 do something only if the second operand is a constant. */
7260 if (same_p
7261 && TYPE_OVERFLOW_WRAPS (ctype)
7262 && (t1 = extract_muldiv (op0, c, code, wide_type,
7263 strict_overflow_p)) != 0)
7264 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7265 fold_convert (ctype, op1));
7266 else if (tcode == MULT_EXPR && code == MULT_EXPR
7267 && TYPE_OVERFLOW_WRAPS (ctype)
7268 && (t1 = extract_muldiv (op1, c, code, wide_type,
7269 strict_overflow_p)) != 0)
7270 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7271 fold_convert (ctype, t1));
7272 else if (TREE_CODE (op1) != INTEGER_CST)
7273 return 0;
7275 /* If these are the same operation types, we can associate them
7276 assuming no overflow. */
7277 if (tcode == code)
7279 bool overflow_p = false;
7280 wi::overflow_type overflow_mul;
7281 signop sign = TYPE_SIGN (ctype);
7282 unsigned prec = TYPE_PRECISION (ctype);
7283 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7284 wi::to_wide (c, prec),
7285 sign, &overflow_mul);
7286 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7287 if (overflow_mul
7288 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7289 overflow_p = true;
7290 if (!overflow_p)
7291 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7292 wide_int_to_tree (ctype, mul));
7295 /* If these operations "cancel" each other, we have the main
7296 optimizations of this pass, which occur when either constant is a
7297 multiple of the other, in which case we replace this with either an
7298 operation or CODE or TCODE.
7300 If we have an unsigned type, we cannot do this since it will change
7301 the result if the original computation overflowed. */
7302 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7303 && !TYPE_OVERFLOW_SANITIZED (ctype)
7304 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7305 || (tcode == MULT_EXPR
7306 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7307 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7308 && code != MULT_EXPR)))
7310 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7311 TYPE_SIGN (type)))
7313 *strict_overflow_p = true;
7314 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7315 fold_convert (ctype,
7316 const_binop (TRUNC_DIV_EXPR,
7317 op1, c)));
7319 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7320 TYPE_SIGN (type)))
7322 *strict_overflow_p = true;
7323 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7324 fold_convert (ctype,
7325 const_binop (TRUNC_DIV_EXPR,
7326 c, op1)));
7329 break;
7331 default:
7332 break;
7335 return 0;
7338 /* Return a node which has the indicated constant VALUE (either 0 or
7339 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7340 and is of the indicated TYPE. */
7342 tree
7343 constant_boolean_node (bool value, tree type)
7345 if (type == integer_type_node)
7346 return value ? integer_one_node : integer_zero_node;
7347 else if (type == boolean_type_node)
7348 return value ? boolean_true_node : boolean_false_node;
7349 else if (VECTOR_TYPE_P (type))
7350 return build_vector_from_val (type,
7351 build_int_cst (TREE_TYPE (type),
7352 value ? -1 : 0));
7353 else
7354 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7358 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7359 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7360 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7361 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7362 COND is the first argument to CODE; otherwise (as in the example
7363 given here), it is the second argument. TYPE is the type of the
7364 original expression. Return NULL_TREE if no simplification is
7365 possible. */
7367 static tree
7368 fold_binary_op_with_conditional_arg (location_t loc,
7369 enum tree_code code,
7370 tree type, tree op0, tree op1,
7371 tree cond, tree arg, int cond_first_p)
7373 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7374 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7375 tree test, true_value, false_value;
7376 tree lhs = NULL_TREE;
7377 tree rhs = NULL_TREE;
7378 enum tree_code cond_code = COND_EXPR;
7380 /* Do not move possibly trapping operations into the conditional as this
7381 pessimizes code and causes gimplification issues when applied late. */
7382 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7383 ANY_INTEGRAL_TYPE_P (type)
7384 && TYPE_OVERFLOW_TRAPS (type), op1))
7385 return NULL_TREE;
7387 if (TREE_CODE (cond) == COND_EXPR
7388 || TREE_CODE (cond) == VEC_COND_EXPR)
7390 test = TREE_OPERAND (cond, 0);
7391 true_value = TREE_OPERAND (cond, 1);
7392 false_value = TREE_OPERAND (cond, 2);
7393 /* If this operand throws an expression, then it does not make
7394 sense to try to perform a logical or arithmetic operation
7395 involving it. */
7396 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7397 lhs = true_value;
7398 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7399 rhs = false_value;
7401 else if (!(TREE_CODE (type) != VECTOR_TYPE
7402 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7404 tree testtype = TREE_TYPE (cond);
7405 test = cond;
7406 true_value = constant_boolean_node (true, testtype);
7407 false_value = constant_boolean_node (false, testtype);
7409 else
7410 /* Detect the case of mixing vector and scalar types - bail out. */
7411 return NULL_TREE;
7413 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7414 cond_code = VEC_COND_EXPR;
7416 /* This transformation is only worthwhile if we don't have to wrap ARG
7417 in a SAVE_EXPR and the operation can be simplified without recursing
7418 on at least one of the branches once its pushed inside the COND_EXPR. */
7419 if (!TREE_CONSTANT (arg)
7420 && (TREE_SIDE_EFFECTS (arg)
7421 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7422 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7423 return NULL_TREE;
7425 arg = fold_convert_loc (loc, arg_type, arg);
7426 if (lhs == 0)
7428 true_value = fold_convert_loc (loc, cond_type, true_value);
7429 if (cond_first_p)
7430 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7431 else
7432 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7434 if (rhs == 0)
7436 false_value = fold_convert_loc (loc, cond_type, false_value);
7437 if (cond_first_p)
7438 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7439 else
7440 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7443 /* Check that we have simplified at least one of the branches. */
7444 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7445 return NULL_TREE;
7447 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7451 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7453 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7454 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7455 if ARG - ZERO_ARG is the same as X.
7457 If ARG is NULL, check for any value of type TYPE.
7459 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7460 and finite. The problematic cases are when X is zero, and its mode
7461 has signed zeros. In the case of rounding towards -infinity,
7462 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7463 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7465 bool
7466 fold_real_zero_addition_p (const_tree type, const_tree arg,
7467 const_tree zero_arg, int negate)
7469 if (!real_zerop (zero_arg))
7470 return false;
7472 /* Don't allow the fold with -fsignaling-nans. */
7473 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7474 return false;
7476 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7477 if (!HONOR_SIGNED_ZEROS (type))
7478 return true;
7480 /* There is no case that is safe for all rounding modes. */
7481 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7482 return false;
7484 /* In a vector or complex, we would need to check the sign of all zeros. */
7485 if (TREE_CODE (zero_arg) == VECTOR_CST)
7486 zero_arg = uniform_vector_p (zero_arg);
7487 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7488 return false;
7490 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7491 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7492 negate = !negate;
7494 /* The mode has signed zeros, and we have to honor their sign.
7495 In this situation, there are only two cases we can return true for.
7496 (i) X - 0 is the same as X with default rounding.
7497 (ii) X + 0 is X when X can't possibly be -0.0. */
7498 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7501 /* Subroutine of match.pd that optimizes comparisons of a division by
7502 a nonzero integer constant against an integer constant, i.e.
7503 X/C1 op C2.
7505 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7506 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7508 enum tree_code
7509 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7510 tree *hi, bool *neg_overflow)
7512 tree prod, tmp, type = TREE_TYPE (c1);
7513 signop sign = TYPE_SIGN (type);
7514 wi::overflow_type overflow;
7516 /* We have to do this the hard way to detect unsigned overflow.
7517 prod = int_const_binop (MULT_EXPR, c1, c2); */
7518 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7519 prod = force_fit_type (type, val, -1, overflow);
7520 *neg_overflow = false;
7522 if (sign == UNSIGNED)
7524 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7525 *lo = prod;
7527 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7528 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7529 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7531 else if (tree_int_cst_sgn (c1) >= 0)
7533 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7534 switch (tree_int_cst_sgn (c2))
7536 case -1:
7537 *neg_overflow = true;
7538 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7539 *hi = prod;
7540 break;
7542 case 0:
7543 *lo = fold_negate_const (tmp, type);
7544 *hi = tmp;
7545 break;
7547 case 1:
7548 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7549 *lo = prod;
7550 break;
7552 default:
7553 gcc_unreachable ();
7556 else
7558 /* A negative divisor reverses the relational operators. */
7559 code = swap_tree_comparison (code);
7561 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7562 switch (tree_int_cst_sgn (c2))
7564 case -1:
7565 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7566 *lo = prod;
7567 break;
7569 case 0:
7570 *hi = fold_negate_const (tmp, type);
7571 *lo = tmp;
7572 break;
7574 case 1:
7575 *neg_overflow = true;
7576 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7577 *hi = prod;
7578 break;
7580 default:
7581 gcc_unreachable ();
7585 if (code != EQ_EXPR && code != NE_EXPR)
7586 return code;
7588 if (TREE_OVERFLOW (*lo)
7589 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7590 *lo = NULL_TREE;
7591 if (TREE_OVERFLOW (*hi)
7592 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7593 *hi = NULL_TREE;
7595 return code;
7598 /* Test whether it is preferable to swap two operands, ARG0 and
7599 ARG1, for example because ARG0 is an integer constant and ARG1
7600 isn't. */
7602 bool
7603 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7605 if (CONSTANT_CLASS_P (arg1))
7606 return false;
7607 if (CONSTANT_CLASS_P (arg0))
7608 return true;
7610 STRIP_NOPS (arg0);
7611 STRIP_NOPS (arg1);
7613 if (TREE_CONSTANT (arg1))
7614 return false;
7615 if (TREE_CONSTANT (arg0))
7616 return true;
7618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7619 for commutative and comparison operators. Ensuring a canonical
7620 form allows the optimizers to find additional redundancies without
7621 having to explicitly check for both orderings. */
7622 if (TREE_CODE (arg0) == SSA_NAME
7623 && TREE_CODE (arg1) == SSA_NAME
7624 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7625 return true;
7627 /* Put SSA_NAMEs last. */
7628 if (TREE_CODE (arg1) == SSA_NAME)
7629 return false;
7630 if (TREE_CODE (arg0) == SSA_NAME)
7631 return true;
7633 /* Put variables last. */
7634 if (DECL_P (arg1))
7635 return false;
7636 if (DECL_P (arg0))
7637 return true;
7639 return false;
7643 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7644 means A >= Y && A != MAX, but in this case we know that
7645 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7647 static tree
7648 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7650 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7652 if (TREE_CODE (bound) == LT_EXPR)
7653 a = TREE_OPERAND (bound, 0);
7654 else if (TREE_CODE (bound) == GT_EXPR)
7655 a = TREE_OPERAND (bound, 1);
7656 else
7657 return NULL_TREE;
7659 typea = TREE_TYPE (a);
7660 if (!INTEGRAL_TYPE_P (typea)
7661 && !POINTER_TYPE_P (typea))
7662 return NULL_TREE;
7664 if (TREE_CODE (ineq) == LT_EXPR)
7666 a1 = TREE_OPERAND (ineq, 1);
7667 y = TREE_OPERAND (ineq, 0);
7669 else if (TREE_CODE (ineq) == GT_EXPR)
7671 a1 = TREE_OPERAND (ineq, 0);
7672 y = TREE_OPERAND (ineq, 1);
7674 else
7675 return NULL_TREE;
7677 if (TREE_TYPE (a1) != typea)
7678 return NULL_TREE;
7680 if (POINTER_TYPE_P (typea))
7682 /* Convert the pointer types into integer before taking the difference. */
7683 tree ta = fold_convert_loc (loc, ssizetype, a);
7684 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7685 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7687 else
7688 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7690 if (!diff || !integer_onep (diff))
7691 return NULL_TREE;
7693 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7696 /* Fold a sum or difference of at least one multiplication.
7697 Returns the folded tree or NULL if no simplification could be made. */
7699 static tree
7700 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7701 tree arg0, tree arg1)
7703 tree arg00, arg01, arg10, arg11;
7704 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7706 /* (A * C) +- (B * C) -> (A+-B) * C.
7707 (A * C) +- A -> A * (C+-1).
7708 We are most concerned about the case where C is a constant,
7709 but other combinations show up during loop reduction. Since
7710 it is not difficult, try all four possibilities. */
7712 if (TREE_CODE (arg0) == MULT_EXPR)
7714 arg00 = TREE_OPERAND (arg0, 0);
7715 arg01 = TREE_OPERAND (arg0, 1);
7717 else if (TREE_CODE (arg0) == INTEGER_CST)
7719 arg00 = build_one_cst (type);
7720 arg01 = arg0;
7722 else
7724 /* We cannot generate constant 1 for fract. */
7725 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7726 return NULL_TREE;
7727 arg00 = arg0;
7728 arg01 = build_one_cst (type);
7730 if (TREE_CODE (arg1) == MULT_EXPR)
7732 arg10 = TREE_OPERAND (arg1, 0);
7733 arg11 = TREE_OPERAND (arg1, 1);
7735 else if (TREE_CODE (arg1) == INTEGER_CST)
7737 arg10 = build_one_cst (type);
7738 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7739 the purpose of this canonicalization. */
7740 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7741 && negate_expr_p (arg1)
7742 && code == PLUS_EXPR)
7744 arg11 = negate_expr (arg1);
7745 code = MINUS_EXPR;
7747 else
7748 arg11 = arg1;
7750 else
7752 /* We cannot generate constant 1 for fract. */
7753 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7754 return NULL_TREE;
7755 arg10 = arg1;
7756 arg11 = build_one_cst (type);
7758 same = NULL_TREE;
7760 /* Prefer factoring a common non-constant. */
7761 if (operand_equal_p (arg00, arg10, 0))
7762 same = arg00, alt0 = arg01, alt1 = arg11;
7763 else if (operand_equal_p (arg01, arg11, 0))
7764 same = arg01, alt0 = arg00, alt1 = arg10;
7765 else if (operand_equal_p (arg00, arg11, 0))
7766 same = arg00, alt0 = arg01, alt1 = arg10;
7767 else if (operand_equal_p (arg01, arg10, 0))
7768 same = arg01, alt0 = arg00, alt1 = arg11;
7770 /* No identical multiplicands; see if we can find a common
7771 power-of-two factor in non-power-of-two multiplies. This
7772 can help in multi-dimensional array access. */
7773 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7775 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7776 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7777 HOST_WIDE_INT tmp;
7778 bool swap = false;
7779 tree maybe_same;
7781 /* Move min of absolute values to int11. */
7782 if (absu_hwi (int01) < absu_hwi (int11))
7784 tmp = int01, int01 = int11, int11 = tmp;
7785 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7786 maybe_same = arg01;
7787 swap = true;
7789 else
7790 maybe_same = arg11;
7792 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7793 if (factor > 1
7794 && pow2p_hwi (factor)
7795 && (int01 & (factor - 1)) == 0
7796 /* The remainder should not be a constant, otherwise we
7797 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7798 increased the number of multiplications necessary. */
7799 && TREE_CODE (arg10) != INTEGER_CST)
7801 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7802 build_int_cst (TREE_TYPE (arg00),
7803 int01 / int11));
7804 alt1 = arg10;
7805 same = maybe_same;
7806 if (swap)
7807 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7811 if (!same)
7812 return NULL_TREE;
7814 if (! ANY_INTEGRAL_TYPE_P (type)
7815 || TYPE_OVERFLOW_WRAPS (type)
7816 /* We are neither factoring zero nor minus one. */
7817 || TREE_CODE (same) == INTEGER_CST)
7818 return fold_build2_loc (loc, MULT_EXPR, type,
7819 fold_build2_loc (loc, code, type,
7820 fold_convert_loc (loc, type, alt0),
7821 fold_convert_loc (loc, type, alt1)),
7822 fold_convert_loc (loc, type, same));
7824 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7825 same may be minus one and thus the multiplication may overflow. Perform
7826 the sum operation in an unsigned type. */
7827 tree utype = unsigned_type_for (type);
7828 tree tem = fold_build2_loc (loc, code, utype,
7829 fold_convert_loc (loc, utype, alt0),
7830 fold_convert_loc (loc, utype, alt1));
7831 /* If the sum evaluated to a constant that is not -INF the multiplication
7832 cannot overflow. */
7833 if (TREE_CODE (tem) == INTEGER_CST
7834 && (wi::to_wide (tem)
7835 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7836 return fold_build2_loc (loc, MULT_EXPR, type,
7837 fold_convert (type, tem), same);
7839 /* Do not resort to unsigned multiplication because
7840 we lose the no-overflow property of the expression. */
7841 return NULL_TREE;
7844 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7845 specified by EXPR into the buffer PTR of length LEN bytes.
7846 Return the number of bytes placed in the buffer, or zero
7847 upon failure. */
7849 static int
7850 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7852 tree type = TREE_TYPE (expr);
7853 int total_bytes;
7854 if (TREE_CODE (type) == BITINT_TYPE)
7856 struct bitint_info info;
7857 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7858 gcc_assert (ok);
7859 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7860 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7862 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7863 /* More work is needed when adding _BitInt support to PDP endian
7864 if limb is smaller than word, or if _BitInt limb ordering doesn't
7865 match target endianity here. */
7866 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7867 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7868 || (GET_MODE_SIZE (limb_mode)
7869 >= UNITS_PER_WORD)));
7871 else
7872 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7874 else
7875 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7876 int byte, offset, word, words;
7877 unsigned char value;
7879 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7880 return 0;
7881 if (off == -1)
7882 off = 0;
7884 if (ptr == NULL)
7885 /* Dry run. */
7886 return MIN (len, total_bytes - off);
7888 words = total_bytes / UNITS_PER_WORD;
7890 for (byte = 0; byte < total_bytes; byte++)
7892 int bitpos = byte * BITS_PER_UNIT;
7893 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7894 number of bytes. */
7895 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7897 if (total_bytes > UNITS_PER_WORD)
7899 word = byte / UNITS_PER_WORD;
7900 if (WORDS_BIG_ENDIAN)
7901 word = (words - 1) - word;
7902 offset = word * UNITS_PER_WORD;
7903 if (BYTES_BIG_ENDIAN)
7904 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7905 else
7906 offset += byte % UNITS_PER_WORD;
7908 else
7909 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7910 if (offset >= off && offset - off < len)
7911 ptr[offset - off] = value;
7913 return MIN (len, total_bytes - off);
7917 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7918 specified by EXPR into the buffer PTR of length LEN bytes.
7919 Return the number of bytes placed in the buffer, or zero
7920 upon failure. */
7922 static int
7923 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7925 tree type = TREE_TYPE (expr);
7926 scalar_mode mode = SCALAR_TYPE_MODE (type);
7927 int total_bytes = GET_MODE_SIZE (mode);
7928 FIXED_VALUE_TYPE value;
7929 tree i_value, i_type;
7931 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7932 return 0;
7934 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7936 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7937 return 0;
7939 value = TREE_FIXED_CST (expr);
7940 i_value = double_int_to_tree (i_type, value.data);
7942 return native_encode_int (i_value, ptr, len, off);
7946 /* Subroutine of native_encode_expr. Encode the REAL_CST
7947 specified by EXPR into the buffer PTR of length LEN bytes.
7948 Return the number of bytes placed in the buffer, or zero
7949 upon failure. */
7951 static int
7952 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7954 tree type = TREE_TYPE (expr);
7955 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7956 int byte, offset, word, words, bitpos;
7957 unsigned char value;
7959 /* There are always 32 bits in each long, no matter the size of
7960 the hosts long. We handle floating point representations with
7961 up to 192 bits. */
7962 long tmp[6];
7964 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7965 return 0;
7966 if (off == -1)
7967 off = 0;
7969 if (ptr == NULL)
7970 /* Dry run. */
7971 return MIN (len, total_bytes - off);
7973 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7975 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7977 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7978 bitpos += BITS_PER_UNIT)
7980 byte = (bitpos / BITS_PER_UNIT) & 3;
7981 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7983 if (UNITS_PER_WORD < 4)
7985 word = byte / UNITS_PER_WORD;
7986 if (WORDS_BIG_ENDIAN)
7987 word = (words - 1) - word;
7988 offset = word * UNITS_PER_WORD;
7989 if (BYTES_BIG_ENDIAN)
7990 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7991 else
7992 offset += byte % UNITS_PER_WORD;
7994 else
7996 offset = byte;
7997 if (BYTES_BIG_ENDIAN)
7999 /* Reverse bytes within each long, or within the entire float
8000 if it's smaller than a long (for HFmode). */
8001 offset = MIN (3, total_bytes - 1) - offset;
8002 gcc_assert (offset >= 0);
8005 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
8006 if (offset >= off
8007 && offset - off < len)
8008 ptr[offset - off] = value;
8010 return MIN (len, total_bytes - off);
8013 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
8014 specified by EXPR into the buffer PTR of length LEN bytes.
8015 Return the number of bytes placed in the buffer, or zero
8016 upon failure. */
8018 static int
8019 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
8021 int rsize, isize;
8022 tree part;
8024 part = TREE_REALPART (expr);
8025 rsize = native_encode_expr (part, ptr, len, off);
8026 if (off == -1 && rsize == 0)
8027 return 0;
8028 part = TREE_IMAGPART (expr);
8029 if (off != -1)
8030 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
8031 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
8032 len - rsize, off);
8033 if (off == -1 && isize != rsize)
8034 return 0;
8035 return rsize + isize;
8038 /* Like native_encode_vector, but only encode the first COUNT elements.
8039 The other arguments are as for native_encode_vector. */
8041 static int
8042 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
8043 int off, unsigned HOST_WIDE_INT count)
8045 tree itype = TREE_TYPE (TREE_TYPE (expr));
8046 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
8047 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
8049 /* This is the only case in which elements can be smaller than a byte.
8050 Element 0 is always in the lsb of the containing byte. */
8051 unsigned int elt_bits = TYPE_PRECISION (itype);
8052 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
8053 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8054 return 0;
8056 if (off == -1)
8057 off = 0;
8059 /* Zero the buffer and then set bits later where necessary. */
8060 int extract_bytes = MIN (len, total_bytes - off);
8061 if (ptr)
8062 memset (ptr, 0, extract_bytes);
8064 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
8065 unsigned int first_elt = off * elts_per_byte;
8066 unsigned int extract_elts = extract_bytes * elts_per_byte;
8067 for (unsigned int i = 0; i < extract_elts; ++i)
8069 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
8070 if (TREE_CODE (elt) != INTEGER_CST)
8071 return 0;
8073 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
8075 unsigned int bit = i * elt_bits;
8076 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8079 return extract_bytes;
8082 int offset = 0;
8083 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8084 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8086 if (off >= size)
8088 off -= size;
8089 continue;
8091 tree elem = VECTOR_CST_ELT (expr, i);
8092 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8093 len - offset, off);
8094 if ((off == -1 && res != size) || res == 0)
8095 return 0;
8096 offset += res;
8097 if (offset >= len)
8098 return (off == -1 && i < count - 1) ? 0 : offset;
8099 if (off != -1)
8100 off = 0;
8102 return offset;
8105 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8106 specified by EXPR into the buffer PTR of length LEN bytes.
8107 Return the number of bytes placed in the buffer, or zero
8108 upon failure. */
8110 static int
8111 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8113 unsigned HOST_WIDE_INT count;
8114 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8115 return 0;
8116 return native_encode_vector_part (expr, ptr, len, off, count);
8120 /* Subroutine of native_encode_expr. Encode the STRING_CST
8121 specified by EXPR into the buffer PTR of length LEN bytes.
8122 Return the number of bytes placed in the buffer, or zero
8123 upon failure. */
8125 static int
8126 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8128 tree type = TREE_TYPE (expr);
8130 /* Wide-char strings are encoded in target byte-order so native
8131 encoding them is trivial. */
8132 if (BITS_PER_UNIT != CHAR_BIT
8133 || TREE_CODE (type) != ARRAY_TYPE
8134 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8135 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8136 return 0;
8138 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8139 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8140 return 0;
8141 if (off == -1)
8142 off = 0;
8143 len = MIN (total_bytes - off, len);
8144 if (ptr == NULL)
8145 /* Dry run. */;
8146 else
8148 int written = 0;
8149 if (off < TREE_STRING_LENGTH (expr))
8151 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8152 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8154 memset (ptr + written, 0, len - written);
8156 return len;
8160 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8161 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8162 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8163 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8164 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8165 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8166 Return the number of bytes placed in the buffer, or zero upon failure. */
8169 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8171 /* We don't support starting at negative offset and -1 is special. */
8172 if (off < -1)
8173 return 0;
8175 switch (TREE_CODE (expr))
8177 case INTEGER_CST:
8178 return native_encode_int (expr, ptr, len, off);
8180 case REAL_CST:
8181 return native_encode_real (expr, ptr, len, off);
8183 case FIXED_CST:
8184 return native_encode_fixed (expr, ptr, len, off);
8186 case COMPLEX_CST:
8187 return native_encode_complex (expr, ptr, len, off);
8189 case VECTOR_CST:
8190 return native_encode_vector (expr, ptr, len, off);
8192 case STRING_CST:
8193 return native_encode_string (expr, ptr, len, off);
8195 default:
8196 return 0;
8200 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8201 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8202 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8203 machine modes, we can't just use build_nonstandard_integer_type. */
8205 tree
8206 find_bitfield_repr_type (int fieldsize, int len)
8208 machine_mode mode;
8209 for (int pass = 0; pass < 2; pass++)
8211 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8212 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8213 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8214 && known_eq (GET_MODE_PRECISION (mode),
8215 GET_MODE_BITSIZE (mode))
8216 && known_le (GET_MODE_SIZE (mode), len))
8218 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8219 if (ret && TYPE_MODE (ret) == mode)
8220 return ret;
8224 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8225 if (int_n_enabled_p[i]
8226 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8227 && int_n_trees[i].unsigned_type)
8229 tree ret = int_n_trees[i].unsigned_type;
8230 mode = TYPE_MODE (ret);
8231 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8232 && known_eq (GET_MODE_PRECISION (mode),
8233 GET_MODE_BITSIZE (mode))
8234 && known_le (GET_MODE_SIZE (mode), len))
8235 return ret;
8238 return NULL_TREE;
8241 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8242 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8243 to be non-NULL and OFF zero), then in addition to filling the
8244 bytes pointed by PTR with the value also clear any bits pointed
8245 by MASK that are known to be initialized, keep them as is for
8246 e.g. uninitialized padding bits or uninitialized fields. */
8249 native_encode_initializer (tree init, unsigned char *ptr, int len,
8250 int off, unsigned char *mask)
8252 int r;
8254 /* We don't support starting at negative offset and -1 is special. */
8255 if (off < -1 || init == NULL_TREE)
8256 return 0;
8258 gcc_assert (mask == NULL || (off == 0 && ptr));
8260 STRIP_NOPS (init);
8261 switch (TREE_CODE (init))
8263 case VIEW_CONVERT_EXPR:
8264 case NON_LVALUE_EXPR:
8265 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8266 mask);
8267 default:
8268 r = native_encode_expr (init, ptr, len, off);
8269 if (mask)
8270 memset (mask, 0, r);
8271 return r;
8272 case CONSTRUCTOR:
8273 tree type = TREE_TYPE (init);
8274 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8275 if (total_bytes < 0)
8276 return 0;
8277 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8278 return 0;
8279 int o = off == -1 ? 0 : off;
8280 if (TREE_CODE (type) == ARRAY_TYPE)
8282 tree min_index;
8283 unsigned HOST_WIDE_INT cnt;
8284 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8285 constructor_elt *ce;
8287 if (!TYPE_DOMAIN (type)
8288 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8289 return 0;
8291 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8292 if (fieldsize <= 0)
8293 return 0;
8295 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8296 if (ptr)
8297 memset (ptr, '\0', MIN (total_bytes - off, len));
8299 for (cnt = 0; ; cnt++)
8301 tree val = NULL_TREE, index = NULL_TREE;
8302 HOST_WIDE_INT pos = curpos, count = 0;
8303 bool full = false;
8304 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8306 val = ce->value;
8307 index = ce->index;
8309 else if (mask == NULL
8310 || CONSTRUCTOR_NO_CLEARING (init)
8311 || curpos >= total_bytes)
8312 break;
8313 else
8314 pos = total_bytes;
8316 if (index && TREE_CODE (index) == RANGE_EXPR)
8318 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8319 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8320 return 0;
8321 offset_int wpos
8322 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8323 - wi::to_offset (min_index),
8324 TYPE_PRECISION (sizetype));
8325 wpos *= fieldsize;
8326 if (!wi::fits_shwi_p (pos))
8327 return 0;
8328 pos = wpos.to_shwi ();
8329 offset_int wcount
8330 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8331 - wi::to_offset (TREE_OPERAND (index, 0)),
8332 TYPE_PRECISION (sizetype));
8333 if (!wi::fits_shwi_p (wcount))
8334 return 0;
8335 count = wcount.to_shwi ();
8337 else if (index)
8339 if (TREE_CODE (index) != INTEGER_CST)
8340 return 0;
8341 offset_int wpos
8342 = wi::sext (wi::to_offset (index)
8343 - wi::to_offset (min_index),
8344 TYPE_PRECISION (sizetype));
8345 wpos *= fieldsize;
8346 if (!wi::fits_shwi_p (wpos))
8347 return 0;
8348 pos = wpos.to_shwi ();
8351 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8353 if (valueinit == -1)
8355 tree zero = build_zero_cst (TREE_TYPE (type));
8356 r = native_encode_initializer (zero, ptr + curpos,
8357 fieldsize, 0,
8358 mask + curpos);
8359 if (TREE_CODE (zero) == CONSTRUCTOR)
8360 ggc_free (zero);
8361 if (!r)
8362 return 0;
8363 valueinit = curpos;
8364 curpos += fieldsize;
8366 while (curpos != pos)
8368 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8369 memcpy (mask + curpos, mask + valueinit, fieldsize);
8370 curpos += fieldsize;
8374 curpos = pos;
8375 if (val)
8378 if (off == -1
8379 || (curpos >= off
8380 && (curpos + fieldsize
8381 <= (HOST_WIDE_INT) off + len)))
8383 if (full)
8385 if (ptr)
8386 memcpy (ptr + (curpos - o), ptr + (pos - o),
8387 fieldsize);
8388 if (mask)
8389 memcpy (mask + curpos, mask + pos, fieldsize);
8391 else if (!native_encode_initializer (val,
8393 ? ptr + curpos - o
8394 : NULL,
8395 fieldsize,
8396 off == -1 ? -1
8397 : 0,
8398 mask
8399 ? mask + curpos
8400 : NULL))
8401 return 0;
8402 else
8404 full = true;
8405 pos = curpos;
8408 else if (curpos + fieldsize > off
8409 && curpos < (HOST_WIDE_INT) off + len)
8411 /* Partial overlap. */
8412 unsigned char *p = NULL;
8413 int no = 0;
8414 int l;
8415 gcc_assert (mask == NULL);
8416 if (curpos >= off)
8418 if (ptr)
8419 p = ptr + curpos - off;
8420 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8421 fieldsize);
8423 else
8425 p = ptr;
8426 no = off - curpos;
8427 l = len;
8429 if (!native_encode_initializer (val, p, l, no, NULL))
8430 return 0;
8432 curpos += fieldsize;
8434 while (count-- != 0);
8436 return MIN (total_bytes - off, len);
8438 else if (TREE_CODE (type) == RECORD_TYPE
8439 || TREE_CODE (type) == UNION_TYPE)
8441 unsigned HOST_WIDE_INT cnt;
8442 constructor_elt *ce;
8443 tree fld_base = TYPE_FIELDS (type);
8444 tree to_free = NULL_TREE;
8446 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8447 if (ptr != NULL)
8448 memset (ptr, '\0', MIN (total_bytes - o, len));
8449 for (cnt = 0; ; cnt++)
8451 tree val = NULL_TREE, field = NULL_TREE;
8452 HOST_WIDE_INT pos = 0, fieldsize;
8453 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8455 if (to_free)
8457 ggc_free (to_free);
8458 to_free = NULL_TREE;
8461 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8463 val = ce->value;
8464 field = ce->index;
8465 if (field == NULL_TREE)
8466 return 0;
8468 pos = int_byte_position (field);
8469 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8470 continue;
8472 else if (mask == NULL
8473 || CONSTRUCTOR_NO_CLEARING (init))
8474 break;
8475 else
8476 pos = total_bytes;
8478 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8480 tree fld;
8481 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8483 if (TREE_CODE (fld) != FIELD_DECL)
8484 continue;
8485 if (fld == field)
8486 break;
8487 if (DECL_PADDING_P (fld))
8488 continue;
8489 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8490 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8491 return 0;
8492 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8493 continue;
8494 break;
8496 if (fld == NULL_TREE)
8498 if (ce == NULL)
8499 break;
8500 return 0;
8502 fld_base = DECL_CHAIN (fld);
8503 if (fld != field)
8505 cnt--;
8506 field = fld;
8507 pos = int_byte_position (field);
8508 val = build_zero_cst (TREE_TYPE (fld));
8509 if (TREE_CODE (val) == CONSTRUCTOR)
8510 to_free = val;
8514 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8515 && TYPE_DOMAIN (TREE_TYPE (field))
8516 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8518 if (mask || off != -1)
8519 return 0;
8520 if (val == NULL_TREE)
8521 continue;
8522 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8523 return 0;
8524 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8525 if (fieldsize < 0
8526 || (int) fieldsize != fieldsize
8527 || (pos + fieldsize) > INT_MAX)
8528 return 0;
8529 if (pos + fieldsize > total_bytes)
8531 if (ptr != NULL && total_bytes < len)
8532 memset (ptr + total_bytes, '\0',
8533 MIN (pos + fieldsize, len) - total_bytes);
8534 total_bytes = pos + fieldsize;
8537 else
8539 if (DECL_SIZE_UNIT (field) == NULL_TREE
8540 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8541 return 0;
8542 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8544 if (fieldsize == 0)
8545 continue;
8547 /* Prepare to deal with integral bit-fields and filter out other
8548 bit-fields that do not start and end on a byte boundary. */
8549 if (DECL_BIT_FIELD (field))
8551 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8552 return 0;
8553 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8554 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8556 bpos %= BITS_PER_UNIT;
8557 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8558 epos = fieldsize % BITS_PER_UNIT;
8559 fieldsize += BITS_PER_UNIT - 1;
8560 fieldsize /= BITS_PER_UNIT;
8562 else if (bpos % BITS_PER_UNIT
8563 || DECL_SIZE (field) == NULL_TREE
8564 || !tree_fits_shwi_p (DECL_SIZE (field))
8565 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8566 return 0;
8569 if (off != -1 && pos + fieldsize <= off)
8570 continue;
8572 if (val == NULL_TREE)
8573 continue;
8575 if (DECL_BIT_FIELD (field)
8576 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8578 /* FIXME: Handle PDP endian. */
8579 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8580 return 0;
8582 if (TREE_CODE (val) != INTEGER_CST)
8583 return 0;
8585 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8586 tree repr_type = NULL_TREE;
8587 HOST_WIDE_INT rpos = 0;
8588 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8590 rpos = int_byte_position (repr);
8591 repr_type = TREE_TYPE (repr);
8593 else
8595 repr_type = find_bitfield_repr_type (fieldsize, len);
8596 if (repr_type == NULL_TREE)
8597 return 0;
8598 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8599 gcc_assert (repr_size > 0 && repr_size <= len);
8600 if (pos + repr_size <= o + len)
8601 rpos = pos;
8602 else
8604 rpos = o + len - repr_size;
8605 gcc_assert (rpos <= pos);
8609 if (rpos > pos)
8610 return 0;
8611 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8612 int diff = (TYPE_PRECISION (repr_type)
8613 - TYPE_PRECISION (TREE_TYPE (field)));
8614 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8615 if (!BYTES_BIG_ENDIAN)
8616 w = wi::lshift (w, bitoff);
8617 else
8618 w = wi::lshift (w, diff - bitoff);
8619 val = wide_int_to_tree (repr_type, w);
8621 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8622 / BITS_PER_UNIT + 1];
8623 int l = native_encode_int (val, buf, sizeof buf, 0);
8624 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8625 return 0;
8627 if (ptr == NULL)
8628 continue;
8630 /* If the bitfield does not start at byte boundary, handle
8631 the partial byte at the start. */
8632 if (bpos
8633 && (off == -1 || (pos >= off && len >= 1)))
8635 if (!BYTES_BIG_ENDIAN)
8637 int msk = (1 << bpos) - 1;
8638 buf[pos - rpos] &= ~msk;
8639 buf[pos - rpos] |= ptr[pos - o] & msk;
8640 if (mask)
8642 if (fieldsize > 1 || epos == 0)
8643 mask[pos] &= msk;
8644 else
8645 mask[pos] &= (msk | ~((1 << epos) - 1));
8648 else
8650 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8651 buf[pos - rpos] &= msk;
8652 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8653 if (mask)
8655 if (fieldsize > 1 || epos == 0)
8656 mask[pos] &= ~msk;
8657 else
8658 mask[pos] &= (~msk
8659 | ((1 << (BITS_PER_UNIT - epos))
8660 - 1));
8664 /* If the bitfield does not end at byte boundary, handle
8665 the partial byte at the end. */
8666 if (epos
8667 && (off == -1
8668 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8670 if (!BYTES_BIG_ENDIAN)
8672 int msk = (1 << epos) - 1;
8673 buf[pos - rpos + fieldsize - 1] &= msk;
8674 buf[pos - rpos + fieldsize - 1]
8675 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8676 if (mask && (fieldsize > 1 || bpos == 0))
8677 mask[pos + fieldsize - 1] &= ~msk;
8679 else
8681 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8682 buf[pos - rpos + fieldsize - 1] &= ~msk;
8683 buf[pos - rpos + fieldsize - 1]
8684 |= ptr[pos + fieldsize - 1 - o] & msk;
8685 if (mask && (fieldsize > 1 || bpos == 0))
8686 mask[pos + fieldsize - 1] &= msk;
8689 if (off == -1
8690 || (pos >= off
8691 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8693 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8694 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8695 memset (mask + pos + (bpos != 0), 0,
8696 fieldsize - (bpos != 0) - (epos != 0));
8698 else
8700 /* Partial overlap. */
8701 HOST_WIDE_INT fsz = fieldsize;
8702 gcc_assert (mask == NULL);
8703 if (pos < off)
8705 fsz -= (off - pos);
8706 pos = off;
8708 if (pos + fsz > (HOST_WIDE_INT) off + len)
8709 fsz = (HOST_WIDE_INT) off + len - pos;
8710 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8712 continue;
8715 if (off == -1
8716 || (pos >= off
8717 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8719 int fldsize = fieldsize;
8720 if (off == -1)
8722 tree fld = DECL_CHAIN (field);
8723 while (fld)
8725 if (TREE_CODE (fld) == FIELD_DECL)
8726 break;
8727 fld = DECL_CHAIN (fld);
8729 if (fld == NULL_TREE)
8730 fldsize = len - pos;
8732 r = native_encode_initializer (val, ptr ? ptr + pos - o
8733 : NULL,
8734 fldsize,
8735 off == -1 ? -1 : 0,
8736 mask ? mask + pos : NULL);
8737 if (!r)
8738 return 0;
8739 if (off == -1
8740 && fldsize != fieldsize
8741 && r > fieldsize
8742 && pos + r > total_bytes)
8743 total_bytes = pos + r;
8745 else
8747 /* Partial overlap. */
8748 unsigned char *p = NULL;
8749 int no = 0;
8750 int l;
8751 gcc_assert (mask == NULL);
8752 if (pos >= off)
8754 if (ptr)
8755 p = ptr + pos - off;
8756 l = MIN ((HOST_WIDE_INT) off + len - pos,
8757 fieldsize);
8759 else
8761 p = ptr;
8762 no = off - pos;
8763 l = len;
8765 if (!native_encode_initializer (val, p, l, no, NULL))
8766 return 0;
8769 return MIN (total_bytes - off, len);
8771 return 0;
8776 /* Subroutine of native_interpret_expr. Interpret the contents of
8777 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8778 If the buffer cannot be interpreted, return NULL_TREE. */
8780 static tree
8781 native_interpret_int (tree type, const unsigned char *ptr, int len)
8783 int total_bytes;
8784 if (TREE_CODE (type) == BITINT_TYPE)
8786 struct bitint_info info;
8787 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8788 gcc_assert (ok);
8789 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8790 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8792 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8793 /* More work is needed when adding _BitInt support to PDP endian
8794 if limb is smaller than word, or if _BitInt limb ordering doesn't
8795 match target endianity here. */
8796 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8797 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8798 || (GET_MODE_SIZE (limb_mode)
8799 >= UNITS_PER_WORD)));
8801 else
8802 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8804 else
8805 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8807 if (total_bytes > len)
8808 return NULL_TREE;
8810 wide_int result = wi::from_buffer (ptr, total_bytes);
8812 return wide_int_to_tree (type, result);
8816 /* Subroutine of native_interpret_expr. Interpret the contents of
8817 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8818 If the buffer cannot be interpreted, return NULL_TREE. */
8820 static tree
8821 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8823 scalar_mode mode = SCALAR_TYPE_MODE (type);
8824 int total_bytes = GET_MODE_SIZE (mode);
8825 double_int result;
8826 FIXED_VALUE_TYPE fixed_value;
8828 if (total_bytes > len
8829 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8830 return NULL_TREE;
8832 result = double_int::from_buffer (ptr, total_bytes);
8833 fixed_value = fixed_from_double_int (result, mode);
8835 return build_fixed (type, fixed_value);
8839 /* Subroutine of native_interpret_expr. Interpret the contents of
8840 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8841 If the buffer cannot be interpreted, return NULL_TREE. */
8843 tree
8844 native_interpret_real (tree type, const unsigned char *ptr, int len)
8846 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8847 int total_bytes = GET_MODE_SIZE (mode);
8848 unsigned char value;
8849 /* There are always 32 bits in each long, no matter the size of
8850 the hosts long. We handle floating point representations with
8851 up to 192 bits. */
8852 REAL_VALUE_TYPE r;
8853 long tmp[6];
8855 if (total_bytes > len || total_bytes > 24)
8856 return NULL_TREE;
8857 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8859 memset (tmp, 0, sizeof (tmp));
8860 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8861 bitpos += BITS_PER_UNIT)
8863 /* Both OFFSET and BYTE index within a long;
8864 bitpos indexes the whole float. */
8865 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8866 if (UNITS_PER_WORD < 4)
8868 int word = byte / UNITS_PER_WORD;
8869 if (WORDS_BIG_ENDIAN)
8870 word = (words - 1) - word;
8871 offset = word * UNITS_PER_WORD;
8872 if (BYTES_BIG_ENDIAN)
8873 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8874 else
8875 offset += byte % UNITS_PER_WORD;
8877 else
8879 offset = byte;
8880 if (BYTES_BIG_ENDIAN)
8882 /* Reverse bytes within each long, or within the entire float
8883 if it's smaller than a long (for HFmode). */
8884 offset = MIN (3, total_bytes - 1) - offset;
8885 gcc_assert (offset >= 0);
8888 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8890 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8893 real_from_target (&r, tmp, mode);
8894 return build_real (type, r);
8898 /* Subroutine of native_interpret_expr. Interpret the contents of
8899 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8900 If the buffer cannot be interpreted, return NULL_TREE. */
8902 static tree
8903 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8905 tree etype, rpart, ipart;
8906 int size;
8908 etype = TREE_TYPE (type);
8909 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8910 if (size * 2 > len)
8911 return NULL_TREE;
8912 rpart = native_interpret_expr (etype, ptr, size);
8913 if (!rpart)
8914 return NULL_TREE;
8915 ipart = native_interpret_expr (etype, ptr+size, size);
8916 if (!ipart)
8917 return NULL_TREE;
8918 return build_complex (type, rpart, ipart);
8921 /* Read a vector of type TYPE from the target memory image given by BYTES,
8922 which contains LEN bytes. The vector is known to be encodable using
8923 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8925 Return the vector on success, otherwise return null. */
8927 static tree
8928 native_interpret_vector_part (tree type, const unsigned char *bytes,
8929 unsigned int len, unsigned int npatterns,
8930 unsigned int nelts_per_pattern)
8932 tree elt_type = TREE_TYPE (type);
8933 if (VECTOR_BOOLEAN_TYPE_P (type)
8934 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8936 /* This is the only case in which elements can be smaller than a byte.
8937 Element 0 is always in the lsb of the containing byte. */
8938 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8939 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8940 return NULL_TREE;
8942 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8943 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8945 unsigned int bit_index = i * elt_bits;
8946 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8947 unsigned int lsb = bit_index % BITS_PER_UNIT;
8948 builder.quick_push (bytes[byte_index] & (1 << lsb)
8949 ? build_all_ones_cst (elt_type)
8950 : build_zero_cst (elt_type));
8952 return builder.build ();
8955 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8956 if (elt_bytes * npatterns * nelts_per_pattern > len)
8957 return NULL_TREE;
8959 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8960 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8962 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8963 if (!elt)
8964 return NULL_TREE;
8965 builder.quick_push (elt);
8966 bytes += elt_bytes;
8968 return builder.build ();
8971 /* Subroutine of native_interpret_expr. Interpret the contents of
8972 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8973 If the buffer cannot be interpreted, return NULL_TREE. */
8975 static tree
8976 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8978 unsigned HOST_WIDE_INT size;
8980 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8981 || size > len)
8982 return NULL_TREE;
8984 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8985 return native_interpret_vector_part (type, ptr, len, count, 1);
8989 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8990 the buffer PTR of length LEN as a constant of type TYPE. For
8991 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8992 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8993 return NULL_TREE. */
8995 tree
8996 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8998 switch (TREE_CODE (type))
9000 case INTEGER_TYPE:
9001 case ENUMERAL_TYPE:
9002 case BOOLEAN_TYPE:
9003 case POINTER_TYPE:
9004 case REFERENCE_TYPE:
9005 case OFFSET_TYPE:
9006 case BITINT_TYPE:
9007 return native_interpret_int (type, ptr, len);
9009 case REAL_TYPE:
9010 if (tree ret = native_interpret_real (type, ptr, len))
9012 /* For floating point values in composite modes, punt if this
9013 folding doesn't preserve bit representation. As the mode doesn't
9014 have fixed precision while GCC pretends it does, there could be
9015 valid values that GCC can't really represent accurately.
9016 See PR95450. Even for other modes, e.g. x86 XFmode can have some
9017 bit combinationations which GCC doesn't preserve. */
9018 unsigned char buf[24 * 2];
9019 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
9020 int total_bytes = GET_MODE_SIZE (mode);
9021 memcpy (buf + 24, ptr, total_bytes);
9022 clear_type_padding_in_mask (type, buf + 24);
9023 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
9024 || memcmp (buf + 24, buf, total_bytes) != 0)
9025 return NULL_TREE;
9026 return ret;
9028 return NULL_TREE;
9030 case FIXED_POINT_TYPE:
9031 return native_interpret_fixed (type, ptr, len);
9033 case COMPLEX_TYPE:
9034 return native_interpret_complex (type, ptr, len);
9036 case VECTOR_TYPE:
9037 return native_interpret_vector (type, ptr, len);
9039 default:
9040 return NULL_TREE;
9044 /* Returns true if we can interpret the contents of a native encoding
9045 as TYPE. */
9047 bool
9048 can_native_interpret_type_p (tree type)
9050 switch (TREE_CODE (type))
9052 case INTEGER_TYPE:
9053 case ENUMERAL_TYPE:
9054 case BOOLEAN_TYPE:
9055 case POINTER_TYPE:
9056 case REFERENCE_TYPE:
9057 case FIXED_POINT_TYPE:
9058 case REAL_TYPE:
9059 case COMPLEX_TYPE:
9060 case VECTOR_TYPE:
9061 case OFFSET_TYPE:
9062 return true;
9063 default:
9064 return false;
9068 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
9069 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
9071 tree
9072 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
9073 int len)
9075 vec<constructor_elt, va_gc> *elts = NULL;
9076 if (TREE_CODE (type) == ARRAY_TYPE)
9078 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
9079 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
9080 return NULL_TREE;
9082 HOST_WIDE_INT cnt = 0;
9083 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
9085 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
9086 return NULL_TREE;
9087 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
9089 if (eltsz == 0)
9090 cnt = 0;
9091 HOST_WIDE_INT pos = 0;
9092 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9094 tree v = NULL_TREE;
9095 if (pos >= len || pos + eltsz > len)
9096 return NULL_TREE;
9097 if (can_native_interpret_type_p (TREE_TYPE (type)))
9099 v = native_interpret_expr (TREE_TYPE (type),
9100 ptr + off + pos, eltsz);
9101 if (v == NULL_TREE)
9102 return NULL_TREE;
9104 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9105 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9106 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9107 eltsz);
9108 if (v == NULL_TREE)
9109 return NULL_TREE;
9110 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9112 return build_constructor (type, elts);
9114 if (TREE_CODE (type) != RECORD_TYPE)
9115 return NULL_TREE;
9116 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9118 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9119 || is_empty_type (TREE_TYPE (field)))
9120 continue;
9121 tree fld = field;
9122 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9123 int diff = 0;
9124 tree v = NULL_TREE;
9125 if (DECL_BIT_FIELD (field))
9127 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9128 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9130 poly_int64 bitoffset;
9131 poly_uint64 field_offset, fld_offset;
9132 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9133 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9134 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9135 else
9136 bitoffset = 0;
9137 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9138 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9139 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9140 - TYPE_PRECISION (TREE_TYPE (field)));
9141 if (!bitoffset.is_constant (&bitoff)
9142 || bitoff < 0
9143 || bitoff > diff)
9144 return NULL_TREE;
9146 else
9148 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9149 return NULL_TREE;
9150 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9151 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9152 bpos %= BITS_PER_UNIT;
9153 fieldsize += bpos;
9154 fieldsize += BITS_PER_UNIT - 1;
9155 fieldsize /= BITS_PER_UNIT;
9156 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9157 if (repr_type == NULL_TREE)
9158 return NULL_TREE;
9159 sz = int_size_in_bytes (repr_type);
9160 if (sz < 0 || sz > len)
9161 return NULL_TREE;
9162 pos = int_byte_position (field);
9163 if (pos < 0 || pos > len || pos + fieldsize > len)
9164 return NULL_TREE;
9165 HOST_WIDE_INT rpos;
9166 if (pos + sz <= len)
9167 rpos = pos;
9168 else
9170 rpos = len - sz;
9171 gcc_assert (rpos <= pos);
9173 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9174 pos = rpos;
9175 diff = (TYPE_PRECISION (repr_type)
9176 - TYPE_PRECISION (TREE_TYPE (field)));
9177 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9178 if (v == NULL_TREE)
9179 return NULL_TREE;
9180 fld = NULL_TREE;
9184 if (fld)
9186 sz = int_size_in_bytes (TREE_TYPE (fld));
9187 if (sz < 0 || sz > len)
9188 return NULL_TREE;
9189 tree byte_pos = byte_position (fld);
9190 if (!tree_fits_shwi_p (byte_pos))
9191 return NULL_TREE;
9192 pos = tree_to_shwi (byte_pos);
9193 if (pos < 0 || pos > len || pos + sz > len)
9194 return NULL_TREE;
9196 if (fld == NULL_TREE)
9197 /* Already handled above. */;
9198 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9200 v = native_interpret_expr (TREE_TYPE (fld),
9201 ptr + off + pos, sz);
9202 if (v == NULL_TREE)
9203 return NULL_TREE;
9205 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9206 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9207 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9208 if (v == NULL_TREE)
9209 return NULL_TREE;
9210 if (fld != field)
9212 if (TREE_CODE (v) != INTEGER_CST)
9213 return NULL_TREE;
9215 /* FIXME: Figure out how to handle PDP endian bitfields. */
9216 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9217 return NULL_TREE;
9218 if (!BYTES_BIG_ENDIAN)
9219 v = wide_int_to_tree (TREE_TYPE (field),
9220 wi::lrshift (wi::to_wide (v), bitoff));
9221 else
9222 v = wide_int_to_tree (TREE_TYPE (field),
9223 wi::lrshift (wi::to_wide (v),
9224 diff - bitoff));
9226 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9228 return build_constructor (type, elts);
9231 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9232 or extracted constant positions and/or sizes aren't byte aligned. */
9234 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9235 bits between adjacent elements. AMNT should be within
9236 [0, BITS_PER_UNIT).
9237 Example, AMNT = 2:
9238 00011111|11100000 << 2 = 01111111|10000000
9239 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9241 void
9242 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9243 unsigned int amnt)
9245 if (amnt == 0)
9246 return;
9248 unsigned char carry_over = 0U;
9249 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9250 unsigned char clear_mask = (~0U) << amnt;
9252 for (unsigned int i = 0; i < sz; i++)
9254 unsigned prev_carry_over = carry_over;
9255 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9257 ptr[i] <<= amnt;
9258 if (i != 0)
9260 ptr[i] &= clear_mask;
9261 ptr[i] |= prev_carry_over;
9266 /* Like shift_bytes_in_array_left but for big-endian.
9267 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9268 bits between adjacent elements. AMNT should be within
9269 [0, BITS_PER_UNIT).
9270 Example, AMNT = 2:
9271 00011111|11100000 >> 2 = 00000111|11111000
9272 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9274 void
9275 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9276 unsigned int amnt)
9278 if (amnt == 0)
9279 return;
9281 unsigned char carry_over = 0U;
9282 unsigned char carry_mask = ~(~0U << amnt);
9284 for (unsigned int i = 0; i < sz; i++)
9286 unsigned prev_carry_over = carry_over;
9287 carry_over = ptr[i] & carry_mask;
9289 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9290 ptr[i] >>= amnt;
9291 ptr[i] |= prev_carry_over;
9295 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9296 directly on the VECTOR_CST encoding, in a way that works for variable-
9297 length vectors. Return the resulting VECTOR_CST on success or null
9298 on failure. */
9300 static tree
9301 fold_view_convert_vector_encoding (tree type, tree expr)
9303 tree expr_type = TREE_TYPE (expr);
9304 poly_uint64 type_bits, expr_bits;
9305 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9306 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9307 return NULL_TREE;
9309 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9310 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9311 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9312 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9314 /* We can only preserve the semantics of a stepped pattern if the new
9315 vector element is an integer of the same size. */
9316 if (VECTOR_CST_STEPPED_P (expr)
9317 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9318 return NULL_TREE;
9320 /* The number of bits needed to encode one element from every pattern
9321 of the original vector. */
9322 unsigned int expr_sequence_bits
9323 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9325 /* The number of bits needed to encode one element from every pattern
9326 of the result. */
9327 unsigned int type_sequence_bits
9328 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9330 /* Don't try to read more bytes than are available, which can happen
9331 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9332 The general VIEW_CONVERT handling can cope with that case, so there's
9333 no point complicating things here. */
9334 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9335 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9336 BITS_PER_UNIT);
9337 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9338 if (known_gt (buffer_bits, expr_bits))
9339 return NULL_TREE;
9341 /* Get enough bytes of EXPR to form the new encoding. */
9342 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9343 buffer.quick_grow (buffer_bytes);
9344 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9345 buffer_bits / expr_elt_bits)
9346 != (int) buffer_bytes)
9347 return NULL_TREE;
9349 /* Reencode the bytes as TYPE. */
9350 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9351 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9352 type_npatterns, nelts_per_pattern);
9355 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9356 TYPE at compile-time. If we're unable to perform the conversion
9357 return NULL_TREE. */
9359 static tree
9360 fold_view_convert_expr (tree type, tree expr)
9362 unsigned char buffer[128];
9363 unsigned char *buf;
9364 int len;
9365 HOST_WIDE_INT l;
9367 /* Check that the host and target are sane. */
9368 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9369 return NULL_TREE;
9371 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9372 if (tree res = fold_view_convert_vector_encoding (type, expr))
9373 return res;
9375 l = int_size_in_bytes (type);
9376 if (l > (int) sizeof (buffer)
9377 && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9379 buf = XALLOCAVEC (unsigned char, l);
9380 len = l;
9382 else
9384 buf = buffer;
9385 len = sizeof (buffer);
9387 len = native_encode_expr (expr, buf, len);
9388 if (len == 0)
9389 return NULL_TREE;
9391 return native_interpret_expr (type, buf, len);
9394 /* Build an expression for the address of T. Folds away INDIRECT_REF
9395 to avoid confusing the gimplify process. */
9397 tree
9398 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9400 /* The size of the object is not relevant when talking about its address. */
9401 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9402 t = TREE_OPERAND (t, 0);
9404 if (INDIRECT_REF_P (t))
9406 t = TREE_OPERAND (t, 0);
9408 if (TREE_TYPE (t) != ptrtype)
9409 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9411 else if (TREE_CODE (t) == MEM_REF
9412 && integer_zerop (TREE_OPERAND (t, 1)))
9414 t = TREE_OPERAND (t, 0);
9416 if (TREE_TYPE (t) != ptrtype)
9417 t = fold_convert_loc (loc, ptrtype, t);
9419 else if (TREE_CODE (t) == MEM_REF
9420 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9421 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9422 TREE_OPERAND (t, 0),
9423 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9424 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9426 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9428 if (TREE_TYPE (t) != ptrtype)
9429 t = fold_convert_loc (loc, ptrtype, t);
9431 else
9432 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9434 return t;
9437 /* Build an expression for the address of T. */
9439 tree
9440 build_fold_addr_expr_loc (location_t loc, tree t)
9442 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9444 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9447 /* Fold a unary expression of code CODE and type TYPE with operand
9448 OP0. Return the folded expression if folding is successful.
9449 Otherwise, return NULL_TREE. */
9451 tree
9452 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9454 tree tem;
9455 tree arg0;
9456 enum tree_code_class kind = TREE_CODE_CLASS (code);
9458 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9459 && TREE_CODE_LENGTH (code) == 1);
9461 arg0 = op0;
9462 if (arg0)
9464 if (CONVERT_EXPR_CODE_P (code)
9465 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9467 /* Don't use STRIP_NOPS, because signedness of argument type
9468 matters. */
9469 STRIP_SIGN_NOPS (arg0);
9471 else
9473 /* Strip any conversions that don't change the mode. This
9474 is safe for every expression, except for a comparison
9475 expression because its signedness is derived from its
9476 operands.
9478 Note that this is done as an internal manipulation within
9479 the constant folder, in order to find the simplest
9480 representation of the arguments so that their form can be
9481 studied. In any cases, the appropriate type conversions
9482 should be put back in the tree that will get out of the
9483 constant folder. */
9484 STRIP_NOPS (arg0);
9487 if (CONSTANT_CLASS_P (arg0))
9489 tree tem = const_unop (code, type, arg0);
9490 if (tem)
9492 if (TREE_TYPE (tem) != type)
9493 tem = fold_convert_loc (loc, type, tem);
9494 return tem;
9499 tem = generic_simplify (loc, code, type, op0);
9500 if (tem)
9501 return tem;
9503 if (TREE_CODE_CLASS (code) == tcc_unary)
9505 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9506 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9507 fold_build1_loc (loc, code, type,
9508 fold_convert_loc (loc, TREE_TYPE (op0),
9509 TREE_OPERAND (arg0, 1))));
9510 else if (TREE_CODE (arg0) == COND_EXPR)
9512 tree arg01 = TREE_OPERAND (arg0, 1);
9513 tree arg02 = TREE_OPERAND (arg0, 2);
9514 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9515 arg01 = fold_build1_loc (loc, code, type,
9516 fold_convert_loc (loc,
9517 TREE_TYPE (op0), arg01));
9518 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9519 arg02 = fold_build1_loc (loc, code, type,
9520 fold_convert_loc (loc,
9521 TREE_TYPE (op0), arg02));
9522 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9523 arg01, arg02);
9525 /* If this was a conversion, and all we did was to move into
9526 inside the COND_EXPR, bring it back out. But leave it if
9527 it is a conversion from integer to integer and the
9528 result precision is no wider than a word since such a
9529 conversion is cheap and may be optimized away by combine,
9530 while it couldn't if it were outside the COND_EXPR. Then return
9531 so we don't get into an infinite recursion loop taking the
9532 conversion out and then back in. */
9534 if ((CONVERT_EXPR_CODE_P (code)
9535 || code == NON_LVALUE_EXPR)
9536 && TREE_CODE (tem) == COND_EXPR
9537 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9538 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9539 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9540 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9541 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9542 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9543 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9544 && (INTEGRAL_TYPE_P
9545 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9546 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9547 || flag_syntax_only))
9548 tem = build1_loc (loc, code, type,
9549 build3 (COND_EXPR,
9550 TREE_TYPE (TREE_OPERAND
9551 (TREE_OPERAND (tem, 1), 0)),
9552 TREE_OPERAND (tem, 0),
9553 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9554 TREE_OPERAND (TREE_OPERAND (tem, 2),
9555 0)));
9556 return tem;
9560 switch (code)
9562 case NON_LVALUE_EXPR:
9563 if (!maybe_lvalue_p (op0))
9564 return fold_convert_loc (loc, type, op0);
9565 return NULL_TREE;
9567 CASE_CONVERT:
9568 case FLOAT_EXPR:
9569 case FIX_TRUNC_EXPR:
9570 if (COMPARISON_CLASS_P (op0))
9572 /* If we have (type) (a CMP b) and type is an integral type, return
9573 new expression involving the new type. Canonicalize
9574 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9575 non-integral type.
9576 Do not fold the result as that would not simplify further, also
9577 folding again results in recursions. */
9578 if (TREE_CODE (type) == BOOLEAN_TYPE)
9579 return build2_loc (loc, TREE_CODE (op0), type,
9580 TREE_OPERAND (op0, 0),
9581 TREE_OPERAND (op0, 1));
9582 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9583 && TREE_CODE (type) != VECTOR_TYPE)
9584 return build3_loc (loc, COND_EXPR, type, op0,
9585 constant_boolean_node (true, type),
9586 constant_boolean_node (false, type));
9589 /* Handle (T *)&A.B.C for A being of type T and B and C
9590 living at offset zero. This occurs frequently in
9591 C++ upcasting and then accessing the base. */
9592 if (TREE_CODE (op0) == ADDR_EXPR
9593 && POINTER_TYPE_P (type)
9594 && handled_component_p (TREE_OPERAND (op0, 0)))
9596 poly_int64 bitsize, bitpos;
9597 tree offset;
9598 machine_mode mode;
9599 int unsignedp, reversep, volatilep;
9600 tree base
9601 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9602 &offset, &mode, &unsignedp, &reversep,
9603 &volatilep);
9604 /* If the reference was to a (constant) zero offset, we can use
9605 the address of the base if it has the same base type
9606 as the result type and the pointer type is unqualified. */
9607 if (!offset
9608 && known_eq (bitpos, 0)
9609 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9610 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9611 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9612 return fold_convert_loc (loc, type,
9613 build_fold_addr_expr_loc (loc, base));
9616 if (TREE_CODE (op0) == MODIFY_EXPR
9617 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9618 /* Detect assigning a bitfield. */
9619 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9620 && DECL_BIT_FIELD
9621 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9623 /* Don't leave an assignment inside a conversion
9624 unless assigning a bitfield. */
9625 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9626 /* First do the assignment, then return converted constant. */
9627 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9628 suppress_warning (tem /* What warning? */);
9629 TREE_USED (tem) = 1;
9630 return tem;
9633 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9634 constants (if x has signed type, the sign bit cannot be set
9635 in c). This folds extension into the BIT_AND_EXPR.
9636 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9637 very likely don't have maximal range for their precision and this
9638 transformation effectively doesn't preserve non-maximal ranges. */
9639 if (TREE_CODE (type) == INTEGER_TYPE
9640 && TREE_CODE (op0) == BIT_AND_EXPR
9641 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9643 tree and_expr = op0;
9644 tree and0 = TREE_OPERAND (and_expr, 0);
9645 tree and1 = TREE_OPERAND (and_expr, 1);
9646 int change = 0;
9648 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9649 || (TYPE_PRECISION (type)
9650 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9651 change = 1;
9652 else if (TYPE_PRECISION (TREE_TYPE (and1))
9653 <= HOST_BITS_PER_WIDE_INT
9654 && tree_fits_uhwi_p (and1))
9656 unsigned HOST_WIDE_INT cst;
9658 cst = tree_to_uhwi (and1);
9659 cst &= HOST_WIDE_INT_M1U
9660 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9661 change = (cst == 0);
9662 if (change
9663 && !flag_syntax_only
9664 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9665 == ZERO_EXTEND))
9667 tree uns = unsigned_type_for (TREE_TYPE (and0));
9668 and0 = fold_convert_loc (loc, uns, and0);
9669 and1 = fold_convert_loc (loc, uns, and1);
9672 if (change)
9674 tree and1_type = TREE_TYPE (and1);
9675 unsigned prec = MAX (TYPE_PRECISION (and1_type),
9676 TYPE_PRECISION (type));
9677 tem = force_fit_type (type,
9678 wide_int::from (wi::to_wide (and1), prec,
9679 TYPE_SIGN (and1_type)),
9680 0, TREE_OVERFLOW (and1));
9681 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9682 fold_convert_loc (loc, type, and0), tem);
9686 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9687 cast (T1)X will fold away. We assume that this happens when X itself
9688 is a cast. */
9689 if (POINTER_TYPE_P (type)
9690 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9691 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9693 tree arg00 = TREE_OPERAND (arg0, 0);
9694 tree arg01 = TREE_OPERAND (arg0, 1);
9696 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9697 when the pointed type needs higher alignment than
9698 the p+ first operand's pointed type. */
9699 if (!in_gimple_form
9700 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9701 && (min_align_of_type (TREE_TYPE (type))
9702 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9703 return NULL_TREE;
9705 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9706 when type is a reference type and arg00's type is not,
9707 because arg00 could be validly nullptr and if arg01 doesn't return,
9708 we don't want false positive binding of reference to nullptr. */
9709 if (TREE_CODE (type) == REFERENCE_TYPE
9710 && !in_gimple_form
9711 && sanitize_flags_p (SANITIZE_NULL)
9712 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9713 return NULL_TREE;
9715 arg00 = fold_convert_loc (loc, type, arg00);
9716 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9719 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9720 of the same precision, and X is an integer type not narrower than
9721 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9722 if (INTEGRAL_TYPE_P (type)
9723 && TREE_CODE (op0) == BIT_NOT_EXPR
9724 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9725 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9726 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9728 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9729 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9730 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9731 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9732 fold_convert_loc (loc, type, tem));
9735 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9736 type of X and Y (integer types only). */
9737 if (INTEGRAL_TYPE_P (type)
9738 && TREE_CODE (op0) == MULT_EXPR
9739 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9740 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9741 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9742 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9744 /* Be careful not to introduce new overflows. */
9745 tree mult_type;
9746 if (TYPE_OVERFLOW_WRAPS (type))
9747 mult_type = type;
9748 else
9749 mult_type = unsigned_type_for (type);
9751 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9753 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9754 fold_convert_loc (loc, mult_type,
9755 TREE_OPERAND (op0, 0)),
9756 fold_convert_loc (loc, mult_type,
9757 TREE_OPERAND (op0, 1)));
9758 return fold_convert_loc (loc, type, tem);
9762 return NULL_TREE;
9764 case VIEW_CONVERT_EXPR:
9765 if (TREE_CODE (op0) == MEM_REF)
9767 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9768 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9769 tem = fold_build2_loc (loc, MEM_REF, type,
9770 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9771 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9772 return tem;
9775 return NULL_TREE;
9777 case NEGATE_EXPR:
9778 tem = fold_negate_expr (loc, arg0);
9779 if (tem)
9780 return fold_convert_loc (loc, type, tem);
9781 return NULL_TREE;
9783 case ABS_EXPR:
9784 /* Convert fabs((double)float) into (double)fabsf(float). */
9785 if (TREE_CODE (arg0) == NOP_EXPR
9786 && TREE_CODE (type) == REAL_TYPE)
9788 tree targ0 = strip_float_extensions (arg0);
9789 if (targ0 != arg0)
9790 return fold_convert_loc (loc, type,
9791 fold_build1_loc (loc, ABS_EXPR,
9792 TREE_TYPE (targ0),
9793 targ0));
9795 return NULL_TREE;
9797 case BIT_NOT_EXPR:
9798 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9799 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9800 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9801 fold_convert_loc (loc, type,
9802 TREE_OPERAND (arg0, 0)))))
9803 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9804 fold_convert_loc (loc, type,
9805 TREE_OPERAND (arg0, 1)));
9806 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9807 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9808 fold_convert_loc (loc, type,
9809 TREE_OPERAND (arg0, 1)))))
9810 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9811 fold_convert_loc (loc, type,
9812 TREE_OPERAND (arg0, 0)), tem);
9814 return NULL_TREE;
9816 case TRUTH_NOT_EXPR:
9817 /* Note that the operand of this must be an int
9818 and its values must be 0 or 1.
9819 ("true" is a fixed value perhaps depending on the language,
9820 but we don't handle values other than 1 correctly yet.) */
9821 tem = fold_truth_not_expr (loc, arg0);
9822 if (!tem)
9823 return NULL_TREE;
9824 return fold_convert_loc (loc, type, tem);
9826 case INDIRECT_REF:
9827 /* Fold *&X to X if X is an lvalue. */
9828 if (TREE_CODE (op0) == ADDR_EXPR)
9830 tree op00 = TREE_OPERAND (op0, 0);
9831 if ((VAR_P (op00)
9832 || TREE_CODE (op00) == PARM_DECL
9833 || TREE_CODE (op00) == RESULT_DECL)
9834 && !TREE_READONLY (op00))
9835 return op00;
9837 return NULL_TREE;
9839 default:
9840 return NULL_TREE;
9841 } /* switch (code) */
9845 /* If the operation was a conversion do _not_ mark a resulting constant
9846 with TREE_OVERFLOW if the original constant was not. These conversions
9847 have implementation defined behavior and retaining the TREE_OVERFLOW
9848 flag here would confuse later passes such as VRP. */
9849 tree
9850 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9851 tree type, tree op0)
9853 tree res = fold_unary_loc (loc, code, type, op0);
9854 if (res
9855 && TREE_CODE (res) == INTEGER_CST
9856 && TREE_CODE (op0) == INTEGER_CST
9857 && CONVERT_EXPR_CODE_P (code))
9858 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9860 return res;
9863 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9864 operands OP0 and OP1. LOC is the location of the resulting expression.
9865 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9866 Return the folded expression if folding is successful. Otherwise,
9867 return NULL_TREE. */
9868 static tree
9869 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9870 tree arg0, tree arg1, tree op0, tree op1)
9872 tree tem;
9874 /* We only do these simplifications if we are optimizing. */
9875 if (!optimize)
9876 return NULL_TREE;
9878 /* Check for things like (A || B) && (A || C). We can convert this
9879 to A || (B && C). Note that either operator can be any of the four
9880 truth and/or operations and the transformation will still be
9881 valid. Also note that we only care about order for the
9882 ANDIF and ORIF operators. If B contains side effects, this
9883 might change the truth-value of A. */
9884 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9885 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9886 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9887 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9888 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9889 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9891 tree a00 = TREE_OPERAND (arg0, 0);
9892 tree a01 = TREE_OPERAND (arg0, 1);
9893 tree a10 = TREE_OPERAND (arg1, 0);
9894 tree a11 = TREE_OPERAND (arg1, 1);
9895 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9896 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9897 && (code == TRUTH_AND_EXPR
9898 || code == TRUTH_OR_EXPR));
9900 if (operand_equal_p (a00, a10, 0))
9901 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9902 fold_build2_loc (loc, code, type, a01, a11));
9903 else if (commutative && operand_equal_p (a00, a11, 0))
9904 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9905 fold_build2_loc (loc, code, type, a01, a10));
9906 else if (commutative && operand_equal_p (a01, a10, 0))
9907 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9908 fold_build2_loc (loc, code, type, a00, a11));
9910 /* This case if tricky because we must either have commutative
9911 operators or else A10 must not have side-effects. */
9913 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9914 && operand_equal_p (a01, a11, 0))
9915 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9916 fold_build2_loc (loc, code, type, a00, a10),
9917 a01);
9920 /* See if we can build a range comparison. */
9921 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9922 return tem;
9924 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9925 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9927 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9928 if (tem)
9929 return fold_build2_loc (loc, code, type, tem, arg1);
9932 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9933 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9935 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9936 if (tem)
9937 return fold_build2_loc (loc, code, type, arg0, tem);
9940 /* Check for the possibility of merging component references. If our
9941 lhs is another similar operation, try to merge its rhs with our
9942 rhs. Then try to merge our lhs and rhs. */
9943 if (TREE_CODE (arg0) == code
9944 && (tem = fold_truth_andor_1 (loc, code, type,
9945 TREE_OPERAND (arg0, 1), arg1)) != 0)
9946 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9948 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9949 return tem;
9951 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9952 if (param_logical_op_non_short_circuit != -1)
9953 logical_op_non_short_circuit
9954 = param_logical_op_non_short_circuit;
9955 if (logical_op_non_short_circuit
9956 && !sanitize_coverage_p ()
9957 && (code == TRUTH_AND_EXPR
9958 || code == TRUTH_ANDIF_EXPR
9959 || code == TRUTH_OR_EXPR
9960 || code == TRUTH_ORIF_EXPR))
9962 enum tree_code ncode, icode;
9964 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9965 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9966 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9968 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9969 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9970 We don't want to pack more than two leafs to a non-IF AND/OR
9971 expression.
9972 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9973 equal to IF-CODE, then we don't want to add right-hand operand.
9974 If the inner right-hand side of left-hand operand has
9975 side-effects, or isn't simple, then we can't add to it,
9976 as otherwise we might destroy if-sequence. */
9977 if (TREE_CODE (arg0) == icode
9978 && simple_condition_p (arg1)
9979 /* Needed for sequence points to handle trappings, and
9980 side-effects. */
9981 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9983 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9984 arg1);
9985 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9986 tem);
9988 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9989 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9990 else if (TREE_CODE (arg1) == icode
9991 && simple_condition_p (arg0)
9992 /* Needed for sequence points to handle trappings, and
9993 side-effects. */
9994 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9996 tem = fold_build2_loc (loc, ncode, type,
9997 arg0, TREE_OPERAND (arg1, 0));
9998 return fold_build2_loc (loc, icode, type, tem,
9999 TREE_OPERAND (arg1, 1));
10001 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
10002 into (A OR B).
10003 For sequence point consistancy, we need to check for trapping,
10004 and side-effects. */
10005 else if (code == icode && simple_condition_p (arg0)
10006 && simple_condition_p (arg1))
10007 return fold_build2_loc (loc, ncode, type, arg0, arg1);
10010 return NULL_TREE;
10013 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
10014 by changing CODE to reduce the magnitude of constants involved in
10015 ARG0 of the comparison.
10016 Returns a canonicalized comparison tree if a simplification was
10017 possible, otherwise returns NULL_TREE.
10018 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
10019 valid if signed overflow is undefined. */
10021 static tree
10022 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
10023 tree arg0, tree arg1,
10024 bool *strict_overflow_p)
10026 enum tree_code code0 = TREE_CODE (arg0);
10027 tree t, cst0 = NULL_TREE;
10028 int sgn0;
10030 /* Match A +- CST code arg1. We can change this only if overflow
10031 is undefined. */
10032 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10033 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
10034 /* In principle pointers also have undefined overflow behavior,
10035 but that causes problems elsewhere. */
10036 && !POINTER_TYPE_P (TREE_TYPE (arg0))
10037 && (code0 == MINUS_EXPR
10038 || code0 == PLUS_EXPR)
10039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
10040 return NULL_TREE;
10042 /* Identify the constant in arg0 and its sign. */
10043 cst0 = TREE_OPERAND (arg0, 1);
10044 sgn0 = tree_int_cst_sgn (cst0);
10046 /* Overflowed constants and zero will cause problems. */
10047 if (integer_zerop (cst0)
10048 || TREE_OVERFLOW (cst0))
10049 return NULL_TREE;
10051 /* See if we can reduce the magnitude of the constant in
10052 arg0 by changing the comparison code. */
10053 /* A - CST < arg1 -> A - CST-1 <= arg1. */
10054 if (code == LT_EXPR
10055 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10056 code = LE_EXPR;
10057 /* A + CST > arg1 -> A + CST-1 >= arg1. */
10058 else if (code == GT_EXPR
10059 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10060 code = GE_EXPR;
10061 /* A + CST <= arg1 -> A + CST-1 < arg1. */
10062 else if (code == LE_EXPR
10063 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10064 code = LT_EXPR;
10065 /* A - CST >= arg1 -> A - CST-1 > arg1. */
10066 else if (code == GE_EXPR
10067 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10068 code = GT_EXPR;
10069 else
10070 return NULL_TREE;
10071 *strict_overflow_p = true;
10073 /* Now build the constant reduced in magnitude. But not if that
10074 would produce one outside of its types range. */
10075 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
10076 && ((sgn0 == 1
10077 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
10078 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
10079 || (sgn0 == -1
10080 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
10081 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
10082 return NULL_TREE;
10084 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
10085 cst0, build_int_cst (TREE_TYPE (cst0), 1));
10086 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
10087 t = fold_convert (TREE_TYPE (arg1), t);
10089 return fold_build2_loc (loc, code, type, t, arg1);
10092 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
10093 overflow further. Try to decrease the magnitude of constants involved
10094 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
10095 and put sole constants at the second argument position.
10096 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
10098 static tree
10099 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
10100 tree arg0, tree arg1)
10102 tree t;
10103 bool strict_overflow_p;
10104 const char * const warnmsg = G_("assuming signed overflow does not occur "
10105 "when reducing constant in comparison");
10107 /* Try canonicalization by simplifying arg0. */
10108 strict_overflow_p = false;
10109 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
10110 &strict_overflow_p);
10111 if (t)
10113 if (strict_overflow_p)
10114 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10115 return t;
10118 /* Try canonicalization by simplifying arg1 using the swapped
10119 comparison. */
10120 code = swap_tree_comparison (code);
10121 strict_overflow_p = false;
10122 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10123 &strict_overflow_p);
10124 if (t && strict_overflow_p)
10125 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10126 return t;
10129 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10130 space. This is used to avoid issuing overflow warnings for
10131 expressions like &p->x which cannot wrap. */
10133 static bool
10134 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10136 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10137 return true;
10139 if (maybe_lt (bitpos, 0))
10140 return true;
10142 poly_wide_int wi_offset;
10143 int precision = TYPE_PRECISION (TREE_TYPE (base));
10144 if (offset == NULL_TREE)
10145 wi_offset = wi::zero (precision);
10146 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10147 return true;
10148 else
10149 wi_offset = wi::to_poly_wide (offset);
10151 wi::overflow_type overflow;
10152 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10153 precision);
10154 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10155 if (overflow)
10156 return true;
10158 poly_uint64 total_hwi, size;
10159 if (!total.to_uhwi (&total_hwi)
10160 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10161 &size)
10162 || known_eq (size, 0U))
10163 return true;
10165 if (known_le (total_hwi, size))
10166 return false;
10168 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10169 array. */
10170 if (TREE_CODE (base) == ADDR_EXPR
10171 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10172 &size)
10173 && maybe_ne (size, 0U)
10174 && known_le (total_hwi, size))
10175 return false;
10177 return true;
10180 /* Return a positive integer when the symbol DECL is known to have
10181 a nonzero address, zero when it's known not to (e.g., it's a weak
10182 symbol), and a negative integer when the symbol is not yet in the
10183 symbol table and so whether or not its address is zero is unknown.
10184 For function local objects always return positive integer. */
10185 static int
10186 maybe_nonzero_address (tree decl)
10188 /* Normally, don't do anything for variables and functions before symtab is
10189 built; it is quite possible that DECL will be declared weak later.
10190 But if folding_initializer, we need a constant answer now, so create
10191 the symtab entry and prevent later weak declaration. */
10192 if (DECL_P (decl) && decl_in_symtab_p (decl))
10193 if (struct symtab_node *symbol
10194 = (folding_initializer
10195 ? symtab_node::get_create (decl)
10196 : symtab_node::get (decl)))
10197 return symbol->nonzero_address ();
10199 /* Function local objects are never NULL. */
10200 if (DECL_P (decl)
10201 && (DECL_CONTEXT (decl)
10202 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10203 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10204 return 1;
10206 return -1;
10209 /* Subroutine of fold_binary. This routine performs all of the
10210 transformations that are common to the equality/inequality
10211 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10212 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10213 fold_binary should call fold_binary. Fold a comparison with
10214 tree code CODE and type TYPE with operands OP0 and OP1. Return
10215 the folded comparison or NULL_TREE. */
10217 static tree
10218 fold_comparison (location_t loc, enum tree_code code, tree type,
10219 tree op0, tree op1)
10221 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10222 tree arg0, arg1, tem;
10224 arg0 = op0;
10225 arg1 = op1;
10227 STRIP_SIGN_NOPS (arg0);
10228 STRIP_SIGN_NOPS (arg1);
10230 /* For comparisons of pointers we can decompose it to a compile time
10231 comparison of the base objects and the offsets into the object.
10232 This requires at least one operand being an ADDR_EXPR or a
10233 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10234 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10235 && (TREE_CODE (arg0) == ADDR_EXPR
10236 || TREE_CODE (arg1) == ADDR_EXPR
10237 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10238 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10240 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10241 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10242 machine_mode mode;
10243 int volatilep, reversep, unsignedp;
10244 bool indirect_base0 = false, indirect_base1 = false;
10246 /* Get base and offset for the access. Strip ADDR_EXPR for
10247 get_inner_reference, but put it back by stripping INDIRECT_REF
10248 off the base object if possible. indirect_baseN will be true
10249 if baseN is not an address but refers to the object itself. */
10250 base0 = arg0;
10251 if (TREE_CODE (arg0) == ADDR_EXPR)
10253 base0
10254 = get_inner_reference (TREE_OPERAND (arg0, 0),
10255 &bitsize, &bitpos0, &offset0, &mode,
10256 &unsignedp, &reversep, &volatilep);
10257 if (INDIRECT_REF_P (base0))
10258 base0 = TREE_OPERAND (base0, 0);
10259 else
10260 indirect_base0 = true;
10262 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10264 base0 = TREE_OPERAND (arg0, 0);
10265 STRIP_SIGN_NOPS (base0);
10266 if (TREE_CODE (base0) == ADDR_EXPR)
10268 base0
10269 = get_inner_reference (TREE_OPERAND (base0, 0),
10270 &bitsize, &bitpos0, &offset0, &mode,
10271 &unsignedp, &reversep, &volatilep);
10272 if (INDIRECT_REF_P (base0))
10273 base0 = TREE_OPERAND (base0, 0);
10274 else
10275 indirect_base0 = true;
10277 if (offset0 == NULL_TREE || integer_zerop (offset0))
10278 offset0 = TREE_OPERAND (arg0, 1);
10279 else
10280 offset0 = size_binop (PLUS_EXPR, offset0,
10281 TREE_OPERAND (arg0, 1));
10282 if (poly_int_tree_p (offset0))
10284 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10285 TYPE_PRECISION (sizetype));
10286 tem <<= LOG2_BITS_PER_UNIT;
10287 tem += bitpos0;
10288 if (tem.to_shwi (&bitpos0))
10289 offset0 = NULL_TREE;
10293 base1 = arg1;
10294 if (TREE_CODE (arg1) == ADDR_EXPR)
10296 base1
10297 = get_inner_reference (TREE_OPERAND (arg1, 0),
10298 &bitsize, &bitpos1, &offset1, &mode,
10299 &unsignedp, &reversep, &volatilep);
10300 if (INDIRECT_REF_P (base1))
10301 base1 = TREE_OPERAND (base1, 0);
10302 else
10303 indirect_base1 = true;
10305 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10307 base1 = TREE_OPERAND (arg1, 0);
10308 STRIP_SIGN_NOPS (base1);
10309 if (TREE_CODE (base1) == ADDR_EXPR)
10311 base1
10312 = get_inner_reference (TREE_OPERAND (base1, 0),
10313 &bitsize, &bitpos1, &offset1, &mode,
10314 &unsignedp, &reversep, &volatilep);
10315 if (INDIRECT_REF_P (base1))
10316 base1 = TREE_OPERAND (base1, 0);
10317 else
10318 indirect_base1 = true;
10320 if (offset1 == NULL_TREE || integer_zerop (offset1))
10321 offset1 = TREE_OPERAND (arg1, 1);
10322 else
10323 offset1 = size_binop (PLUS_EXPR, offset1,
10324 TREE_OPERAND (arg1, 1));
10325 if (poly_int_tree_p (offset1))
10327 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10328 TYPE_PRECISION (sizetype));
10329 tem <<= LOG2_BITS_PER_UNIT;
10330 tem += bitpos1;
10331 if (tem.to_shwi (&bitpos1))
10332 offset1 = NULL_TREE;
10336 /* If we have equivalent bases we might be able to simplify. */
10337 if (indirect_base0 == indirect_base1
10338 && operand_equal_p (base0, base1,
10339 indirect_base0 ? OEP_ADDRESS_OF : 0))
10341 /* We can fold this expression to a constant if the non-constant
10342 offset parts are equal. */
10343 if ((offset0 == offset1
10344 || (offset0 && offset1
10345 && operand_equal_p (offset0, offset1, 0)))
10346 && (equality_code
10347 || (indirect_base0
10348 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10349 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10351 if (!equality_code
10352 && maybe_ne (bitpos0, bitpos1)
10353 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10354 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10355 fold_overflow_warning (("assuming pointer wraparound does not "
10356 "occur when comparing P +- C1 with "
10357 "P +- C2"),
10358 WARN_STRICT_OVERFLOW_CONDITIONAL);
10360 switch (code)
10362 case EQ_EXPR:
10363 if (known_eq (bitpos0, bitpos1))
10364 return constant_boolean_node (true, type);
10365 if (known_ne (bitpos0, bitpos1))
10366 return constant_boolean_node (false, type);
10367 break;
10368 case NE_EXPR:
10369 if (known_ne (bitpos0, bitpos1))
10370 return constant_boolean_node (true, type);
10371 if (known_eq (bitpos0, bitpos1))
10372 return constant_boolean_node (false, type);
10373 break;
10374 case LT_EXPR:
10375 if (known_lt (bitpos0, bitpos1))
10376 return constant_boolean_node (true, type);
10377 if (known_ge (bitpos0, bitpos1))
10378 return constant_boolean_node (false, type);
10379 break;
10380 case LE_EXPR:
10381 if (known_le (bitpos0, bitpos1))
10382 return constant_boolean_node (true, type);
10383 if (known_gt (bitpos0, bitpos1))
10384 return constant_boolean_node (false, type);
10385 break;
10386 case GE_EXPR:
10387 if (known_ge (bitpos0, bitpos1))
10388 return constant_boolean_node (true, type);
10389 if (known_lt (bitpos0, bitpos1))
10390 return constant_boolean_node (false, type);
10391 break;
10392 case GT_EXPR:
10393 if (known_gt (bitpos0, bitpos1))
10394 return constant_boolean_node (true, type);
10395 if (known_le (bitpos0, bitpos1))
10396 return constant_boolean_node (false, type);
10397 break;
10398 default:;
10401 /* We can simplify the comparison to a comparison of the variable
10402 offset parts if the constant offset parts are equal.
10403 Be careful to use signed sizetype here because otherwise we
10404 mess with array offsets in the wrong way. This is possible
10405 because pointer arithmetic is restricted to retain within an
10406 object and overflow on pointer differences is undefined as of
10407 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10408 else if (known_eq (bitpos0, bitpos1)
10409 && (equality_code
10410 || (indirect_base0
10411 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10412 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10414 /* By converting to signed sizetype we cover middle-end pointer
10415 arithmetic which operates on unsigned pointer types of size
10416 type size and ARRAY_REF offsets which are properly sign or
10417 zero extended from their type in case it is narrower than
10418 sizetype. */
10419 if (offset0 == NULL_TREE)
10420 offset0 = build_int_cst (ssizetype, 0);
10421 else
10422 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10423 if (offset1 == NULL_TREE)
10424 offset1 = build_int_cst (ssizetype, 0);
10425 else
10426 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10428 if (!equality_code
10429 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10430 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10431 fold_overflow_warning (("assuming pointer wraparound does not "
10432 "occur when comparing P +- C1 with "
10433 "P +- C2"),
10434 WARN_STRICT_OVERFLOW_COMPARISON);
10436 return fold_build2_loc (loc, code, type, offset0, offset1);
10439 /* For equal offsets we can simplify to a comparison of the
10440 base addresses. */
10441 else if (known_eq (bitpos0, bitpos1)
10442 && (indirect_base0
10443 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10444 && (indirect_base1
10445 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10446 && ((offset0 == offset1)
10447 || (offset0 && offset1
10448 && operand_equal_p (offset0, offset1, 0))))
10450 if (indirect_base0)
10451 base0 = build_fold_addr_expr_loc (loc, base0);
10452 if (indirect_base1)
10453 base1 = build_fold_addr_expr_loc (loc, base1);
10454 return fold_build2_loc (loc, code, type, base0, base1);
10456 /* Comparison between an ordinary (non-weak) symbol and a null
10457 pointer can be eliminated since such symbols must have a non
10458 null address. In C, relational expressions between pointers
10459 to objects and null pointers are undefined. The results
10460 below follow the C++ rules with the additional property that
10461 every object pointer compares greater than a null pointer.
10463 else if (((DECL_P (base0)
10464 && maybe_nonzero_address (base0) > 0
10465 /* Avoid folding references to struct members at offset 0 to
10466 prevent tests like '&ptr->firstmember == 0' from getting
10467 eliminated. When ptr is null, although the -> expression
10468 is strictly speaking invalid, GCC retains it as a matter
10469 of QoI. See PR c/44555. */
10470 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10471 || CONSTANT_CLASS_P (base0))
10472 && indirect_base0
10473 /* The caller guarantees that when one of the arguments is
10474 constant (i.e., null in this case) it is second. */
10475 && integer_zerop (arg1))
10477 switch (code)
10479 case EQ_EXPR:
10480 case LE_EXPR:
10481 case LT_EXPR:
10482 return constant_boolean_node (false, type);
10483 case GE_EXPR:
10484 case GT_EXPR:
10485 case NE_EXPR:
10486 return constant_boolean_node (true, type);
10487 default:
10488 gcc_unreachable ();
10493 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10494 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10495 the resulting offset is smaller in absolute value than the
10496 original one and has the same sign. */
10497 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10498 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10499 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10500 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10501 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10502 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10503 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10504 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10506 tree const1 = TREE_OPERAND (arg0, 1);
10507 tree const2 = TREE_OPERAND (arg1, 1);
10508 tree variable1 = TREE_OPERAND (arg0, 0);
10509 tree variable2 = TREE_OPERAND (arg1, 0);
10510 tree cst;
10511 const char * const warnmsg = G_("assuming signed overflow does not "
10512 "occur when combining constants around "
10513 "a comparison");
10515 /* Put the constant on the side where it doesn't overflow and is
10516 of lower absolute value and of same sign than before. */
10517 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10518 ? MINUS_EXPR : PLUS_EXPR,
10519 const2, const1);
10520 if (!TREE_OVERFLOW (cst)
10521 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10522 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10524 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10525 return fold_build2_loc (loc, code, type,
10526 variable1,
10527 fold_build2_loc (loc, TREE_CODE (arg1),
10528 TREE_TYPE (arg1),
10529 variable2, cst));
10532 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10533 ? MINUS_EXPR : PLUS_EXPR,
10534 const1, const2);
10535 if (!TREE_OVERFLOW (cst)
10536 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10537 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10539 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10540 return fold_build2_loc (loc, code, type,
10541 fold_build2_loc (loc, TREE_CODE (arg0),
10542 TREE_TYPE (arg0),
10543 variable1, cst),
10544 variable2);
10548 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10549 if (tem)
10550 return tem;
10552 /* If we are comparing an expression that just has comparisons
10553 of two integer values, arithmetic expressions of those comparisons,
10554 and constants, we can simplify it. There are only three cases
10555 to check: the two values can either be equal, the first can be
10556 greater, or the second can be greater. Fold the expression for
10557 those three values. Since each value must be 0 or 1, we have
10558 eight possibilities, each of which corresponds to the constant 0
10559 or 1 or one of the six possible comparisons.
10561 This handles common cases like (a > b) == 0 but also handles
10562 expressions like ((x > y) - (y > x)) > 0, which supposedly
10563 occur in macroized code. */
10565 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10567 tree cval1 = 0, cval2 = 0;
10569 if (twoval_comparison_p (arg0, &cval1, &cval2)
10570 /* Don't handle degenerate cases here; they should already
10571 have been handled anyway. */
10572 && cval1 != 0 && cval2 != 0
10573 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10574 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10575 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10576 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10577 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10578 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10579 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10581 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10582 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10584 /* We can't just pass T to eval_subst in case cval1 or cval2
10585 was the same as ARG1. */
10587 tree high_result
10588 = fold_build2_loc (loc, code, type,
10589 eval_subst (loc, arg0, cval1, maxval,
10590 cval2, minval),
10591 arg1);
10592 tree equal_result
10593 = fold_build2_loc (loc, code, type,
10594 eval_subst (loc, arg0, cval1, maxval,
10595 cval2, maxval),
10596 arg1);
10597 tree low_result
10598 = fold_build2_loc (loc, code, type,
10599 eval_subst (loc, arg0, cval1, minval,
10600 cval2, maxval),
10601 arg1);
10603 /* All three of these results should be 0 or 1. Confirm they are.
10604 Then use those values to select the proper code to use. */
10606 if (TREE_CODE (high_result) == INTEGER_CST
10607 && TREE_CODE (equal_result) == INTEGER_CST
10608 && TREE_CODE (low_result) == INTEGER_CST)
10610 /* Make a 3-bit mask with the high-order bit being the
10611 value for `>', the next for '=', and the low for '<'. */
10612 switch ((integer_onep (high_result) * 4)
10613 + (integer_onep (equal_result) * 2)
10614 + integer_onep (low_result))
10616 case 0:
10617 /* Always false. */
10618 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10619 case 1:
10620 code = LT_EXPR;
10621 break;
10622 case 2:
10623 code = EQ_EXPR;
10624 break;
10625 case 3:
10626 code = LE_EXPR;
10627 break;
10628 case 4:
10629 code = GT_EXPR;
10630 break;
10631 case 5:
10632 code = NE_EXPR;
10633 break;
10634 case 6:
10635 code = GE_EXPR;
10636 break;
10637 case 7:
10638 /* Always true. */
10639 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10642 return fold_build2_loc (loc, code, type, cval1, cval2);
10647 return NULL_TREE;
10651 /* Subroutine of fold_binary. Optimize complex multiplications of the
10652 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10653 argument EXPR represents the expression "z" of type TYPE. */
10655 static tree
10656 fold_mult_zconjz (location_t loc, tree type, tree expr)
10658 tree itype = TREE_TYPE (type);
10659 tree rpart, ipart, tem;
10661 if (TREE_CODE (expr) == COMPLEX_EXPR)
10663 rpart = TREE_OPERAND (expr, 0);
10664 ipart = TREE_OPERAND (expr, 1);
10666 else if (TREE_CODE (expr) == COMPLEX_CST)
10668 rpart = TREE_REALPART (expr);
10669 ipart = TREE_IMAGPART (expr);
10671 else
10673 expr = save_expr (expr);
10674 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10675 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10678 rpart = save_expr (rpart);
10679 ipart = save_expr (ipart);
10680 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10681 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10682 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10683 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10684 build_zero_cst (itype));
10688 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10689 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10690 true if successful. */
10692 static bool
10693 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10695 unsigned HOST_WIDE_INT i, nunits;
10697 if (TREE_CODE (arg) == VECTOR_CST
10698 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10700 for (i = 0; i < nunits; ++i)
10701 elts[i] = VECTOR_CST_ELT (arg, i);
10703 else if (TREE_CODE (arg) == CONSTRUCTOR)
10705 constructor_elt *elt;
10707 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10708 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10709 return false;
10710 else
10711 elts[i] = elt->value;
10713 else
10714 return false;
10715 for (; i < nelts; i++)
10716 elts[i]
10717 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10718 return true;
10721 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10722 mask for VLA vec_perm folding.
10723 REASON if specified, will contain the reason why SEL is not suitable.
10724 Used only for debugging and unit-testing. */
10726 static bool
10727 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10728 const vec_perm_indices &sel,
10729 const char **reason = NULL)
10731 unsigned sel_npatterns = sel.encoding ().npatterns ();
10732 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10734 if (!(pow2p_hwi (sel_npatterns)
10735 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10736 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10738 if (reason)
10739 *reason = "npatterns is not power of 2";
10740 return false;
10743 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10744 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10745 poly_uint64 esel;
10746 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10748 if (reason)
10749 *reason = "sel.length is not multiple of sel_npatterns";
10750 return false;
10753 if (sel_nelts_per_pattern < 3)
10754 return true;
10756 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10758 poly_uint64 a1 = sel[pattern + sel_npatterns];
10759 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10760 HOST_WIDE_INT step;
10761 if (!poly_int64 (a2 - a1).is_constant (&step))
10763 if (reason)
10764 *reason = "step is not constant";
10765 return false;
10767 // FIXME: Punt on step < 0 for now, revisit later.
10768 if (step < 0)
10769 return false;
10770 if (step == 0)
10771 continue;
10773 if (!pow2p_hwi (step))
10775 if (reason)
10776 *reason = "step is not power of 2";
10777 return false;
10780 /* Ensure that stepped sequence of the pattern selects elements
10781 only from the same input vector. */
10782 uint64_t q1, qe;
10783 poly_uint64 r1, re;
10784 poly_uint64 ae = a1 + (esel - 2) * step;
10785 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10787 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10788 && can_div_trunc_p (ae, arg_len, &qe, &re)
10789 && q1 == qe))
10791 if (reason)
10792 *reason = "crossed input vectors";
10793 return false;
10796 /* Ensure that the stepped sequence always selects from the same
10797 input pattern. */
10798 tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10799 unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10801 if (!multiple_p (step, arg_npatterns))
10803 if (reason)
10804 *reason = "step is not multiple of npatterns";
10805 return false;
10808 /* If a1 chooses base element from arg, ensure that it's a natural
10809 stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10810 to preserve arg's encoding. */
10812 if (maybe_lt (r1, arg_npatterns))
10814 unsigned HOST_WIDE_INT index;
10815 if (!r1.is_constant (&index))
10816 return false;
10818 tree arg_elem0 = vector_cst_elt (arg, index);
10819 tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10820 tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10822 tree step1, step2;
10823 if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10824 || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10825 || !operand_equal_p (step1, step2, 0))
10827 if (reason)
10828 *reason = "not a natural stepped sequence";
10829 return false;
10834 return true;
10837 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10838 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10839 REASON has same purpose as described in
10840 valid_mask_for_fold_vec_perm_cst_p. */
10842 static tree
10843 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10844 const char **reason = NULL)
10846 unsigned res_npatterns, res_nelts_per_pattern;
10847 unsigned HOST_WIDE_INT res_nelts;
10849 /* First try to implement the fold in a VLA-friendly way.
10851 (1) If the selector is simply a duplication of N elements, the
10852 result is likewise a duplication of N elements.
10854 (2) If the selector is N elements followed by a duplication
10855 of N elements, the result is too.
10857 (3) If the selector is N elements followed by an interleaving
10858 of N linear series, the situation is more complex.
10860 valid_mask_for_fold_vec_perm_cst_p detects whether we
10861 can handle this case. If we can, then each of the N linear
10862 series either (a) selects the same element each time or
10863 (b) selects a linear series from one of the input patterns.
10865 If (b) holds for one of the linear series, the result
10866 will contain a linear series, and so the result will have
10867 the same shape as the selector. If (a) holds for all of
10868 the linear series, the result will be the same as (2) above.
10870 (b) can only hold if one of the input patterns has a
10871 stepped encoding. */
10873 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10875 res_npatterns = sel.encoding ().npatterns ();
10876 res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10877 if (res_nelts_per_pattern == 3
10878 && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10879 && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10880 res_nelts_per_pattern = 2;
10881 res_nelts = res_npatterns * res_nelts_per_pattern;
10883 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10885 res_npatterns = res_nelts;
10886 res_nelts_per_pattern = 1;
10888 else
10889 return NULL_TREE;
10891 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10892 for (unsigned i = 0; i < res_nelts; i++)
10894 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10895 uint64_t q;
10896 poly_uint64 r;
10897 unsigned HOST_WIDE_INT index;
10899 /* Punt if sel[i] /trunc_div len cannot be determined,
10900 because the input vector to be chosen will depend on
10901 runtime vector length.
10902 For example if len == 4 + 4x, and sel[i] == 4,
10903 If len at runtime equals 4, we choose arg1[0].
10904 For any other value of len > 4 at runtime, we choose arg0[4].
10905 which makes the element choice dependent on runtime vector length. */
10906 if (!can_div_trunc_p (sel[i], len, &q, &r))
10908 if (reason)
10909 *reason = "cannot divide selector element by arg len";
10910 return NULL_TREE;
10913 /* sel[i] % len will give the index of element in the chosen input
10914 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10915 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10916 if (!r.is_constant (&index))
10918 if (reason)
10919 *reason = "remainder is not constant";
10920 return NULL_TREE;
10923 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10924 tree elem = vector_cst_elt (arg, index);
10925 out_elts.quick_push (elem);
10928 return out_elts.build ();
10931 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10932 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10933 NULL_TREE otherwise. */
10935 tree
10936 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10938 unsigned int i;
10939 unsigned HOST_WIDE_INT nelts;
10941 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10942 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10943 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10945 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10946 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10947 return NULL_TREE;
10949 if (TREE_CODE (arg0) == VECTOR_CST
10950 && TREE_CODE (arg1) == VECTOR_CST)
10951 return fold_vec_perm_cst (type, arg0, arg1, sel);
10953 /* For fall back case, we want to ensure we have VLS vectors
10954 with equal length. */
10955 if (!sel.length ().is_constant (&nelts))
10956 return NULL_TREE;
10958 gcc_assert (known_eq (sel.length (),
10959 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10960 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10961 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10962 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10963 return NULL_TREE;
10965 vec<constructor_elt, va_gc> *v;
10966 vec_alloc (v, nelts);
10967 for (i = 0; i < nelts; i++)
10969 HOST_WIDE_INT index;
10970 if (!sel[i].is_constant (&index))
10971 return NULL_TREE;
10972 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10974 return build_constructor (type, v);
10977 /* Try to fold a pointer difference of type TYPE two address expressions of
10978 array references AREF0 and AREF1 using location LOC. Return a
10979 simplified expression for the difference or NULL_TREE. */
10981 static tree
10982 fold_addr_of_array_ref_difference (location_t loc, tree type,
10983 tree aref0, tree aref1,
10984 bool use_pointer_diff)
10986 tree base0 = TREE_OPERAND (aref0, 0);
10987 tree base1 = TREE_OPERAND (aref1, 0);
10988 tree base_offset = build_int_cst (type, 0);
10990 /* If the bases are array references as well, recurse. If the bases
10991 are pointer indirections compute the difference of the pointers.
10992 If the bases are equal, we are set. */
10993 if ((TREE_CODE (base0) == ARRAY_REF
10994 && TREE_CODE (base1) == ARRAY_REF
10995 && (base_offset
10996 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10997 use_pointer_diff)))
10998 || (INDIRECT_REF_P (base0)
10999 && INDIRECT_REF_P (base1)
11000 && (base_offset
11001 = use_pointer_diff
11002 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
11003 TREE_OPERAND (base0, 0),
11004 TREE_OPERAND (base1, 0))
11005 : fold_binary_loc (loc, MINUS_EXPR, type,
11006 fold_convert (type,
11007 TREE_OPERAND (base0, 0)),
11008 fold_convert (type,
11009 TREE_OPERAND (base1, 0)))))
11010 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
11012 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
11013 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
11014 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
11015 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
11016 return fold_build2_loc (loc, PLUS_EXPR, type,
11017 base_offset,
11018 fold_build2_loc (loc, MULT_EXPR, type,
11019 diff, esz));
11021 return NULL_TREE;
11024 /* If the real or vector real constant CST of type TYPE has an exact
11025 inverse, return it, else return NULL. */
11027 tree
11028 exact_inverse (tree type, tree cst)
11030 REAL_VALUE_TYPE r;
11031 tree unit_type;
11032 machine_mode mode;
11034 switch (TREE_CODE (cst))
11036 case REAL_CST:
11037 r = TREE_REAL_CST (cst);
11039 if (exact_real_inverse (TYPE_MODE (type), &r))
11040 return build_real (type, r);
11042 return NULL_TREE;
11044 case VECTOR_CST:
11046 unit_type = TREE_TYPE (type);
11047 mode = TYPE_MODE (unit_type);
11049 tree_vector_builder elts;
11050 if (!elts.new_unary_operation (type, cst, false))
11051 return NULL_TREE;
11052 unsigned int count = elts.encoded_nelts ();
11053 for (unsigned int i = 0; i < count; ++i)
11055 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
11056 if (!exact_real_inverse (mode, &r))
11057 return NULL_TREE;
11058 elts.quick_push (build_real (unit_type, r));
11061 return elts.build ();
11064 default:
11065 return NULL_TREE;
11069 /* Mask out the tz least significant bits of X of type TYPE where
11070 tz is the number of trailing zeroes in Y. */
11071 static wide_int
11072 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
11074 int tz = wi::ctz (y);
11075 if (tz > 0)
11076 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
11077 return x;
11080 /* Return true when T is an address and is known to be nonzero.
11081 For floating point we further ensure that T is not denormal.
11082 Similar logic is present in nonzero_address in rtlanal.h.
11084 If the return value is based on the assumption that signed overflow
11085 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
11086 change *STRICT_OVERFLOW_P. */
11088 static bool
11089 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
11091 tree type = TREE_TYPE (t);
11092 enum tree_code code;
11094 /* Doing something useful for floating point would need more work. */
11095 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11096 return false;
11098 code = TREE_CODE (t);
11099 switch (TREE_CODE_CLASS (code))
11101 case tcc_unary:
11102 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11103 strict_overflow_p);
11104 case tcc_binary:
11105 case tcc_comparison:
11106 return tree_binary_nonzero_warnv_p (code, type,
11107 TREE_OPERAND (t, 0),
11108 TREE_OPERAND (t, 1),
11109 strict_overflow_p);
11110 case tcc_constant:
11111 case tcc_declaration:
11112 case tcc_reference:
11113 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11115 default:
11116 break;
11119 switch (code)
11121 case TRUTH_NOT_EXPR:
11122 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11123 strict_overflow_p);
11125 case TRUTH_AND_EXPR:
11126 case TRUTH_OR_EXPR:
11127 case TRUTH_XOR_EXPR:
11128 return tree_binary_nonzero_warnv_p (code, type,
11129 TREE_OPERAND (t, 0),
11130 TREE_OPERAND (t, 1),
11131 strict_overflow_p);
11133 case COND_EXPR:
11134 case CONSTRUCTOR:
11135 case OBJ_TYPE_REF:
11136 case ADDR_EXPR:
11137 case WITH_SIZE_EXPR:
11138 case SSA_NAME:
11139 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11141 case COMPOUND_EXPR:
11142 case MODIFY_EXPR:
11143 case BIND_EXPR:
11144 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
11145 strict_overflow_p);
11147 case SAVE_EXPR:
11148 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11149 strict_overflow_p);
11151 case CALL_EXPR:
11153 tree fndecl = get_callee_fndecl (t);
11154 if (!fndecl) return false;
11155 if (flag_delete_null_pointer_checks && !flag_check_new
11156 && DECL_IS_OPERATOR_NEW_P (fndecl)
11157 && !TREE_NOTHROW (fndecl))
11158 return true;
11159 if (flag_delete_null_pointer_checks
11160 && lookup_attribute ("returns_nonnull",
11161 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11162 return true;
11163 return alloca_call_p (t);
11166 default:
11167 break;
11169 return false;
11172 /* Return true when T is an address and is known to be nonzero.
11173 Handle warnings about undefined signed overflow. */
11175 bool
11176 tree_expr_nonzero_p (tree t)
11178 bool ret, strict_overflow_p;
11180 strict_overflow_p = false;
11181 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11182 if (strict_overflow_p)
11183 fold_overflow_warning (("assuming signed overflow does not occur when "
11184 "determining that expression is always "
11185 "non-zero"),
11186 WARN_STRICT_OVERFLOW_MISC);
11187 return ret;
11190 /* Return true if T is known not to be equal to an integer W. */
11192 bool
11193 expr_not_equal_to (tree t, const wide_int &w)
11195 int_range_max vr;
11196 switch (TREE_CODE (t))
11198 case INTEGER_CST:
11199 return wi::to_wide (t) != w;
11201 case SSA_NAME:
11202 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11203 return false;
11205 get_range_query (cfun)->range_of_expr (vr, t);
11206 if (!vr.undefined_p () && !vr.contains_p (w))
11207 return true;
11208 /* If T has some known zero bits and W has any of those bits set,
11209 then T is known not to be equal to W. */
11210 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11211 TYPE_PRECISION (TREE_TYPE (t))), 0))
11212 return true;
11213 return false;
11215 default:
11216 return false;
11220 /* Fold a binary expression of code CODE and type TYPE with operands
11221 OP0 and OP1. LOC is the location of the resulting expression.
11222 Return the folded expression if folding is successful. Otherwise,
11223 return NULL_TREE. */
11225 tree
11226 fold_binary_loc (location_t loc, enum tree_code code, tree type,
11227 tree op0, tree op1)
11229 enum tree_code_class kind = TREE_CODE_CLASS (code);
11230 tree arg0, arg1, tem;
11231 tree t1 = NULL_TREE;
11232 bool strict_overflow_p;
11233 unsigned int prec;
11235 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11236 && TREE_CODE_LENGTH (code) == 2
11237 && op0 != NULL_TREE
11238 && op1 != NULL_TREE);
11240 arg0 = op0;
11241 arg1 = op1;
11243 /* Strip any conversions that don't change the mode. This is
11244 safe for every expression, except for a comparison expression
11245 because its signedness is derived from its operands. So, in
11246 the latter case, only strip conversions that don't change the
11247 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11248 preserved.
11250 Note that this is done as an internal manipulation within the
11251 constant folder, in order to find the simplest representation
11252 of the arguments so that their form can be studied. In any
11253 cases, the appropriate type conversions should be put back in
11254 the tree that will get out of the constant folder. */
11256 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11258 STRIP_SIGN_NOPS (arg0);
11259 STRIP_SIGN_NOPS (arg1);
11261 else
11263 STRIP_NOPS (arg0);
11264 STRIP_NOPS (arg1);
11267 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11268 constant but we can't do arithmetic on them. */
11269 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11271 tem = const_binop (code, type, arg0, arg1);
11272 if (tem != NULL_TREE)
11274 if (TREE_TYPE (tem) != type)
11275 tem = fold_convert_loc (loc, type, tem);
11276 return tem;
11280 /* If this is a commutative operation, and ARG0 is a constant, move it
11281 to ARG1 to reduce the number of tests below. */
11282 if (commutative_tree_code (code)
11283 && tree_swap_operands_p (arg0, arg1))
11284 return fold_build2_loc (loc, code, type, op1, op0);
11286 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11287 to ARG1 to reduce the number of tests below. */
11288 if (kind == tcc_comparison
11289 && tree_swap_operands_p (arg0, arg1))
11290 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11292 tem = generic_simplify (loc, code, type, op0, op1);
11293 if (tem)
11294 return tem;
11296 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11298 First check for cases where an arithmetic operation is applied to a
11299 compound, conditional, or comparison operation. Push the arithmetic
11300 operation inside the compound or conditional to see if any folding
11301 can then be done. Convert comparison to conditional for this purpose.
11302 The also optimizes non-constant cases that used to be done in
11303 expand_expr.
11305 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11306 one of the operands is a comparison and the other is a comparison, a
11307 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11308 code below would make the expression more complex. Change it to a
11309 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11310 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11312 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11313 || code == EQ_EXPR || code == NE_EXPR)
11314 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11315 && ((truth_value_p (TREE_CODE (arg0))
11316 && (truth_value_p (TREE_CODE (arg1))
11317 || (TREE_CODE (arg1) == BIT_AND_EXPR
11318 && integer_onep (TREE_OPERAND (arg1, 1)))))
11319 || (truth_value_p (TREE_CODE (arg1))
11320 && (truth_value_p (TREE_CODE (arg0))
11321 || (TREE_CODE (arg0) == BIT_AND_EXPR
11322 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11324 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11325 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11326 : TRUTH_XOR_EXPR,
11327 boolean_type_node,
11328 fold_convert_loc (loc, boolean_type_node, arg0),
11329 fold_convert_loc (loc, boolean_type_node, arg1));
11331 if (code == EQ_EXPR)
11332 tem = invert_truthvalue_loc (loc, tem);
11334 return fold_convert_loc (loc, type, tem);
11337 if (TREE_CODE_CLASS (code) == tcc_binary
11338 || TREE_CODE_CLASS (code) == tcc_comparison)
11340 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11342 tem = fold_build2_loc (loc, code, type,
11343 fold_convert_loc (loc, TREE_TYPE (op0),
11344 TREE_OPERAND (arg0, 1)), op1);
11345 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11346 tem);
11348 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11350 tem = fold_build2_loc (loc, code, type, op0,
11351 fold_convert_loc (loc, TREE_TYPE (op1),
11352 TREE_OPERAND (arg1, 1)));
11353 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11354 tem);
11357 if (TREE_CODE (arg0) == COND_EXPR
11358 || TREE_CODE (arg0) == VEC_COND_EXPR
11359 || COMPARISON_CLASS_P (arg0))
11361 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11362 arg0, arg1,
11363 /*cond_first_p=*/1);
11364 if (tem != NULL_TREE)
11365 return tem;
11368 if (TREE_CODE (arg1) == COND_EXPR
11369 || TREE_CODE (arg1) == VEC_COND_EXPR
11370 || COMPARISON_CLASS_P (arg1))
11372 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11373 arg1, arg0,
11374 /*cond_first_p=*/0);
11375 if (tem != NULL_TREE)
11376 return tem;
11380 switch (code)
11382 case MEM_REF:
11383 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11384 if (TREE_CODE (arg0) == ADDR_EXPR
11385 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11387 tree iref = TREE_OPERAND (arg0, 0);
11388 return fold_build2 (MEM_REF, type,
11389 TREE_OPERAND (iref, 0),
11390 int_const_binop (PLUS_EXPR, arg1,
11391 TREE_OPERAND (iref, 1)));
11394 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11395 if (TREE_CODE (arg0) == ADDR_EXPR
11396 && handled_component_p (TREE_OPERAND (arg0, 0)))
11398 tree base;
11399 poly_int64 coffset;
11400 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11401 &coffset);
11402 if (!base)
11403 return NULL_TREE;
11404 return fold_build2 (MEM_REF, type,
11405 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11406 int_const_binop (PLUS_EXPR, arg1,
11407 size_int (coffset)));
11410 return NULL_TREE;
11412 case POINTER_PLUS_EXPR:
11413 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11414 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11415 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11416 return fold_convert_loc (loc, type,
11417 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11418 fold_convert_loc (loc, sizetype,
11419 arg1),
11420 fold_convert_loc (loc, sizetype,
11421 arg0)));
11423 return NULL_TREE;
11425 case PLUS_EXPR:
11426 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11428 /* X + (X / CST) * -CST is X % CST. */
11429 if (TREE_CODE (arg1) == MULT_EXPR
11430 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11431 && operand_equal_p (arg0,
11432 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11434 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11435 tree cst1 = TREE_OPERAND (arg1, 1);
11436 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11437 cst1, cst0);
11438 if (sum && integer_zerop (sum))
11439 return fold_convert_loc (loc, type,
11440 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11441 TREE_TYPE (arg0), arg0,
11442 cst0));
11446 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11447 one. Make sure the type is not saturating and has the signedness of
11448 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11449 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11450 if ((TREE_CODE (arg0) == MULT_EXPR
11451 || TREE_CODE (arg1) == MULT_EXPR)
11452 && !TYPE_SATURATING (type)
11453 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11454 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11455 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11457 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11458 if (tem)
11459 return tem;
11462 if (! FLOAT_TYPE_P (type))
11464 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11465 (plus (plus (mult) (mult)) (foo)) so that we can
11466 take advantage of the factoring cases below. */
11467 if (ANY_INTEGRAL_TYPE_P (type)
11468 && TYPE_OVERFLOW_WRAPS (type)
11469 && (((TREE_CODE (arg0) == PLUS_EXPR
11470 || TREE_CODE (arg0) == MINUS_EXPR)
11471 && TREE_CODE (arg1) == MULT_EXPR)
11472 || ((TREE_CODE (arg1) == PLUS_EXPR
11473 || TREE_CODE (arg1) == MINUS_EXPR)
11474 && TREE_CODE (arg0) == MULT_EXPR)))
11476 tree parg0, parg1, parg, marg;
11477 enum tree_code pcode;
11479 if (TREE_CODE (arg1) == MULT_EXPR)
11480 parg = arg0, marg = arg1;
11481 else
11482 parg = arg1, marg = arg0;
11483 pcode = TREE_CODE (parg);
11484 parg0 = TREE_OPERAND (parg, 0);
11485 parg1 = TREE_OPERAND (parg, 1);
11486 STRIP_NOPS (parg0);
11487 STRIP_NOPS (parg1);
11489 if (TREE_CODE (parg0) == MULT_EXPR
11490 && TREE_CODE (parg1) != MULT_EXPR)
11491 return fold_build2_loc (loc, pcode, type,
11492 fold_build2_loc (loc, PLUS_EXPR, type,
11493 fold_convert_loc (loc, type,
11494 parg0),
11495 fold_convert_loc (loc, type,
11496 marg)),
11497 fold_convert_loc (loc, type, parg1));
11498 if (TREE_CODE (parg0) != MULT_EXPR
11499 && TREE_CODE (parg1) == MULT_EXPR)
11500 return
11501 fold_build2_loc (loc, PLUS_EXPR, type,
11502 fold_convert_loc (loc, type, parg0),
11503 fold_build2_loc (loc, pcode, type,
11504 fold_convert_loc (loc, type, marg),
11505 fold_convert_loc (loc, type,
11506 parg1)));
11509 else
11511 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11512 to __complex__ ( x, y ). This is not the same for SNaNs or
11513 if signed zeros are involved. */
11514 if (!HONOR_SNANS (arg0)
11515 && !HONOR_SIGNED_ZEROS (arg0)
11516 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11518 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11519 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11520 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11521 bool arg0rz = false, arg0iz = false;
11522 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11523 || (arg0i && (arg0iz = real_zerop (arg0i))))
11525 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11526 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11527 if (arg0rz && arg1i && real_zerop (arg1i))
11529 tree rp = arg1r ? arg1r
11530 : build1 (REALPART_EXPR, rtype, arg1);
11531 tree ip = arg0i ? arg0i
11532 : build1 (IMAGPART_EXPR, rtype, arg0);
11533 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11535 else if (arg0iz && arg1r && real_zerop (arg1r))
11537 tree rp = arg0r ? arg0r
11538 : build1 (REALPART_EXPR, rtype, arg0);
11539 tree ip = arg1i ? arg1i
11540 : build1 (IMAGPART_EXPR, rtype, arg1);
11541 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11546 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11547 We associate floats only if the user has specified
11548 -fassociative-math. */
11549 if (flag_associative_math
11550 && TREE_CODE (arg1) == PLUS_EXPR
11551 && TREE_CODE (arg0) != MULT_EXPR)
11553 tree tree10 = TREE_OPERAND (arg1, 0);
11554 tree tree11 = TREE_OPERAND (arg1, 1);
11555 if (TREE_CODE (tree11) == MULT_EXPR
11556 && TREE_CODE (tree10) == MULT_EXPR)
11558 tree tree0;
11559 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11560 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11563 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11564 We associate floats only if the user has specified
11565 -fassociative-math. */
11566 if (flag_associative_math
11567 && TREE_CODE (arg0) == PLUS_EXPR
11568 && TREE_CODE (arg1) != MULT_EXPR)
11570 tree tree00 = TREE_OPERAND (arg0, 0);
11571 tree tree01 = TREE_OPERAND (arg0, 1);
11572 if (TREE_CODE (tree01) == MULT_EXPR
11573 && TREE_CODE (tree00) == MULT_EXPR)
11575 tree tree0;
11576 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11577 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11582 bit_rotate:
11583 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11584 is a rotate of A by C1 bits. */
11585 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11586 is a rotate of A by B bits.
11587 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11588 though in this case CODE must be | and not + or ^, otherwise
11589 it doesn't return A when B is 0. */
11591 enum tree_code code0, code1;
11592 tree rtype;
11593 code0 = TREE_CODE (arg0);
11594 code1 = TREE_CODE (arg1);
11595 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11596 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11597 && operand_equal_p (TREE_OPERAND (arg0, 0),
11598 TREE_OPERAND (arg1, 0), 0)
11599 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11600 TYPE_UNSIGNED (rtype))
11601 /* Only create rotates in complete modes. Other cases are not
11602 expanded properly. */
11603 && (element_precision (rtype)
11604 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11606 tree tree01, tree11;
11607 tree orig_tree01, orig_tree11;
11608 enum tree_code code01, code11;
11610 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11611 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11612 STRIP_NOPS (tree01);
11613 STRIP_NOPS (tree11);
11614 code01 = TREE_CODE (tree01);
11615 code11 = TREE_CODE (tree11);
11616 if (code11 != MINUS_EXPR
11617 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11619 std::swap (code0, code1);
11620 std::swap (code01, code11);
11621 std::swap (tree01, tree11);
11622 std::swap (orig_tree01, orig_tree11);
11624 if (code01 == INTEGER_CST
11625 && code11 == INTEGER_CST
11626 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11627 == element_precision (rtype)))
11629 tem = build2_loc (loc, LROTATE_EXPR,
11630 rtype, TREE_OPERAND (arg0, 0),
11631 code0 == LSHIFT_EXPR
11632 ? orig_tree01 : orig_tree11);
11633 return fold_convert_loc (loc, type, tem);
11635 else if (code11 == MINUS_EXPR)
11637 tree tree110, tree111;
11638 tree110 = TREE_OPERAND (tree11, 0);
11639 tree111 = TREE_OPERAND (tree11, 1);
11640 STRIP_NOPS (tree110);
11641 STRIP_NOPS (tree111);
11642 if (TREE_CODE (tree110) == INTEGER_CST
11643 && compare_tree_int (tree110,
11644 element_precision (rtype)) == 0
11645 && operand_equal_p (tree01, tree111, 0))
11647 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11648 ? LROTATE_EXPR : RROTATE_EXPR),
11649 rtype, TREE_OPERAND (arg0, 0),
11650 orig_tree01);
11651 return fold_convert_loc (loc, type, tem);
11654 else if (code == BIT_IOR_EXPR
11655 && code11 == BIT_AND_EXPR
11656 && pow2p_hwi (element_precision (rtype)))
11658 tree tree110, tree111;
11659 tree110 = TREE_OPERAND (tree11, 0);
11660 tree111 = TREE_OPERAND (tree11, 1);
11661 STRIP_NOPS (tree110);
11662 STRIP_NOPS (tree111);
11663 if (TREE_CODE (tree110) == NEGATE_EXPR
11664 && TREE_CODE (tree111) == INTEGER_CST
11665 && compare_tree_int (tree111,
11666 element_precision (rtype) - 1) == 0
11667 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11669 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11670 ? LROTATE_EXPR : RROTATE_EXPR),
11671 rtype, TREE_OPERAND (arg0, 0),
11672 orig_tree01);
11673 return fold_convert_loc (loc, type, tem);
11679 associate:
11680 /* In most languages, can't associate operations on floats through
11681 parentheses. Rather than remember where the parentheses were, we
11682 don't associate floats at all, unless the user has specified
11683 -fassociative-math.
11684 And, we need to make sure type is not saturating. */
11686 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11687 && !TYPE_SATURATING (type)
11688 && !TYPE_OVERFLOW_SANITIZED (type))
11690 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11691 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11692 tree atype = type;
11693 bool ok = true;
11695 /* Split both trees into variables, constants, and literals. Then
11696 associate each group together, the constants with literals,
11697 then the result with variables. This increases the chances of
11698 literals being recombined later and of generating relocatable
11699 expressions for the sum of a constant and literal. */
11700 var0 = split_tree (arg0, type, code,
11701 &minus_var0, &con0, &minus_con0,
11702 &lit0, &minus_lit0, 0);
11703 var1 = split_tree (arg1, type, code,
11704 &minus_var1, &con1, &minus_con1,
11705 &lit1, &minus_lit1, code == MINUS_EXPR);
11707 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11708 if (code == MINUS_EXPR)
11709 code = PLUS_EXPR;
11711 /* With undefined overflow prefer doing association in a type
11712 which wraps on overflow, if that is one of the operand types. */
11713 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11714 && !TYPE_OVERFLOW_WRAPS (type))
11716 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11717 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11718 atype = TREE_TYPE (arg0);
11719 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11720 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11721 atype = TREE_TYPE (arg1);
11722 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11725 /* With undefined overflow we can only associate constants with one
11726 variable, and constants whose association doesn't overflow. */
11727 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11728 && !TYPE_OVERFLOW_WRAPS (atype))
11730 if ((var0 && var1) || (minus_var0 && minus_var1))
11732 /* ??? If split_tree would handle NEGATE_EXPR we could
11733 simply reject these cases and the allowed cases would
11734 be the var0/minus_var1 ones. */
11735 tree tmp0 = var0 ? var0 : minus_var0;
11736 tree tmp1 = var1 ? var1 : minus_var1;
11737 bool one_neg = false;
11739 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11741 tmp0 = TREE_OPERAND (tmp0, 0);
11742 one_neg = !one_neg;
11744 if (CONVERT_EXPR_P (tmp0)
11745 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11746 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11747 <= TYPE_PRECISION (atype)))
11748 tmp0 = TREE_OPERAND (tmp0, 0);
11749 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11751 tmp1 = TREE_OPERAND (tmp1, 0);
11752 one_neg = !one_neg;
11754 if (CONVERT_EXPR_P (tmp1)
11755 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11756 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11757 <= TYPE_PRECISION (atype)))
11758 tmp1 = TREE_OPERAND (tmp1, 0);
11759 /* The only case we can still associate with two variables
11760 is if they cancel out. */
11761 if (!one_neg
11762 || !operand_equal_p (tmp0, tmp1, 0))
11763 ok = false;
11765 else if ((var0 && minus_var1
11766 && ! operand_equal_p (var0, minus_var1, 0))
11767 || (minus_var0 && var1
11768 && ! operand_equal_p (minus_var0, var1, 0)))
11769 ok = false;
11772 /* Only do something if we found more than two objects. Otherwise,
11773 nothing has changed and we risk infinite recursion. */
11774 if (ok
11775 && ((var0 != 0) + (var1 != 0)
11776 + (minus_var0 != 0) + (minus_var1 != 0)
11777 + (con0 != 0) + (con1 != 0)
11778 + (minus_con0 != 0) + (minus_con1 != 0)
11779 + (lit0 != 0) + (lit1 != 0)
11780 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11782 int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11783 int minus_var0_origin
11784 = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11785 int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11786 int minus_con0_origin
11787 = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11788 int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11789 int minus_lit0_origin
11790 = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11791 var0 = associate_trees (loc, var0, var1, code, atype);
11792 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11793 code, atype);
11794 con0 = associate_trees (loc, con0, con1, code, atype);
11795 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11796 code, atype);
11797 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11798 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11799 code, atype);
11801 if (minus_var0 && var0)
11803 var0_origin |= minus_var0_origin;
11804 var0 = associate_trees (loc, var0, minus_var0,
11805 MINUS_EXPR, atype);
11806 minus_var0 = 0;
11807 minus_var0_origin = 0;
11809 if (minus_con0 && con0)
11811 con0_origin |= minus_con0_origin;
11812 con0 = associate_trees (loc, con0, minus_con0,
11813 MINUS_EXPR, atype);
11814 minus_con0 = 0;
11815 minus_con0_origin = 0;
11818 /* Preserve the MINUS_EXPR if the negative part of the literal is
11819 greater than the positive part. Otherwise, the multiplicative
11820 folding code (i.e extract_muldiv) may be fooled in case
11821 unsigned constants are subtracted, like in the following
11822 example: ((X*2 + 4) - 8U)/2. */
11823 if (minus_lit0 && lit0)
11825 if (TREE_CODE (lit0) == INTEGER_CST
11826 && TREE_CODE (minus_lit0) == INTEGER_CST
11827 && tree_int_cst_lt (lit0, minus_lit0)
11828 /* But avoid ending up with only negated parts. */
11829 && (var0 || con0))
11831 minus_lit0_origin |= lit0_origin;
11832 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11833 MINUS_EXPR, atype);
11834 lit0 = 0;
11835 lit0_origin = 0;
11837 else
11839 lit0_origin |= minus_lit0_origin;
11840 lit0 = associate_trees (loc, lit0, minus_lit0,
11841 MINUS_EXPR, atype);
11842 minus_lit0 = 0;
11843 minus_lit0_origin = 0;
11847 /* Don't introduce overflows through reassociation. */
11848 if ((lit0 && TREE_OVERFLOW_P (lit0))
11849 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11850 return NULL_TREE;
11852 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11853 con0_origin |= lit0_origin;
11854 con0 = associate_trees (loc, con0, lit0, code, atype);
11855 minus_con0_origin |= minus_lit0_origin;
11856 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11857 code, atype);
11859 /* Eliminate minus_con0. */
11860 if (minus_con0)
11862 if (con0)
11864 con0_origin |= minus_con0_origin;
11865 con0 = associate_trees (loc, con0, minus_con0,
11866 MINUS_EXPR, atype);
11868 else if (var0)
11870 var0_origin |= minus_con0_origin;
11871 var0 = associate_trees (loc, var0, minus_con0,
11872 MINUS_EXPR, atype);
11874 else
11875 gcc_unreachable ();
11878 /* Eliminate minus_var0. */
11879 if (minus_var0)
11881 if (con0)
11883 con0_origin |= minus_var0_origin;
11884 con0 = associate_trees (loc, con0, minus_var0,
11885 MINUS_EXPR, atype);
11887 else
11888 gcc_unreachable ();
11891 /* Reassociate only if there has been any actual association
11892 between subtrees from op0 and subtrees from op1 in at
11893 least one of the operands, otherwise we risk infinite
11894 recursion. See PR114084. */
11895 if (var0_origin != 3 && con0_origin != 3)
11896 return NULL_TREE;
11898 return
11899 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11900 code, atype));
11904 return NULL_TREE;
11906 case POINTER_DIFF_EXPR:
11907 case MINUS_EXPR:
11908 /* Fold &a[i] - &a[j] to i-j. */
11909 if (TREE_CODE (arg0) == ADDR_EXPR
11910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11911 && TREE_CODE (arg1) == ADDR_EXPR
11912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11914 tree tem = fold_addr_of_array_ref_difference (loc, type,
11915 TREE_OPERAND (arg0, 0),
11916 TREE_OPERAND (arg1, 0),
11917 code
11918 == POINTER_DIFF_EXPR);
11919 if (tem)
11920 return tem;
11923 /* Further transformations are not for pointers. */
11924 if (code == POINTER_DIFF_EXPR)
11925 return NULL_TREE;
11927 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11928 if (TREE_CODE (arg0) == NEGATE_EXPR
11929 && negate_expr_p (op1)
11930 /* If arg0 is e.g. unsigned int and type is int, then this could
11931 introduce UB, because if A is INT_MIN at runtime, the original
11932 expression can be well defined while the latter is not.
11933 See PR83269. */
11934 && !(ANY_INTEGRAL_TYPE_P (type)
11935 && TYPE_OVERFLOW_UNDEFINED (type)
11936 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11937 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11938 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11939 fold_convert_loc (loc, type,
11940 TREE_OPERAND (arg0, 0)));
11942 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11943 __complex__ ( x, -y ). This is not the same for SNaNs or if
11944 signed zeros are involved. */
11945 if (!HONOR_SNANS (arg0)
11946 && !HONOR_SIGNED_ZEROS (arg0)
11947 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11949 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11950 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11951 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11952 bool arg0rz = false, arg0iz = false;
11953 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11954 || (arg0i && (arg0iz = real_zerop (arg0i))))
11956 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11957 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11958 if (arg0rz && arg1i && real_zerop (arg1i))
11960 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11961 arg1r ? arg1r
11962 : build1 (REALPART_EXPR, rtype, arg1));
11963 tree ip = arg0i ? arg0i
11964 : build1 (IMAGPART_EXPR, rtype, arg0);
11965 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11967 else if (arg0iz && arg1r && real_zerop (arg1r))
11969 tree rp = arg0r ? arg0r
11970 : build1 (REALPART_EXPR, rtype, arg0);
11971 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11972 arg1i ? arg1i
11973 : build1 (IMAGPART_EXPR, rtype, arg1));
11974 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11979 /* A - B -> A + (-B) if B is easily negatable. */
11980 if (negate_expr_p (op1)
11981 && ! TYPE_OVERFLOW_SANITIZED (type)
11982 && ((FLOAT_TYPE_P (type)
11983 /* Avoid this transformation if B is a positive REAL_CST. */
11984 && (TREE_CODE (op1) != REAL_CST
11985 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11986 || INTEGRAL_TYPE_P (type)))
11987 return fold_build2_loc (loc, PLUS_EXPR, type,
11988 fold_convert_loc (loc, type, arg0),
11989 negate_expr (op1));
11991 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11992 one. Make sure the type is not saturating and has the signedness of
11993 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11994 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11995 if ((TREE_CODE (arg0) == MULT_EXPR
11996 || TREE_CODE (arg1) == MULT_EXPR)
11997 && !TYPE_SATURATING (type)
11998 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11999 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
12000 && (!FLOAT_TYPE_P (type) || flag_associative_math))
12002 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
12003 if (tem)
12004 return tem;
12007 goto associate;
12009 case MULT_EXPR:
12010 if (! FLOAT_TYPE_P (type))
12012 /* Transform x * -C into -x * C if x is easily negatable. */
12013 if (TREE_CODE (op1) == INTEGER_CST
12014 && tree_int_cst_sgn (op1) == -1
12015 && negate_expr_p (op0)
12016 && negate_expr_p (op1)
12017 && (tem = negate_expr (op1)) != op1
12018 && ! TREE_OVERFLOW (tem))
12019 return fold_build2_loc (loc, MULT_EXPR, type,
12020 fold_convert_loc (loc, type,
12021 negate_expr (op0)), tem);
12023 strict_overflow_p = false;
12024 if (TREE_CODE (arg1) == INTEGER_CST
12025 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12026 &strict_overflow_p)) != 0)
12028 if (strict_overflow_p)
12029 fold_overflow_warning (("assuming signed overflow does not "
12030 "occur when simplifying "
12031 "multiplication"),
12032 WARN_STRICT_OVERFLOW_MISC);
12033 return fold_convert_loc (loc, type, tem);
12036 /* Optimize z * conj(z) for integer complex numbers. */
12037 if (TREE_CODE (arg0) == CONJ_EXPR
12038 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12039 return fold_mult_zconjz (loc, type, arg1);
12040 if (TREE_CODE (arg1) == CONJ_EXPR
12041 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12042 return fold_mult_zconjz (loc, type, arg0);
12044 else
12046 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
12047 This is not the same for NaNs or if signed zeros are
12048 involved. */
12049 if (!HONOR_NANS (arg0)
12050 && !HONOR_SIGNED_ZEROS (arg0)
12051 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12052 && TREE_CODE (arg1) == COMPLEX_CST
12053 && real_zerop (TREE_REALPART (arg1)))
12055 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
12056 if (real_onep (TREE_IMAGPART (arg1)))
12057 return
12058 fold_build2_loc (loc, COMPLEX_EXPR, type,
12059 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
12060 rtype, arg0)),
12061 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
12062 else if (real_minus_onep (TREE_IMAGPART (arg1)))
12063 return
12064 fold_build2_loc (loc, COMPLEX_EXPR, type,
12065 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
12066 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
12067 rtype, arg0)));
12070 /* Optimize z * conj(z) for floating point complex numbers.
12071 Guarded by flag_unsafe_math_optimizations as non-finite
12072 imaginary components don't produce scalar results. */
12073 if (flag_unsafe_math_optimizations
12074 && TREE_CODE (arg0) == CONJ_EXPR
12075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12076 return fold_mult_zconjz (loc, type, arg1);
12077 if (flag_unsafe_math_optimizations
12078 && TREE_CODE (arg1) == CONJ_EXPR
12079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12080 return fold_mult_zconjz (loc, type, arg0);
12082 goto associate;
12084 case BIT_IOR_EXPR:
12085 /* Canonicalize (X & C1) | C2. */
12086 if (TREE_CODE (arg0) == BIT_AND_EXPR
12087 && TREE_CODE (arg1) == INTEGER_CST
12088 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12090 int width = TYPE_PRECISION (type), w;
12091 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
12092 wide_int c2 = wi::to_wide (arg1);
12094 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
12095 if ((c1 & c2) == c1)
12096 return omit_one_operand_loc (loc, type, arg1,
12097 TREE_OPERAND (arg0, 0));
12099 wide_int msk = wi::mask (width, false,
12100 TYPE_PRECISION (TREE_TYPE (arg1)));
12102 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
12103 if (wi::bit_and_not (msk, c1 | c2) == 0)
12105 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12106 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12109 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
12110 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
12111 mode which allows further optimizations. */
12112 c1 &= msk;
12113 c2 &= msk;
12114 wide_int c3 = wi::bit_and_not (c1, c2);
12115 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
12117 wide_int mask = wi::mask (w, false,
12118 TYPE_PRECISION (type));
12119 if (((c1 | c2) & mask) == mask
12120 && wi::bit_and_not (c1, mask) == 0)
12122 c3 = mask;
12123 break;
12127 if (c3 != c1)
12129 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12130 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
12131 wide_int_to_tree (type, c3));
12132 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12136 /* See if this can be simplified into a rotate first. If that
12137 is unsuccessful continue in the association code. */
12138 goto bit_rotate;
12140 case BIT_XOR_EXPR:
12141 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
12142 if (TREE_CODE (arg0) == BIT_AND_EXPR
12143 && INTEGRAL_TYPE_P (type)
12144 && integer_onep (TREE_OPERAND (arg0, 1))
12145 && integer_onep (arg1))
12146 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
12147 build_zero_cst (TREE_TYPE (arg0)));
12149 /* See if this can be simplified into a rotate first. If that
12150 is unsuccessful continue in the association code. */
12151 goto bit_rotate;
12153 case BIT_AND_EXPR:
12154 /* Fold !X & 1 as X == 0. */
12155 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12156 && integer_onep (arg1))
12158 tem = TREE_OPERAND (arg0, 0);
12159 return fold_build2_loc (loc, EQ_EXPR, type, tem,
12160 build_zero_cst (TREE_TYPE (tem)));
12163 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12164 multiple of 1 << CST. */
12165 if (TREE_CODE (arg1) == INTEGER_CST)
12167 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12168 wide_int ncst1 = -cst1;
12169 if ((cst1 & ncst1) == ncst1
12170 && multiple_of_p (type, arg0,
12171 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
12172 return fold_convert_loc (loc, type, arg0);
12175 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12176 bits from CST2. */
12177 if (TREE_CODE (arg1) == INTEGER_CST
12178 && TREE_CODE (arg0) == MULT_EXPR
12179 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12181 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12182 wide_int masked
12183 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12185 if (masked == 0)
12186 return omit_two_operands_loc (loc, type, build_zero_cst (type),
12187 arg0, arg1);
12188 else if (masked != warg1)
12190 /* Avoid the transform if arg1 is a mask of some
12191 mode which allows further optimizations. */
12192 int pop = wi::popcount (warg1);
12193 if (!(pop >= BITS_PER_UNIT
12194 && pow2p_hwi (pop)
12195 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12196 return fold_build2_loc (loc, code, type, op0,
12197 wide_int_to_tree (type, masked));
12201 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12202 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12203 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12205 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12207 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12208 if (mask == -1)
12209 return
12210 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12213 goto associate;
12215 case RDIV_EXPR:
12216 /* Don't touch a floating-point divide by zero unless the mode
12217 of the constant can represent infinity. */
12218 if (TREE_CODE (arg1) == REAL_CST
12219 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12220 && real_zerop (arg1))
12221 return NULL_TREE;
12223 /* (-A) / (-B) -> A / B */
12224 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12225 return fold_build2_loc (loc, RDIV_EXPR, type,
12226 TREE_OPERAND (arg0, 0),
12227 negate_expr (arg1));
12228 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12229 return fold_build2_loc (loc, RDIV_EXPR, type,
12230 negate_expr (arg0),
12231 TREE_OPERAND (arg1, 0));
12232 return NULL_TREE;
12234 case TRUNC_DIV_EXPR:
12235 /* Fall through */
12237 case FLOOR_DIV_EXPR:
12238 /* Simplify A / (B << N) where A and B are positive and B is
12239 a power of 2, to A >> (N + log2(B)). */
12240 strict_overflow_p = false;
12241 if (TREE_CODE (arg1) == LSHIFT_EXPR
12242 && (TYPE_UNSIGNED (type)
12243 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12245 tree sval = TREE_OPERAND (arg1, 0);
12246 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12248 tree sh_cnt = TREE_OPERAND (arg1, 1);
12249 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12250 wi::exact_log2 (wi::to_wide (sval)));
12252 if (strict_overflow_p)
12253 fold_overflow_warning (("assuming signed overflow does not "
12254 "occur when simplifying A / (B << N)"),
12255 WARN_STRICT_OVERFLOW_MISC);
12257 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12258 sh_cnt, pow2);
12259 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12260 fold_convert_loc (loc, type, arg0), sh_cnt);
12264 /* Fall through */
12266 case ROUND_DIV_EXPR:
12267 case CEIL_DIV_EXPR:
12268 case EXACT_DIV_EXPR:
12269 if (integer_zerop (arg1))
12270 return NULL_TREE;
12272 /* Convert -A / -B to A / B when the type is signed and overflow is
12273 undefined. */
12274 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12275 && TREE_CODE (op0) == NEGATE_EXPR
12276 && negate_expr_p (op1))
12278 if (ANY_INTEGRAL_TYPE_P (type))
12279 fold_overflow_warning (("assuming signed overflow does not occur "
12280 "when distributing negation across "
12281 "division"),
12282 WARN_STRICT_OVERFLOW_MISC);
12283 return fold_build2_loc (loc, code, type,
12284 fold_convert_loc (loc, type,
12285 TREE_OPERAND (arg0, 0)),
12286 negate_expr (op1));
12288 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12289 && TREE_CODE (arg1) == NEGATE_EXPR
12290 && negate_expr_p (op0))
12292 if (ANY_INTEGRAL_TYPE_P (type))
12293 fold_overflow_warning (("assuming signed overflow does not occur "
12294 "when distributing negation across "
12295 "division"),
12296 WARN_STRICT_OVERFLOW_MISC);
12297 return fold_build2_loc (loc, code, type,
12298 negate_expr (op0),
12299 fold_convert_loc (loc, type,
12300 TREE_OPERAND (arg1, 0)));
12303 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12304 operation, EXACT_DIV_EXPR.
12306 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12307 At one time others generated faster code, it's not clear if they do
12308 after the last round to changes to the DIV code in expmed.cc. */
12309 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12310 && multiple_of_p (type, arg0, arg1))
12311 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12312 fold_convert (type, arg0),
12313 fold_convert (type, arg1));
12315 strict_overflow_p = false;
12316 if (TREE_CODE (arg1) == INTEGER_CST
12317 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12318 &strict_overflow_p)) != 0)
12320 if (strict_overflow_p)
12321 fold_overflow_warning (("assuming signed overflow does not occur "
12322 "when simplifying division"),
12323 WARN_STRICT_OVERFLOW_MISC);
12324 return fold_convert_loc (loc, type, tem);
12327 return NULL_TREE;
12329 case CEIL_MOD_EXPR:
12330 case FLOOR_MOD_EXPR:
12331 case ROUND_MOD_EXPR:
12332 case TRUNC_MOD_EXPR:
12333 strict_overflow_p = false;
12334 if (TREE_CODE (arg1) == INTEGER_CST
12335 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12336 &strict_overflow_p)) != 0)
12338 if (strict_overflow_p)
12339 fold_overflow_warning (("assuming signed overflow does not occur "
12340 "when simplifying modulus"),
12341 WARN_STRICT_OVERFLOW_MISC);
12342 return fold_convert_loc (loc, type, tem);
12345 return NULL_TREE;
12347 case LROTATE_EXPR:
12348 case RROTATE_EXPR:
12349 case RSHIFT_EXPR:
12350 case LSHIFT_EXPR:
12351 /* Since negative shift count is not well-defined,
12352 don't try to compute it in the compiler. */
12353 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12354 return NULL_TREE;
12356 prec = element_precision (type);
12358 /* If we have a rotate of a bit operation with the rotate count and
12359 the second operand of the bit operation both constant,
12360 permute the two operations. */
12361 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12362 && (TREE_CODE (arg0) == BIT_AND_EXPR
12363 || TREE_CODE (arg0) == BIT_IOR_EXPR
12364 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12365 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12367 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12368 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12369 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12370 fold_build2_loc (loc, code, type,
12371 arg00, arg1),
12372 fold_build2_loc (loc, code, type,
12373 arg01, arg1));
12376 /* Two consecutive rotates adding up to the some integer
12377 multiple of the precision of the type can be ignored. */
12378 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12379 && TREE_CODE (arg0) == RROTATE_EXPR
12380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12381 && wi::umod_trunc (wi::to_wide (arg1)
12382 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12383 prec) == 0)
12384 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12386 return NULL_TREE;
12388 case MIN_EXPR:
12389 case MAX_EXPR:
12390 goto associate;
12392 case TRUTH_ANDIF_EXPR:
12393 /* Note that the operands of this must be ints
12394 and their values must be 0 or 1.
12395 ("true" is a fixed value perhaps depending on the language.) */
12396 /* If first arg is constant zero, return it. */
12397 if (integer_zerop (arg0))
12398 return fold_convert_loc (loc, type, arg0);
12399 /* FALLTHRU */
12400 case TRUTH_AND_EXPR:
12401 /* If either arg is constant true, drop it. */
12402 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12403 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12404 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12405 /* Preserve sequence points. */
12406 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12407 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12408 /* If second arg is constant zero, result is zero, but first arg
12409 must be evaluated. */
12410 if (integer_zerop (arg1))
12411 return omit_one_operand_loc (loc, type, arg1, arg0);
12412 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12413 case will be handled here. */
12414 if (integer_zerop (arg0))
12415 return omit_one_operand_loc (loc, type, arg0, arg1);
12417 /* !X && X is always false. */
12418 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12420 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12421 /* X && !X is always false. */
12422 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12423 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12424 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12426 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12427 means A >= Y && A != MAX, but in this case we know that
12428 A < X <= MAX. */
12430 if (!TREE_SIDE_EFFECTS (arg0)
12431 && !TREE_SIDE_EFFECTS (arg1))
12433 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12434 if (tem && !operand_equal_p (tem, arg0, 0))
12435 return fold_convert (type,
12436 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12437 tem, arg1));
12439 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12440 if (tem && !operand_equal_p (tem, arg1, 0))
12441 return fold_convert (type,
12442 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12443 arg0, tem));
12446 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12447 != NULL_TREE)
12448 return tem;
12450 return NULL_TREE;
12452 case TRUTH_ORIF_EXPR:
12453 /* Note that the operands of this must be ints
12454 and their values must be 0 or true.
12455 ("true" is a fixed value perhaps depending on the language.) */
12456 /* If first arg is constant true, return it. */
12457 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12458 return fold_convert_loc (loc, type, arg0);
12459 /* FALLTHRU */
12460 case TRUTH_OR_EXPR:
12461 /* If either arg is constant zero, drop it. */
12462 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12463 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12464 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12465 /* Preserve sequence points. */
12466 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12468 /* If second arg is constant true, result is true, but we must
12469 evaluate first arg. */
12470 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12471 return omit_one_operand_loc (loc, type, arg1, arg0);
12472 /* Likewise for first arg, but note this only occurs here for
12473 TRUTH_OR_EXPR. */
12474 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12475 return omit_one_operand_loc (loc, type, arg0, arg1);
12477 /* !X || X is always true. */
12478 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12480 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12481 /* X || !X is always true. */
12482 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12483 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12484 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12486 /* (X && !Y) || (!X && Y) is X ^ Y */
12487 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12488 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12490 tree a0, a1, l0, l1, n0, n1;
12492 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12493 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12495 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12496 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12498 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12499 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12501 if ((operand_equal_p (n0, a0, 0)
12502 && operand_equal_p (n1, a1, 0))
12503 || (operand_equal_p (n0, a1, 0)
12504 && operand_equal_p (n1, a0, 0)))
12505 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12508 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12509 != NULL_TREE)
12510 return tem;
12512 return NULL_TREE;
12514 case TRUTH_XOR_EXPR:
12515 /* If the second arg is constant zero, drop it. */
12516 if (integer_zerop (arg1))
12517 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12518 /* If the second arg is constant true, this is a logical inversion. */
12519 if (integer_onep (arg1))
12521 tem = invert_truthvalue_loc (loc, arg0);
12522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12524 /* Identical arguments cancel to zero. */
12525 if (operand_equal_p (arg0, arg1, 0))
12526 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12528 /* !X ^ X is always true. */
12529 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12530 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12531 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12533 /* X ^ !X is always true. */
12534 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12536 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12538 return NULL_TREE;
12540 case EQ_EXPR:
12541 case NE_EXPR:
12542 STRIP_NOPS (arg0);
12543 STRIP_NOPS (arg1);
12545 tem = fold_comparison (loc, code, type, op0, op1);
12546 if (tem != NULL_TREE)
12547 return tem;
12549 /* bool_var != 1 becomes !bool_var. */
12550 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12551 && code == NE_EXPR)
12552 return fold_convert_loc (loc, type,
12553 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12554 TREE_TYPE (arg0), arg0));
12556 /* bool_var == 0 becomes !bool_var. */
12557 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12558 && code == EQ_EXPR)
12559 return fold_convert_loc (loc, type,
12560 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12561 TREE_TYPE (arg0), arg0));
12563 /* !exp != 0 becomes !exp */
12564 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12565 && code == NE_EXPR)
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12568 /* If this is an EQ or NE comparison with zero and ARG0 is
12569 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12570 two operations, but the latter can be done in one less insn
12571 on machines that have only two-operand insns or on which a
12572 constant cannot be the first operand. */
12573 if (TREE_CODE (arg0) == BIT_AND_EXPR
12574 && integer_zerop (arg1))
12576 tree arg00 = TREE_OPERAND (arg0, 0);
12577 tree arg01 = TREE_OPERAND (arg0, 1);
12578 if (TREE_CODE (arg00) == LSHIFT_EXPR
12579 && integer_onep (TREE_OPERAND (arg00, 0)))
12581 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12582 arg01, TREE_OPERAND (arg00, 1));
12583 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12584 build_one_cst (TREE_TYPE (arg0)));
12585 return fold_build2_loc (loc, code, type,
12586 fold_convert_loc (loc, TREE_TYPE (arg1),
12587 tem), arg1);
12589 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12590 && integer_onep (TREE_OPERAND (arg01, 0)))
12592 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12593 arg00, TREE_OPERAND (arg01, 1));
12594 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12595 build_one_cst (TREE_TYPE (arg0)));
12596 return fold_build2_loc (loc, code, type,
12597 fold_convert_loc (loc, TREE_TYPE (arg1),
12598 tem), arg1);
12602 /* If this is a comparison of a field, we may be able to simplify it. */
12603 if ((TREE_CODE (arg0) == COMPONENT_REF
12604 || TREE_CODE (arg0) == BIT_FIELD_REF)
12605 /* Handle the constant case even without -O
12606 to make sure the warnings are given. */
12607 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12609 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12610 if (t1)
12611 return t1;
12614 /* Optimize comparisons of strlen vs zero to a compare of the
12615 first character of the string vs zero. To wit,
12616 strlen(ptr) == 0 => *ptr == 0
12617 strlen(ptr) != 0 => *ptr != 0
12618 Other cases should reduce to one of these two (or a constant)
12619 due to the return value of strlen being unsigned. */
12620 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12622 tree fndecl = get_callee_fndecl (arg0);
12624 if (fndecl
12625 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12626 && call_expr_nargs (arg0) == 1
12627 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12628 == POINTER_TYPE))
12630 tree ptrtype
12631 = build_pointer_type (build_qualified_type (char_type_node,
12632 TYPE_QUAL_CONST));
12633 tree ptr = fold_convert_loc (loc, ptrtype,
12634 CALL_EXPR_ARG (arg0, 0));
12635 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12636 return fold_build2_loc (loc, code, type, iref,
12637 build_int_cst (TREE_TYPE (iref), 0));
12641 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12642 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12643 if (TREE_CODE (arg0) == RSHIFT_EXPR
12644 && integer_zerop (arg1)
12645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12647 tree arg00 = TREE_OPERAND (arg0, 0);
12648 tree arg01 = TREE_OPERAND (arg0, 1);
12649 tree itype = TREE_TYPE (arg00);
12650 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12652 if (TYPE_UNSIGNED (itype))
12654 itype = signed_type_for (itype);
12655 arg00 = fold_convert_loc (loc, itype, arg00);
12657 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12658 type, arg00, build_zero_cst (itype));
12662 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12663 (X & C) == 0 when C is a single bit. */
12664 if (TREE_CODE (arg0) == BIT_AND_EXPR
12665 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12666 && integer_zerop (arg1)
12667 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12669 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12670 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12671 TREE_OPERAND (arg0, 1));
12672 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12673 type, tem,
12674 fold_convert_loc (loc, TREE_TYPE (arg0),
12675 arg1));
12678 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12679 constant C is a power of two, i.e. a single bit. */
12680 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12681 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12682 && integer_zerop (arg1)
12683 && integer_pow2p (TREE_OPERAND (arg0, 1))
12684 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12685 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12687 tree arg00 = TREE_OPERAND (arg0, 0);
12688 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12689 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12692 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12693 when is C is a power of two, i.e. a single bit. */
12694 if (TREE_CODE (arg0) == BIT_AND_EXPR
12695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12696 && integer_zerop (arg1)
12697 && integer_pow2p (TREE_OPERAND (arg0, 1))
12698 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12699 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12701 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12702 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12703 arg000, TREE_OPERAND (arg0, 1));
12704 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12705 tem, build_int_cst (TREE_TYPE (tem), 0));
12708 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12709 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12711 tree arg00 = TREE_OPERAND (arg0, 0);
12712 tree arg01 = TREE_OPERAND (arg0, 1);
12713 tree arg10 = TREE_OPERAND (arg1, 0);
12714 tree arg11 = TREE_OPERAND (arg1, 1);
12715 tree itype = TREE_TYPE (arg0);
12717 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12718 operand_equal_p guarantees no side-effects so we don't need
12719 to use omit_one_operand on Z. */
12720 if (operand_equal_p (arg01, arg11, 0))
12721 return fold_build2_loc (loc, code, type, arg00,
12722 fold_convert_loc (loc, TREE_TYPE (arg00),
12723 arg10));
12724 if (operand_equal_p (arg01, arg10, 0))
12725 return fold_build2_loc (loc, code, type, arg00,
12726 fold_convert_loc (loc, TREE_TYPE (arg00),
12727 arg11));
12728 if (operand_equal_p (arg00, arg11, 0))
12729 return fold_build2_loc (loc, code, type, arg01,
12730 fold_convert_loc (loc, TREE_TYPE (arg01),
12731 arg10));
12732 if (operand_equal_p (arg00, arg10, 0))
12733 return fold_build2_loc (loc, code, type, arg01,
12734 fold_convert_loc (loc, TREE_TYPE (arg01),
12735 arg11));
12737 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12738 if (TREE_CODE (arg01) == INTEGER_CST
12739 && TREE_CODE (arg11) == INTEGER_CST)
12741 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12742 fold_convert_loc (loc, itype, arg11));
12743 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12744 return fold_build2_loc (loc, code, type, tem,
12745 fold_convert_loc (loc, itype, arg10));
12749 /* Attempt to simplify equality/inequality comparisons of complex
12750 values. Only lower the comparison if the result is known or
12751 can be simplified to a single scalar comparison. */
12752 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12753 || TREE_CODE (arg0) == COMPLEX_CST)
12754 && (TREE_CODE (arg1) == COMPLEX_EXPR
12755 || TREE_CODE (arg1) == COMPLEX_CST))
12757 tree real0, imag0, real1, imag1;
12758 tree rcond, icond;
12760 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12762 real0 = TREE_OPERAND (arg0, 0);
12763 imag0 = TREE_OPERAND (arg0, 1);
12765 else
12767 real0 = TREE_REALPART (arg0);
12768 imag0 = TREE_IMAGPART (arg0);
12771 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12773 real1 = TREE_OPERAND (arg1, 0);
12774 imag1 = TREE_OPERAND (arg1, 1);
12776 else
12778 real1 = TREE_REALPART (arg1);
12779 imag1 = TREE_IMAGPART (arg1);
12782 rcond = fold_binary_loc (loc, code, type, real0, real1);
12783 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12785 if (integer_zerop (rcond))
12787 if (code == EQ_EXPR)
12788 return omit_two_operands_loc (loc, type, boolean_false_node,
12789 imag0, imag1);
12790 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12792 else
12794 if (code == NE_EXPR)
12795 return omit_two_operands_loc (loc, type, boolean_true_node,
12796 imag0, imag1);
12797 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12801 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12802 if (icond && TREE_CODE (icond) == INTEGER_CST)
12804 if (integer_zerop (icond))
12806 if (code == EQ_EXPR)
12807 return omit_two_operands_loc (loc, type, boolean_false_node,
12808 real0, real1);
12809 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12811 else
12813 if (code == NE_EXPR)
12814 return omit_two_operands_loc (loc, type, boolean_true_node,
12815 real0, real1);
12816 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12821 return NULL_TREE;
12823 case LT_EXPR:
12824 case GT_EXPR:
12825 case LE_EXPR:
12826 case GE_EXPR:
12827 tem = fold_comparison (loc, code, type, op0, op1);
12828 if (tem != NULL_TREE)
12829 return tem;
12831 /* Transform comparisons of the form X +- C CMP X. */
12832 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12833 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12835 && !HONOR_SNANS (arg0))
12837 tree arg01 = TREE_OPERAND (arg0, 1);
12838 enum tree_code code0 = TREE_CODE (arg0);
12839 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12841 /* (X - c) > X becomes false. */
12842 if (code == GT_EXPR
12843 && ((code0 == MINUS_EXPR && is_positive >= 0)
12844 || (code0 == PLUS_EXPR && is_positive <= 0)))
12845 return constant_boolean_node (0, type);
12847 /* Likewise (X + c) < X becomes false. */
12848 if (code == LT_EXPR
12849 && ((code0 == PLUS_EXPR && is_positive >= 0)
12850 || (code0 == MINUS_EXPR && is_positive <= 0)))
12851 return constant_boolean_node (0, type);
12853 /* Convert (X - c) <= X to true. */
12854 if (!HONOR_NANS (arg1)
12855 && code == LE_EXPR
12856 && ((code0 == MINUS_EXPR && is_positive >= 0)
12857 || (code0 == PLUS_EXPR && is_positive <= 0)))
12858 return constant_boolean_node (1, type);
12860 /* Convert (X + c) >= X to true. */
12861 if (!HONOR_NANS (arg1)
12862 && code == GE_EXPR
12863 && ((code0 == PLUS_EXPR && is_positive >= 0)
12864 || (code0 == MINUS_EXPR && is_positive <= 0)))
12865 return constant_boolean_node (1, type);
12868 /* If we are comparing an ABS_EXPR with a constant, we can
12869 convert all the cases into explicit comparisons, but they may
12870 well not be faster than doing the ABS and one comparison.
12871 But ABS (X) <= C is a range comparison, which becomes a subtraction
12872 and a comparison, and is probably faster. */
12873 if (code == LE_EXPR
12874 && TREE_CODE (arg1) == INTEGER_CST
12875 && TREE_CODE (arg0) == ABS_EXPR
12876 && ! TREE_SIDE_EFFECTS (arg0)
12877 && (tem = negate_expr (arg1)) != 0
12878 && TREE_CODE (tem) == INTEGER_CST
12879 && !TREE_OVERFLOW (tem))
12880 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12881 build2 (GE_EXPR, type,
12882 TREE_OPERAND (arg0, 0), tem),
12883 build2 (LE_EXPR, type,
12884 TREE_OPERAND (arg0, 0), arg1));
12886 /* Convert ABS_EXPR<x> >= 0 to true. */
12887 strict_overflow_p = false;
12888 if (code == GE_EXPR
12889 && (integer_zerop (arg1)
12890 || (! HONOR_NANS (arg0)
12891 && real_zerop (arg1)))
12892 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12894 if (strict_overflow_p)
12895 fold_overflow_warning (("assuming signed overflow does not occur "
12896 "when simplifying comparison of "
12897 "absolute value and zero"),
12898 WARN_STRICT_OVERFLOW_CONDITIONAL);
12899 return omit_one_operand_loc (loc, type,
12900 constant_boolean_node (true, type),
12901 arg0);
12904 /* Convert ABS_EXPR<x> < 0 to false. */
12905 strict_overflow_p = false;
12906 if (code == LT_EXPR
12907 && (integer_zerop (arg1) || real_zerop (arg1))
12908 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12910 if (strict_overflow_p)
12911 fold_overflow_warning (("assuming signed overflow does not occur "
12912 "when simplifying comparison of "
12913 "absolute value and zero"),
12914 WARN_STRICT_OVERFLOW_CONDITIONAL);
12915 return omit_one_operand_loc (loc, type,
12916 constant_boolean_node (false, type),
12917 arg0);
12920 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12921 and similarly for >= into !=. */
12922 if ((code == LT_EXPR || code == GE_EXPR)
12923 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12924 && TREE_CODE (arg1) == LSHIFT_EXPR
12925 && integer_onep (TREE_OPERAND (arg1, 0)))
12926 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12927 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12928 TREE_OPERAND (arg1, 1)),
12929 build_zero_cst (TREE_TYPE (arg0)));
12931 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12932 otherwise Y might be >= # of bits in X's type and thus e.g.
12933 (unsigned char) (1 << Y) for Y 15 might be 0.
12934 If the cast is widening, then 1 << Y should have unsigned type,
12935 otherwise if Y is number of bits in the signed shift type minus 1,
12936 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12937 31 might be 0xffffffff80000000. */
12938 if ((code == LT_EXPR || code == GE_EXPR)
12939 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12940 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12941 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12942 && CONVERT_EXPR_P (arg1)
12943 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12944 && (element_precision (TREE_TYPE (arg1))
12945 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12946 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12947 || (element_precision (TREE_TYPE (arg1))
12948 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12949 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12951 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12952 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12953 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12954 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12955 build_zero_cst (TREE_TYPE (arg0)));
12958 return NULL_TREE;
12960 case UNORDERED_EXPR:
12961 case ORDERED_EXPR:
12962 case UNLT_EXPR:
12963 case UNLE_EXPR:
12964 case UNGT_EXPR:
12965 case UNGE_EXPR:
12966 case UNEQ_EXPR:
12967 case LTGT_EXPR:
12968 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12970 tree targ0 = strip_float_extensions (arg0);
12971 tree targ1 = strip_float_extensions (arg1);
12972 tree newtype = TREE_TYPE (targ0);
12974 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12975 newtype = TREE_TYPE (targ1);
12977 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
12978 && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
12979 return fold_build2_loc (loc, code, type,
12980 fold_convert_loc (loc, newtype, targ0),
12981 fold_convert_loc (loc, newtype, targ1));
12984 return NULL_TREE;
12986 case COMPOUND_EXPR:
12987 /* When pedantic, a compound expression can be neither an lvalue
12988 nor an integer constant expression. */
12989 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12990 return NULL_TREE;
12991 /* Don't let (0, 0) be null pointer constant. */
12992 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12993 : fold_convert_loc (loc, type, arg1);
12994 return tem;
12996 default:
12997 return NULL_TREE;
12998 } /* switch (code) */
13001 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
13002 ((A & N) + B) & M -> (A + B) & M
13003 Similarly if (N & M) == 0,
13004 ((A | N) + B) & M -> (A + B) & M
13005 and for - instead of + (or unary - instead of +)
13006 and/or ^ instead of |.
13007 If B is constant and (B & M) == 0, fold into A & M.
13009 This function is a helper for match.pd patterns. Return non-NULL
13010 type in which the simplified operation should be performed only
13011 if any optimization is possible.
13013 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
13014 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
13015 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
13016 +/-. */
13017 tree
13018 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
13019 tree arg00, enum tree_code code00, tree arg000, tree arg001,
13020 tree arg01, enum tree_code code01, tree arg010, tree arg011,
13021 tree *pmop)
13023 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
13024 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
13025 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
13026 if (~cst1 == 0
13027 || (cst1 & (cst1 + 1)) != 0
13028 || !INTEGRAL_TYPE_P (type)
13029 || (!TYPE_OVERFLOW_WRAPS (type)
13030 && TREE_CODE (type) != INTEGER_TYPE)
13031 || (wi::max_value (type) & cst1) != cst1)
13032 return NULL_TREE;
13034 enum tree_code codes[2] = { code00, code01 };
13035 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
13036 int which = 0;
13037 wide_int cst0;
13039 /* Now we know that arg0 is (C + D) or (C - D) or -C and
13040 arg1 (M) is == (1LL << cst) - 1.
13041 Store C into PMOP[0] and D into PMOP[1]. */
13042 pmop[0] = arg00;
13043 pmop[1] = arg01;
13044 which = code != NEGATE_EXPR;
13046 for (; which >= 0; which--)
13047 switch (codes[which])
13049 case BIT_AND_EXPR:
13050 case BIT_IOR_EXPR:
13051 case BIT_XOR_EXPR:
13052 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
13053 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
13054 if (codes[which] == BIT_AND_EXPR)
13056 if (cst0 != cst1)
13057 break;
13059 else if (cst0 != 0)
13060 break;
13061 /* If C or D is of the form (A & N) where
13062 (N & M) == M, or of the form (A | N) or
13063 (A ^ N) where (N & M) == 0, replace it with A. */
13064 pmop[which] = arg0xx[2 * which];
13065 break;
13066 case ERROR_MARK:
13067 if (TREE_CODE (pmop[which]) != INTEGER_CST)
13068 break;
13069 /* If C or D is a N where (N & M) == 0, it can be
13070 omitted (replaced with 0). */
13071 if ((code == PLUS_EXPR
13072 || (code == MINUS_EXPR && which == 0))
13073 && (cst1 & wi::to_wide (pmop[which])) == 0)
13074 pmop[which] = build_int_cst (type, 0);
13075 /* Similarly, with C - N where (-N & M) == 0. */
13076 if (code == MINUS_EXPR
13077 && which == 1
13078 && (cst1 & -wi::to_wide (pmop[which])) == 0)
13079 pmop[which] = build_int_cst (type, 0);
13080 break;
13081 default:
13082 gcc_unreachable ();
13085 /* Only build anything new if we optimized one or both arguments above. */
13086 if (pmop[0] == arg00 && pmop[1] == arg01)
13087 return NULL_TREE;
13089 if (TYPE_OVERFLOW_WRAPS (type))
13090 return type;
13091 else
13092 return unsigned_type_for (type);
13095 /* Used by contains_label_[p1]. */
13097 struct contains_label_data
13099 hash_set<tree> *pset;
13100 bool inside_switch_p;
13103 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13104 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
13105 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
13107 static tree
13108 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
13110 contains_label_data *d = (contains_label_data *) data;
13111 switch (TREE_CODE (*tp))
13113 case LABEL_EXPR:
13114 return *tp;
13116 case CASE_LABEL_EXPR:
13117 if (!d->inside_switch_p)
13118 return *tp;
13119 return NULL_TREE;
13121 case SWITCH_EXPR:
13122 if (!d->inside_switch_p)
13124 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
13125 return *tp;
13126 d->inside_switch_p = true;
13127 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
13128 return *tp;
13129 d->inside_switch_p = false;
13130 *walk_subtrees = 0;
13132 return NULL_TREE;
13134 case GOTO_EXPR:
13135 *walk_subtrees = 0;
13136 return NULL_TREE;
13138 default:
13139 return NULL_TREE;
13143 /* Return whether the sub-tree ST contains a label which is accessible from
13144 outside the sub-tree. */
13146 static bool
13147 contains_label_p (tree st)
13149 hash_set<tree> pset;
13150 contains_label_data data = { &pset, false };
13151 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13154 /* Fold a ternary expression of code CODE and type TYPE with operands
13155 OP0, OP1, and OP2. Return the folded expression if folding is
13156 successful. Otherwise, return NULL_TREE. */
13158 tree
13159 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13160 tree op0, tree op1, tree op2)
13162 tree tem;
13163 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13164 enum tree_code_class kind = TREE_CODE_CLASS (code);
13166 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13167 && TREE_CODE_LENGTH (code) == 3);
13169 /* If this is a commutative operation, and OP0 is a constant, move it
13170 to OP1 to reduce the number of tests below. */
13171 if (commutative_ternary_tree_code (code)
13172 && tree_swap_operands_p (op0, op1))
13173 return fold_build3_loc (loc, code, type, op1, op0, op2);
13175 tem = generic_simplify (loc, code, type, op0, op1, op2);
13176 if (tem)
13177 return tem;
13179 /* Strip any conversions that don't change the mode. This is safe
13180 for every expression, except for a comparison expression because
13181 its signedness is derived from its operands. So, in the latter
13182 case, only strip conversions that don't change the signedness.
13184 Note that this is done as an internal manipulation within the
13185 constant folder, in order to find the simplest representation of
13186 the arguments so that their form can be studied. In any cases,
13187 the appropriate type conversions should be put back in the tree
13188 that will get out of the constant folder. */
13189 if (op0)
13191 arg0 = op0;
13192 STRIP_NOPS (arg0);
13195 if (op1)
13197 arg1 = op1;
13198 STRIP_NOPS (arg1);
13201 if (op2)
13203 arg2 = op2;
13204 STRIP_NOPS (arg2);
13207 switch (code)
13209 case COMPONENT_REF:
13210 if (TREE_CODE (arg0) == CONSTRUCTOR
13211 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13213 unsigned HOST_WIDE_INT idx;
13214 tree field, value;
13215 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13216 if (field == arg1)
13217 return value;
13219 return NULL_TREE;
13221 case COND_EXPR:
13222 case VEC_COND_EXPR:
13223 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13224 so all simple results must be passed through pedantic_non_lvalue. */
13225 if (TREE_CODE (arg0) == INTEGER_CST)
13227 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13228 tem = integer_zerop (arg0) ? op2 : op1;
13229 /* Only optimize constant conditions when the selected branch
13230 has the same type as the COND_EXPR. This avoids optimizing
13231 away "c ? x : throw", where the throw has a void type.
13232 Avoid throwing away that operand which contains label. */
13233 if ((!TREE_SIDE_EFFECTS (unused_op)
13234 || !contains_label_p (unused_op))
13235 && (! VOID_TYPE_P (TREE_TYPE (tem))
13236 || VOID_TYPE_P (type)))
13237 return protected_set_expr_location_unshare (tem, loc);
13238 return NULL_TREE;
13240 else if (TREE_CODE (arg0) == VECTOR_CST)
13242 unsigned HOST_WIDE_INT nelts;
13243 if ((TREE_CODE (arg1) == VECTOR_CST
13244 || TREE_CODE (arg1) == CONSTRUCTOR)
13245 && (TREE_CODE (arg2) == VECTOR_CST
13246 || TREE_CODE (arg2) == CONSTRUCTOR)
13247 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13249 vec_perm_builder sel (nelts, nelts, 1);
13250 for (unsigned int i = 0; i < nelts; i++)
13252 tree val = VECTOR_CST_ELT (arg0, i);
13253 if (integer_all_onesp (val))
13254 sel.quick_push (i);
13255 else if (integer_zerop (val))
13256 sel.quick_push (nelts + i);
13257 else /* Currently unreachable. */
13258 return NULL_TREE;
13260 vec_perm_indices indices (sel, 2, nelts);
13261 tree t = fold_vec_perm (type, arg1, arg2, indices);
13262 if (t != NULL_TREE)
13263 return t;
13267 /* If we have A op B ? A : C, we may be able to convert this to a
13268 simpler expression, depending on the operation and the values
13269 of B and C. Signed zeros prevent all of these transformations,
13270 for reasons given above each one.
13272 Also try swapping the arguments and inverting the conditional. */
13273 if (COMPARISON_CLASS_P (arg0)
13274 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13275 && !HONOR_SIGNED_ZEROS (op1))
13277 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13278 TREE_OPERAND (arg0, 0),
13279 TREE_OPERAND (arg0, 1),
13280 op1, op2);
13281 if (tem)
13282 return tem;
13285 if (COMPARISON_CLASS_P (arg0)
13286 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13287 && !HONOR_SIGNED_ZEROS (op2))
13289 enum tree_code comp_code = TREE_CODE (arg0);
13290 tree arg00 = TREE_OPERAND (arg0, 0);
13291 tree arg01 = TREE_OPERAND (arg0, 1);
13292 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13293 if (comp_code != ERROR_MARK)
13294 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13295 arg00,
13296 arg01,
13297 op2, op1);
13298 if (tem)
13299 return tem;
13302 /* If the second operand is simpler than the third, swap them
13303 since that produces better jump optimization results. */
13304 if (truth_value_p (TREE_CODE (arg0))
13305 && tree_swap_operands_p (op1, op2))
13307 location_t loc0 = expr_location_or (arg0, loc);
13308 /* See if this can be inverted. If it can't, possibly because
13309 it was a floating-point inequality comparison, don't do
13310 anything. */
13311 tem = fold_invert_truthvalue (loc0, arg0);
13312 if (tem)
13313 return fold_build3_loc (loc, code, type, tem, op2, op1);
13316 /* Convert A ? 1 : 0 to simply A. */
13317 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13318 : (integer_onep (op1)
13319 && !VECTOR_TYPE_P (type)))
13320 && integer_zerop (op2)
13321 /* If we try to convert OP0 to our type, the
13322 call to fold will try to move the conversion inside
13323 a COND, which will recurse. In that case, the COND_EXPR
13324 is probably the best choice, so leave it alone. */
13325 && type == TREE_TYPE (arg0))
13326 return protected_set_expr_location_unshare (arg0, loc);
13328 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13329 over COND_EXPR in cases such as floating point comparisons. */
13330 if (integer_zerop (op1)
13331 && code == COND_EXPR
13332 && integer_onep (op2)
13333 && !VECTOR_TYPE_P (type)
13334 && truth_value_p (TREE_CODE (arg0)))
13335 return fold_convert_loc (loc, type,
13336 invert_truthvalue_loc (loc, arg0));
13338 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13339 if (TREE_CODE (arg0) == LT_EXPR
13340 && integer_zerop (TREE_OPERAND (arg0, 1))
13341 && integer_zerop (op2)
13342 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13344 /* sign_bit_p looks through both zero and sign extensions,
13345 but for this optimization only sign extensions are
13346 usable. */
13347 tree tem2 = TREE_OPERAND (arg0, 0);
13348 while (tem != tem2)
13350 if (TREE_CODE (tem2) != NOP_EXPR
13351 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13353 tem = NULL_TREE;
13354 break;
13356 tem2 = TREE_OPERAND (tem2, 0);
13358 /* sign_bit_p only checks ARG1 bits within A's precision.
13359 If <sign bit of A> has wider type than A, bits outside
13360 of A's precision in <sign bit of A> need to be checked.
13361 If they are all 0, this optimization needs to be done
13362 in unsigned A's type, if they are all 1 in signed A's type,
13363 otherwise this can't be done. */
13364 if (tem
13365 && TYPE_PRECISION (TREE_TYPE (tem))
13366 < TYPE_PRECISION (TREE_TYPE (arg1))
13367 && TYPE_PRECISION (TREE_TYPE (tem))
13368 < TYPE_PRECISION (type))
13370 int inner_width, outer_width;
13371 tree tem_type;
13373 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13374 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13375 if (outer_width > TYPE_PRECISION (type))
13376 outer_width = TYPE_PRECISION (type);
13378 wide_int mask = wi::shifted_mask
13379 (inner_width, outer_width - inner_width, false,
13380 TYPE_PRECISION (TREE_TYPE (arg1)));
13382 wide_int common = mask & wi::to_wide (arg1);
13383 if (common == mask)
13385 tem_type = signed_type_for (TREE_TYPE (tem));
13386 tem = fold_convert_loc (loc, tem_type, tem);
13388 else if (common == 0)
13390 tem_type = unsigned_type_for (TREE_TYPE (tem));
13391 tem = fold_convert_loc (loc, tem_type, tem);
13393 else
13394 tem = NULL;
13397 if (tem)
13398 return
13399 fold_convert_loc (loc, type,
13400 fold_build2_loc (loc, BIT_AND_EXPR,
13401 TREE_TYPE (tem), tem,
13402 fold_convert_loc (loc,
13403 TREE_TYPE (tem),
13404 arg1)));
13407 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13408 already handled above. */
13409 if (TREE_CODE (arg0) == BIT_AND_EXPR
13410 && integer_onep (TREE_OPERAND (arg0, 1))
13411 && integer_zerop (op2)
13412 && integer_pow2p (arg1))
13414 tree tem = TREE_OPERAND (arg0, 0);
13415 STRIP_NOPS (tem);
13416 if (TREE_CODE (tem) == RSHIFT_EXPR
13417 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13418 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13419 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13420 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13421 fold_convert_loc (loc, type,
13422 TREE_OPERAND (tem, 0)),
13423 op1);
13426 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13427 is probably obsolete because the first operand should be a
13428 truth value (that's why we have the two cases above), but let's
13429 leave it in until we can confirm this for all front-ends. */
13430 if (integer_zerop (op2)
13431 && TREE_CODE (arg0) == NE_EXPR
13432 && integer_zerop (TREE_OPERAND (arg0, 1))
13433 && integer_pow2p (arg1)
13434 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13435 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13436 arg1, OEP_ONLY_CONST)
13437 /* operand_equal_p compares just value, not precision, so e.g.
13438 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13439 second operand 32-bit -128, which is not a power of two (or vice
13440 versa. */
13441 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13442 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13444 /* Disable the transformations below for vectors, since
13445 fold_binary_op_with_conditional_arg may undo them immediately,
13446 yielding an infinite loop. */
13447 if (code == VEC_COND_EXPR)
13448 return NULL_TREE;
13450 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13451 if (integer_zerop (op2)
13452 && truth_value_p (TREE_CODE (arg0))
13453 && truth_value_p (TREE_CODE (arg1))
13454 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13455 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13456 : TRUTH_ANDIF_EXPR,
13457 type, fold_convert_loc (loc, type, arg0), op1);
13459 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13460 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13461 && truth_value_p (TREE_CODE (arg0))
13462 && truth_value_p (TREE_CODE (arg1))
13463 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13465 location_t loc0 = expr_location_or (arg0, loc);
13466 /* Only perform transformation if ARG0 is easily inverted. */
13467 tem = fold_invert_truthvalue (loc0, arg0);
13468 if (tem)
13469 return fold_build2_loc (loc, code == VEC_COND_EXPR
13470 ? BIT_IOR_EXPR
13471 : TRUTH_ORIF_EXPR,
13472 type, fold_convert_loc (loc, type, tem),
13473 op1);
13476 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13477 if (integer_zerop (arg1)
13478 && truth_value_p (TREE_CODE (arg0))
13479 && truth_value_p (TREE_CODE (op2))
13480 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13482 location_t loc0 = expr_location_or (arg0, loc);
13483 /* Only perform transformation if ARG0 is easily inverted. */
13484 tem = fold_invert_truthvalue (loc0, arg0);
13485 if (tem)
13486 return fold_build2_loc (loc, code == VEC_COND_EXPR
13487 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13488 type, fold_convert_loc (loc, type, tem),
13489 op2);
13492 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13493 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13494 && truth_value_p (TREE_CODE (arg0))
13495 && truth_value_p (TREE_CODE (op2))
13496 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13497 return fold_build2_loc (loc, code == VEC_COND_EXPR
13498 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13499 type, fold_convert_loc (loc, type, arg0), op2);
13501 return NULL_TREE;
13503 case CALL_EXPR:
13504 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13505 of fold_ternary on them. */
13506 gcc_unreachable ();
13508 case BIT_FIELD_REF:
13509 if (TREE_CODE (arg0) == VECTOR_CST
13510 && (type == TREE_TYPE (TREE_TYPE (arg0))
13511 || (VECTOR_TYPE_P (type)
13512 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13513 && tree_fits_uhwi_p (op1)
13514 && tree_fits_uhwi_p (op2))
13516 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13517 unsigned HOST_WIDE_INT width
13518 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13519 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13520 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13521 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13523 if (n != 0
13524 && (idx % width) == 0
13525 && (n % width) == 0
13526 && known_le ((idx + n) / width,
13527 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13529 idx = idx / width;
13530 n = n / width;
13532 if (TREE_CODE (arg0) == VECTOR_CST)
13534 if (n == 1)
13536 tem = VECTOR_CST_ELT (arg0, idx);
13537 if (VECTOR_TYPE_P (type))
13538 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13539 return tem;
13542 tree_vector_builder vals (type, n, 1);
13543 for (unsigned i = 0; i < n; ++i)
13544 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13545 return vals.build ();
13550 /* On constants we can use native encode/interpret to constant
13551 fold (nearly) all BIT_FIELD_REFs. */
13552 if (CONSTANT_CLASS_P (arg0)
13553 && can_native_interpret_type_p (type)
13554 && BITS_PER_UNIT == 8
13555 && tree_fits_uhwi_p (op1)
13556 && tree_fits_uhwi_p (op2))
13558 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13559 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13560 /* Limit us to a reasonable amount of work. To relax the
13561 other limitations we need bit-shifting of the buffer
13562 and rounding up the size. */
13563 if (bitpos % BITS_PER_UNIT == 0
13564 && bitsize % BITS_PER_UNIT == 0
13565 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13567 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13568 unsigned HOST_WIDE_INT len
13569 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13570 bitpos / BITS_PER_UNIT);
13571 if (len > 0
13572 && len * BITS_PER_UNIT >= bitsize)
13574 tree v = native_interpret_expr (type, b,
13575 bitsize / BITS_PER_UNIT);
13576 if (v)
13577 return v;
13582 return NULL_TREE;
13584 case VEC_PERM_EXPR:
13585 /* Perform constant folding of BIT_INSERT_EXPR. */
13586 if (TREE_CODE (arg2) == VECTOR_CST
13587 && TREE_CODE (op0) == VECTOR_CST
13588 && TREE_CODE (op1) == VECTOR_CST)
13590 /* Build a vector of integers from the tree mask. */
13591 vec_perm_builder builder;
13592 if (!tree_to_vec_perm_builder (&builder, arg2))
13593 return NULL_TREE;
13595 /* Create a vec_perm_indices for the integer vector. */
13596 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13597 bool single_arg = (op0 == op1);
13598 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13599 return fold_vec_perm (type, op0, op1, sel);
13601 return NULL_TREE;
13603 case BIT_INSERT_EXPR:
13604 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13605 if (TREE_CODE (arg0) == INTEGER_CST
13606 && TREE_CODE (arg1) == INTEGER_CST)
13608 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13609 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13610 wide_int tem = (wi::to_wide (arg0)
13611 & wi::shifted_mask (bitpos, bitsize, true,
13612 TYPE_PRECISION (type)));
13613 wide_int tem2
13614 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13615 bitsize), bitpos);
13616 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13618 else if (TREE_CODE (arg0) == VECTOR_CST
13619 && CONSTANT_CLASS_P (arg1)
13620 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13621 TREE_TYPE (arg1)))
13623 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13624 unsigned HOST_WIDE_INT elsize
13625 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13626 if (bitpos % elsize == 0)
13628 unsigned k = bitpos / elsize;
13629 unsigned HOST_WIDE_INT nelts;
13630 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13631 return arg0;
13632 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13634 tree_vector_builder elts (type, nelts, 1);
13635 elts.quick_grow (nelts);
13636 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13637 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13638 return elts.build ();
13642 return NULL_TREE;
13644 default:
13645 return NULL_TREE;
13646 } /* switch (code) */
13649 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13650 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13651 constructor element index of the value returned. If the element is
13652 not found NULL_TREE is returned and *CTOR_IDX is updated to
13653 the index of the element after the ACCESS_INDEX position (which
13654 may be outside of the CTOR array). */
13656 tree
13657 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13658 unsigned *ctor_idx)
13660 tree index_type = NULL_TREE;
13661 signop index_sgn = UNSIGNED;
13662 offset_int low_bound = 0;
13664 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13666 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13667 if (domain_type && TYPE_MIN_VALUE (domain_type))
13669 /* Static constructors for variably sized objects makes no sense. */
13670 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13671 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13672 /* ??? When it is obvious that the range is signed, treat it so. */
13673 if (TYPE_UNSIGNED (index_type)
13674 && TYPE_MAX_VALUE (domain_type)
13675 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13676 TYPE_MIN_VALUE (domain_type)))
13678 index_sgn = SIGNED;
13679 low_bound
13680 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13681 SIGNED);
13683 else
13685 index_sgn = TYPE_SIGN (index_type);
13686 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13691 if (index_type)
13692 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13693 index_sgn);
13695 offset_int index = low_bound;
13696 if (index_type)
13697 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13699 offset_int max_index = index;
13700 unsigned cnt;
13701 tree cfield, cval;
13702 bool first_p = true;
13704 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13706 /* Array constructor might explicitly set index, or specify a range,
13707 or leave index NULL meaning that it is next index after previous
13708 one. */
13709 if (cfield)
13711 if (TREE_CODE (cfield) == INTEGER_CST)
13712 max_index = index
13713 = offset_int::from (wi::to_wide (cfield), index_sgn);
13714 else
13716 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13717 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13718 index_sgn);
13719 max_index
13720 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13721 index_sgn);
13722 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13725 else if (!first_p)
13727 index = max_index + 1;
13728 if (index_type)
13729 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13730 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13731 max_index = index;
13733 else
13734 first_p = false;
13736 /* Do we have match? */
13737 if (wi::cmp (access_index, index, index_sgn) >= 0)
13739 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13741 if (ctor_idx)
13742 *ctor_idx = cnt;
13743 return cval;
13746 else if (in_gimple_form)
13747 /* We're past the element we search for. Note during parsing
13748 the elements might not be sorted.
13749 ??? We should use a binary search and a flag on the
13750 CONSTRUCTOR as to whether elements are sorted in declaration
13751 order. */
13752 break;
13754 if (ctor_idx)
13755 *ctor_idx = cnt;
13756 return NULL_TREE;
13759 /* Perform constant folding and related simplification of EXPR.
13760 The related simplifications include x*1 => x, x*0 => 0, etc.,
13761 and application of the associative law.
13762 NOP_EXPR conversions may be removed freely (as long as we
13763 are careful not to change the type of the overall expression).
13764 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13765 but we can constant-fold them if they have constant operands. */
13767 #ifdef ENABLE_FOLD_CHECKING
13768 # define fold(x) fold_1 (x)
13769 static tree fold_1 (tree);
13770 static
13771 #endif
13772 tree
13773 fold (tree expr)
13775 const tree t = expr;
13776 enum tree_code code = TREE_CODE (t);
13777 enum tree_code_class kind = TREE_CODE_CLASS (code);
13778 tree tem;
13779 location_t loc = EXPR_LOCATION (expr);
13781 /* Return right away if a constant. */
13782 if (kind == tcc_constant)
13783 return t;
13785 /* CALL_EXPR-like objects with variable numbers of operands are
13786 treated specially. */
13787 if (kind == tcc_vl_exp)
13789 if (code == CALL_EXPR)
13791 tem = fold_call_expr (loc, expr, false);
13792 return tem ? tem : expr;
13794 return expr;
13797 if (IS_EXPR_CODE_CLASS (kind))
13799 tree type = TREE_TYPE (t);
13800 tree op0, op1, op2;
13802 switch (TREE_CODE_LENGTH (code))
13804 case 1:
13805 op0 = TREE_OPERAND (t, 0);
13806 tem = fold_unary_loc (loc, code, type, op0);
13807 return tem ? tem : expr;
13808 case 2:
13809 op0 = TREE_OPERAND (t, 0);
13810 op1 = TREE_OPERAND (t, 1);
13811 tem = fold_binary_loc (loc, code, type, op0, op1);
13812 return tem ? tem : expr;
13813 case 3:
13814 op0 = TREE_OPERAND (t, 0);
13815 op1 = TREE_OPERAND (t, 1);
13816 op2 = TREE_OPERAND (t, 2);
13817 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13818 return tem ? tem : expr;
13819 default:
13820 break;
13824 switch (code)
13826 case ARRAY_REF:
13828 tree op0 = TREE_OPERAND (t, 0);
13829 tree op1 = TREE_OPERAND (t, 1);
13831 if (TREE_CODE (op1) == INTEGER_CST
13832 && TREE_CODE (op0) == CONSTRUCTOR
13833 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13835 tree val = get_array_ctor_element_at_index (op0,
13836 wi::to_offset (op1));
13837 if (val)
13838 return val;
13841 return t;
13844 /* Return a VECTOR_CST if possible. */
13845 case CONSTRUCTOR:
13847 tree type = TREE_TYPE (t);
13848 if (TREE_CODE (type) != VECTOR_TYPE)
13849 return t;
13851 unsigned i;
13852 tree val;
13853 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13854 if (! CONSTANT_CLASS_P (val))
13855 return t;
13857 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13860 case CONST_DECL:
13861 return fold (DECL_INITIAL (t));
13863 default:
13864 return t;
13865 } /* switch (code) */
13868 #ifdef ENABLE_FOLD_CHECKING
13869 #undef fold
13871 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13872 hash_table<nofree_ptr_hash<const tree_node> > *);
13873 static void fold_check_failed (const_tree, const_tree);
13874 void print_fold_checksum (const_tree);
13876 /* When --enable-checking=fold, compute a digest of expr before
13877 and after actual fold call to see if fold did not accidentally
13878 change original expr. */
13880 tree
13881 fold (tree expr)
13883 tree ret;
13884 struct md5_ctx ctx;
13885 unsigned char checksum_before[16], checksum_after[16];
13886 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13888 md5_init_ctx (&ctx);
13889 fold_checksum_tree (expr, &ctx, &ht);
13890 md5_finish_ctx (&ctx, checksum_before);
13891 ht.empty ();
13893 ret = fold_1 (expr);
13895 md5_init_ctx (&ctx);
13896 fold_checksum_tree (expr, &ctx, &ht);
13897 md5_finish_ctx (&ctx, checksum_after);
13899 if (memcmp (checksum_before, checksum_after, 16))
13900 fold_check_failed (expr, ret);
13902 return ret;
13905 void
13906 print_fold_checksum (const_tree expr)
13908 struct md5_ctx ctx;
13909 unsigned char checksum[16], cnt;
13910 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13912 md5_init_ctx (&ctx);
13913 fold_checksum_tree (expr, &ctx, &ht);
13914 md5_finish_ctx (&ctx, checksum);
13915 for (cnt = 0; cnt < 16; ++cnt)
13916 fprintf (stderr, "%02x", checksum[cnt]);
13917 putc ('\n', stderr);
13920 static void
13921 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13923 internal_error ("fold check: original tree changed by fold");
13926 static void
13927 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13928 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13930 const tree_node **slot;
13931 enum tree_code code;
13932 union tree_node *buf;
13933 int i, len;
13935 recursive_label:
13936 if (expr == NULL)
13937 return;
13938 slot = ht->find_slot (expr, INSERT);
13939 if (*slot != NULL)
13940 return;
13941 *slot = expr;
13942 code = TREE_CODE (expr);
13943 if (TREE_CODE_CLASS (code) == tcc_declaration
13944 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13946 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13947 size_t sz = tree_size (expr);
13948 buf = XALLOCAVAR (union tree_node, sz);
13949 memcpy ((char *) buf, expr, sz);
13950 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13951 buf->decl_with_vis.symtab_node = NULL;
13952 buf->base.nowarning_flag = 0;
13953 expr = (tree) buf;
13955 else if (TREE_CODE_CLASS (code) == tcc_type
13956 && (TYPE_POINTER_TO (expr)
13957 || TYPE_REFERENCE_TO (expr)
13958 || TYPE_CACHED_VALUES_P (expr)
13959 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13960 || TYPE_NEXT_VARIANT (expr)
13961 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13963 /* Allow these fields to be modified. */
13964 tree tmp;
13965 size_t sz = tree_size (expr);
13966 buf = XALLOCAVAR (union tree_node, sz);
13967 memcpy ((char *) buf, expr, sz);
13968 expr = tmp = (tree) buf;
13969 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13970 TYPE_POINTER_TO (tmp) = NULL;
13971 TYPE_REFERENCE_TO (tmp) = NULL;
13972 TYPE_NEXT_VARIANT (tmp) = NULL;
13973 TYPE_ALIAS_SET (tmp) = -1;
13974 if (TYPE_CACHED_VALUES_P (tmp))
13976 TYPE_CACHED_VALUES_P (tmp) = 0;
13977 TYPE_CACHED_VALUES (tmp) = NULL;
13980 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13982 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13983 that and change builtins.cc etc. instead - see PR89543. */
13984 size_t sz = tree_size (expr);
13985 buf = XALLOCAVAR (union tree_node, sz);
13986 memcpy ((char *) buf, expr, sz);
13987 buf->base.nowarning_flag = 0;
13988 expr = (tree) buf;
13990 md5_process_bytes (expr, tree_size (expr), ctx);
13991 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13992 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13993 if (TREE_CODE_CLASS (code) != tcc_type
13994 && TREE_CODE_CLASS (code) != tcc_declaration
13995 && code != TREE_LIST
13996 && code != SSA_NAME
13997 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13998 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13999 switch (TREE_CODE_CLASS (code))
14001 case tcc_constant:
14002 switch (code)
14004 case STRING_CST:
14005 md5_process_bytes (TREE_STRING_POINTER (expr),
14006 TREE_STRING_LENGTH (expr), ctx);
14007 break;
14008 case COMPLEX_CST:
14009 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14010 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14011 break;
14012 case VECTOR_CST:
14013 len = vector_cst_encoded_nelts (expr);
14014 for (i = 0; i < len; ++i)
14015 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
14016 break;
14017 default:
14018 break;
14020 break;
14021 case tcc_exceptional:
14022 switch (code)
14024 case TREE_LIST:
14025 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14026 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14027 expr = TREE_CHAIN (expr);
14028 goto recursive_label;
14029 break;
14030 case TREE_VEC:
14031 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14032 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14033 break;
14034 default:
14035 break;
14037 break;
14038 case tcc_expression:
14039 case tcc_reference:
14040 case tcc_comparison:
14041 case tcc_unary:
14042 case tcc_binary:
14043 case tcc_statement:
14044 case tcc_vl_exp:
14045 len = TREE_OPERAND_LENGTH (expr);
14046 for (i = 0; i < len; ++i)
14047 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14048 break;
14049 case tcc_declaration:
14050 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14051 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14052 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14054 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14055 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14056 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14057 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14058 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14061 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14063 if (TREE_CODE (expr) == FUNCTION_DECL)
14065 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14066 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14068 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14070 break;
14071 case tcc_type:
14072 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14073 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14074 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14075 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14076 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14077 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14078 if (INTEGRAL_TYPE_P (expr)
14079 || SCALAR_FLOAT_TYPE_P (expr))
14081 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14082 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14084 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14085 if (RECORD_OR_UNION_TYPE_P (expr))
14086 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14087 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14088 break;
14089 default:
14090 break;
14094 /* Helper function for outputting the checksum of a tree T. When
14095 debugging with gdb, you can "define mynext" to be "next" followed
14096 by "call debug_fold_checksum (op0)", then just trace down till the
14097 outputs differ. */
14099 DEBUG_FUNCTION void
14100 debug_fold_checksum (const_tree t)
14102 int i;
14103 unsigned char checksum[16];
14104 struct md5_ctx ctx;
14105 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14107 md5_init_ctx (&ctx);
14108 fold_checksum_tree (t, &ctx, &ht);
14109 md5_finish_ctx (&ctx, checksum);
14110 ht.empty ();
14112 for (i = 0; i < 16; i++)
14113 fprintf (stderr, "%d ", checksum[i]);
14115 fprintf (stderr, "\n");
14118 #endif
14120 /* Fold a unary tree expression with code CODE of type TYPE with an
14121 operand OP0. LOC is the location of the resulting expression.
14122 Return a folded expression if successful. Otherwise, return a tree
14123 expression with code CODE of type TYPE with an operand OP0. */
14125 tree
14126 fold_build1_loc (location_t loc,
14127 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14129 tree tem;
14130 #ifdef ENABLE_FOLD_CHECKING
14131 unsigned char checksum_before[16], checksum_after[16];
14132 struct md5_ctx ctx;
14133 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14135 md5_init_ctx (&ctx);
14136 fold_checksum_tree (op0, &ctx, &ht);
14137 md5_finish_ctx (&ctx, checksum_before);
14138 ht.empty ();
14139 #endif
14141 tem = fold_unary_loc (loc, code, type, op0);
14142 if (!tem)
14143 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
14145 #ifdef ENABLE_FOLD_CHECKING
14146 md5_init_ctx (&ctx);
14147 fold_checksum_tree (op0, &ctx, &ht);
14148 md5_finish_ctx (&ctx, checksum_after);
14150 if (memcmp (checksum_before, checksum_after, 16))
14151 fold_check_failed (op0, tem);
14152 #endif
14153 return tem;
14156 /* Fold a binary tree expression with code CODE of type TYPE with
14157 operands OP0 and OP1. LOC is the location of the resulting
14158 expression. Return a folded expression if successful. Otherwise,
14159 return a tree expression with code CODE of type TYPE with operands
14160 OP0 and OP1. */
14162 tree
14163 fold_build2_loc (location_t loc,
14164 enum tree_code code, tree type, tree op0, tree op1
14165 MEM_STAT_DECL)
14167 tree tem;
14168 #ifdef ENABLE_FOLD_CHECKING
14169 unsigned char checksum_before_op0[16],
14170 checksum_before_op1[16],
14171 checksum_after_op0[16],
14172 checksum_after_op1[16];
14173 struct md5_ctx ctx;
14174 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14176 md5_init_ctx (&ctx);
14177 fold_checksum_tree (op0, &ctx, &ht);
14178 md5_finish_ctx (&ctx, checksum_before_op0);
14179 ht.empty ();
14181 md5_init_ctx (&ctx);
14182 fold_checksum_tree (op1, &ctx, &ht);
14183 md5_finish_ctx (&ctx, checksum_before_op1);
14184 ht.empty ();
14185 #endif
14187 tem = fold_binary_loc (loc, code, type, op0, op1);
14188 if (!tem)
14189 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14191 #ifdef ENABLE_FOLD_CHECKING
14192 md5_init_ctx (&ctx);
14193 fold_checksum_tree (op0, &ctx, &ht);
14194 md5_finish_ctx (&ctx, checksum_after_op0);
14195 ht.empty ();
14197 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14198 fold_check_failed (op0, tem);
14200 md5_init_ctx (&ctx);
14201 fold_checksum_tree (op1, &ctx, &ht);
14202 md5_finish_ctx (&ctx, checksum_after_op1);
14204 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14205 fold_check_failed (op1, tem);
14206 #endif
14207 return tem;
14210 /* Fold a ternary tree expression with code CODE of type TYPE with
14211 operands OP0, OP1, and OP2. Return a folded expression if
14212 successful. Otherwise, return a tree expression with code CODE of
14213 type TYPE with operands OP0, OP1, and OP2. */
14215 tree
14216 fold_build3_loc (location_t loc, enum tree_code code, tree type,
14217 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14219 tree tem;
14220 #ifdef ENABLE_FOLD_CHECKING
14221 unsigned char checksum_before_op0[16],
14222 checksum_before_op1[16],
14223 checksum_before_op2[16],
14224 checksum_after_op0[16],
14225 checksum_after_op1[16],
14226 checksum_after_op2[16];
14227 struct md5_ctx ctx;
14228 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14230 md5_init_ctx (&ctx);
14231 fold_checksum_tree (op0, &ctx, &ht);
14232 md5_finish_ctx (&ctx, checksum_before_op0);
14233 ht.empty ();
14235 md5_init_ctx (&ctx);
14236 fold_checksum_tree (op1, &ctx, &ht);
14237 md5_finish_ctx (&ctx, checksum_before_op1);
14238 ht.empty ();
14240 md5_init_ctx (&ctx);
14241 fold_checksum_tree (op2, &ctx, &ht);
14242 md5_finish_ctx (&ctx, checksum_before_op2);
14243 ht.empty ();
14244 #endif
14246 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14247 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14248 if (!tem)
14249 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14251 #ifdef ENABLE_FOLD_CHECKING
14252 md5_init_ctx (&ctx);
14253 fold_checksum_tree (op0, &ctx, &ht);
14254 md5_finish_ctx (&ctx, checksum_after_op0);
14255 ht.empty ();
14257 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14258 fold_check_failed (op0, tem);
14260 md5_init_ctx (&ctx);
14261 fold_checksum_tree (op1, &ctx, &ht);
14262 md5_finish_ctx (&ctx, checksum_after_op1);
14263 ht.empty ();
14265 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14266 fold_check_failed (op1, tem);
14268 md5_init_ctx (&ctx);
14269 fold_checksum_tree (op2, &ctx, &ht);
14270 md5_finish_ctx (&ctx, checksum_after_op2);
14272 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14273 fold_check_failed (op2, tem);
14274 #endif
14275 return tem;
14278 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14279 arguments in ARGARRAY, and a null static chain.
14280 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14281 of type TYPE from the given operands as constructed by build_call_array. */
14283 tree
14284 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14285 int nargs, tree *argarray)
14287 tree tem;
14288 #ifdef ENABLE_FOLD_CHECKING
14289 unsigned char checksum_before_fn[16],
14290 checksum_before_arglist[16],
14291 checksum_after_fn[16],
14292 checksum_after_arglist[16];
14293 struct md5_ctx ctx;
14294 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14295 int i;
14297 md5_init_ctx (&ctx);
14298 fold_checksum_tree (fn, &ctx, &ht);
14299 md5_finish_ctx (&ctx, checksum_before_fn);
14300 ht.empty ();
14302 md5_init_ctx (&ctx);
14303 for (i = 0; i < nargs; i++)
14304 fold_checksum_tree (argarray[i], &ctx, &ht);
14305 md5_finish_ctx (&ctx, checksum_before_arglist);
14306 ht.empty ();
14307 #endif
14309 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14310 if (!tem)
14311 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14313 #ifdef ENABLE_FOLD_CHECKING
14314 md5_init_ctx (&ctx);
14315 fold_checksum_tree (fn, &ctx, &ht);
14316 md5_finish_ctx (&ctx, checksum_after_fn);
14317 ht.empty ();
14319 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14320 fold_check_failed (fn, tem);
14322 md5_init_ctx (&ctx);
14323 for (i = 0; i < nargs; i++)
14324 fold_checksum_tree (argarray[i], &ctx, &ht);
14325 md5_finish_ctx (&ctx, checksum_after_arglist);
14327 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14328 fold_check_failed (NULL_TREE, tem);
14329 #endif
14330 return tem;
14333 /* Perform constant folding and related simplification of initializer
14334 expression EXPR. These behave identically to "fold_buildN" but ignore
14335 potential run-time traps and exceptions that fold must preserve. */
14337 #define START_FOLD_INIT \
14338 int saved_signaling_nans = flag_signaling_nans;\
14339 int saved_trapping_math = flag_trapping_math;\
14340 int saved_rounding_math = flag_rounding_math;\
14341 int saved_trapv = flag_trapv;\
14342 int saved_folding_initializer = folding_initializer;\
14343 flag_signaling_nans = 0;\
14344 flag_trapping_math = 0;\
14345 flag_rounding_math = 0;\
14346 flag_trapv = 0;\
14347 folding_initializer = 1;
14349 #define END_FOLD_INIT \
14350 flag_signaling_nans = saved_signaling_nans;\
14351 flag_trapping_math = saved_trapping_math;\
14352 flag_rounding_math = saved_rounding_math;\
14353 flag_trapv = saved_trapv;\
14354 folding_initializer = saved_folding_initializer;
14356 tree
14357 fold_init (tree expr)
14359 tree result;
14360 START_FOLD_INIT;
14362 result = fold (expr);
14364 END_FOLD_INIT;
14365 return result;
14368 tree
14369 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14370 tree type, tree op)
14372 tree result;
14373 START_FOLD_INIT;
14375 result = fold_build1_loc (loc, code, type, op);
14377 END_FOLD_INIT;
14378 return result;
14381 tree
14382 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14383 tree type, tree op0, tree op1)
14385 tree result;
14386 START_FOLD_INIT;
14388 result = fold_build2_loc (loc, code, type, op0, op1);
14390 END_FOLD_INIT;
14391 return result;
14394 tree
14395 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14396 int nargs, tree *argarray)
14398 tree result;
14399 START_FOLD_INIT;
14401 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14403 END_FOLD_INIT;
14404 return result;
14407 tree
14408 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14409 tree lhs, tree rhs)
14411 tree result;
14412 START_FOLD_INIT;
14414 result = fold_binary_loc (loc, code, type, lhs, rhs);
14416 END_FOLD_INIT;
14417 return result;
14420 #undef START_FOLD_INIT
14421 #undef END_FOLD_INIT
14423 /* Determine if first argument is a multiple of second argument. Return
14424 false if it is not, or we cannot easily determined it to be.
14426 An example of the sort of thing we care about (at this point; this routine
14427 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14428 fold cases do now) is discovering that
14430 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14432 is a multiple of
14434 SAVE_EXPR (J * 8)
14436 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14438 This code also handles discovering that
14440 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14442 is a multiple of 8 so we don't have to worry about dealing with a
14443 possible remainder.
14445 Note that we *look* inside a SAVE_EXPR only to determine how it was
14446 calculated; it is not safe for fold to do much of anything else with the
14447 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14448 at run time. For example, the latter example above *cannot* be implemented
14449 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14450 evaluation time of the original SAVE_EXPR is not necessarily the same at
14451 the time the new expression is evaluated. The only optimization of this
14452 sort that would be valid is changing
14454 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14456 divided by 8 to
14458 SAVE_EXPR (I) * SAVE_EXPR (J)
14460 (where the same SAVE_EXPR (J) is used in the original and the
14461 transformed version).
14463 NOWRAP specifies whether all outer operations in TYPE should
14464 be considered not wrapping. Any type conversion within TOP acts
14465 as a barrier and we will fall back to NOWRAP being false.
14466 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14467 as not wrapping even though they are generally using unsigned arithmetic. */
14469 bool
14470 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14472 gimple *stmt;
14473 tree op1, op2;
14475 if (operand_equal_p (top, bottom, 0))
14476 return true;
14478 if (TREE_CODE (type) != INTEGER_TYPE)
14479 return false;
14481 switch (TREE_CODE (top))
14483 case BIT_AND_EXPR:
14484 /* Bitwise and provides a power of two multiple. If the mask is
14485 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14486 if (!integer_pow2p (bottom))
14487 return false;
14488 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14489 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14491 case MULT_EXPR:
14492 /* If the multiplication can wrap we cannot recurse further unless
14493 the bottom is a power of two which is where wrapping does not
14494 matter. */
14495 if (!nowrap
14496 && !TYPE_OVERFLOW_UNDEFINED (type)
14497 && !integer_pow2p (bottom))
14498 return false;
14499 if (TREE_CODE (bottom) == INTEGER_CST)
14501 op1 = TREE_OPERAND (top, 0);
14502 op2 = TREE_OPERAND (top, 1);
14503 if (TREE_CODE (op1) == INTEGER_CST)
14504 std::swap (op1, op2);
14505 if (TREE_CODE (op2) == INTEGER_CST)
14507 if (multiple_of_p (type, op2, bottom, nowrap))
14508 return true;
14509 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14510 if (multiple_of_p (type, bottom, op2, nowrap))
14512 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14513 wi::to_widest (op2));
14514 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14516 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14517 return multiple_of_p (type, op1, op2, nowrap);
14520 return multiple_of_p (type, op1, bottom, nowrap);
14523 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14524 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14526 case LSHIFT_EXPR:
14527 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14528 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14530 op1 = TREE_OPERAND (top, 1);
14531 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14533 wide_int mul_op
14534 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14535 return multiple_of_p (type,
14536 wide_int_to_tree (type, mul_op), bottom,
14537 nowrap);
14540 return false;
14542 case MINUS_EXPR:
14543 case PLUS_EXPR:
14544 /* If the addition or subtraction can wrap we cannot recurse further
14545 unless bottom is a power of two which is where wrapping does not
14546 matter. */
14547 if (!nowrap
14548 && !TYPE_OVERFLOW_UNDEFINED (type)
14549 && !integer_pow2p (bottom))
14550 return false;
14552 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14553 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14554 but 0xfffffffd is not. */
14555 op1 = TREE_OPERAND (top, 1);
14556 if (TREE_CODE (top) == PLUS_EXPR
14557 && nowrap
14558 && TYPE_UNSIGNED (type)
14559 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14560 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14562 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14563 precisely, so be conservative here checking if both op0 and op1
14564 are multiple of bottom. Note we check the second operand first
14565 since it's usually simpler. */
14566 return (multiple_of_p (type, op1, bottom, nowrap)
14567 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14569 CASE_CONVERT:
14570 /* Can't handle conversions from non-integral or wider integral type. */
14571 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14572 || (TYPE_PRECISION (type)
14573 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14574 return false;
14575 /* NOWRAP only extends to operations in the outermost type so
14576 make sure to strip it off here. */
14577 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14578 TREE_OPERAND (top, 0), bottom, false);
14580 case SAVE_EXPR:
14581 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14583 case COND_EXPR:
14584 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14585 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14587 case INTEGER_CST:
14588 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14589 return false;
14590 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14591 SIGNED);
14593 case SSA_NAME:
14594 if (TREE_CODE (bottom) == INTEGER_CST
14595 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14596 && gimple_code (stmt) == GIMPLE_ASSIGN)
14598 enum tree_code code = gimple_assign_rhs_code (stmt);
14600 /* Check for special cases to see if top is defined as multiple
14601 of bottom:
14603 top = (X & ~(bottom - 1) ; bottom is power of 2
14607 Y = X % bottom
14608 top = X - Y. */
14609 if (code == BIT_AND_EXPR
14610 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14611 && TREE_CODE (op2) == INTEGER_CST
14612 && integer_pow2p (bottom)
14613 && wi::multiple_of_p (wi::to_widest (op2),
14614 wi::to_widest (bottom), SIGNED))
14615 return true;
14617 op1 = gimple_assign_rhs1 (stmt);
14618 if (code == MINUS_EXPR
14619 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14620 && TREE_CODE (op2) == SSA_NAME
14621 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14622 && gimple_code (stmt) == GIMPLE_ASSIGN
14623 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14624 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14625 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14626 return true;
14629 /* fall through */
14631 default:
14632 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14633 return multiple_p (wi::to_poly_widest (top),
14634 wi::to_poly_widest (bottom));
14636 return false;
14640 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14641 This function returns true for integer expressions, and returns
14642 false if uncertain. */
14644 bool
14645 tree_expr_finite_p (const_tree x)
14647 machine_mode mode = element_mode (x);
14648 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14649 return true;
14650 switch (TREE_CODE (x))
14652 case REAL_CST:
14653 return real_isfinite (TREE_REAL_CST_PTR (x));
14654 case COMPLEX_CST:
14655 return tree_expr_finite_p (TREE_REALPART (x))
14656 && tree_expr_finite_p (TREE_IMAGPART (x));
14657 case FLOAT_EXPR:
14658 return true;
14659 case ABS_EXPR:
14660 case CONVERT_EXPR:
14661 case NON_LVALUE_EXPR:
14662 case NEGATE_EXPR:
14663 case SAVE_EXPR:
14664 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14665 case MIN_EXPR:
14666 case MAX_EXPR:
14667 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14668 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14669 case COND_EXPR:
14670 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14671 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14672 case CALL_EXPR:
14673 switch (get_call_combined_fn (x))
14675 CASE_CFN_FABS:
14676 CASE_CFN_FABS_FN:
14677 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14678 CASE_CFN_FMAX:
14679 CASE_CFN_FMAX_FN:
14680 CASE_CFN_FMIN:
14681 CASE_CFN_FMIN_FN:
14682 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14683 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14684 default:
14685 return false;
14688 default:
14689 return false;
14693 /* Return true if expression X evaluates to an infinity.
14694 This function returns false for integer expressions. */
14696 bool
14697 tree_expr_infinite_p (const_tree x)
14699 if (!HONOR_INFINITIES (x))
14700 return false;
14701 switch (TREE_CODE (x))
14703 case REAL_CST:
14704 return real_isinf (TREE_REAL_CST_PTR (x));
14705 case ABS_EXPR:
14706 case NEGATE_EXPR:
14707 case NON_LVALUE_EXPR:
14708 case SAVE_EXPR:
14709 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14710 case COND_EXPR:
14711 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14712 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14713 default:
14714 return false;
14718 /* Return true if expression X could evaluate to an infinity.
14719 This function returns false for integer expressions, and returns
14720 true if uncertain. */
14722 bool
14723 tree_expr_maybe_infinite_p (const_tree x)
14725 if (!HONOR_INFINITIES (x))
14726 return false;
14727 switch (TREE_CODE (x))
14729 case REAL_CST:
14730 return real_isinf (TREE_REAL_CST_PTR (x));
14731 case FLOAT_EXPR:
14732 return false;
14733 case ABS_EXPR:
14734 case NEGATE_EXPR:
14735 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14736 case COND_EXPR:
14737 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14738 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14739 default:
14740 return true;
14744 /* Return true if expression X evaluates to a signaling NaN.
14745 This function returns false for integer expressions. */
14747 bool
14748 tree_expr_signaling_nan_p (const_tree x)
14750 if (!HONOR_SNANS (x))
14751 return false;
14752 switch (TREE_CODE (x))
14754 case REAL_CST:
14755 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14756 case NON_LVALUE_EXPR:
14757 case SAVE_EXPR:
14758 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14759 case COND_EXPR:
14760 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14761 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14762 default:
14763 return false;
14767 /* Return true if expression X could evaluate to a signaling NaN.
14768 This function returns false for integer expressions, and returns
14769 true if uncertain. */
14771 bool
14772 tree_expr_maybe_signaling_nan_p (const_tree x)
14774 if (!HONOR_SNANS (x))
14775 return false;
14776 switch (TREE_CODE (x))
14778 case REAL_CST:
14779 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14780 case FLOAT_EXPR:
14781 return false;
14782 case ABS_EXPR:
14783 case CONVERT_EXPR:
14784 case NEGATE_EXPR:
14785 case NON_LVALUE_EXPR:
14786 case SAVE_EXPR:
14787 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14788 case MIN_EXPR:
14789 case MAX_EXPR:
14790 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14791 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14792 case COND_EXPR:
14793 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14794 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14795 case CALL_EXPR:
14796 switch (get_call_combined_fn (x))
14798 CASE_CFN_FABS:
14799 CASE_CFN_FABS_FN:
14800 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14801 CASE_CFN_FMAX:
14802 CASE_CFN_FMAX_FN:
14803 CASE_CFN_FMIN:
14804 CASE_CFN_FMIN_FN:
14805 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14806 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14807 default:
14808 return true;
14810 default:
14811 return true;
14815 /* Return true if expression X evaluates to a NaN.
14816 This function returns false for integer expressions. */
14818 bool
14819 tree_expr_nan_p (const_tree x)
14821 if (!HONOR_NANS (x))
14822 return false;
14823 switch (TREE_CODE (x))
14825 case REAL_CST:
14826 return real_isnan (TREE_REAL_CST_PTR (x));
14827 case NON_LVALUE_EXPR:
14828 case SAVE_EXPR:
14829 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14830 case COND_EXPR:
14831 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14832 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14833 default:
14834 return false;
14838 /* Return true if expression X could evaluate to a NaN.
14839 This function returns false for integer expressions, and returns
14840 true if uncertain. */
14842 bool
14843 tree_expr_maybe_nan_p (const_tree x)
14845 if (!HONOR_NANS (x))
14846 return false;
14847 switch (TREE_CODE (x))
14849 case REAL_CST:
14850 return real_isnan (TREE_REAL_CST_PTR (x));
14851 case FLOAT_EXPR:
14852 return false;
14853 case PLUS_EXPR:
14854 case MINUS_EXPR:
14855 case MULT_EXPR:
14856 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14857 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14858 case ABS_EXPR:
14859 case CONVERT_EXPR:
14860 case NEGATE_EXPR:
14861 case NON_LVALUE_EXPR:
14862 case SAVE_EXPR:
14863 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14864 case MIN_EXPR:
14865 case MAX_EXPR:
14866 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14867 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14868 case COND_EXPR:
14869 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14870 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14871 case CALL_EXPR:
14872 switch (get_call_combined_fn (x))
14874 CASE_CFN_FABS:
14875 CASE_CFN_FABS_FN:
14876 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14877 CASE_CFN_FMAX:
14878 CASE_CFN_FMAX_FN:
14879 CASE_CFN_FMIN:
14880 CASE_CFN_FMIN_FN:
14881 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14882 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14883 default:
14884 return true;
14886 default:
14887 return true;
14891 /* Return true if expression X could evaluate to -0.0.
14892 This function returns true if uncertain. */
14894 bool
14895 tree_expr_maybe_real_minus_zero_p (const_tree x)
14897 if (!HONOR_SIGNED_ZEROS (x))
14898 return false;
14899 switch (TREE_CODE (x))
14901 case REAL_CST:
14902 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14903 case INTEGER_CST:
14904 case FLOAT_EXPR:
14905 case ABS_EXPR:
14906 return false;
14907 case NON_LVALUE_EXPR:
14908 case SAVE_EXPR:
14909 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14910 case COND_EXPR:
14911 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14912 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14913 case CALL_EXPR:
14914 switch (get_call_combined_fn (x))
14916 CASE_CFN_FABS:
14917 CASE_CFN_FABS_FN:
14918 return false;
14919 default:
14920 break;
14922 default:
14923 break;
14925 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14926 * but currently those predicates require tree and not const_tree. */
14927 return true;
14930 #define tree_expr_nonnegative_warnv_p(X, Y) \
14931 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14933 #define RECURSE(X) \
14934 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14936 /* Return true if CODE or TYPE is known to be non-negative. */
14938 static bool
14939 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14941 if (!VECTOR_TYPE_P (type)
14942 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14943 && truth_value_p (code))
14944 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14945 have a signed:1 type (where the value is -1 and 0). */
14946 return true;
14947 return false;
14950 /* Return true if (CODE OP0) is known to be non-negative. If the return
14951 value is based on the assumption that signed overflow is undefined,
14952 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14953 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14955 bool
14956 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14957 bool *strict_overflow_p, int depth)
14959 if (TYPE_UNSIGNED (type))
14960 return true;
14962 switch (code)
14964 case ABS_EXPR:
14965 /* We can't return 1 if flag_wrapv is set because
14966 ABS_EXPR<INT_MIN> = INT_MIN. */
14967 if (!ANY_INTEGRAL_TYPE_P (type))
14968 return true;
14969 if (TYPE_OVERFLOW_UNDEFINED (type))
14971 *strict_overflow_p = true;
14972 return true;
14974 break;
14976 case NON_LVALUE_EXPR:
14977 case FLOAT_EXPR:
14978 case FIX_TRUNC_EXPR:
14979 return RECURSE (op0);
14981 CASE_CONVERT:
14983 tree inner_type = TREE_TYPE (op0);
14984 tree outer_type = type;
14986 if (SCALAR_FLOAT_TYPE_P (outer_type))
14988 if (SCALAR_FLOAT_TYPE_P (inner_type))
14989 return RECURSE (op0);
14990 if (INTEGRAL_TYPE_P (inner_type))
14992 if (TYPE_UNSIGNED (inner_type))
14993 return true;
14994 return RECURSE (op0);
14997 else if (INTEGRAL_TYPE_P (outer_type))
14999 if (SCALAR_FLOAT_TYPE_P (inner_type))
15000 return RECURSE (op0);
15001 if (INTEGRAL_TYPE_P (inner_type))
15002 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15003 && TYPE_UNSIGNED (inner_type);
15006 break;
15008 default:
15009 return tree_simple_nonnegative_warnv_p (code, type);
15012 /* We don't know sign of `t', so be conservative and return false. */
15013 return false;
15016 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15017 value is based on the assumption that signed overflow is undefined,
15018 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15019 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15021 bool
15022 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15023 tree op1, bool *strict_overflow_p,
15024 int depth)
15026 if (TYPE_UNSIGNED (type))
15027 return true;
15029 switch (code)
15031 case POINTER_PLUS_EXPR:
15032 case PLUS_EXPR:
15033 if (FLOAT_TYPE_P (type))
15034 return RECURSE (op0) && RECURSE (op1);
15036 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15037 both unsigned and at least 2 bits shorter than the result. */
15038 if (TREE_CODE (type) == INTEGER_TYPE
15039 && TREE_CODE (op0) == NOP_EXPR
15040 && TREE_CODE (op1) == NOP_EXPR)
15042 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15043 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15044 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15045 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15047 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15048 TYPE_PRECISION (inner2)) + 1;
15049 return prec < TYPE_PRECISION (type);
15052 break;
15054 case MULT_EXPR:
15055 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15057 /* x * x is always non-negative for floating point x
15058 or without overflow. */
15059 if (operand_equal_p (op0, op1, 0)
15060 || (RECURSE (op0) && RECURSE (op1)))
15062 if (ANY_INTEGRAL_TYPE_P (type)
15063 && TYPE_OVERFLOW_UNDEFINED (type))
15064 *strict_overflow_p = true;
15065 return true;
15069 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15070 both unsigned and their total bits is shorter than the result. */
15071 if (TREE_CODE (type) == INTEGER_TYPE
15072 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15073 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15075 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15076 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15077 : TREE_TYPE (op0);
15078 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15079 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15080 : TREE_TYPE (op1);
15082 bool unsigned0 = TYPE_UNSIGNED (inner0);
15083 bool unsigned1 = TYPE_UNSIGNED (inner1);
15085 if (TREE_CODE (op0) == INTEGER_CST)
15086 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15088 if (TREE_CODE (op1) == INTEGER_CST)
15089 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15091 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15092 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15094 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15095 ? tree_int_cst_min_precision (op0, UNSIGNED)
15096 : TYPE_PRECISION (inner0);
15098 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15099 ? tree_int_cst_min_precision (op1, UNSIGNED)
15100 : TYPE_PRECISION (inner1);
15102 return precision0 + precision1 < TYPE_PRECISION (type);
15105 return false;
15107 case BIT_AND_EXPR:
15108 return RECURSE (op0) || RECURSE (op1);
15110 case MAX_EXPR:
15111 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
15112 things. */
15113 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
15114 return RECURSE (op0) && RECURSE (op1);
15115 return RECURSE (op0) || RECURSE (op1);
15117 case BIT_IOR_EXPR:
15118 case BIT_XOR_EXPR:
15119 case MIN_EXPR:
15120 case RDIV_EXPR:
15121 case TRUNC_DIV_EXPR:
15122 case CEIL_DIV_EXPR:
15123 case FLOOR_DIV_EXPR:
15124 case ROUND_DIV_EXPR:
15125 return RECURSE (op0) && RECURSE (op1);
15127 case TRUNC_MOD_EXPR:
15128 return RECURSE (op0);
15130 case FLOOR_MOD_EXPR:
15131 return RECURSE (op1);
15133 case CEIL_MOD_EXPR:
15134 case ROUND_MOD_EXPR:
15135 default:
15136 return tree_simple_nonnegative_warnv_p (code, type);
15139 /* We don't know sign of `t', so be conservative and return false. */
15140 return false;
15143 /* Return true if T is known to be non-negative. If the return
15144 value is based on the assumption that signed overflow is undefined,
15145 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15146 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15148 bool
15149 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15151 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15152 return true;
15154 switch (TREE_CODE (t))
15156 case INTEGER_CST:
15157 return tree_int_cst_sgn (t) >= 0;
15159 case REAL_CST:
15160 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15162 case FIXED_CST:
15163 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15165 case COND_EXPR:
15166 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15168 case SSA_NAME:
15169 /* Limit the depth of recursion to avoid quadratic behavior.
15170 This is expected to catch almost all occurrences in practice.
15171 If this code misses important cases that unbounded recursion
15172 would not, passes that need this information could be revised
15173 to provide it through dataflow propagation. */
15174 return (!name_registered_for_update_p (t)
15175 && depth < param_max_ssa_name_query_depth
15176 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15177 strict_overflow_p, depth));
15179 default:
15180 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15184 /* Return true if T is known to be non-negative. If the return
15185 value is based on the assumption that signed overflow is undefined,
15186 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15187 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15189 bool
15190 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15191 bool *strict_overflow_p, int depth)
15193 switch (fn)
15195 CASE_CFN_ACOS:
15196 CASE_CFN_ACOS_FN:
15197 CASE_CFN_ACOSH:
15198 CASE_CFN_ACOSH_FN:
15199 CASE_CFN_CABS:
15200 CASE_CFN_CABS_FN:
15201 CASE_CFN_COSH:
15202 CASE_CFN_COSH_FN:
15203 CASE_CFN_ERFC:
15204 CASE_CFN_ERFC_FN:
15205 CASE_CFN_EXP:
15206 CASE_CFN_EXP_FN:
15207 CASE_CFN_EXP10:
15208 CASE_CFN_EXP2:
15209 CASE_CFN_EXP2_FN:
15210 CASE_CFN_FABS:
15211 CASE_CFN_FABS_FN:
15212 CASE_CFN_FDIM:
15213 CASE_CFN_FDIM_FN:
15214 CASE_CFN_HYPOT:
15215 CASE_CFN_HYPOT_FN:
15216 CASE_CFN_POW10:
15217 CASE_CFN_FFS:
15218 CASE_CFN_PARITY:
15219 CASE_CFN_POPCOUNT:
15220 CASE_CFN_CLZ:
15221 CASE_CFN_CLRSB:
15222 case CFN_BUILT_IN_BSWAP16:
15223 case CFN_BUILT_IN_BSWAP32:
15224 case CFN_BUILT_IN_BSWAP64:
15225 case CFN_BUILT_IN_BSWAP128:
15226 /* Always true. */
15227 return true;
15229 CASE_CFN_SQRT:
15230 CASE_CFN_SQRT_FN:
15231 /* sqrt(-0.0) is -0.0. */
15232 if (!HONOR_SIGNED_ZEROS (type))
15233 return true;
15234 return RECURSE (arg0);
15236 CASE_CFN_ASINH:
15237 CASE_CFN_ASINH_FN:
15238 CASE_CFN_ATAN:
15239 CASE_CFN_ATAN_FN:
15240 CASE_CFN_ATANH:
15241 CASE_CFN_ATANH_FN:
15242 CASE_CFN_CBRT:
15243 CASE_CFN_CBRT_FN:
15244 CASE_CFN_CEIL:
15245 CASE_CFN_CEIL_FN:
15246 CASE_CFN_ERF:
15247 CASE_CFN_ERF_FN:
15248 CASE_CFN_EXPM1:
15249 CASE_CFN_EXPM1_FN:
15250 CASE_CFN_FLOOR:
15251 CASE_CFN_FLOOR_FN:
15252 CASE_CFN_FMOD:
15253 CASE_CFN_FMOD_FN:
15254 CASE_CFN_FREXP:
15255 CASE_CFN_FREXP_FN:
15256 CASE_CFN_ICEIL:
15257 CASE_CFN_IFLOOR:
15258 CASE_CFN_IRINT:
15259 CASE_CFN_IROUND:
15260 CASE_CFN_LCEIL:
15261 CASE_CFN_LDEXP:
15262 CASE_CFN_LFLOOR:
15263 CASE_CFN_LLCEIL:
15264 CASE_CFN_LLFLOOR:
15265 CASE_CFN_LLRINT:
15266 CASE_CFN_LLRINT_FN:
15267 CASE_CFN_LLROUND:
15268 CASE_CFN_LLROUND_FN:
15269 CASE_CFN_LRINT:
15270 CASE_CFN_LRINT_FN:
15271 CASE_CFN_LROUND:
15272 CASE_CFN_LROUND_FN:
15273 CASE_CFN_MODF:
15274 CASE_CFN_MODF_FN:
15275 CASE_CFN_NEARBYINT:
15276 CASE_CFN_NEARBYINT_FN:
15277 CASE_CFN_RINT:
15278 CASE_CFN_RINT_FN:
15279 CASE_CFN_ROUND:
15280 CASE_CFN_ROUND_FN:
15281 CASE_CFN_ROUNDEVEN:
15282 CASE_CFN_ROUNDEVEN_FN:
15283 CASE_CFN_SCALB:
15284 CASE_CFN_SCALBLN:
15285 CASE_CFN_SCALBLN_FN:
15286 CASE_CFN_SCALBN:
15287 CASE_CFN_SCALBN_FN:
15288 CASE_CFN_SIGNBIT:
15289 CASE_CFN_SIGNIFICAND:
15290 CASE_CFN_SINH:
15291 CASE_CFN_SINH_FN:
15292 CASE_CFN_TANH:
15293 CASE_CFN_TANH_FN:
15294 CASE_CFN_TRUNC:
15295 CASE_CFN_TRUNC_FN:
15296 /* True if the 1st argument is nonnegative. */
15297 return RECURSE (arg0);
15299 CASE_CFN_FMAX:
15300 CASE_CFN_FMAX_FN:
15301 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15302 things. In the presence of sNaNs, we're only guaranteed to be
15303 non-negative if both operands are non-negative. In the presence
15304 of qNaNs, we're non-negative if either operand is non-negative
15305 and can't be a qNaN, or if both operands are non-negative. */
15306 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15307 tree_expr_maybe_signaling_nan_p (arg1))
15308 return RECURSE (arg0) && RECURSE (arg1);
15309 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15310 || RECURSE (arg1))
15311 : (RECURSE (arg1)
15312 && !tree_expr_maybe_nan_p (arg1));
15314 CASE_CFN_FMIN:
15315 CASE_CFN_FMIN_FN:
15316 /* True if the 1st AND 2nd arguments are nonnegative. */
15317 return RECURSE (arg0) && RECURSE (arg1);
15319 CASE_CFN_COPYSIGN:
15320 CASE_CFN_COPYSIGN_FN:
15321 /* True if the 2nd argument is nonnegative. */
15322 return RECURSE (arg1);
15324 CASE_CFN_POWI:
15325 /* True if the 1st argument is nonnegative or the second
15326 argument is an even integer. */
15327 if (TREE_CODE (arg1) == INTEGER_CST
15328 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15329 return true;
15330 return RECURSE (arg0);
15332 CASE_CFN_POW:
15333 CASE_CFN_POW_FN:
15334 /* True if the 1st argument is nonnegative or the second
15335 argument is an even integer valued real. */
15336 if (TREE_CODE (arg1) == REAL_CST)
15338 REAL_VALUE_TYPE c;
15339 HOST_WIDE_INT n;
15341 c = TREE_REAL_CST (arg1);
15342 n = real_to_integer (&c);
15343 if ((n & 1) == 0)
15345 REAL_VALUE_TYPE cint;
15346 real_from_integer (&cint, VOIDmode, n, SIGNED);
15347 if (real_identical (&c, &cint))
15348 return true;
15351 return RECURSE (arg0);
15353 default:
15354 break;
15356 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15359 /* Return true if T is known to be non-negative. If the return
15360 value is based on the assumption that signed overflow is undefined,
15361 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15362 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15364 static bool
15365 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15367 enum tree_code code = TREE_CODE (t);
15368 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15369 return true;
15371 switch (code)
15373 case TARGET_EXPR:
15375 tree temp = TARGET_EXPR_SLOT (t);
15376 t = TARGET_EXPR_INITIAL (t);
15378 /* If the initializer is non-void, then it's a normal expression
15379 that will be assigned to the slot. */
15380 if (!VOID_TYPE_P (TREE_TYPE (t)))
15381 return RECURSE (t);
15383 /* Otherwise, the initializer sets the slot in some way. One common
15384 way is an assignment statement at the end of the initializer. */
15385 while (1)
15387 if (TREE_CODE (t) == BIND_EXPR)
15388 t = expr_last (BIND_EXPR_BODY (t));
15389 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15390 || TREE_CODE (t) == TRY_CATCH_EXPR)
15391 t = expr_last (TREE_OPERAND (t, 0));
15392 else if (TREE_CODE (t) == STATEMENT_LIST)
15393 t = expr_last (t);
15394 else
15395 break;
15397 if (TREE_CODE (t) == MODIFY_EXPR
15398 && TREE_OPERAND (t, 0) == temp)
15399 return RECURSE (TREE_OPERAND (t, 1));
15401 return false;
15404 case CALL_EXPR:
15406 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15407 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15409 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15410 get_call_combined_fn (t),
15411 arg0,
15412 arg1,
15413 strict_overflow_p, depth);
15415 case COMPOUND_EXPR:
15416 case MODIFY_EXPR:
15417 return RECURSE (TREE_OPERAND (t, 1));
15419 case BIND_EXPR:
15420 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15422 case SAVE_EXPR:
15423 return RECURSE (TREE_OPERAND (t, 0));
15425 default:
15426 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15430 #undef RECURSE
15431 #undef tree_expr_nonnegative_warnv_p
15433 /* Return true if T is known to be non-negative. If the return
15434 value is based on the assumption that signed overflow is undefined,
15435 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15436 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15438 bool
15439 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15441 enum tree_code code;
15442 if (t == error_mark_node)
15443 return false;
15445 code = TREE_CODE (t);
15446 switch (TREE_CODE_CLASS (code))
15448 case tcc_binary:
15449 case tcc_comparison:
15450 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15451 TREE_TYPE (t),
15452 TREE_OPERAND (t, 0),
15453 TREE_OPERAND (t, 1),
15454 strict_overflow_p, depth);
15456 case tcc_unary:
15457 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15458 TREE_TYPE (t),
15459 TREE_OPERAND (t, 0),
15460 strict_overflow_p, depth);
15462 case tcc_constant:
15463 case tcc_declaration:
15464 case tcc_reference:
15465 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15467 default:
15468 break;
15471 switch (code)
15473 case TRUTH_AND_EXPR:
15474 case TRUTH_OR_EXPR:
15475 case TRUTH_XOR_EXPR:
15476 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15477 TREE_TYPE (t),
15478 TREE_OPERAND (t, 0),
15479 TREE_OPERAND (t, 1),
15480 strict_overflow_p, depth);
15481 case TRUTH_NOT_EXPR:
15482 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15483 TREE_TYPE (t),
15484 TREE_OPERAND (t, 0),
15485 strict_overflow_p, depth);
15487 case COND_EXPR:
15488 case CONSTRUCTOR:
15489 case OBJ_TYPE_REF:
15490 case ADDR_EXPR:
15491 case WITH_SIZE_EXPR:
15492 case SSA_NAME:
15493 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15495 default:
15496 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15500 /* Return true if `t' is known to be non-negative. Handle warnings
15501 about undefined signed overflow. */
15503 bool
15504 tree_expr_nonnegative_p (tree t)
15506 bool ret, strict_overflow_p;
15508 strict_overflow_p = false;
15509 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15510 if (strict_overflow_p)
15511 fold_overflow_warning (("assuming signed overflow does not occur when "
15512 "determining that expression is always "
15513 "non-negative"),
15514 WARN_STRICT_OVERFLOW_MISC);
15515 return ret;
15519 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15520 For floating point we further ensure that T is not denormal.
15521 Similar logic is present in nonzero_address in rtlanal.h.
15523 If the return value is based on the assumption that signed overflow
15524 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15525 change *STRICT_OVERFLOW_P. */
15527 bool
15528 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15529 bool *strict_overflow_p)
15531 switch (code)
15533 case ABS_EXPR:
15534 return tree_expr_nonzero_warnv_p (op0,
15535 strict_overflow_p);
15537 case NOP_EXPR:
15539 tree inner_type = TREE_TYPE (op0);
15540 tree outer_type = type;
15542 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15543 && tree_expr_nonzero_warnv_p (op0,
15544 strict_overflow_p));
15546 break;
15548 case NON_LVALUE_EXPR:
15549 return tree_expr_nonzero_warnv_p (op0,
15550 strict_overflow_p);
15552 default:
15553 break;
15556 return false;
15559 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15560 For floating point we further ensure that T is not denormal.
15561 Similar logic is present in nonzero_address in rtlanal.h.
15563 If the return value is based on the assumption that signed overflow
15564 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15565 change *STRICT_OVERFLOW_P. */
15567 bool
15568 tree_binary_nonzero_warnv_p (enum tree_code code,
15569 tree type,
15570 tree op0,
15571 tree op1, bool *strict_overflow_p)
15573 bool sub_strict_overflow_p;
15574 switch (code)
15576 case POINTER_PLUS_EXPR:
15577 case PLUS_EXPR:
15578 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15580 /* With the presence of negative values it is hard
15581 to say something. */
15582 sub_strict_overflow_p = false;
15583 if (!tree_expr_nonnegative_warnv_p (op0,
15584 &sub_strict_overflow_p)
15585 || !tree_expr_nonnegative_warnv_p (op1,
15586 &sub_strict_overflow_p))
15587 return false;
15588 /* One of operands must be positive and the other non-negative. */
15589 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15590 overflows, on a twos-complement machine the sum of two
15591 nonnegative numbers can never be zero. */
15592 return (tree_expr_nonzero_warnv_p (op0,
15593 strict_overflow_p)
15594 || tree_expr_nonzero_warnv_p (op1,
15595 strict_overflow_p));
15597 break;
15599 case MULT_EXPR:
15600 if (TYPE_OVERFLOW_UNDEFINED (type))
15602 if (tree_expr_nonzero_warnv_p (op0,
15603 strict_overflow_p)
15604 && tree_expr_nonzero_warnv_p (op1,
15605 strict_overflow_p))
15607 *strict_overflow_p = true;
15608 return true;
15611 break;
15613 case MIN_EXPR:
15614 sub_strict_overflow_p = false;
15615 if (tree_expr_nonzero_warnv_p (op0,
15616 &sub_strict_overflow_p)
15617 && tree_expr_nonzero_warnv_p (op1,
15618 &sub_strict_overflow_p))
15620 if (sub_strict_overflow_p)
15621 *strict_overflow_p = true;
15623 break;
15625 case MAX_EXPR:
15626 sub_strict_overflow_p = false;
15627 if (tree_expr_nonzero_warnv_p (op0,
15628 &sub_strict_overflow_p))
15630 if (sub_strict_overflow_p)
15631 *strict_overflow_p = true;
15633 /* When both operands are nonzero, then MAX must be too. */
15634 if (tree_expr_nonzero_warnv_p (op1,
15635 strict_overflow_p))
15636 return true;
15638 /* MAX where operand 0 is positive is positive. */
15639 return tree_expr_nonnegative_warnv_p (op0,
15640 strict_overflow_p);
15642 /* MAX where operand 1 is positive is positive. */
15643 else if (tree_expr_nonzero_warnv_p (op1,
15644 &sub_strict_overflow_p)
15645 && tree_expr_nonnegative_warnv_p (op1,
15646 &sub_strict_overflow_p))
15648 if (sub_strict_overflow_p)
15649 *strict_overflow_p = true;
15650 return true;
15652 break;
15654 case BIT_IOR_EXPR:
15655 return (tree_expr_nonzero_warnv_p (op1,
15656 strict_overflow_p)
15657 || tree_expr_nonzero_warnv_p (op0,
15658 strict_overflow_p));
15660 default:
15661 break;
15664 return false;
15667 /* Return true when T is an address and is known to be nonzero.
15668 For floating point we further ensure that T is not denormal.
15669 Similar logic is present in nonzero_address in rtlanal.h.
15671 If the return value is based on the assumption that signed overflow
15672 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15673 change *STRICT_OVERFLOW_P. */
15675 bool
15676 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15678 bool sub_strict_overflow_p;
15679 switch (TREE_CODE (t))
15681 case INTEGER_CST:
15682 return !integer_zerop (t);
15684 case ADDR_EXPR:
15686 tree base = TREE_OPERAND (t, 0);
15688 if (!DECL_P (base))
15689 base = get_base_address (base);
15691 if (base && TREE_CODE (base) == TARGET_EXPR)
15692 base = TARGET_EXPR_SLOT (base);
15694 if (!base)
15695 return false;
15697 /* For objects in symbol table check if we know they are non-zero.
15698 Don't do anything for variables and functions before symtab is built;
15699 it is quite possible that they will be declared weak later. */
15700 int nonzero_addr = maybe_nonzero_address (base);
15701 if (nonzero_addr >= 0)
15702 return nonzero_addr;
15704 /* Constants are never weak. */
15705 if (CONSTANT_CLASS_P (base))
15706 return true;
15708 return false;
15711 case COND_EXPR:
15712 sub_strict_overflow_p = false;
15713 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15714 &sub_strict_overflow_p)
15715 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15716 &sub_strict_overflow_p))
15718 if (sub_strict_overflow_p)
15719 *strict_overflow_p = true;
15720 return true;
15722 break;
15724 case SSA_NAME:
15725 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15726 break;
15727 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15729 default:
15730 break;
15732 return false;
15735 #define integer_valued_real_p(X) \
15736 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15738 #define RECURSE(X) \
15739 ((integer_valued_real_p) (X, depth + 1))
15741 /* Return true if the floating point result of (CODE OP0) has an
15742 integer value. We also allow +Inf, -Inf and NaN to be considered
15743 integer values. Return false for signaling NaN.
15745 DEPTH is the current nesting depth of the query. */
15747 bool
15748 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15750 switch (code)
15752 case FLOAT_EXPR:
15753 return true;
15755 case ABS_EXPR:
15756 return RECURSE (op0);
15758 CASE_CONVERT:
15760 tree type = TREE_TYPE (op0);
15761 if (TREE_CODE (type) == INTEGER_TYPE)
15762 return true;
15763 if (SCALAR_FLOAT_TYPE_P (type))
15764 return RECURSE (op0);
15765 break;
15768 default:
15769 break;
15771 return false;
15774 /* Return true if the floating point result of (CODE OP0 OP1) has an
15775 integer value. We also allow +Inf, -Inf and NaN to be considered
15776 integer values. Return false for signaling NaN.
15778 DEPTH is the current nesting depth of the query. */
15780 bool
15781 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15783 switch (code)
15785 case PLUS_EXPR:
15786 case MINUS_EXPR:
15787 case MULT_EXPR:
15788 case MIN_EXPR:
15789 case MAX_EXPR:
15790 return RECURSE (op0) && RECURSE (op1);
15792 default:
15793 break;
15795 return false;
15798 /* Return true if the floating point result of calling FNDECL with arguments
15799 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15800 considered integer values. Return false for signaling NaN. If FNDECL
15801 takes fewer than 2 arguments, the remaining ARGn are null.
15803 DEPTH is the current nesting depth of the query. */
15805 bool
15806 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15808 switch (fn)
15810 CASE_CFN_CEIL:
15811 CASE_CFN_CEIL_FN:
15812 CASE_CFN_FLOOR:
15813 CASE_CFN_FLOOR_FN:
15814 CASE_CFN_NEARBYINT:
15815 CASE_CFN_NEARBYINT_FN:
15816 CASE_CFN_RINT:
15817 CASE_CFN_RINT_FN:
15818 CASE_CFN_ROUND:
15819 CASE_CFN_ROUND_FN:
15820 CASE_CFN_ROUNDEVEN:
15821 CASE_CFN_ROUNDEVEN_FN:
15822 CASE_CFN_TRUNC:
15823 CASE_CFN_TRUNC_FN:
15824 return true;
15826 CASE_CFN_FMIN:
15827 CASE_CFN_FMIN_FN:
15828 CASE_CFN_FMAX:
15829 CASE_CFN_FMAX_FN:
15830 return RECURSE (arg0) && RECURSE (arg1);
15832 default:
15833 break;
15835 return false;
15838 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15839 has an integer value. We also allow +Inf, -Inf and NaN to be
15840 considered integer values. Return false for signaling NaN.
15842 DEPTH is the current nesting depth of the query. */
15844 bool
15845 integer_valued_real_single_p (tree t, int depth)
15847 switch (TREE_CODE (t))
15849 case REAL_CST:
15850 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15852 case COND_EXPR:
15853 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15855 case SSA_NAME:
15856 /* Limit the depth of recursion to avoid quadratic behavior.
15857 This is expected to catch almost all occurrences in practice.
15858 If this code misses important cases that unbounded recursion
15859 would not, passes that need this information could be revised
15860 to provide it through dataflow propagation. */
15861 return (!name_registered_for_update_p (t)
15862 && depth < param_max_ssa_name_query_depth
15863 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15864 depth));
15866 default:
15867 break;
15869 return false;
15872 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15873 has an integer value. We also allow +Inf, -Inf and NaN to be
15874 considered integer values. Return false for signaling NaN.
15876 DEPTH is the current nesting depth of the query. */
15878 static bool
15879 integer_valued_real_invalid_p (tree t, int depth)
15881 switch (TREE_CODE (t))
15883 case COMPOUND_EXPR:
15884 case MODIFY_EXPR:
15885 case BIND_EXPR:
15886 return RECURSE (TREE_OPERAND (t, 1));
15888 case SAVE_EXPR:
15889 return RECURSE (TREE_OPERAND (t, 0));
15891 default:
15892 break;
15894 return false;
15897 #undef RECURSE
15898 #undef integer_valued_real_p
15900 /* Return true if the floating point expression T has an integer value.
15901 We also allow +Inf, -Inf and NaN to be considered integer values.
15902 Return false for signaling NaN.
15904 DEPTH is the current nesting depth of the query. */
15906 bool
15907 integer_valued_real_p (tree t, int depth)
15909 if (t == error_mark_node)
15910 return false;
15912 STRIP_ANY_LOCATION_WRAPPER (t);
15914 tree_code code = TREE_CODE (t);
15915 switch (TREE_CODE_CLASS (code))
15917 case tcc_binary:
15918 case tcc_comparison:
15919 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15920 TREE_OPERAND (t, 1), depth);
15922 case tcc_unary:
15923 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15925 case tcc_constant:
15926 case tcc_declaration:
15927 case tcc_reference:
15928 return integer_valued_real_single_p (t, depth);
15930 default:
15931 break;
15934 switch (code)
15936 case COND_EXPR:
15937 case SSA_NAME:
15938 return integer_valued_real_single_p (t, depth);
15940 case CALL_EXPR:
15942 tree arg0 = (call_expr_nargs (t) > 0
15943 ? CALL_EXPR_ARG (t, 0)
15944 : NULL_TREE);
15945 tree arg1 = (call_expr_nargs (t) > 1
15946 ? CALL_EXPR_ARG (t, 1)
15947 : NULL_TREE);
15948 return integer_valued_real_call_p (get_call_combined_fn (t),
15949 arg0, arg1, depth);
15952 default:
15953 return integer_valued_real_invalid_p (t, depth);
15957 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15958 attempt to fold the expression to a constant without modifying TYPE,
15959 OP0 or OP1.
15961 If the expression could be simplified to a constant, then return
15962 the constant. If the expression would not be simplified to a
15963 constant, then return NULL_TREE. */
15965 tree
15966 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15968 tree tem = fold_binary (code, type, op0, op1);
15969 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15972 /* Given the components of a unary expression CODE, TYPE and OP0,
15973 attempt to fold the expression to a constant without modifying
15974 TYPE or OP0.
15976 If the expression could be simplified to a constant, then return
15977 the constant. If the expression would not be simplified to a
15978 constant, then return NULL_TREE. */
15980 tree
15981 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15983 tree tem = fold_unary (code, type, op0);
15984 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15987 /* If EXP represents referencing an element in a constant string
15988 (either via pointer arithmetic or array indexing), return the
15989 tree representing the value accessed, otherwise return NULL. */
15991 tree
15992 fold_read_from_constant_string (tree exp)
15994 if ((INDIRECT_REF_P (exp)
15995 || TREE_CODE (exp) == ARRAY_REF)
15996 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15998 tree exp1 = TREE_OPERAND (exp, 0);
15999 tree index;
16000 tree string;
16001 location_t loc = EXPR_LOCATION (exp);
16003 if (INDIRECT_REF_P (exp))
16004 string = string_constant (exp1, &index, NULL, NULL);
16005 else
16007 tree low_bound = array_ref_low_bound (exp);
16008 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16010 /* Optimize the special-case of a zero lower bound.
16012 We convert the low_bound to sizetype to avoid some problems
16013 with constant folding. (E.g. suppose the lower bound is 1,
16014 and its mode is QI. Without the conversion,l (ARRAY
16015 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16016 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16017 if (! integer_zerop (low_bound))
16018 index = size_diffop_loc (loc, index,
16019 fold_convert_loc (loc, sizetype, low_bound));
16021 string = exp1;
16024 scalar_int_mode char_mode;
16025 if (string
16026 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16027 && TREE_CODE (string) == STRING_CST
16028 && tree_fits_uhwi_p (index)
16029 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16030 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
16031 &char_mode)
16032 && GET_MODE_SIZE (char_mode) == 1)
16033 return build_int_cst_type (TREE_TYPE (exp),
16034 (TREE_STRING_POINTER (string)
16035 [TREE_INT_CST_LOW (index)]));
16037 return NULL;
16040 /* Folds a read from vector element at IDX of vector ARG. */
16042 tree
16043 fold_read_from_vector (tree arg, poly_uint64 idx)
16045 unsigned HOST_WIDE_INT i;
16046 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
16047 && known_ge (idx, 0u)
16048 && idx.is_constant (&i))
16050 if (TREE_CODE (arg) == VECTOR_CST)
16051 return VECTOR_CST_ELT (arg, i);
16052 else if (TREE_CODE (arg) == CONSTRUCTOR)
16054 if (CONSTRUCTOR_NELTS (arg)
16055 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
16056 return NULL_TREE;
16057 if (i >= CONSTRUCTOR_NELTS (arg))
16058 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
16059 return CONSTRUCTOR_ELT (arg, i)->value;
16062 return NULL_TREE;
16065 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16066 an integer constant, real, or fixed-point constant.
16068 TYPE is the type of the result. */
16070 static tree
16071 fold_negate_const (tree arg0, tree type)
16073 tree t = NULL_TREE;
16075 switch (TREE_CODE (arg0))
16077 case REAL_CST:
16078 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16079 break;
16081 case FIXED_CST:
16083 FIXED_VALUE_TYPE f;
16084 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16085 &(TREE_FIXED_CST (arg0)), NULL,
16086 TYPE_SATURATING (type));
16087 t = build_fixed (type, f);
16088 /* Propagate overflow flags. */
16089 if (overflow_p | TREE_OVERFLOW (arg0))
16090 TREE_OVERFLOW (t) = 1;
16091 break;
16094 default:
16095 if (poly_int_tree_p (arg0))
16097 wi::overflow_type overflow;
16098 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
16099 t = force_fit_type (type, res, 1,
16100 (overflow && ! TYPE_UNSIGNED (type))
16101 || TREE_OVERFLOW (arg0));
16102 break;
16105 gcc_unreachable ();
16108 return t;
16111 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16112 an integer constant or real constant.
16114 TYPE is the type of the result. */
16116 tree
16117 fold_abs_const (tree arg0, tree type)
16119 tree t = NULL_TREE;
16121 switch (TREE_CODE (arg0))
16123 case INTEGER_CST:
16125 /* If the value is unsigned or non-negative, then the absolute value
16126 is the same as the ordinary value. */
16127 wide_int val = wi::to_wide (arg0);
16128 wi::overflow_type overflow = wi::OVF_NONE;
16129 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
16132 /* If the value is negative, then the absolute value is
16133 its negation. */
16134 else
16135 val = wi::neg (val, &overflow);
16137 /* Force to the destination type, set TREE_OVERFLOW for signed
16138 TYPE only. */
16139 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
16141 break;
16143 case REAL_CST:
16144 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16145 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16146 else
16147 t = arg0;
16148 break;
16150 default:
16151 gcc_unreachable ();
16154 return t;
16157 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16158 constant. TYPE is the type of the result. */
16160 static tree
16161 fold_not_const (const_tree arg0, tree type)
16163 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16165 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
16168 /* Given CODE, a relational operator, the target type, TYPE and two
16169 constant operands OP0 and OP1, return the result of the
16170 relational operation. If the result is not a compile time
16171 constant, then return NULL_TREE. */
16173 static tree
16174 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16176 int result, invert;
16178 /* From here on, the only cases we handle are when the result is
16179 known to be a constant. */
16181 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16183 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16184 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16186 /* Handle the cases where either operand is a NaN. */
16187 if (real_isnan (c0) || real_isnan (c1))
16189 switch (code)
16191 case EQ_EXPR:
16192 case ORDERED_EXPR:
16193 result = 0;
16194 break;
16196 case NE_EXPR:
16197 case UNORDERED_EXPR:
16198 case UNLT_EXPR:
16199 case UNLE_EXPR:
16200 case UNGT_EXPR:
16201 case UNGE_EXPR:
16202 case UNEQ_EXPR:
16203 result = 1;
16204 break;
16206 case LT_EXPR:
16207 case LE_EXPR:
16208 case GT_EXPR:
16209 case GE_EXPR:
16210 case LTGT_EXPR:
16211 if (flag_trapping_math)
16212 return NULL_TREE;
16213 result = 0;
16214 break;
16216 default:
16217 gcc_unreachable ();
16220 return constant_boolean_node (result, type);
16223 return constant_boolean_node (real_compare (code, c0, c1), type);
16226 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16228 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16229 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16230 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16233 /* Handle equality/inequality of complex constants. */
16234 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16236 tree rcond = fold_relational_const (code, type,
16237 TREE_REALPART (op0),
16238 TREE_REALPART (op1));
16239 tree icond = fold_relational_const (code, type,
16240 TREE_IMAGPART (op0),
16241 TREE_IMAGPART (op1));
16242 if (code == EQ_EXPR)
16243 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16244 else if (code == NE_EXPR)
16245 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16246 else
16247 return NULL_TREE;
16250 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16252 if (!VECTOR_TYPE_P (type))
16254 /* Have vector comparison with scalar boolean result. */
16255 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16256 && known_eq (VECTOR_CST_NELTS (op0),
16257 VECTOR_CST_NELTS (op1)));
16258 unsigned HOST_WIDE_INT nunits;
16259 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16260 return NULL_TREE;
16261 for (unsigned i = 0; i < nunits; i++)
16263 tree elem0 = VECTOR_CST_ELT (op0, i);
16264 tree elem1 = VECTOR_CST_ELT (op1, i);
16265 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16266 if (tmp == NULL_TREE)
16267 return NULL_TREE;
16268 if (integer_zerop (tmp))
16269 return constant_boolean_node (code == NE_EXPR, type);
16271 return constant_boolean_node (code == EQ_EXPR, type);
16273 tree_vector_builder elts;
16274 if (!elts.new_binary_operation (type, op0, op1, false))
16275 return NULL_TREE;
16276 unsigned int count = elts.encoded_nelts ();
16277 for (unsigned i = 0; i < count; i++)
16279 tree elem_type = TREE_TYPE (type);
16280 tree elem0 = VECTOR_CST_ELT (op0, i);
16281 tree elem1 = VECTOR_CST_ELT (op1, i);
16283 tree tem = fold_relational_const (code, elem_type,
16284 elem0, elem1);
16286 if (tem == NULL_TREE)
16287 return NULL_TREE;
16289 elts.quick_push (build_int_cst (elem_type,
16290 integer_zerop (tem) ? 0 : -1));
16293 return elts.build ();
16296 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16298 To compute GT, swap the arguments and do LT.
16299 To compute GE, do LT and invert the result.
16300 To compute LE, swap the arguments, do LT and invert the result.
16301 To compute NE, do EQ and invert the result.
16303 Therefore, the code below must handle only EQ and LT. */
16305 if (code == LE_EXPR || code == GT_EXPR)
16307 std::swap (op0, op1);
16308 code = swap_tree_comparison (code);
16311 /* Note that it is safe to invert for real values here because we
16312 have already handled the one case that it matters. */
16314 invert = 0;
16315 if (code == NE_EXPR || code == GE_EXPR)
16317 invert = 1;
16318 code = invert_tree_comparison (code, false);
16321 /* Compute a result for LT or EQ if args permit;
16322 Otherwise return T. */
16323 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16325 if (code == EQ_EXPR)
16326 result = tree_int_cst_equal (op0, op1);
16327 else
16328 result = tree_int_cst_lt (op0, op1);
16330 else
16331 return NULL_TREE;
16333 if (invert)
16334 result ^= 1;
16335 return constant_boolean_node (result, type);
16338 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16339 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16340 itself. */
16342 tree
16343 fold_build_cleanup_point_expr (tree type, tree expr)
16345 /* If the expression does not have side effects then we don't have to wrap
16346 it with a cleanup point expression. */
16347 if (!TREE_SIDE_EFFECTS (expr))
16348 return expr;
16350 /* If the expression is a return, check to see if the expression inside the
16351 return has no side effects or the right hand side of the modify expression
16352 inside the return. If either don't have side effects set we don't need to
16353 wrap the expression in a cleanup point expression. Note we don't check the
16354 left hand side of the modify because it should always be a return decl. */
16355 if (TREE_CODE (expr) == RETURN_EXPR)
16357 tree op = TREE_OPERAND (expr, 0);
16358 if (!op || !TREE_SIDE_EFFECTS (op))
16359 return expr;
16360 op = TREE_OPERAND (op, 1);
16361 if (!TREE_SIDE_EFFECTS (op))
16362 return expr;
16365 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16368 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16369 of an indirection through OP0, or NULL_TREE if no simplification is
16370 possible. */
16372 tree
16373 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16375 tree sub = op0;
16376 tree subtype;
16377 poly_uint64 const_op01;
16379 STRIP_NOPS (sub);
16380 subtype = TREE_TYPE (sub);
16381 if (!POINTER_TYPE_P (subtype)
16382 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16383 return NULL_TREE;
16385 if (TREE_CODE (sub) == ADDR_EXPR)
16387 tree op = TREE_OPERAND (sub, 0);
16388 tree optype = TREE_TYPE (op);
16390 /* *&CONST_DECL -> to the value of the const decl. */
16391 if (TREE_CODE (op) == CONST_DECL)
16392 return DECL_INITIAL (op);
16393 /* *&p => p; make sure to handle *&"str"[cst] here. */
16394 if (type == optype)
16396 tree fop = fold_read_from_constant_string (op);
16397 if (fop)
16398 return fop;
16399 else
16400 return op;
16402 /* *(foo *)&fooarray => fooarray[0] */
16403 else if (TREE_CODE (optype) == ARRAY_TYPE
16404 && type == TREE_TYPE (optype)
16405 && (!in_gimple_form
16406 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16408 tree type_domain = TYPE_DOMAIN (optype);
16409 tree min_val = size_zero_node;
16410 if (type_domain && TYPE_MIN_VALUE (type_domain))
16411 min_val = TYPE_MIN_VALUE (type_domain);
16412 if (in_gimple_form
16413 && TREE_CODE (min_val) != INTEGER_CST)
16414 return NULL_TREE;
16415 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16416 NULL_TREE, NULL_TREE);
16418 /* *(foo *)&complexfoo => __real__ complexfoo */
16419 else if (TREE_CODE (optype) == COMPLEX_TYPE
16420 && type == TREE_TYPE (optype))
16421 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16422 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16423 else if (VECTOR_TYPE_P (optype)
16424 && type == TREE_TYPE (optype))
16426 tree part_width = TYPE_SIZE (type);
16427 tree index = bitsize_int (0);
16428 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16429 index);
16433 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16434 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16436 tree op00 = TREE_OPERAND (sub, 0);
16437 tree op01 = TREE_OPERAND (sub, 1);
16439 STRIP_NOPS (op00);
16440 if (TREE_CODE (op00) == ADDR_EXPR)
16442 tree op00type;
16443 op00 = TREE_OPERAND (op00, 0);
16444 op00type = TREE_TYPE (op00);
16446 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16447 if (VECTOR_TYPE_P (op00type)
16448 && type == TREE_TYPE (op00type)
16449 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16450 but we want to treat offsets with MSB set as negative.
16451 For the code below negative offsets are invalid and
16452 TYPE_SIZE of the element is something unsigned, so
16453 check whether op01 fits into poly_int64, which implies
16454 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16455 then just use poly_uint64 because we want to treat the
16456 value as unsigned. */
16457 && tree_fits_poly_int64_p (op01))
16459 tree part_width = TYPE_SIZE (type);
16460 poly_uint64 max_offset
16461 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16462 * TYPE_VECTOR_SUBPARTS (op00type));
16463 if (known_lt (const_op01, max_offset))
16465 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16466 return fold_build3_loc (loc,
16467 BIT_FIELD_REF, type, op00,
16468 part_width, index);
16471 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16472 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16473 && type == TREE_TYPE (op00type))
16475 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16476 const_op01))
16477 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16479 /* ((foo *)&fooarray)[1] => fooarray[1] */
16480 else if (TREE_CODE (op00type) == ARRAY_TYPE
16481 && type == TREE_TYPE (op00type))
16483 tree type_domain = TYPE_DOMAIN (op00type);
16484 tree min_val = size_zero_node;
16485 if (type_domain && TYPE_MIN_VALUE (type_domain))
16486 min_val = TYPE_MIN_VALUE (type_domain);
16487 poly_uint64 type_size, index;
16488 if (poly_int_tree_p (min_val)
16489 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16490 && multiple_p (const_op01, type_size, &index))
16492 poly_offset_int off = index + wi::to_poly_offset (min_val);
16493 op01 = wide_int_to_tree (sizetype, off);
16494 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16495 NULL_TREE, NULL_TREE);
16501 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16502 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16503 && type == TREE_TYPE (TREE_TYPE (subtype))
16504 && (!in_gimple_form
16505 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16507 tree type_domain;
16508 tree min_val = size_zero_node;
16509 sub = build_fold_indirect_ref_loc (loc, sub);
16510 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16511 if (type_domain && TYPE_MIN_VALUE (type_domain))
16512 min_val = TYPE_MIN_VALUE (type_domain);
16513 if (in_gimple_form
16514 && TREE_CODE (min_val) != INTEGER_CST)
16515 return NULL_TREE;
16516 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16517 NULL_TREE);
16520 return NULL_TREE;
16523 /* Builds an expression for an indirection through T, simplifying some
16524 cases. */
16526 tree
16527 build_fold_indirect_ref_loc (location_t loc, tree t)
16529 tree type = TREE_TYPE (TREE_TYPE (t));
16530 tree sub = fold_indirect_ref_1 (loc, type, t);
16532 if (sub)
16533 return sub;
16535 return build1_loc (loc, INDIRECT_REF, type, t);
16538 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16540 tree
16541 fold_indirect_ref_loc (location_t loc, tree t)
16543 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16545 if (sub)
16546 return sub;
16547 else
16548 return t;
16551 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16552 whose result is ignored. The type of the returned tree need not be
16553 the same as the original expression. */
16555 tree
16556 fold_ignored_result (tree t)
16558 if (!TREE_SIDE_EFFECTS (t))
16559 return integer_zero_node;
16561 for (;;)
16562 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16564 case tcc_unary:
16565 t = TREE_OPERAND (t, 0);
16566 break;
16568 case tcc_binary:
16569 case tcc_comparison:
16570 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16571 t = TREE_OPERAND (t, 0);
16572 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16573 t = TREE_OPERAND (t, 1);
16574 else
16575 return t;
16576 break;
16578 case tcc_expression:
16579 switch (TREE_CODE (t))
16581 case COMPOUND_EXPR:
16582 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16583 return t;
16584 t = TREE_OPERAND (t, 0);
16585 break;
16587 case COND_EXPR:
16588 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16589 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16590 return t;
16591 t = TREE_OPERAND (t, 0);
16592 break;
16594 default:
16595 return t;
16597 break;
16599 default:
16600 return t;
16604 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16606 tree
16607 round_up_loc (location_t loc, tree value, unsigned int divisor)
16609 tree div = NULL_TREE;
16611 if (divisor == 1)
16612 return value;
16614 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16615 have to do anything. Only do this when we are not given a const,
16616 because in that case, this check is more expensive than just
16617 doing it. */
16618 if (TREE_CODE (value) != INTEGER_CST)
16620 div = build_int_cst (TREE_TYPE (value), divisor);
16622 if (multiple_of_p (TREE_TYPE (value), value, div))
16623 return value;
16626 /* If divisor is a power of two, simplify this to bit manipulation. */
16627 if (pow2_or_zerop (divisor))
16629 if (TREE_CODE (value) == INTEGER_CST)
16631 wide_int val = wi::to_wide (value);
16632 bool overflow_p;
16634 if ((val & (divisor - 1)) == 0)
16635 return value;
16637 overflow_p = TREE_OVERFLOW (value);
16638 val += divisor - 1;
16639 val &= (int) -divisor;
16640 if (val == 0)
16641 overflow_p = true;
16643 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16645 else
16647 tree t;
16649 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16650 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16651 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16652 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16655 else
16657 if (!div)
16658 div = build_int_cst (TREE_TYPE (value), divisor);
16659 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16660 value = size_binop_loc (loc, MULT_EXPR, value, div);
16663 return value;
16666 /* Likewise, but round down. */
16668 tree
16669 round_down_loc (location_t loc, tree value, int divisor)
16671 tree div = NULL_TREE;
16673 gcc_assert (divisor > 0);
16674 if (divisor == 1)
16675 return value;
16677 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16678 have to do anything. Only do this when we are not given a const,
16679 because in that case, this check is more expensive than just
16680 doing it. */
16681 if (TREE_CODE (value) != INTEGER_CST)
16683 div = build_int_cst (TREE_TYPE (value), divisor);
16685 if (multiple_of_p (TREE_TYPE (value), value, div))
16686 return value;
16689 /* If divisor is a power of two, simplify this to bit manipulation. */
16690 if (pow2_or_zerop (divisor))
16692 tree t;
16694 t = build_int_cst (TREE_TYPE (value), -divisor);
16695 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16697 else
16699 if (!div)
16700 div = build_int_cst (TREE_TYPE (value), divisor);
16701 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16702 value = size_binop_loc (loc, MULT_EXPR, value, div);
16705 return value;
16708 /* Returns the pointer to the base of the object addressed by EXP and
16709 extracts the information about the offset of the access, storing it
16710 to PBITPOS and POFFSET. */
16712 static tree
16713 split_address_to_core_and_offset (tree exp,
16714 poly_int64 *pbitpos, tree *poffset)
16716 tree core;
16717 machine_mode mode;
16718 int unsignedp, reversep, volatilep;
16719 poly_int64 bitsize;
16720 location_t loc = EXPR_LOCATION (exp);
16722 if (TREE_CODE (exp) == SSA_NAME)
16723 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16724 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16725 exp = gimple_assign_rhs1 (def);
16727 if (TREE_CODE (exp) == ADDR_EXPR)
16729 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16730 poffset, &mode, &unsignedp, &reversep,
16731 &volatilep);
16732 core = build_fold_addr_expr_loc (loc, core);
16734 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16736 core = TREE_OPERAND (exp, 0);
16737 STRIP_NOPS (core);
16738 *pbitpos = 0;
16739 *poffset = TREE_OPERAND (exp, 1);
16740 if (poly_int_tree_p (*poffset))
16742 poly_offset_int tem
16743 = wi::sext (wi::to_poly_offset (*poffset),
16744 TYPE_PRECISION (TREE_TYPE (*poffset)));
16745 tem <<= LOG2_BITS_PER_UNIT;
16746 if (tem.to_shwi (pbitpos))
16747 *poffset = NULL_TREE;
16750 else
16752 core = exp;
16753 *pbitpos = 0;
16754 *poffset = NULL_TREE;
16757 return core;
16760 /* Returns true if addresses of E1 and E2 differ by a constant, false
16761 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16763 bool
16764 ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16766 tree core1, core2;
16767 poly_int64 bitpos1, bitpos2;
16768 tree toffset1, toffset2, tdiff, type;
16770 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16771 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16773 poly_int64 bytepos1, bytepos2;
16774 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16775 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16776 || !operand_equal_p (core1, core2, 0))
16777 return false;
16779 if (toffset1 && toffset2)
16781 type = TREE_TYPE (toffset1);
16782 if (type != TREE_TYPE (toffset2))
16783 toffset2 = fold_convert (type, toffset2);
16785 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16786 if (!cst_and_fits_in_hwi (tdiff))
16787 return false;
16789 *diff = int_cst_value (tdiff);
16791 else if (toffset1 || toffset2)
16793 /* If only one of the offsets is non-constant, the difference cannot
16794 be a constant. */
16795 return false;
16797 else
16798 *diff = 0;
16800 *diff += bytepos1 - bytepos2;
16801 return true;
16804 /* Return OFF converted to a pointer offset type suitable as offset for
16805 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16806 tree
16807 convert_to_ptrofftype_loc (location_t loc, tree off)
16809 if (ptrofftype_p (TREE_TYPE (off)))
16810 return off;
16811 return fold_convert_loc (loc, sizetype, off);
16814 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16815 tree
16816 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16818 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16819 ptr, convert_to_ptrofftype_loc (loc, off));
16822 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16823 tree
16824 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16826 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16827 ptr, size_int (off));
16830 /* Return a pointer to a NUL-terminated string containing the sequence
16831 of bytes corresponding to the representation of the object referred to
16832 by SRC (or a subsequence of such bytes within it if SRC is a reference
16833 to an initialized constant array plus some constant offset).
16834 Set *STRSIZE the number of bytes in the constant sequence including
16835 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16836 where A is the array that stores the constant sequence that SRC points
16837 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16838 need not point to a string or even an array of characters but may point
16839 to an object of any type. */
16841 const char *
16842 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16844 /* The offset into the array A storing the string, and A's byte size. */
16845 tree offset_node;
16846 tree mem_size;
16848 if (strsize)
16849 *strsize = 0;
16851 if (strsize)
16852 src = byte_representation (src, &offset_node, &mem_size, NULL);
16853 else
16854 src = string_constant (src, &offset_node, &mem_size, NULL);
16855 if (!src)
16856 return NULL;
16858 unsigned HOST_WIDE_INT offset = 0;
16859 if (offset_node != NULL_TREE)
16861 if (!tree_fits_uhwi_p (offset_node))
16862 return NULL;
16863 else
16864 offset = tree_to_uhwi (offset_node);
16867 if (!tree_fits_uhwi_p (mem_size))
16868 return NULL;
16870 /* ARRAY_SIZE is the byte size of the array the constant sequence
16871 is stored in and equal to sizeof A. INIT_BYTES is the number
16872 of bytes in the constant sequence used to initialize the array,
16873 including any embedded NULs as well as the terminating NUL (for
16874 strings), but not including any trailing zeros/NULs past
16875 the terminating one appended implicitly to a string literal to
16876 zero out the remainder of the array it's stored in. For example,
16877 given:
16878 const char a[7] = "abc\0d";
16879 n = strlen (a + 1);
16880 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16881 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16882 is equal to strlen (A) + 1. */
16883 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16884 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16885 const char *string = TREE_STRING_POINTER (src);
16887 /* Ideally this would turn into a gcc_checking_assert over time. */
16888 if (init_bytes > array_size)
16889 init_bytes = array_size;
16891 if (init_bytes == 0 || offset >= array_size)
16892 return NULL;
16894 if (strsize)
16896 /* Compute and store the number of characters from the beginning
16897 of the substring at OFFSET to the end, including the terminating
16898 nul. Offsets past the initial length refer to null strings. */
16899 if (offset < init_bytes)
16900 *strsize = init_bytes - offset;
16901 else
16902 *strsize = 1;
16904 else
16906 tree eltype = TREE_TYPE (TREE_TYPE (src));
16907 /* Support only properly NUL-terminated single byte strings. */
16908 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16909 return NULL;
16910 if (string[init_bytes - 1] != '\0')
16911 return NULL;
16914 return offset < init_bytes ? string + offset : "";
16917 /* Return a pointer to a NUL-terminated string corresponding to
16918 the expression STR referencing a constant string, possibly
16919 involving a constant offset. Return null if STR either doesn't
16920 reference a constant string or if it involves a nonconstant
16921 offset. */
16923 const char *
16924 c_getstr (tree str)
16926 return getbyterep (str, NULL);
16929 /* Given a tree T, compute which bits in T may be nonzero. */
16931 wide_int
16932 tree_nonzero_bits (const_tree t)
16934 switch (TREE_CODE (t))
16936 case INTEGER_CST:
16937 return wi::to_wide (t);
16938 case SSA_NAME:
16939 return get_nonzero_bits (t);
16940 case NON_LVALUE_EXPR:
16941 case SAVE_EXPR:
16942 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16943 case BIT_AND_EXPR:
16944 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16945 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16946 case BIT_IOR_EXPR:
16947 case BIT_XOR_EXPR:
16948 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16949 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16950 case COND_EXPR:
16951 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16952 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16953 CASE_CONVERT:
16954 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16955 TYPE_PRECISION (TREE_TYPE (t)),
16956 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16957 case PLUS_EXPR:
16958 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16960 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16961 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16962 if (wi::bit_and (nzbits1, nzbits2) == 0)
16963 return wi::bit_or (nzbits1, nzbits2);
16965 break;
16966 case LSHIFT_EXPR:
16967 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16969 tree type = TREE_TYPE (t);
16970 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16971 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16972 TYPE_PRECISION (type));
16973 return wi::neg_p (arg1)
16974 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16975 : wi::lshift (nzbits, arg1);
16977 break;
16978 case RSHIFT_EXPR:
16979 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16981 tree type = TREE_TYPE (t);
16982 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16983 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16984 TYPE_PRECISION (type));
16985 return wi::neg_p (arg1)
16986 ? wi::lshift (nzbits, -arg1)
16987 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16989 break;
16990 default:
16991 break;
16994 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16997 /* Helper function for address compare simplifications in match.pd.
16998 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16999 TYPE is the type of comparison operands.
17000 BASE0, BASE1, OFF0 and OFF1 are set by the function.
17001 GENERIC is true if GENERIC folding and false for GIMPLE folding.
17002 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
17003 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
17004 and 2 if unknown. */
17007 address_compare (tree_code code, tree type, tree op0, tree op1,
17008 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
17009 bool generic)
17011 if (TREE_CODE (op0) == SSA_NAME)
17012 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
17013 if (TREE_CODE (op1) == SSA_NAME)
17014 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
17015 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
17016 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
17017 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
17018 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
17019 if (base0 && TREE_CODE (base0) == MEM_REF)
17021 off0 += mem_ref_offset (base0).force_shwi ();
17022 base0 = TREE_OPERAND (base0, 0);
17024 if (base1 && TREE_CODE (base1) == MEM_REF)
17026 off1 += mem_ref_offset (base1).force_shwi ();
17027 base1 = TREE_OPERAND (base1, 0);
17029 if (base0 == NULL_TREE || base1 == NULL_TREE)
17030 return 2;
17032 int equal = 2;
17033 /* Punt in GENERIC on variables with value expressions;
17034 the value expressions might point to fields/elements
17035 of other vars etc. */
17036 if (generic
17037 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
17038 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
17039 return 2;
17040 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
17042 symtab_node *node0 = symtab_node::get_create (base0);
17043 symtab_node *node1 = symtab_node::get_create (base1);
17044 equal = node0->equal_address_to (node1);
17046 else if ((DECL_P (base0)
17047 || TREE_CODE (base0) == SSA_NAME
17048 || TREE_CODE (base0) == STRING_CST)
17049 && (DECL_P (base1)
17050 || TREE_CODE (base1) == SSA_NAME
17051 || TREE_CODE (base1) == STRING_CST))
17052 equal = (base0 == base1);
17053 /* Assume different STRING_CSTs with the same content will be
17054 merged. */
17055 if (equal == 0
17056 && TREE_CODE (base0) == STRING_CST
17057 && TREE_CODE (base1) == STRING_CST
17058 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
17059 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
17060 TREE_STRING_LENGTH (base0)) == 0)
17061 equal = 1;
17062 if (equal == 1)
17064 if (code == EQ_EXPR
17065 || code == NE_EXPR
17066 /* If the offsets are equal we can ignore overflow. */
17067 || known_eq (off0, off1)
17068 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
17069 /* Or if we compare using pointers to decls or strings. */
17070 || (POINTER_TYPE_P (type)
17071 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
17072 return 1;
17073 return 2;
17075 if (equal != 0)
17076 return equal;
17077 if (code != EQ_EXPR && code != NE_EXPR)
17078 return 2;
17080 /* At this point we know (or assume) the two pointers point at
17081 different objects. */
17082 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
17083 off0.is_constant (&ioff0);
17084 off1.is_constant (&ioff1);
17085 /* Punt on non-zero offsets from functions. */
17086 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
17087 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
17088 return 2;
17089 /* Or if the bases are neither decls nor string literals. */
17090 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
17091 return 2;
17092 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
17093 return 2;
17094 /* For initializers, assume addresses of different functions are
17095 different. */
17096 if (folding_initializer
17097 && TREE_CODE (base0) == FUNCTION_DECL
17098 && TREE_CODE (base1) == FUNCTION_DECL)
17099 return 0;
17101 /* Compute whether one address points to the start of one
17102 object and another one to the end of another one. */
17103 poly_int64 size0 = 0, size1 = 0;
17104 if (TREE_CODE (base0) == STRING_CST)
17106 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
17107 equal = 2;
17108 else
17109 size0 = TREE_STRING_LENGTH (base0);
17111 else if (TREE_CODE (base0) == FUNCTION_DECL)
17112 size0 = 1;
17113 else
17115 tree sz0 = DECL_SIZE_UNIT (base0);
17116 if (!tree_fits_poly_int64_p (sz0))
17117 equal = 2;
17118 else
17119 size0 = tree_to_poly_int64 (sz0);
17121 if (TREE_CODE (base1) == STRING_CST)
17123 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
17124 equal = 2;
17125 else
17126 size1 = TREE_STRING_LENGTH (base1);
17128 else if (TREE_CODE (base1) == FUNCTION_DECL)
17129 size1 = 1;
17130 else
17132 tree sz1 = DECL_SIZE_UNIT (base1);
17133 if (!tree_fits_poly_int64_p (sz1))
17134 equal = 2;
17135 else
17136 size1 = tree_to_poly_int64 (sz1);
17138 if (equal == 0)
17140 /* If one offset is pointing (or could be) to the beginning of one
17141 object and the other is pointing to one past the last byte of the
17142 other object, punt. */
17143 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
17144 equal = 2;
17145 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
17146 equal = 2;
17147 /* If both offsets are the same, there are some cases we know that are
17148 ok. Either if we know they aren't zero, or if we know both sizes
17149 are no zero. */
17150 if (equal == 2
17151 && known_eq (off0, off1)
17152 && (known_ne (off0, 0)
17153 || (known_ne (size0, 0) && known_ne (size1, 0))))
17154 equal = 0;
17157 /* At this point, equal is 2 if either one or both pointers are out of
17158 bounds of their object, or one points to start of its object and the
17159 other points to end of its object. This is unspecified behavior
17160 e.g. in C++. Otherwise equal is 0. */
17161 if (folding_cxx_constexpr && equal)
17162 return equal;
17164 /* When both pointers point to string literals, even when equal is 0,
17165 due to tail merging of string literals the pointers might be the same. */
17166 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17168 if (ioff0 < 0
17169 || ioff1 < 0
17170 || ioff0 > TREE_STRING_LENGTH (base0)
17171 || ioff1 > TREE_STRING_LENGTH (base1))
17172 return 2;
17174 /* If the bytes in the string literals starting at the pointers
17175 differ, the pointers need to be different. */
17176 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17177 TREE_STRING_POINTER (base1) + ioff1,
17178 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17179 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17181 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17182 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17183 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17184 ioffmin) == 0)
17185 /* If even the bytes in the string literal before the
17186 pointers are the same, the string literals could be
17187 tail merged. */
17188 return 2;
17190 return 0;
17193 if (folding_cxx_constexpr)
17194 return 0;
17196 /* If this is a pointer comparison, ignore for now even
17197 valid equalities where one pointer is the offset zero
17198 of one object and the other to one past end of another one. */
17199 if (!INTEGRAL_TYPE_P (type))
17200 return 0;
17202 /* Assume that string literals can't be adjacent to variables
17203 (automatic or global). */
17204 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17205 return 0;
17207 /* Assume that automatic variables can't be adjacent to global
17208 variables. */
17209 if (is_global_var (base0) != is_global_var (base1))
17210 return 0;
17212 return equal;
17215 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17216 tree
17217 ctor_single_nonzero_element (const_tree t)
17219 unsigned HOST_WIDE_INT idx;
17220 constructor_elt *ce;
17221 tree elt = NULL_TREE;
17223 if (TREE_CODE (t) != CONSTRUCTOR)
17224 return NULL_TREE;
17225 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17226 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17228 if (elt)
17229 return NULL_TREE;
17230 elt = ce->value;
17232 return elt;
17235 #if CHECKING_P
17237 namespace selftest {
17239 /* Helper functions for writing tests of folding trees. */
17241 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17243 static void
17244 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17245 tree constant)
17247 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17250 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17251 wrapping WRAPPED_EXPR. */
17253 static void
17254 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17255 tree wrapped_expr)
17257 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17258 ASSERT_NE (wrapped_expr, result);
17259 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17260 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17263 /* Verify that various arithmetic binary operations are folded
17264 correctly. */
17266 static void
17267 test_arithmetic_folding ()
17269 tree type = integer_type_node;
17270 tree x = create_tmp_var_raw (type, "x");
17271 tree zero = build_zero_cst (type);
17272 tree one = build_int_cst (type, 1);
17274 /* Addition. */
17275 /* 1 <-- (0 + 1) */
17276 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17277 one);
17278 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17279 one);
17281 /* (nonlvalue)x <-- (x + 0) */
17282 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17285 /* Subtraction. */
17286 /* 0 <-- (x - x) */
17287 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17288 zero);
17289 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17292 /* Multiplication. */
17293 /* 0 <-- (x * 0) */
17294 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17295 zero);
17297 /* (nonlvalue)x <-- (x * 1) */
17298 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17302 namespace test_fold_vec_perm_cst {
17304 /* Build a VECTOR_CST corresponding to VMODE, and has
17305 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17306 Fill it with randomized elements, using rand() % THRESHOLD. */
17308 static tree
17309 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17310 unsigned nelts_per_pattern,
17311 int step = 0, bool natural_stepped = false,
17312 int threshold = 100)
17314 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17315 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17316 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17318 // Fill a0 for each pattern
17319 for (unsigned i = 0; i < npatterns; i++)
17320 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17322 if (nelts_per_pattern == 1)
17323 return builder.build ();
17325 // Fill a1 for each pattern
17326 for (unsigned i = 0; i < npatterns; i++)
17328 tree a1;
17329 if (natural_stepped)
17331 tree a0 = builder[i];
17332 wide_int a0_val = wi::to_wide (a0);
17333 wide_int a1_val = a0_val + step;
17334 a1 = wide_int_to_tree (inner_type, a1_val);
17336 else
17337 a1 = build_int_cst (inner_type, rand () % threshold);
17338 builder.quick_push (a1);
17340 if (nelts_per_pattern == 2)
17341 return builder.build ();
17343 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17345 tree prev_elem = builder[i - npatterns];
17346 wide_int prev_elem_val = wi::to_wide (prev_elem);
17347 wide_int val = prev_elem_val + step;
17348 builder.quick_push (wide_int_to_tree (inner_type, val));
17351 return builder.build ();
17354 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17355 when result is VLA. */
17357 static void
17358 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17359 tree res, tree *expected_res)
17361 /* Actual npatterns and encoded_elts in res may be less than expected due
17362 to canonicalization. */
17363 ASSERT_TRUE (res != NULL_TREE);
17364 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17365 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17367 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17368 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17371 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17372 when the result is VLS. */
17374 static void
17375 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17377 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17378 for (unsigned i = 0; i < expected_nelts; i++)
17379 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17382 /* Helper routine to push multiple elements into BUILDER. */
17383 template<unsigned N>
17384 static void builder_push_elems (vec_perm_builder& builder,
17385 poly_uint64 (&elems)[N])
17387 for (unsigned i = 0; i < N; i++)
17388 builder.quick_push (elems[i]);
17391 #define ARG0(index) vector_cst_elt (arg0, index)
17392 #define ARG1(index) vector_cst_elt (arg1, index)
17394 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17396 static void
17397 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17399 for (int i = 0; i < 10; i++)
17401 /* Case 1:
17402 sel = { 0, 4, 1, 5, ... }
17403 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17405 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17406 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17408 tree inner_type
17409 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17410 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17412 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17413 vec_perm_builder builder (res_len, 4, 1);
17414 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17415 builder_push_elems (builder, mask_elems);
17417 vec_perm_indices sel (builder, 2, res_len);
17418 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17420 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17421 validate_res (4, 1, res, expected_res);
17424 /* Case 2: Same as case 1, but contains an out of bounds access which
17425 should wrap around.
17426 sel = {0, 8, 4, 12, ...} (4, 1)
17427 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17429 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17430 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17432 tree inner_type
17433 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17434 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17436 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17437 vec_perm_builder builder (res_len, 4, 1);
17438 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17439 builder_push_elems (builder, mask_elems);
17441 vec_perm_indices sel (builder, 2, res_len);
17442 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17444 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17445 validate_res (4, 1, res, expected_res);
17450 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17452 static void
17453 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17455 for (int i = 0; i < 10; i++)
17457 /* Case 1:
17458 sel = { 0, 1, 2, 3}
17459 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17461 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17462 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17464 tree inner_type
17465 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17466 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17468 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17469 vec_perm_builder builder (res_len, 4, 1);
17470 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17471 builder_push_elems (builder, mask_elems);
17473 vec_perm_indices sel (builder, 2, res_len);
17474 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17476 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17477 validate_res_vls (res, expected_res, 4);
17480 /* Case 2: Same as Case 1, but crossing input vector.
17481 sel = {0, 2, 4, 6}
17482 In this case,the index 4 is ambiguous since len = 4 + 4x.
17483 Since we cannot determine, which vector to choose from during
17484 compile time, should return NULL_TREE. */
17486 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17487 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17489 tree inner_type
17490 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17491 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17493 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17494 vec_perm_builder builder (res_len, 4, 1);
17495 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17496 builder_push_elems (builder, mask_elems);
17498 vec_perm_indices sel (builder, 2, res_len);
17499 const char *reason;
17500 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17502 ASSERT_TRUE (res == NULL_TREE);
17503 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17508 /* Test all input vectors. */
17510 static void
17511 test_all_nunits (machine_mode vmode)
17513 /* Test with 10 different inputs. */
17514 for (int i = 0; i < 10; i++)
17516 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17517 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17518 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17520 /* Case 1: mask = {0, ...} // (1, 1)
17521 res = { arg0[0], ... } // (1, 1) */
17523 vec_perm_builder builder (len, 1, 1);
17524 builder.quick_push (0);
17525 vec_perm_indices sel (builder, 2, len);
17526 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17527 tree expected_res[] = { ARG0(0) };
17528 validate_res (1, 1, res, expected_res);
17531 /* Case 2: mask = {len, ...} // (1, 1)
17532 res = { arg1[0], ... } // (1, 1) */
17534 vec_perm_builder builder (len, 1, 1);
17535 builder.quick_push (len);
17536 vec_perm_indices sel (builder, 2, len);
17537 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17539 tree expected_res[] = { ARG1(0) };
17540 validate_res (1, 1, res, expected_res);
17545 /* Test all vectors which contain at-least 2 elements. */
17547 static void
17548 test_nunits_min_2 (machine_mode vmode)
17550 for (int i = 0; i < 10; i++)
17552 /* Case 1: mask = { 0, len, ... } // (2, 1)
17553 res = { arg0[0], arg1[0], ... } // (2, 1) */
17555 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17556 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17557 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17559 vec_perm_builder builder (len, 2, 1);
17560 poly_uint64 mask_elems[] = { 0, len };
17561 builder_push_elems (builder, mask_elems);
17563 vec_perm_indices sel (builder, 2, len);
17564 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17566 tree expected_res[] = { ARG0(0), ARG1(0) };
17567 validate_res (2, 1, res, expected_res);
17570 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17571 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17573 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17574 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17575 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17577 vec_perm_builder builder (len, 2, 2);
17578 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17579 builder_push_elems (builder, mask_elems);
17581 vec_perm_indices sel (builder, 2, len);
17582 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17584 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17585 validate_res (2, 2, res, expected_res);
17588 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17589 Test that the stepped sequence of the pattern selects from
17590 same input pattern. Since input vectors have npatterns = 2,
17591 and step (a2 - a1) = 1, step is not a multiple of npatterns
17592 in input vector. So return NULL_TREE. */
17594 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17595 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17596 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17598 vec_perm_builder builder (len, 1, 3);
17599 poly_uint64 mask_elems[] = { 0, 0, 1 };
17600 builder_push_elems (builder, mask_elems);
17602 vec_perm_indices sel (builder, 2, len);
17603 const char *reason;
17604 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17605 &reason);
17606 ASSERT_TRUE (res == NULL_TREE);
17607 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17610 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17611 Test that stepped sequence of the pattern selects from arg0.
17612 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17614 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17615 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17616 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17618 vec_perm_builder builder (len, 1, 3);
17619 poly_uint64 mask_elems[] = { len, 0, 1 };
17620 builder_push_elems (builder, mask_elems);
17622 vec_perm_indices sel (builder, 2, len);
17623 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17625 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17626 validate_res (1, 3, res, expected_res);
17629 /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17630 In this case ensure that arg has a natural stepped sequence
17631 to preserve arg's encoding.
17633 As a concrete example, consider:
17634 arg0: { -16, -9, -10, ... } // (1, 3)
17635 arg1: { -12, -5, -6, ... } // (1, 3)
17636 sel = { 0, len, len + 1, ... } // (1, 3)
17638 This will create res with following encoding:
17639 res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17640 = { -16, -12, -5, ... }
17642 The step in above encoding would be: (-5) - (-12) = 7
17643 And hence res[3] would be computed as -5 + 7 = 2.
17644 instead of arg1[2], ie, -6.
17645 Ensure that valid_mask_for_fold_vec_perm_cst returns false
17646 for this case. */
17648 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17649 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17650 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17652 vec_perm_builder builder (len, 1, 3);
17653 poly_uint64 mask_elems[] = { 0, len, len+1 };
17654 builder_push_elems (builder, mask_elems);
17656 vec_perm_indices sel (builder, 2, len);
17657 const char *reason;
17658 /* FIXME: It may happen that build_vec_cst_rand may build a natural
17659 stepped pattern, even if we didn't explicitly tell it to. So folding
17660 may not always fail, but if it does, ensure that's because arg1 does
17661 not have a natural stepped sequence (and not due to other reason) */
17662 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17663 if (res == NULL_TREE)
17664 ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17667 /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17668 sequence and thus folding should be valid for this case. */
17670 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17671 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17672 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17674 vec_perm_builder builder (len, 1, 3);
17675 poly_uint64 mask_elems[] = { 0, len, len+1 };
17676 builder_push_elems (builder, mask_elems);
17678 vec_perm_indices sel (builder, 2, len);
17679 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17681 tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17682 validate_res (1, 3, res, expected_res);
17685 /* Case 8: Same as aarch64/sve/slp_3.c:
17686 arg0, arg1 are dup vectors.
17687 sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17688 So res = { arg0[0], arg1[0], ... } // (2, 1)
17690 In this case, since the input vectors are dup, only the first two
17691 elements per pattern in sel are considered significant. */
17693 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17694 tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17695 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17697 vec_perm_builder builder (len, 2, 3);
17698 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17699 builder_push_elems (builder, mask_elems);
17701 vec_perm_indices sel (builder, 2, len);
17702 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17704 tree expected_res[] = { ARG0(0), ARG1(0) };
17705 validate_res (2, 1, res, expected_res);
17710 /* Test all vectors which contain at-least 4 elements. */
17712 static void
17713 test_nunits_min_4 (machine_mode vmode)
17715 for (int i = 0; i < 10; i++)
17717 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17718 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17720 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17721 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17722 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17724 vec_perm_builder builder (len, 4, 1);
17725 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17726 builder_push_elems (builder, mask_elems);
17728 vec_perm_indices sel (builder, 2, len);
17729 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17731 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17732 validate_res (4, 1, res, expected_res);
17735 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17736 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17738 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17739 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17740 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17742 vec_perm_builder builder (arg0_len, 1, 3);
17743 poly_uint64 mask_elems[] = {0, 1, 2};
17744 builder_push_elems (builder, mask_elems);
17746 vec_perm_indices sel (builder, 2, arg0_len);
17747 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17748 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17749 validate_res (1, 3, res, expected_res);
17752 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17753 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17755 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17756 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17757 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17759 vec_perm_builder builder (len, 1, 3);
17760 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17761 builder_push_elems (builder, mask_elems);
17763 vec_perm_indices sel (builder, 2, len);
17764 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17765 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17766 validate_res (1, 3, res, expected_res);
17769 /* Case 4:
17770 sel = { len, 0, 2, ... } // (1, 3)
17771 This should return NULL because we cross the input vectors.
17772 Because,
17773 Let's assume len = C + Cx
17774 a1 = 0
17775 S = 2
17776 esel = arg0_len / sel_npatterns = C + Cx
17777 ae = 0 + (esel - 2) * S
17778 = 0 + (C + Cx - 2) * 2
17779 = 2(C-2) + 2Cx
17781 For C >= 4:
17782 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17783 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17784 Since q1 != qe, we cross input vectors.
17785 So return NULL_TREE. */
17787 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17788 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17789 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17791 vec_perm_builder builder (arg0_len, 1, 3);
17792 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17793 builder_push_elems (builder, mask_elems);
17795 vec_perm_indices sel (builder, 2, arg0_len);
17796 const char *reason;
17797 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17798 ASSERT_TRUE (res == NULL_TREE);
17799 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17802 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17803 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17804 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17806 Note that fold_vec_perm_cst will set
17807 res_npatterns = max(4, max(4, 2)) = 4
17808 However after canonicalizing, we will end up with shape (2, 2). */
17810 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17811 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17812 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17814 vec_perm_builder builder (len, 2, 2);
17815 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17816 builder_push_elems (builder, mask_elems);
17818 vec_perm_indices sel (builder, 2, len);
17819 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17820 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17821 validate_res (2, 2, res, expected_res);
17824 /* Case 6: Test combination in sel, where one pattern is dup and other
17825 is stepped sequence.
17826 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17827 res = { arg0[0], arg0[0], arg0[0],
17828 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17830 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17831 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17832 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17834 vec_perm_builder builder (len, 2, 3);
17835 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17836 builder_push_elems (builder, mask_elems);
17838 vec_perm_indices sel (builder, 2, len);
17839 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17841 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17842 ARG0(1), ARG0(0), ARG0(2) };
17843 validate_res (2, 3, res, expected_res);
17846 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17847 when arg0, arg1 and sel have different number of patterns.
17848 arg0 is of shape (1, 1)
17849 arg1 is of shape (4, 1)
17850 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17852 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17853 However,
17854 step = (len+2) - (len+1) = 1
17855 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17856 Since step is not a multiple of arg_npatterns,
17857 valid_mask_for_fold_vec_perm_cst should return false,
17858 and thus fold_vec_perm_cst should return NULL_TREE. */
17860 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17861 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17862 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17864 vec_perm_builder builder (len, 2, 3);
17865 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17866 builder_push_elems (builder, mask_elems);
17868 vec_perm_indices sel (builder, 2, len);
17869 const char *reason;
17870 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17872 ASSERT_TRUE (res == NULL_TREE);
17873 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17876 /* Case 8: PR111754: When input vector is not a stepped sequence,
17877 check that the result is not a stepped sequence either, even
17878 if sel has a stepped sequence. */
17880 tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17881 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17883 vec_perm_builder builder (len, 1, 3);
17884 poly_uint64 mask_elems[] = { 0, 1, 2 };
17885 builder_push_elems (builder, mask_elems);
17887 vec_perm_indices sel (builder, 1, len);
17888 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17890 tree expected_res[] = { ARG0(0), ARG0(1) };
17891 validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17894 /* Case 9: If sel doesn't contain a stepped sequence,
17895 check that the result has same encoding as sel, irrespective
17896 of shape of input vectors. */
17898 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17899 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17900 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17902 vec_perm_builder builder (len, 1, 2);
17903 poly_uint64 mask_elems[] = { 0, len };
17904 builder_push_elems (builder, mask_elems);
17906 vec_perm_indices sel (builder, 2, len);
17907 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17909 tree expected_res[] = { ARG0(0), ARG1(0) };
17910 validate_res (sel.encoding ().npatterns (),
17911 sel.encoding ().nelts_per_pattern (), res, expected_res);
17916 /* Test all vectors which contain at-least 8 elements. */
17918 static void
17919 test_nunits_min_8 (machine_mode vmode)
17921 for (int i = 0; i < 10; i++)
17923 /* Case 1: sel_npatterns (4) > input npatterns (2)
17924 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17925 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17926 arg0[2], arg0[0], arg0[3], arg1[0],
17927 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17929 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17930 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17931 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17933 vec_perm_builder builder(len, 4, 3);
17934 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17935 4, 0, 5, len };
17936 builder_push_elems (builder, mask_elems);
17938 vec_perm_indices sel (builder, 2, len);
17939 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17941 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17942 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17943 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17944 validate_res (4, 3, res, expected_res);
17949 /* Test vectors for which nunits[0] <= 4. */
17951 static void
17952 test_nunits_max_4 (machine_mode vmode)
17954 /* Case 1: mask = {0, 4, ...} // (1, 2)
17955 This should return NULL_TREE because the index 4 may choose
17956 from either arg0 or arg1 depending on vector length. */
17958 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17959 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17960 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17962 vec_perm_builder builder (len, 1, 2);
17963 poly_uint64 mask_elems[] = {0, 4};
17964 builder_push_elems (builder, mask_elems);
17966 vec_perm_indices sel (builder, 2, len);
17967 const char *reason;
17968 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17969 ASSERT_TRUE (res == NULL_TREE);
17970 ASSERT_TRUE (reason != NULL);
17971 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17975 #undef ARG0
17976 #undef ARG1
17978 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17980 static bool
17981 is_simple_vla_size (poly_uint64 size)
17983 if (size.is_constant ()
17984 || !pow2p_hwi (size.coeffs[0]))
17985 return false;
17986 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17987 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17988 return false;
17989 return true;
17992 /* Execute fold_vec_perm_cst unit tests. */
17994 static void
17995 test ()
17997 machine_mode vnx4si_mode = E_VOIDmode;
17998 machine_mode v4si_mode = E_VOIDmode;
18000 machine_mode vmode;
18001 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
18003 /* Obtain modes corresponding to VNx4SI and V4SI,
18004 to call mixed mode tests below.
18005 FIXME: Is there a better way to do this ? */
18006 if (GET_MODE_INNER (vmode) == SImode)
18008 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18009 if (is_simple_vla_size (nunits)
18010 && nunits.coeffs[0] == 4)
18011 vnx4si_mode = vmode;
18012 else if (known_eq (nunits, poly_uint64 (4)))
18013 v4si_mode = vmode;
18016 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
18017 || !targetm.vector_mode_supported_p (vmode))
18018 continue;
18020 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18021 test_all_nunits (vmode);
18022 if (nunits.coeffs[0] >= 2)
18023 test_nunits_min_2 (vmode);
18024 if (nunits.coeffs[0] >= 4)
18025 test_nunits_min_4 (vmode);
18026 if (nunits.coeffs[0] >= 8)
18027 test_nunits_min_8 (vmode);
18029 if (nunits.coeffs[0] <= 4)
18030 test_nunits_max_4 (vmode);
18033 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
18034 && targetm.vector_mode_supported_p (vnx4si_mode)
18035 && targetm.vector_mode_supported_p (v4si_mode))
18037 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
18038 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
18041 } // end of test_fold_vec_perm_cst namespace
18043 /* Verify that various binary operations on vectors are folded
18044 correctly. */
18046 static void
18047 test_vector_folding ()
18049 tree inner_type = integer_type_node;
18050 tree type = build_vector_type (inner_type, 4);
18051 tree zero = build_zero_cst (type);
18052 tree one = build_one_cst (type);
18053 tree index = build_index_vector (type, 0, 1);
18055 /* Verify equality tests that return a scalar boolean result. */
18056 tree res_type = boolean_type_node;
18057 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
18058 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
18059 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
18060 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
18061 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
18062 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18063 index, one)));
18064 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
18065 index, index)));
18066 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18067 index, index)));
18070 /* Verify folding of VEC_DUPLICATE_EXPRs. */
18072 static void
18073 test_vec_duplicate_folding ()
18075 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
18076 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
18077 /* This will be 1 if VEC_MODE isn't a vector mode. */
18078 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
18080 tree type = build_vector_type (ssizetype, nunits);
18081 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
18082 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
18083 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
18086 /* Run all of the selftests within this file. */
18088 void
18089 fold_const_cc_tests ()
18091 test_arithmetic_folding ();
18092 test_vector_folding ();
18093 test_vec_duplicate_folding ();
18094 test_fold_vec_perm_cst::test ();
18097 } // namespace selftest
18099 #endif /* CHECKING_P */