Daily bump.
[official-gcc.git] / gcc / fold-const.cc
blob80e211e18c0edb2b4524a002f3c794f6c285e44b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 min value to RES. */
1218 bool
1219 can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1221 if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1223 res = wi::to_poly_wide (arg1);
1224 return true;
1226 else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1228 res = wi::to_poly_wide (arg2);
1229 return true;
1232 return false;
1235 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 produce a new constant in RES. Return FALSE if we don't know how
1237 to evaluate CODE at compile-time. */
1239 static bool
1240 poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 const_tree arg1, const_tree arg2,
1242 signop sign, wi::overflow_type *overflow)
1244 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 switch (code)
1248 case PLUS_EXPR:
1249 res = wi::add (wi::to_poly_wide (arg1),
1250 wi::to_poly_wide (arg2), sign, overflow);
1251 break;
1253 case MINUS_EXPR:
1254 res = wi::sub (wi::to_poly_wide (arg1),
1255 wi::to_poly_wide (arg2), sign, overflow);
1256 break;
1258 case MULT_EXPR:
1259 if (TREE_CODE (arg2) == INTEGER_CST)
1260 res = wi::mul (wi::to_poly_wide (arg1),
1261 wi::to_wide (arg2), sign, overflow);
1262 else if (TREE_CODE (arg1) == INTEGER_CST)
1263 res = wi::mul (wi::to_poly_wide (arg2),
1264 wi::to_wide (arg1), sign, overflow);
1265 else
1266 return NULL_TREE;
1267 break;
1269 case LSHIFT_EXPR:
1270 if (TREE_CODE (arg2) == INTEGER_CST)
1271 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1272 else
1273 return false;
1274 break;
1276 case BIT_IOR_EXPR:
1277 if (TREE_CODE (arg2) != INTEGER_CST
1278 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1279 &res))
1280 return false;
1281 break;
1283 case MIN_EXPR:
1284 if (!can_min_p (arg1, arg2, res))
1285 return false;
1286 break;
1288 default:
1289 return false;
1291 return true;
1294 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 produce a new constant. Return NULL_TREE if we don't know how to
1296 evaluate CODE at compile-time. */
1298 tree
1299 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 int overflowable)
1302 poly_wide_int poly_res;
1303 tree type = TREE_TYPE (arg1);
1304 signop sign = TYPE_SIGN (type);
1305 wi::overflow_type overflow = wi::OVF_NONE;
1307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1309 wide_int warg1 = wi::to_wide (arg1), res;
1310 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1311 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1312 return NULL_TREE;
1313 poly_res = res;
1315 else if (!poly_int_tree_p (arg1)
1316 || !poly_int_tree_p (arg2)
1317 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1318 return NULL_TREE;
1319 return force_fit_type (type, poly_res, overflowable,
1320 (((sign == SIGNED || overflowable == -1)
1321 && overflow)
1322 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1325 /* Return true if binary operation OP distributes over addition in operand
1326 OPNO, with the other operand being held constant. OPNO counts from 1. */
1328 static bool
1329 distributes_over_addition_p (tree_code op, int opno)
1331 switch (op)
1333 case PLUS_EXPR:
1334 case MINUS_EXPR:
1335 case MULT_EXPR:
1336 return true;
1338 case LSHIFT_EXPR:
1339 return opno == 1;
1341 default:
1342 return false;
1346 /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1347 is the other operand. Try to use the value of OP to simplify the
1348 operation in one step, without having to process individual elements. */
1349 static tree
1350 simplify_const_binop (tree_code code, tree op, tree other_op,
1351 int index ATTRIBUTE_UNUSED)
1353 /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1354 if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1356 if (integer_zerop (other_op))
1358 if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1359 return op;
1360 else if (code == BIT_AND_EXPR)
1361 return other_op;
1365 return NULL_TREE;
1369 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1370 constant. We assume ARG1 and ARG2 have the same data type, or at least
1371 are the same kind of constant and the same machine mode. Return zero if
1372 combining the constants is not allowed in the current operating mode. */
1374 static tree
1375 const_binop (enum tree_code code, tree arg1, tree arg2)
1377 /* Sanity check for the recursive cases. */
1378 if (!arg1 || !arg2)
1379 return NULL_TREE;
1381 STRIP_NOPS (arg1);
1382 STRIP_NOPS (arg2);
1384 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1386 if (code == POINTER_PLUS_EXPR)
1387 return int_const_binop (PLUS_EXPR,
1388 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1390 return int_const_binop (code, arg1, arg2);
1393 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1395 machine_mode mode;
1396 REAL_VALUE_TYPE d1;
1397 REAL_VALUE_TYPE d2;
1398 REAL_VALUE_TYPE value;
1399 REAL_VALUE_TYPE result;
1400 bool inexact;
1401 tree t, type;
1403 /* The following codes are handled by real_arithmetic. */
1404 switch (code)
1406 case PLUS_EXPR:
1407 case MINUS_EXPR:
1408 case MULT_EXPR:
1409 case RDIV_EXPR:
1410 case MIN_EXPR:
1411 case MAX_EXPR:
1412 break;
1414 default:
1415 return NULL_TREE;
1418 d1 = TREE_REAL_CST (arg1);
1419 d2 = TREE_REAL_CST (arg2);
1421 type = TREE_TYPE (arg1);
1422 mode = TYPE_MODE (type);
1424 /* Don't perform operation if we honor signaling NaNs and
1425 either operand is a signaling NaN. */
1426 if (HONOR_SNANS (mode)
1427 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1428 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1429 return NULL_TREE;
1431 /* Don't perform operation if it would raise a division
1432 by zero exception. */
1433 if (code == RDIV_EXPR
1434 && real_equal (&d2, &dconst0)
1435 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1436 return NULL_TREE;
1438 /* If either operand is a NaN, just return it. Otherwise, set up
1439 for floating-point trap; we return an overflow. */
1440 if (REAL_VALUE_ISNAN (d1))
1442 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1443 is off. */
1444 d1.signalling = 0;
1445 t = build_real (type, d1);
1446 return t;
1448 else if (REAL_VALUE_ISNAN (d2))
1450 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1451 is off. */
1452 d2.signalling = 0;
1453 t = build_real (type, d2);
1454 return t;
1457 inexact = real_arithmetic (&value, code, &d1, &d2);
1458 real_convert (&result, mode, &value);
1460 /* Don't constant fold this floating point operation if
1461 both operands are not NaN but the result is NaN, and
1462 flag_trapping_math. Such operations should raise an
1463 invalid operation exception. */
1464 if (flag_trapping_math
1465 && MODE_HAS_NANS (mode)
1466 && REAL_VALUE_ISNAN (result)
1467 && !REAL_VALUE_ISNAN (d1)
1468 && !REAL_VALUE_ISNAN (d2))
1469 return NULL_TREE;
1471 /* Don't constant fold this floating point operation if
1472 the result has overflowed and flag_trapping_math. */
1473 if (flag_trapping_math
1474 && MODE_HAS_INFINITIES (mode)
1475 && REAL_VALUE_ISINF (result)
1476 && !REAL_VALUE_ISINF (d1)
1477 && !REAL_VALUE_ISINF (d2))
1478 return NULL_TREE;
1480 /* Don't constant fold this floating point operation if the
1481 result may dependent upon the run-time rounding mode and
1482 flag_rounding_math is set, or if GCC's software emulation
1483 is unable to accurately represent the result. */
1484 if ((flag_rounding_math
1485 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1486 && (inexact || !real_identical (&result, &value)))
1487 return NULL_TREE;
1489 t = build_real (type, result);
1491 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1492 return t;
1495 if (TREE_CODE (arg1) == FIXED_CST)
1497 FIXED_VALUE_TYPE f1;
1498 FIXED_VALUE_TYPE f2;
1499 FIXED_VALUE_TYPE result;
1500 tree t, type;
1501 bool sat_p;
1502 bool overflow_p;
1504 /* The following codes are handled by fixed_arithmetic. */
1505 switch (code)
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 case MULT_EXPR:
1510 case TRUNC_DIV_EXPR:
1511 if (TREE_CODE (arg2) != FIXED_CST)
1512 return NULL_TREE;
1513 f2 = TREE_FIXED_CST (arg2);
1514 break;
1516 case LSHIFT_EXPR:
1517 case RSHIFT_EXPR:
1519 if (TREE_CODE (arg2) != INTEGER_CST)
1520 return NULL_TREE;
1521 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1522 f2.data.high = w2.elt (1);
1523 f2.data.low = w2.ulow ();
1524 f2.mode = SImode;
1526 break;
1528 default:
1529 return NULL_TREE;
1532 f1 = TREE_FIXED_CST (arg1);
1533 type = TREE_TYPE (arg1);
1534 sat_p = TYPE_SATURATING (type);
1535 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1536 t = build_fixed (type, result);
1537 /* Propagate overflow flags. */
1538 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1539 TREE_OVERFLOW (t) = 1;
1540 return t;
1543 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1545 tree type = TREE_TYPE (arg1);
1546 tree r1 = TREE_REALPART (arg1);
1547 tree i1 = TREE_IMAGPART (arg1);
1548 tree r2 = TREE_REALPART (arg2);
1549 tree i2 = TREE_IMAGPART (arg2);
1550 tree real, imag;
1552 switch (code)
1554 case PLUS_EXPR:
1555 case MINUS_EXPR:
1556 real = const_binop (code, r1, r2);
1557 imag = const_binop (code, i1, i2);
1558 break;
1560 case MULT_EXPR:
1561 if (COMPLEX_FLOAT_TYPE_P (type))
1562 return do_mpc_arg2 (arg1, arg2, type,
1563 /* do_nonfinite= */ folding_initializer,
1564 mpc_mul);
1566 real = const_binop (MINUS_EXPR,
1567 const_binop (MULT_EXPR, r1, r2),
1568 const_binop (MULT_EXPR, i1, i2));
1569 imag = const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR, r1, i2),
1571 const_binop (MULT_EXPR, i1, r2));
1572 break;
1574 case RDIV_EXPR:
1575 if (COMPLEX_FLOAT_TYPE_P (type))
1576 return do_mpc_arg2 (arg1, arg2, type,
1577 /* do_nonfinite= */ folding_initializer,
1578 mpc_div);
1579 /* Fallthru. */
1580 case TRUNC_DIV_EXPR:
1581 case CEIL_DIV_EXPR:
1582 case FLOOR_DIV_EXPR:
1583 case ROUND_DIV_EXPR:
1584 if (flag_complex_method == 0)
1586 /* Keep this algorithm in sync with
1587 tree-complex.cc:expand_complex_div_straight().
1589 Expand complex division to scalars, straightforward algorithm.
1590 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1591 t = br*br + bi*bi
1593 tree magsquared
1594 = const_binop (PLUS_EXPR,
1595 const_binop (MULT_EXPR, r2, r2),
1596 const_binop (MULT_EXPR, i2, i2));
1597 tree t1
1598 = const_binop (PLUS_EXPR,
1599 const_binop (MULT_EXPR, r1, r2),
1600 const_binop (MULT_EXPR, i1, i2));
1601 tree t2
1602 = const_binop (MINUS_EXPR,
1603 const_binop (MULT_EXPR, i1, r2),
1604 const_binop (MULT_EXPR, r1, i2));
1606 real = const_binop (code, t1, magsquared);
1607 imag = const_binop (code, t2, magsquared);
1609 else
1611 /* Keep this algorithm in sync with
1612 tree-complex.cc:expand_complex_div_wide().
1614 Expand complex division to scalars, modified algorithm to minimize
1615 overflow with wide input ranges. */
1616 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1617 fold_abs_const (r2, TREE_TYPE (type)),
1618 fold_abs_const (i2, TREE_TYPE (type)));
1620 if (integer_nonzerop (compare))
1622 /* In the TRUE branch, we compute
1623 ratio = br/bi;
1624 div = (br * ratio) + bi;
1625 tr = (ar * ratio) + ai;
1626 ti = (ai * ratio) - ar;
1627 tr = tr / div;
1628 ti = ti / div; */
1629 tree ratio = const_binop (code, r2, i2);
1630 tree div = const_binop (PLUS_EXPR, i2,
1631 const_binop (MULT_EXPR, r2, ratio));
1632 real = const_binop (MULT_EXPR, r1, ratio);
1633 real = const_binop (PLUS_EXPR, real, i1);
1634 real = const_binop (code, real, div);
1636 imag = const_binop (MULT_EXPR, i1, ratio);
1637 imag = const_binop (MINUS_EXPR, imag, r1);
1638 imag = const_binop (code, imag, div);
1640 else
1642 /* In the FALSE branch, we compute
1643 ratio = d/c;
1644 divisor = (d * ratio) + c;
1645 tr = (b * ratio) + a;
1646 ti = b - (a * ratio);
1647 tr = tr / div;
1648 ti = ti / div; */
1649 tree ratio = const_binop (code, i2, r2);
1650 tree div = const_binop (PLUS_EXPR, r2,
1651 const_binop (MULT_EXPR, i2, ratio));
1653 real = const_binop (MULT_EXPR, i1, ratio);
1654 real = const_binop (PLUS_EXPR, real, r1);
1655 real = const_binop (code, real, div);
1657 imag = const_binop (MULT_EXPR, r1, ratio);
1658 imag = const_binop (MINUS_EXPR, i1, imag);
1659 imag = const_binop (code, imag, div);
1662 break;
1664 default:
1665 return NULL_TREE;
1668 if (real && imag)
1669 return build_complex (type, real, imag);
1672 tree simplified;
1673 if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1674 return simplified;
1676 if (commutative_tree_code (code)
1677 && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1678 return simplified;
1680 if (TREE_CODE (arg1) == VECTOR_CST
1681 && TREE_CODE (arg2) == VECTOR_CST
1682 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1683 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1685 tree type = TREE_TYPE (arg1);
1686 bool step_ok_p;
1687 if (VECTOR_CST_STEPPED_P (arg1)
1688 && VECTOR_CST_STEPPED_P (arg2))
1689 /* We can operate directly on the encoding if:
1691 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1692 implies
1693 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1695 Addition and subtraction are the supported operators
1696 for which this is true. */
1697 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1698 else if (VECTOR_CST_STEPPED_P (arg1))
1699 /* We can operate directly on stepped encodings if:
1701 a3 - a2 == a2 - a1
1702 implies:
1703 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1705 which is true if (x -> x op c) distributes over addition. */
1706 step_ok_p = distributes_over_addition_p (code, 1);
1707 else
1708 /* Similarly in reverse. */
1709 step_ok_p = distributes_over_addition_p (code, 2);
1710 tree_vector_builder elts;
1711 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1712 return NULL_TREE;
1713 unsigned int count = elts.encoded_nelts ();
1714 for (unsigned int i = 0; i < count; ++i)
1716 tree elem1 = VECTOR_CST_ELT (arg1, i);
1717 tree elem2 = VECTOR_CST_ELT (arg2, i);
1719 tree elt = const_binop (code, elem1, elem2);
1721 /* It is possible that const_binop cannot handle the given
1722 code and return NULL_TREE */
1723 if (elt == NULL_TREE)
1724 return NULL_TREE;
1725 elts.quick_push (elt);
1728 return elts.build ();
1731 /* Shifts allow a scalar offset for a vector. */
1732 if (TREE_CODE (arg1) == VECTOR_CST
1733 && TREE_CODE (arg2) == INTEGER_CST)
1735 tree type = TREE_TYPE (arg1);
1736 bool step_ok_p = distributes_over_addition_p (code, 1);
1737 tree_vector_builder elts;
1738 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1739 return NULL_TREE;
1740 unsigned int count = elts.encoded_nelts ();
1741 for (unsigned int i = 0; i < count; ++i)
1743 tree elem1 = VECTOR_CST_ELT (arg1, i);
1745 tree elt = const_binop (code, elem1, arg2);
1747 /* It is possible that const_binop cannot handle the given
1748 code and return NULL_TREE. */
1749 if (elt == NULL_TREE)
1750 return NULL_TREE;
1751 elts.quick_push (elt);
1754 return elts.build ();
1756 return NULL_TREE;
1759 /* Overload that adds a TYPE parameter to be able to dispatch
1760 to fold_relational_const. */
1762 tree
1763 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1765 if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 return fold_relational_const (code, type, arg1, arg2);
1768 /* ??? Until we make the const_binop worker take the type of the
1769 result as argument put those cases that need it here. */
1770 switch (code)
1772 case VEC_SERIES_EXPR:
1773 if (CONSTANT_CLASS_P (arg1)
1774 && CONSTANT_CLASS_P (arg2))
1775 return build_vec_series (type, arg1, arg2);
1776 return NULL_TREE;
1778 case COMPLEX_EXPR:
1779 if ((TREE_CODE (arg1) == REAL_CST
1780 && TREE_CODE (arg2) == REAL_CST)
1781 || (TREE_CODE (arg1) == INTEGER_CST
1782 && TREE_CODE (arg2) == INTEGER_CST))
1783 return build_complex (type, arg1, arg2);
1784 return NULL_TREE;
1786 case POINTER_DIFF_EXPR:
1787 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1789 poly_offset_int res = (wi::to_poly_offset (arg1)
1790 - wi::to_poly_offset (arg2));
1791 return force_fit_type (type, res, 1,
1792 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1794 return NULL_TREE;
1796 case VEC_PACK_TRUNC_EXPR:
1797 case VEC_PACK_FIX_TRUNC_EXPR:
1798 case VEC_PACK_FLOAT_EXPR:
1800 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1802 if (TREE_CODE (arg1) != VECTOR_CST
1803 || TREE_CODE (arg2) != VECTOR_CST)
1804 return NULL_TREE;
1806 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1807 return NULL_TREE;
1809 out_nelts = in_nelts * 2;
1810 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1811 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1813 tree_vector_builder elts (type, out_nelts, 1);
1814 for (i = 0; i < out_nelts; i++)
1816 tree elt = (i < in_nelts
1817 ? VECTOR_CST_ELT (arg1, i)
1818 : VECTOR_CST_ELT (arg2, i - in_nelts));
1819 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1820 ? NOP_EXPR
1821 : code == VEC_PACK_FLOAT_EXPR
1822 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1823 TREE_TYPE (type), elt);
1824 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1825 return NULL_TREE;
1826 elts.quick_push (elt);
1829 return elts.build ();
1832 case VEC_WIDEN_MULT_LO_EXPR:
1833 case VEC_WIDEN_MULT_HI_EXPR:
1834 case VEC_WIDEN_MULT_EVEN_EXPR:
1835 case VEC_WIDEN_MULT_ODD_EXPR:
1837 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1839 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1840 return NULL_TREE;
1842 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1843 return NULL_TREE;
1844 out_nelts = in_nelts / 2;
1845 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1846 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1848 if (code == VEC_WIDEN_MULT_LO_EXPR)
1849 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1850 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1851 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1852 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1853 scale = 1, ofs = 0;
1854 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1855 scale = 1, ofs = 1;
1857 tree_vector_builder elts (type, out_nelts, 1);
1858 for (out = 0; out < out_nelts; out++)
1860 unsigned int in = (out << scale) + ofs;
1861 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1862 VECTOR_CST_ELT (arg1, in));
1863 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1864 VECTOR_CST_ELT (arg2, in));
1866 if (t1 == NULL_TREE || t2 == NULL_TREE)
1867 return NULL_TREE;
1868 tree elt = const_binop (MULT_EXPR, t1, t2);
1869 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1870 return NULL_TREE;
1871 elts.quick_push (elt);
1874 return elts.build ();
1877 default:;
1880 if (TREE_CODE_CLASS (code) != tcc_binary)
1881 return NULL_TREE;
1883 /* Make sure type and arg0 have the same saturating flag. */
1884 gcc_checking_assert (TYPE_SATURATING (type)
1885 == TYPE_SATURATING (TREE_TYPE (arg1)));
1887 return const_binop (code, arg1, arg2);
1890 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1891 Return zero if computing the constants is not possible. */
1893 tree
1894 const_unop (enum tree_code code, tree type, tree arg0)
1896 /* Don't perform the operation, other than NEGATE and ABS, if
1897 flag_signaling_nans is on and the operand is a signaling NaN. */
1898 if (TREE_CODE (arg0) == REAL_CST
1899 && HONOR_SNANS (arg0)
1900 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1901 && code != NEGATE_EXPR
1902 && code != ABS_EXPR
1903 && code != ABSU_EXPR)
1904 return NULL_TREE;
1906 switch (code)
1908 CASE_CONVERT:
1909 case FLOAT_EXPR:
1910 case FIX_TRUNC_EXPR:
1911 case FIXED_CONVERT_EXPR:
1912 return fold_convert_const (code, type, arg0);
1914 case ADDR_SPACE_CONVERT_EXPR:
1915 /* If the source address is 0, and the source address space
1916 cannot have a valid object at 0, fold to dest type null. */
1917 if (integer_zerop (arg0)
1918 && !(targetm.addr_space.zero_address_valid
1919 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1920 return fold_convert_const (code, type, arg0);
1921 break;
1923 case VIEW_CONVERT_EXPR:
1924 return fold_view_convert_expr (type, arg0);
1926 case NEGATE_EXPR:
1928 /* Can't call fold_negate_const directly here as that doesn't
1929 handle all cases and we might not be able to negate some
1930 constants. */
1931 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1932 if (tem && CONSTANT_CLASS_P (tem))
1933 return tem;
1934 break;
1937 case ABS_EXPR:
1938 case ABSU_EXPR:
1939 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1940 return fold_abs_const (arg0, type);
1941 break;
1943 case CONJ_EXPR:
1944 if (TREE_CODE (arg0) == COMPLEX_CST)
1946 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1947 TREE_TYPE (type));
1948 return build_complex (type, TREE_REALPART (arg0), ipart);
1950 break;
1952 case BIT_NOT_EXPR:
1953 if (TREE_CODE (arg0) == INTEGER_CST)
1954 return fold_not_const (arg0, type);
1955 else if (POLY_INT_CST_P (arg0))
1956 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1957 /* Perform BIT_NOT_EXPR on each element individually. */
1958 else if (TREE_CODE (arg0) == VECTOR_CST)
1960 tree elem;
1962 /* This can cope with stepped encodings because ~x == -1 - x. */
1963 tree_vector_builder elements;
1964 elements.new_unary_operation (type, arg0, true);
1965 unsigned int i, count = elements.encoded_nelts ();
1966 for (i = 0; i < count; ++i)
1968 elem = VECTOR_CST_ELT (arg0, i);
1969 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1970 if (elem == NULL_TREE)
1971 break;
1972 elements.quick_push (elem);
1974 if (i == count)
1975 return elements.build ();
1977 break;
1979 case TRUTH_NOT_EXPR:
1980 if (TREE_CODE (arg0) == INTEGER_CST)
1981 return constant_boolean_node (integer_zerop (arg0), type);
1982 break;
1984 case REALPART_EXPR:
1985 if (TREE_CODE (arg0) == COMPLEX_CST)
1986 return fold_convert (type, TREE_REALPART (arg0));
1987 break;
1989 case IMAGPART_EXPR:
1990 if (TREE_CODE (arg0) == COMPLEX_CST)
1991 return fold_convert (type, TREE_IMAGPART (arg0));
1992 break;
1994 case VEC_UNPACK_LO_EXPR:
1995 case VEC_UNPACK_HI_EXPR:
1996 case VEC_UNPACK_FLOAT_LO_EXPR:
1997 case VEC_UNPACK_FLOAT_HI_EXPR:
1998 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1999 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2001 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2002 enum tree_code subcode;
2004 if (TREE_CODE (arg0) != VECTOR_CST)
2005 return NULL_TREE;
2007 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2008 return NULL_TREE;
2009 out_nelts = in_nelts / 2;
2010 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2012 unsigned int offset = 0;
2013 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2014 || code == VEC_UNPACK_FLOAT_LO_EXPR
2015 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2016 offset = out_nelts;
2018 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2019 subcode = NOP_EXPR;
2020 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2021 || code == VEC_UNPACK_FLOAT_HI_EXPR)
2022 subcode = FLOAT_EXPR;
2023 else
2024 subcode = FIX_TRUNC_EXPR;
2026 tree_vector_builder elts (type, out_nelts, 1);
2027 for (i = 0; i < out_nelts; i++)
2029 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2030 VECTOR_CST_ELT (arg0, i + offset));
2031 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2032 return NULL_TREE;
2033 elts.quick_push (elt);
2036 return elts.build ();
2039 case VEC_DUPLICATE_EXPR:
2040 if (CONSTANT_CLASS_P (arg0))
2041 return build_vector_from_val (type, arg0);
2042 return NULL_TREE;
2044 default:
2045 break;
2048 return NULL_TREE;
2051 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2052 indicates which particular sizetype to create. */
2054 tree
2055 size_int_kind (poly_int64 number, enum size_type_kind kind)
2057 return build_int_cst (sizetype_tab[(int) kind], number);
2060 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2061 is a tree code. The type of the result is taken from the operands.
2062 Both must be equivalent integer types, ala int_binop_types_match_p.
2063 If the operands are constant, so is the result. */
2065 tree
2066 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2068 tree type = TREE_TYPE (arg0);
2070 if (arg0 == error_mark_node || arg1 == error_mark_node)
2071 return error_mark_node;
2073 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* Handle the special case of two poly_int constants faster. */
2077 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2079 /* And some specific cases even faster than that. */
2080 if (code == PLUS_EXPR)
2082 if (integer_zerop (arg0)
2083 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2084 return arg1;
2085 if (integer_zerop (arg1)
2086 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2087 return arg0;
2089 else if (code == MINUS_EXPR)
2091 if (integer_zerop (arg1)
2092 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2093 return arg0;
2095 else if (code == MULT_EXPR)
2097 if (integer_onep (arg0)
2098 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2099 return arg1;
2102 /* Handle general case of two integer constants. For sizetype
2103 constant calculations we always want to know about overflow,
2104 even in the unsigned case. */
2105 tree res = int_const_binop (code, arg0, arg1, -1);
2106 if (res != NULL_TREE)
2107 return res;
2110 return fold_build2_loc (loc, code, type, arg0, arg1);
2113 /* Given two values, either both of sizetype or both of bitsizetype,
2114 compute the difference between the two values. Return the value
2115 in signed type corresponding to the type of the operands. */
2117 tree
2118 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2120 tree type = TREE_TYPE (arg0);
2121 tree ctype;
2123 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2124 TREE_TYPE (arg1)));
2126 /* If the type is already signed, just do the simple thing. */
2127 if (!TYPE_UNSIGNED (type))
2128 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2130 if (type == sizetype)
2131 ctype = ssizetype;
2132 else if (type == bitsizetype)
2133 ctype = sbitsizetype;
2134 else
2135 ctype = signed_type_for (type);
2137 /* If either operand is not a constant, do the conversions to the signed
2138 type and subtract. The hardware will do the right thing with any
2139 overflow in the subtraction. */
2140 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2141 return size_binop_loc (loc, MINUS_EXPR,
2142 fold_convert_loc (loc, ctype, arg0),
2143 fold_convert_loc (loc, ctype, arg1));
2145 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2146 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2147 overflow) and negate (which can't either). Special-case a result
2148 of zero while we're here. */
2149 if (tree_int_cst_equal (arg0, arg1))
2150 return build_int_cst (ctype, 0);
2151 else if (tree_int_cst_lt (arg1, arg0))
2152 return fold_convert_loc (loc, ctype,
2153 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2154 else
2155 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2156 fold_convert_loc (loc, ctype,
2157 size_binop_loc (loc,
2158 MINUS_EXPR,
2159 arg1, arg0)));
2162 /* A subroutine of fold_convert_const handling conversions of an
2163 INTEGER_CST to another integer type. */
2165 static tree
2166 fold_convert_const_int_from_int (tree type, const_tree arg1)
2168 /* Given an integer constant, make new constant with new type,
2169 appropriately sign-extended or truncated. Use widest_int
2170 so that any extension is done according ARG1's type. */
2171 tree arg1_type = TREE_TYPE (arg1);
2172 unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2173 return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2174 TYPE_SIGN (arg1_type)),
2175 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2176 TREE_OVERFLOW (arg1));
2179 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2180 to an integer type. */
2182 static tree
2183 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2185 bool overflow = false;
2186 tree t;
2188 /* The following code implements the floating point to integer
2189 conversion rules required by the Java Language Specification,
2190 that IEEE NaNs are mapped to zero and values that overflow
2191 the target precision saturate, i.e. values greater than
2192 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2193 are mapped to INT_MIN. These semantics are allowed by the
2194 C and C++ standards that simply state that the behavior of
2195 FP-to-integer conversion is unspecified upon overflow. */
2197 wide_int val;
2198 REAL_VALUE_TYPE r;
2199 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2201 switch (code)
2203 case FIX_TRUNC_EXPR:
2204 real_trunc (&r, VOIDmode, &x);
2205 break;
2207 default:
2208 gcc_unreachable ();
2211 /* If R is NaN, return zero and show we have an overflow. */
2212 if (REAL_VALUE_ISNAN (r))
2214 overflow = true;
2215 val = wi::zero (TYPE_PRECISION (type));
2218 /* See if R is less than the lower bound or greater than the
2219 upper bound. */
2221 if (! overflow)
2223 tree lt = TYPE_MIN_VALUE (type);
2224 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2225 if (real_less (&r, &l))
2227 overflow = true;
2228 val = wi::to_wide (lt);
2232 if (! overflow)
2234 tree ut = TYPE_MAX_VALUE (type);
2235 if (ut)
2237 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2238 if (real_less (&u, &r))
2240 overflow = true;
2241 val = wi::to_wide (ut);
2246 if (! overflow)
2247 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2249 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2250 return t;
2253 /* A subroutine of fold_convert_const handling conversions of a
2254 FIXED_CST to an integer type. */
2256 static tree
2257 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2259 tree t;
2260 double_int temp, temp_trunc;
2261 scalar_mode mode;
2263 /* Right shift FIXED_CST to temp by fbit. */
2264 temp = TREE_FIXED_CST (arg1).data;
2265 mode = TREE_FIXED_CST (arg1).mode;
2266 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2268 temp = temp.rshift (GET_MODE_FBIT (mode),
2269 HOST_BITS_PER_DOUBLE_INT,
2270 SIGNED_FIXED_POINT_MODE_P (mode));
2272 /* Left shift temp to temp_trunc by fbit. */
2273 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2274 HOST_BITS_PER_DOUBLE_INT,
2275 SIGNED_FIXED_POINT_MODE_P (mode));
2277 else
2279 temp = double_int_zero;
2280 temp_trunc = double_int_zero;
2283 /* If FIXED_CST is negative, we need to round the value toward 0.
2284 By checking if the fractional bits are not zero to add 1 to temp. */
2285 if (SIGNED_FIXED_POINT_MODE_P (mode)
2286 && temp_trunc.is_negative ()
2287 && TREE_FIXED_CST (arg1).data != temp_trunc)
2288 temp += double_int_one;
2290 /* Given a fixed-point constant, make new constant with new type,
2291 appropriately sign-extended or truncated. */
2292 t = force_fit_type (type, temp, -1,
2293 (temp.is_negative ()
2294 && (TYPE_UNSIGNED (type)
2295 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2296 | TREE_OVERFLOW (arg1));
2298 return t;
2301 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2302 to another floating point type. */
2304 static tree
2305 fold_convert_const_real_from_real (tree type, const_tree arg1)
2307 REAL_VALUE_TYPE value;
2308 tree t;
2310 /* If the underlying modes are the same, simply treat it as
2311 copy and rebuild with TREE_REAL_CST information and the
2312 given type. */
2313 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2315 t = build_real (type, TREE_REAL_CST (arg1));
2316 return t;
2319 /* Don't perform the operation if flag_signaling_nans is on
2320 and the operand is a signaling NaN. */
2321 if (HONOR_SNANS (arg1)
2322 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2323 return NULL_TREE;
2325 /* With flag_rounding_math we should respect the current rounding mode
2326 unless the conversion is exact. */
2327 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2328 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2329 return NULL_TREE;
2331 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2332 t = build_real (type, value);
2334 /* If converting an infinity or NAN to a representation that doesn't
2335 have one, set the overflow bit so that we can produce some kind of
2336 error message at the appropriate point if necessary. It's not the
2337 most user-friendly message, but it's better than nothing. */
2338 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2339 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2340 TREE_OVERFLOW (t) = 1;
2341 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2342 && !MODE_HAS_NANS (TYPE_MODE (type)))
2343 TREE_OVERFLOW (t) = 1;
2344 /* Regular overflow, conversion produced an infinity in a mode that
2345 can't represent them. */
2346 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2347 && REAL_VALUE_ISINF (value)
2348 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2349 TREE_OVERFLOW (t) = 1;
2350 else
2351 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2352 return t;
2355 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2356 to a floating point type. */
2358 static tree
2359 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2361 REAL_VALUE_TYPE value;
2362 tree t;
2364 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2365 &TREE_FIXED_CST (arg1));
2366 t = build_real (type, value);
2368 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 return t;
2372 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 to another fixed-point type. */
2375 static tree
2376 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2378 FIXED_VALUE_TYPE value;
2379 tree t;
2380 bool overflow_p;
2382 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2383 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 return t;
2392 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2393 to a fixed-point type. */
2395 static tree
2396 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2398 FIXED_VALUE_TYPE value;
2399 tree t;
2400 bool overflow_p;
2401 double_int di;
2403 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2405 di.low = TREE_INT_CST_ELT (arg1, 0);
2406 if (TREE_INT_CST_NUNITS (arg1) == 1)
2407 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2408 else
2409 di.high = TREE_INT_CST_ELT (arg1, 1);
2411 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2412 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2413 TYPE_SATURATING (type));
2414 t = build_fixed (type, value);
2416 /* Propagate overflow flags. */
2417 if (overflow_p | TREE_OVERFLOW (arg1))
2418 TREE_OVERFLOW (t) = 1;
2419 return t;
2422 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2423 to a fixed-point type. */
2425 static tree
2426 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2428 FIXED_VALUE_TYPE value;
2429 tree t;
2430 bool overflow_p;
2432 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2433 &TREE_REAL_CST (arg1),
2434 TYPE_SATURATING (type));
2435 t = build_fixed (type, value);
2437 /* Propagate overflow flags. */
2438 if (overflow_p | TREE_OVERFLOW (arg1))
2439 TREE_OVERFLOW (t) = 1;
2440 return t;
2443 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2444 type TYPE. If no simplification can be done return NULL_TREE. */
2446 static tree
2447 fold_convert_const (enum tree_code code, tree type, tree arg1)
2449 tree arg_type = TREE_TYPE (arg1);
2450 if (arg_type == type)
2451 return arg1;
2453 /* We can't widen types, since the runtime value could overflow the
2454 original type before being extended to the new type. */
2455 if (POLY_INT_CST_P (arg1)
2456 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2457 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2458 return build_poly_int_cst (type,
2459 poly_wide_int::from (poly_int_cst_value (arg1),
2460 TYPE_PRECISION (type),
2461 TYPE_SIGN (arg_type)));
2463 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2464 || TREE_CODE (type) == OFFSET_TYPE)
2466 if (TREE_CODE (arg1) == INTEGER_CST)
2467 return fold_convert_const_int_from_int (type, arg1);
2468 else if (TREE_CODE (arg1) == REAL_CST)
2469 return fold_convert_const_int_from_real (code, type, arg1);
2470 else if (TREE_CODE (arg1) == FIXED_CST)
2471 return fold_convert_const_int_from_fixed (type, arg1);
2473 else if (SCALAR_FLOAT_TYPE_P (type))
2475 if (TREE_CODE (arg1) == INTEGER_CST)
2477 tree res = build_real_from_int_cst (type, arg1);
2478 /* Avoid the folding if flag_rounding_math is on and the
2479 conversion is not exact. */
2480 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2482 bool fail = false;
2483 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2484 TYPE_PRECISION (TREE_TYPE (arg1)));
2485 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2486 return NULL_TREE;
2488 return res;
2490 else if (TREE_CODE (arg1) == REAL_CST)
2491 return fold_convert_const_real_from_real (type, arg1);
2492 else if (TREE_CODE (arg1) == FIXED_CST)
2493 return fold_convert_const_real_from_fixed (type, arg1);
2495 else if (FIXED_POINT_TYPE_P (type))
2497 if (TREE_CODE (arg1) == FIXED_CST)
2498 return fold_convert_const_fixed_from_fixed (type, arg1);
2499 else if (TREE_CODE (arg1) == INTEGER_CST)
2500 return fold_convert_const_fixed_from_int (type, arg1);
2501 else if (TREE_CODE (arg1) == REAL_CST)
2502 return fold_convert_const_fixed_from_real (type, arg1);
2504 else if (VECTOR_TYPE_P (type))
2506 if (TREE_CODE (arg1) == VECTOR_CST
2507 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2509 tree elttype = TREE_TYPE (type);
2510 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2511 /* We can't handle steps directly when extending, since the
2512 values need to wrap at the original precision first. */
2513 bool step_ok_p
2514 = (INTEGRAL_TYPE_P (elttype)
2515 && INTEGRAL_TYPE_P (arg1_elttype)
2516 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2517 tree_vector_builder v;
2518 if (!v.new_unary_operation (type, arg1, step_ok_p))
2519 return NULL_TREE;
2520 unsigned int len = v.encoded_nelts ();
2521 for (unsigned int i = 0; i < len; ++i)
2523 tree elt = VECTOR_CST_ELT (arg1, i);
2524 tree cvt = fold_convert_const (code, elttype, elt);
2525 if (cvt == NULL_TREE)
2526 return NULL_TREE;
2527 v.quick_push (cvt);
2529 return v.build ();
2532 return NULL_TREE;
2535 /* Construct a vector of zero elements of vector type TYPE. */
2537 static tree
2538 build_zero_vector (tree type)
2540 tree t;
2542 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2543 return build_vector_from_val (type, t);
2546 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2548 bool
2549 fold_convertible_p (const_tree type, const_tree arg)
2551 const_tree orig = TREE_TYPE (arg);
2553 if (type == orig)
2554 return true;
2556 if (TREE_CODE (arg) == ERROR_MARK
2557 || TREE_CODE (type) == ERROR_MARK
2558 || TREE_CODE (orig) == ERROR_MARK)
2559 return false;
2561 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2562 return true;
2564 switch (TREE_CODE (type))
2566 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2567 case POINTER_TYPE: case REFERENCE_TYPE:
2568 case OFFSET_TYPE:
2569 return (INTEGRAL_TYPE_P (orig)
2570 || (POINTER_TYPE_P (orig)
2571 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2572 || TREE_CODE (orig) == OFFSET_TYPE);
2574 case REAL_TYPE:
2575 case FIXED_POINT_TYPE:
2576 case VOID_TYPE:
2577 return TREE_CODE (type) == TREE_CODE (orig);
2579 case VECTOR_TYPE:
2580 return (VECTOR_TYPE_P (orig)
2581 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2582 TYPE_VECTOR_SUBPARTS (orig))
2583 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2585 default:
2586 return false;
2590 /* Convert expression ARG to type TYPE. Used by the middle-end for
2591 simple conversions in preference to calling the front-end's convert. */
2593 tree
2594 fold_convert_loc (location_t loc, tree type, tree arg)
2596 tree orig = TREE_TYPE (arg);
2597 tree tem;
2599 if (type == orig)
2600 return arg;
2602 if (TREE_CODE (arg) == ERROR_MARK
2603 || TREE_CODE (type) == ERROR_MARK
2604 || TREE_CODE (orig) == ERROR_MARK)
2605 return error_mark_node;
2607 switch (TREE_CODE (type))
2609 case POINTER_TYPE:
2610 case REFERENCE_TYPE:
2611 /* Handle conversions between pointers to different address spaces. */
2612 if (POINTER_TYPE_P (orig)
2613 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2614 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2615 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2616 /* fall through */
2618 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2619 case OFFSET_TYPE: case BITINT_TYPE:
2620 if (TREE_CODE (arg) == INTEGER_CST)
2622 tem = fold_convert_const (NOP_EXPR, type, arg);
2623 if (tem != NULL_TREE)
2624 return tem;
2626 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2627 || TREE_CODE (orig) == OFFSET_TYPE)
2628 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2629 if (TREE_CODE (orig) == COMPLEX_TYPE)
2630 return fold_convert_loc (loc, type,
2631 fold_build1_loc (loc, REALPART_EXPR,
2632 TREE_TYPE (orig), arg));
2633 gcc_assert (VECTOR_TYPE_P (orig)
2634 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2637 case REAL_TYPE:
2638 if (TREE_CODE (arg) == INTEGER_CST)
2640 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2641 if (tem != NULL_TREE)
2642 return tem;
2644 else if (TREE_CODE (arg) == REAL_CST)
2646 tem = fold_convert_const (NOP_EXPR, type, arg);
2647 if (tem != NULL_TREE)
2648 return tem;
2650 else if (TREE_CODE (arg) == FIXED_CST)
2652 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2653 if (tem != NULL_TREE)
2654 return tem;
2657 switch (TREE_CODE (orig))
2659 case INTEGER_TYPE: case BITINT_TYPE:
2660 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2661 case POINTER_TYPE: case REFERENCE_TYPE:
2662 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2664 case REAL_TYPE:
2665 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2667 case FIXED_POINT_TYPE:
2668 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2670 case COMPLEX_TYPE:
2671 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 return fold_convert_loc (loc, type, tem);
2674 default:
2675 gcc_unreachable ();
2678 case FIXED_POINT_TYPE:
2679 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2680 || TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2684 goto fold_convert_exit;
2687 switch (TREE_CODE (orig))
2689 case FIXED_POINT_TYPE:
2690 case INTEGER_TYPE:
2691 case ENUMERAL_TYPE:
2692 case BOOLEAN_TYPE:
2693 case REAL_TYPE:
2694 case BITINT_TYPE:
2695 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2697 case COMPLEX_TYPE:
2698 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2699 return fold_convert_loc (loc, type, tem);
2701 default:
2702 gcc_unreachable ();
2705 case COMPLEX_TYPE:
2706 switch (TREE_CODE (orig))
2708 case INTEGER_TYPE: case BITINT_TYPE:
2709 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2710 case POINTER_TYPE: case REFERENCE_TYPE:
2711 case REAL_TYPE:
2712 case FIXED_POINT_TYPE:
2713 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2714 fold_convert_loc (loc, TREE_TYPE (type), arg),
2715 fold_convert_loc (loc, TREE_TYPE (type),
2716 integer_zero_node));
2717 case COMPLEX_TYPE:
2719 tree rpart, ipart;
2721 if (TREE_CODE (arg) == COMPLEX_EXPR)
2723 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2724 TREE_OPERAND (arg, 0));
2725 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2726 TREE_OPERAND (arg, 1));
2727 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2730 arg = save_expr (arg);
2731 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2732 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2733 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2734 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2735 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2738 default:
2739 gcc_unreachable ();
2742 case VECTOR_TYPE:
2743 if (integer_zerop (arg))
2744 return build_zero_vector (type);
2745 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2746 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2747 || VECTOR_TYPE_P (orig));
2748 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2750 case VOID_TYPE:
2751 tem = fold_ignored_result (arg);
2752 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2754 default:
2755 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2756 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2757 gcc_unreachable ();
2759 fold_convert_exit:
2760 tem = protected_set_expr_location_unshare (tem, loc);
2761 return tem;
2764 /* Return false if expr can be assumed not to be an lvalue, true
2765 otherwise. */
2767 static bool
2768 maybe_lvalue_p (const_tree x)
2770 /* We only need to wrap lvalue tree codes. */
2771 switch (TREE_CODE (x))
2773 case VAR_DECL:
2774 case PARM_DECL:
2775 case RESULT_DECL:
2776 case LABEL_DECL:
2777 case FUNCTION_DECL:
2778 case SSA_NAME:
2779 case COMPOUND_LITERAL_EXPR:
2781 case COMPONENT_REF:
2782 case MEM_REF:
2783 case INDIRECT_REF:
2784 case ARRAY_REF:
2785 case ARRAY_RANGE_REF:
2786 case BIT_FIELD_REF:
2787 case OBJ_TYPE_REF:
2789 case REALPART_EXPR:
2790 case IMAGPART_EXPR:
2791 case PREINCREMENT_EXPR:
2792 case PREDECREMENT_EXPR:
2793 case SAVE_EXPR:
2794 case TRY_CATCH_EXPR:
2795 case WITH_CLEANUP_EXPR:
2796 case COMPOUND_EXPR:
2797 case MODIFY_EXPR:
2798 case TARGET_EXPR:
2799 case COND_EXPR:
2800 case BIND_EXPR:
2801 case VIEW_CONVERT_EXPR:
2802 break;
2804 default:
2805 /* Assume the worst for front-end tree codes. */
2806 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2807 break;
2808 return false;
2811 return true;
2814 /* Return an expr equal to X but certainly not valid as an lvalue. */
2816 tree
2817 non_lvalue_loc (location_t loc, tree x)
2819 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2820 us. */
2821 if (in_gimple_form)
2822 return x;
2824 if (! maybe_lvalue_p (x))
2825 return x;
2826 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2829 /* Given a tree comparison code, return the code that is the logical inverse.
2830 It is generally not safe to do this for floating-point comparisons, except
2831 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2832 ERROR_MARK in this case. */
2834 enum tree_code
2835 invert_tree_comparison (enum tree_code code, bool honor_nans)
2837 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2838 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2839 return ERROR_MARK;
2841 switch (code)
2843 case EQ_EXPR:
2844 return NE_EXPR;
2845 case NE_EXPR:
2846 return EQ_EXPR;
2847 case GT_EXPR:
2848 return honor_nans ? UNLE_EXPR : LE_EXPR;
2849 case GE_EXPR:
2850 return honor_nans ? UNLT_EXPR : LT_EXPR;
2851 case LT_EXPR:
2852 return honor_nans ? UNGE_EXPR : GE_EXPR;
2853 case LE_EXPR:
2854 return honor_nans ? UNGT_EXPR : GT_EXPR;
2855 case LTGT_EXPR:
2856 return UNEQ_EXPR;
2857 case UNEQ_EXPR:
2858 return LTGT_EXPR;
2859 case UNGT_EXPR:
2860 return LE_EXPR;
2861 case UNGE_EXPR:
2862 return LT_EXPR;
2863 case UNLT_EXPR:
2864 return GE_EXPR;
2865 case UNLE_EXPR:
2866 return GT_EXPR;
2867 case ORDERED_EXPR:
2868 return UNORDERED_EXPR;
2869 case UNORDERED_EXPR:
2870 return ORDERED_EXPR;
2871 default:
2872 gcc_unreachable ();
2876 /* Similar, but return the comparison that results if the operands are
2877 swapped. This is safe for floating-point. */
2879 enum tree_code
2880 swap_tree_comparison (enum tree_code code)
2882 switch (code)
2884 case EQ_EXPR:
2885 case NE_EXPR:
2886 case ORDERED_EXPR:
2887 case UNORDERED_EXPR:
2888 case LTGT_EXPR:
2889 case UNEQ_EXPR:
2890 return code;
2891 case GT_EXPR:
2892 return LT_EXPR;
2893 case GE_EXPR:
2894 return LE_EXPR;
2895 case LT_EXPR:
2896 return GT_EXPR;
2897 case LE_EXPR:
2898 return GE_EXPR;
2899 case UNGT_EXPR:
2900 return UNLT_EXPR;
2901 case UNGE_EXPR:
2902 return UNLE_EXPR;
2903 case UNLT_EXPR:
2904 return UNGT_EXPR;
2905 case UNLE_EXPR:
2906 return UNGE_EXPR;
2907 default:
2908 gcc_unreachable ();
2913 /* Convert a comparison tree code from an enum tree_code representation
2914 into a compcode bit-based encoding. This function is the inverse of
2915 compcode_to_comparison. */
2917 static enum comparison_code
2918 comparison_to_compcode (enum tree_code code)
2920 switch (code)
2922 case LT_EXPR:
2923 return COMPCODE_LT;
2924 case EQ_EXPR:
2925 return COMPCODE_EQ;
2926 case LE_EXPR:
2927 return COMPCODE_LE;
2928 case GT_EXPR:
2929 return COMPCODE_GT;
2930 case NE_EXPR:
2931 return COMPCODE_NE;
2932 case GE_EXPR:
2933 return COMPCODE_GE;
2934 case ORDERED_EXPR:
2935 return COMPCODE_ORD;
2936 case UNORDERED_EXPR:
2937 return COMPCODE_UNORD;
2938 case UNLT_EXPR:
2939 return COMPCODE_UNLT;
2940 case UNEQ_EXPR:
2941 return COMPCODE_UNEQ;
2942 case UNLE_EXPR:
2943 return COMPCODE_UNLE;
2944 case UNGT_EXPR:
2945 return COMPCODE_UNGT;
2946 case LTGT_EXPR:
2947 return COMPCODE_LTGT;
2948 case UNGE_EXPR:
2949 return COMPCODE_UNGE;
2950 default:
2951 gcc_unreachable ();
2955 /* Convert a compcode bit-based encoding of a comparison operator back
2956 to GCC's enum tree_code representation. This function is the
2957 inverse of comparison_to_compcode. */
2959 static enum tree_code
2960 compcode_to_comparison (enum comparison_code code)
2962 switch (code)
2964 case COMPCODE_LT:
2965 return LT_EXPR;
2966 case COMPCODE_EQ:
2967 return EQ_EXPR;
2968 case COMPCODE_LE:
2969 return LE_EXPR;
2970 case COMPCODE_GT:
2971 return GT_EXPR;
2972 case COMPCODE_NE:
2973 return NE_EXPR;
2974 case COMPCODE_GE:
2975 return GE_EXPR;
2976 case COMPCODE_ORD:
2977 return ORDERED_EXPR;
2978 case COMPCODE_UNORD:
2979 return UNORDERED_EXPR;
2980 case COMPCODE_UNLT:
2981 return UNLT_EXPR;
2982 case COMPCODE_UNEQ:
2983 return UNEQ_EXPR;
2984 case COMPCODE_UNLE:
2985 return UNLE_EXPR;
2986 case COMPCODE_UNGT:
2987 return UNGT_EXPR;
2988 case COMPCODE_LTGT:
2989 return LTGT_EXPR;
2990 case COMPCODE_UNGE:
2991 return UNGE_EXPR;
2992 default:
2993 gcc_unreachable ();
2997 /* Return true if COND1 tests the opposite condition of COND2. */
2999 bool
3000 inverse_conditions_p (const_tree cond1, const_tree cond2)
3002 return (COMPARISON_CLASS_P (cond1)
3003 && COMPARISON_CLASS_P (cond2)
3004 && (invert_tree_comparison
3005 (TREE_CODE (cond1),
3006 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3007 && operand_equal_p (TREE_OPERAND (cond1, 0),
3008 TREE_OPERAND (cond2, 0), 0)
3009 && operand_equal_p (TREE_OPERAND (cond1, 1),
3010 TREE_OPERAND (cond2, 1), 0));
3013 /* Return a tree for the comparison which is the combination of
3014 doing the AND or OR (depending on CODE) of the two operations LCODE
3015 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3016 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3017 if this makes the transformation invalid. */
3019 tree
3020 combine_comparisons (location_t loc,
3021 enum tree_code code, enum tree_code lcode,
3022 enum tree_code rcode, tree truth_type,
3023 tree ll_arg, tree lr_arg)
3025 bool honor_nans = HONOR_NANS (ll_arg);
3026 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3027 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3028 int compcode;
3030 switch (code)
3032 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3033 compcode = lcompcode & rcompcode;
3034 break;
3036 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3037 compcode = lcompcode | rcompcode;
3038 break;
3040 default:
3041 return NULL_TREE;
3044 if (!honor_nans)
3046 /* Eliminate unordered comparisons, as well as LTGT and ORD
3047 which are not used unless the mode has NaNs. */
3048 compcode &= ~COMPCODE_UNORD;
3049 if (compcode == COMPCODE_LTGT)
3050 compcode = COMPCODE_NE;
3051 else if (compcode == COMPCODE_ORD)
3052 compcode = COMPCODE_TRUE;
3054 else if (flag_trapping_math)
3056 /* Check that the original operation and the optimized ones will trap
3057 under the same condition. */
3058 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3059 && (lcompcode != COMPCODE_EQ)
3060 && (lcompcode != COMPCODE_ORD);
3061 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3062 && (rcompcode != COMPCODE_EQ)
3063 && (rcompcode != COMPCODE_ORD);
3064 bool trap = (compcode & COMPCODE_UNORD) == 0
3065 && (compcode != COMPCODE_EQ)
3066 && (compcode != COMPCODE_ORD);
3068 /* In a short-circuited boolean expression the LHS might be
3069 such that the RHS, if evaluated, will never trap. For
3070 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3071 if neither x nor y is NaN. (This is a mixed blessing: for
3072 example, the expression above will never trap, hence
3073 optimizing it to x < y would be invalid). */
3074 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3075 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3076 rtrap = false;
3078 /* If the comparison was short-circuited, and only the RHS
3079 trapped, we may now generate a spurious trap. */
3080 if (rtrap && !ltrap
3081 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3082 return NULL_TREE;
3084 /* If we changed the conditions that cause a trap, we lose. */
3085 if ((ltrap || rtrap) != trap)
3086 return NULL_TREE;
3089 if (compcode == COMPCODE_TRUE)
3090 return constant_boolean_node (true, truth_type);
3091 else if (compcode == COMPCODE_FALSE)
3092 return constant_boolean_node (false, truth_type);
3093 else
3095 enum tree_code tcode;
3097 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3098 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3102 /* Return nonzero if two operands (typically of the same tree node)
3103 are necessarily equal. FLAGS modifies behavior as follows:
3105 If OEP_ONLY_CONST is set, only return nonzero for constants.
3106 This function tests whether the operands are indistinguishable;
3107 it does not test whether they are equal using C's == operation.
3108 The distinction is important for IEEE floating point, because
3109 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3110 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3112 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3113 even though it may hold multiple values during a function.
3114 This is because a GCC tree node guarantees that nothing else is
3115 executed between the evaluation of its "operands" (which may often
3116 be evaluated in arbitrary order). Hence if the operands themselves
3117 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3118 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3119 unset means assuming isochronic (or instantaneous) tree equivalence.
3120 Unless comparing arbitrary expression trees, such as from different
3121 statements, this flag can usually be left unset.
3123 If OEP_PURE_SAME is set, then pure functions with identical arguments
3124 are considered the same. It is used when the caller has other ways
3125 to ensure that global memory is unchanged in between.
3127 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3128 not values of expressions.
3130 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3131 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3133 If OEP_BITWISE is set, then require the values to be bitwise identical
3134 rather than simply numerically equal. Do not take advantage of things
3135 like math-related flags or undefined behavior; only return true for
3136 values that are provably bitwise identical in all circumstances.
3138 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3139 any operand with side effect. This is unnecesarily conservative in the
3140 case we know that arg0 and arg1 are in disjoint code paths (such as in
3141 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3142 addresses with TREE_CONSTANT flag set so we know that &var == &var
3143 even if var is volatile. */
3145 bool
3146 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3147 unsigned int flags)
3149 bool r;
3150 if (verify_hash_value (arg0, arg1, flags, &r))
3151 return r;
3153 STRIP_ANY_LOCATION_WRAPPER (arg0);
3154 STRIP_ANY_LOCATION_WRAPPER (arg1);
3156 /* If either is ERROR_MARK, they aren't equal. */
3157 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3158 || TREE_TYPE (arg0) == error_mark_node
3159 || TREE_TYPE (arg1) == error_mark_node)
3160 return false;
3162 /* Similar, if either does not have a type (like a template id),
3163 they aren't equal. */
3164 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3165 return false;
3167 /* Bitwise identity makes no sense if the values have different layouts. */
3168 if ((flags & OEP_BITWISE)
3169 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3170 return false;
3172 /* We cannot consider pointers to different address space equal. */
3173 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3174 && POINTER_TYPE_P (TREE_TYPE (arg1))
3175 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3176 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3177 return false;
3179 /* Check equality of integer constants before bailing out due to
3180 precision differences. */
3181 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3183 /* Address of INTEGER_CST is not defined; check that we did not forget
3184 to drop the OEP_ADDRESS_OF flags. */
3185 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3186 return tree_int_cst_equal (arg0, arg1);
3189 if (!(flags & OEP_ADDRESS_OF))
3191 /* If both types don't have the same signedness, then we can't consider
3192 them equal. We must check this before the STRIP_NOPS calls
3193 because they may change the signedness of the arguments. As pointers
3194 strictly don't have a signedness, require either two pointers or
3195 two non-pointers as well. */
3196 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3197 || POINTER_TYPE_P (TREE_TYPE (arg0))
3198 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3199 return false;
3201 /* If both types don't have the same precision, then it is not safe
3202 to strip NOPs. */
3203 if (element_precision (TREE_TYPE (arg0))
3204 != element_precision (TREE_TYPE (arg1)))
3205 return false;
3207 STRIP_NOPS (arg0);
3208 STRIP_NOPS (arg1);
3210 #if 0
3211 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3212 sanity check once the issue is solved. */
3213 else
3214 /* Addresses of conversions and SSA_NAMEs (and many other things)
3215 are not defined. Check that we did not forget to drop the
3216 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3217 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3218 && TREE_CODE (arg0) != SSA_NAME);
3219 #endif
3221 /* In case both args are comparisons but with different comparison
3222 code, try to swap the comparison operands of one arg to produce
3223 a match and compare that variant. */
3224 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3225 && COMPARISON_CLASS_P (arg0)
3226 && COMPARISON_CLASS_P (arg1))
3228 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3230 if (TREE_CODE (arg0) == swap_code)
3231 return operand_equal_p (TREE_OPERAND (arg0, 0),
3232 TREE_OPERAND (arg1, 1), flags)
3233 && operand_equal_p (TREE_OPERAND (arg0, 1),
3234 TREE_OPERAND (arg1, 0), flags);
3237 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3239 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3240 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3242 else if (flags & OEP_ADDRESS_OF)
3244 /* If we are interested in comparing addresses ignore
3245 MEM_REF wrappings of the base that can appear just for
3246 TBAA reasons. */
3247 if (TREE_CODE (arg0) == MEM_REF
3248 && DECL_P (arg1)
3249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3250 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3251 && integer_zerop (TREE_OPERAND (arg0, 1)))
3252 return true;
3253 else if (TREE_CODE (arg1) == MEM_REF
3254 && DECL_P (arg0)
3255 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3256 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3257 && integer_zerop (TREE_OPERAND (arg1, 1)))
3258 return true;
3259 return false;
3261 else
3262 return false;
3265 /* When not checking adddresses, this is needed for conversions and for
3266 COMPONENT_REF. Might as well play it safe and always test this. */
3267 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3268 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3269 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3270 && !(flags & OEP_ADDRESS_OF)))
3271 return false;
3273 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3274 We don't care about side effects in that case because the SAVE_EXPR
3275 takes care of that for us. In all other cases, two expressions are
3276 equal if they have no side effects. If we have two identical
3277 expressions with side effects that should be treated the same due
3278 to the only side effects being identical SAVE_EXPR's, that will
3279 be detected in the recursive calls below.
3280 If we are taking an invariant address of two identical objects
3281 they are necessarily equal as well. */
3282 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3283 && (TREE_CODE (arg0) == SAVE_EXPR
3284 || (flags & OEP_MATCH_SIDE_EFFECTS)
3285 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3286 return true;
3288 /* Next handle constant cases, those for which we can return 1 even
3289 if ONLY_CONST is set. */
3290 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3291 switch (TREE_CODE (arg0))
3293 case INTEGER_CST:
3294 return tree_int_cst_equal (arg0, arg1);
3296 case FIXED_CST:
3297 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3298 TREE_FIXED_CST (arg1));
3300 case REAL_CST:
3301 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3302 return true;
3304 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3306 /* If we do not distinguish between signed and unsigned zero,
3307 consider them equal. */
3308 if (real_zerop (arg0) && real_zerop (arg1))
3309 return true;
3311 return false;
3313 case VECTOR_CST:
3315 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3316 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3317 return false;
3319 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3320 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3321 return false;
3323 unsigned int count = vector_cst_encoded_nelts (arg0);
3324 for (unsigned int i = 0; i < count; ++i)
3325 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3326 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3327 return false;
3328 return true;
3331 case COMPLEX_CST:
3332 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3333 flags)
3334 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3335 flags));
3337 case STRING_CST:
3338 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3339 && ! memcmp (TREE_STRING_POINTER (arg0),
3340 TREE_STRING_POINTER (arg1),
3341 TREE_STRING_LENGTH (arg0)));
3343 case ADDR_EXPR:
3344 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3345 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3346 flags | OEP_ADDRESS_OF
3347 | OEP_MATCH_SIDE_EFFECTS);
3348 case CONSTRUCTOR:
3350 /* In GIMPLE empty constructors are allowed in initializers of
3351 aggregates. */
3352 if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3353 return true;
3355 /* See sem_variable::equals in ipa-icf for a similar approach. */
3356 tree typ0 = TREE_TYPE (arg0);
3357 tree typ1 = TREE_TYPE (arg1);
3359 if (TREE_CODE (typ0) != TREE_CODE (typ1))
3360 return false;
3361 else if (TREE_CODE (typ0) == ARRAY_TYPE)
3363 /* For arrays, check that the sizes all match. */
3364 const HOST_WIDE_INT siz0 = int_size_in_bytes (typ0);
3365 if (TYPE_MODE (typ0) != TYPE_MODE (typ1)
3366 || siz0 < 0
3367 || siz0 != int_size_in_bytes (typ1))
3368 return false;
3370 else if (!types_compatible_p (typ0, typ1))
3371 return false;
3373 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3374 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3375 if (vec_safe_length (v0) != vec_safe_length (v1))
3376 return false;
3378 /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3379 of the CONSTRUCTOR referenced indirectly. */
3380 flags &= ~OEP_ADDRESS_OF;
3382 for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3384 constructor_elt *c0 = &(*v0)[idx];
3385 constructor_elt *c1 = &(*v1)[idx];
3387 /* Check that the values are the same... */
3388 if (c0->value != c1->value
3389 && !operand_equal_p (c0->value, c1->value, flags))
3390 return false;
3392 /* ... and that they apply to the same field! */
3393 if (c0->index != c1->index
3394 && (TREE_CODE (typ0) == ARRAY_TYPE
3395 ? !operand_equal_p (c0->index, c1->index, flags)
3396 : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3397 DECL_FIELD_OFFSET (c1->index),
3398 flags)
3399 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3400 DECL_FIELD_BIT_OFFSET (c1->index),
3401 flags)))
3402 return false;
3405 return true;
3408 default:
3409 break;
3412 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3413 two instances of undefined behavior will give identical results. */
3414 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3415 return false;
3417 /* Define macros to test an operand from arg0 and arg1 for equality and a
3418 variant that allows null and views null as being different from any
3419 non-null value. In the latter case, if either is null, the both
3420 must be; otherwise, do the normal comparison. */
3421 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3422 TREE_OPERAND (arg1, N), flags)
3424 #define OP_SAME_WITH_NULL(N) \
3425 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3426 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3428 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3430 case tcc_unary:
3431 /* Two conversions are equal only if signedness and modes match. */
3432 switch (TREE_CODE (arg0))
3434 CASE_CONVERT:
3435 case FIX_TRUNC_EXPR:
3436 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3437 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3438 return false;
3439 break;
3440 default:
3441 break;
3444 return OP_SAME (0);
3447 case tcc_comparison:
3448 case tcc_binary:
3449 if (OP_SAME (0) && OP_SAME (1))
3450 return true;
3452 /* For commutative ops, allow the other order. */
3453 return (commutative_tree_code (TREE_CODE (arg0))
3454 && operand_equal_p (TREE_OPERAND (arg0, 0),
3455 TREE_OPERAND (arg1, 1), flags)
3456 && operand_equal_p (TREE_OPERAND (arg0, 1),
3457 TREE_OPERAND (arg1, 0), flags));
3459 case tcc_reference:
3460 /* If either of the pointer (or reference) expressions we are
3461 dereferencing contain a side effect, these cannot be equal,
3462 but their addresses can be. */
3463 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3464 && (TREE_SIDE_EFFECTS (arg0)
3465 || TREE_SIDE_EFFECTS (arg1)))
3466 return false;
3468 switch (TREE_CODE (arg0))
3470 case INDIRECT_REF:
3471 if (!(flags & OEP_ADDRESS_OF))
3473 if (TYPE_ALIGN (TREE_TYPE (arg0))
3474 != TYPE_ALIGN (TREE_TYPE (arg1)))
3475 return false;
3476 /* Verify that the access types are compatible. */
3477 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3478 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3479 return false;
3481 flags &= ~OEP_ADDRESS_OF;
3482 return OP_SAME (0);
3484 case IMAGPART_EXPR:
3485 /* Require the same offset. */
3486 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3487 TYPE_SIZE (TREE_TYPE (arg1)),
3488 flags & ~OEP_ADDRESS_OF))
3489 return false;
3491 /* Fallthru. */
3492 case REALPART_EXPR:
3493 case VIEW_CONVERT_EXPR:
3494 return OP_SAME (0);
3496 case TARGET_MEM_REF:
3497 case MEM_REF:
3498 if (!(flags & OEP_ADDRESS_OF))
3500 /* Require equal access sizes */
3501 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3502 && (!TYPE_SIZE (TREE_TYPE (arg0))
3503 || !TYPE_SIZE (TREE_TYPE (arg1))
3504 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3505 TYPE_SIZE (TREE_TYPE (arg1)),
3506 flags)))
3507 return false;
3508 /* Verify that access happens in similar types. */
3509 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3510 return false;
3511 /* Verify that accesses are TBAA compatible. */
3512 if (!alias_ptr_types_compatible_p
3513 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3514 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3515 || (MR_DEPENDENCE_CLIQUE (arg0)
3516 != MR_DEPENDENCE_CLIQUE (arg1))
3517 || (MR_DEPENDENCE_BASE (arg0)
3518 != MR_DEPENDENCE_BASE (arg1)))
3519 return false;
3520 /* Verify that alignment is compatible. */
3521 if (TYPE_ALIGN (TREE_TYPE (arg0))
3522 != TYPE_ALIGN (TREE_TYPE (arg1)))
3523 return false;
3525 flags &= ~OEP_ADDRESS_OF;
3526 return (OP_SAME (0) && OP_SAME (1)
3527 /* TARGET_MEM_REF require equal extra operands. */
3528 && (TREE_CODE (arg0) != TARGET_MEM_REF
3529 || (OP_SAME_WITH_NULL (2)
3530 && OP_SAME_WITH_NULL (3)
3531 && OP_SAME_WITH_NULL (4))));
3533 case ARRAY_REF:
3534 case ARRAY_RANGE_REF:
3535 if (!OP_SAME (0))
3536 return false;
3537 flags &= ~OEP_ADDRESS_OF;
3538 /* Compare the array index by value if it is constant first as we
3539 may have different types but same value here. */
3540 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3541 TREE_OPERAND (arg1, 1))
3542 || OP_SAME (1))
3543 && OP_SAME_WITH_NULL (2)
3544 && OP_SAME_WITH_NULL (3)
3545 /* Compare low bound and element size as with OEP_ADDRESS_OF
3546 we have to account for the offset of the ref. */
3547 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3548 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3549 || (operand_equal_p (array_ref_low_bound
3550 (CONST_CAST_TREE (arg0)),
3551 array_ref_low_bound
3552 (CONST_CAST_TREE (arg1)), flags)
3553 && operand_equal_p (array_ref_element_size
3554 (CONST_CAST_TREE (arg0)),
3555 array_ref_element_size
3556 (CONST_CAST_TREE (arg1)),
3557 flags))));
3559 case COMPONENT_REF:
3560 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3561 may be NULL when we're called to compare MEM_EXPRs. */
3562 if (!OP_SAME_WITH_NULL (0))
3563 return false;
3565 bool compare_address = flags & OEP_ADDRESS_OF;
3567 /* Most of time we only need to compare FIELD_DECLs for equality.
3568 However when determining address look into actual offsets.
3569 These may match for unions and unshared record types. */
3570 flags &= ~OEP_ADDRESS_OF;
3571 if (!OP_SAME (1))
3573 if (compare_address
3574 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3576 tree field0 = TREE_OPERAND (arg0, 1);
3577 tree field1 = TREE_OPERAND (arg1, 1);
3579 /* Non-FIELD_DECL operands can appear in C++ templates. */
3580 if (TREE_CODE (field0) != FIELD_DECL
3581 || TREE_CODE (field1) != FIELD_DECL
3582 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3583 DECL_FIELD_OFFSET (field1), flags)
3584 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3585 DECL_FIELD_BIT_OFFSET (field1),
3586 flags))
3587 return false;
3589 else
3590 return false;
3593 return OP_SAME_WITH_NULL (2);
3595 case BIT_FIELD_REF:
3596 if (!OP_SAME (0))
3597 return false;
3598 flags &= ~OEP_ADDRESS_OF;
3599 return OP_SAME (1) && OP_SAME (2);
3601 default:
3602 return false;
3605 case tcc_expression:
3606 switch (TREE_CODE (arg0))
3608 case ADDR_EXPR:
3609 /* Be sure we pass right ADDRESS_OF flag. */
3610 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3611 return operand_equal_p (TREE_OPERAND (arg0, 0),
3612 TREE_OPERAND (arg1, 0),
3613 flags | OEP_ADDRESS_OF);
3615 case TRUTH_NOT_EXPR:
3616 return OP_SAME (0);
3618 case TRUTH_ANDIF_EXPR:
3619 case TRUTH_ORIF_EXPR:
3620 return OP_SAME (0) && OP_SAME (1);
3622 case WIDEN_MULT_PLUS_EXPR:
3623 case WIDEN_MULT_MINUS_EXPR:
3624 if (!OP_SAME (2))
3625 return false;
3626 /* The multiplcation operands are commutative. */
3627 /* FALLTHRU */
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 if (OP_SAME (0) && OP_SAME (1))
3633 return true;
3635 /* Otherwise take into account this is a commutative operation. */
3636 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 1), flags)
3638 && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 TREE_OPERAND (arg1, 0), flags));
3641 case COND_EXPR:
3642 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3643 return false;
3644 flags &= ~OEP_ADDRESS_OF;
3645 return OP_SAME (0);
3647 case BIT_INSERT_EXPR:
3648 /* BIT_INSERT_EXPR has an implict operand as the type precision
3649 of op1. Need to check to make sure they are the same. */
3650 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3651 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3652 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3653 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3654 return false;
3655 /* FALLTHRU */
3657 case VEC_COND_EXPR:
3658 case DOT_PROD_EXPR:
3659 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3661 case MODIFY_EXPR:
3662 case INIT_EXPR:
3663 case COMPOUND_EXPR:
3664 case PREDECREMENT_EXPR:
3665 case PREINCREMENT_EXPR:
3666 case POSTDECREMENT_EXPR:
3667 case POSTINCREMENT_EXPR:
3668 if (flags & OEP_LEXICOGRAPHIC)
3669 return OP_SAME (0) && OP_SAME (1);
3670 return false;
3672 case CLEANUP_POINT_EXPR:
3673 case EXPR_STMT:
3674 case SAVE_EXPR:
3675 if (flags & OEP_LEXICOGRAPHIC)
3676 return OP_SAME (0);
3677 return false;
3679 case OBJ_TYPE_REF:
3680 /* Virtual table reference. */
3681 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3682 OBJ_TYPE_REF_EXPR (arg1), flags))
3683 return false;
3684 flags &= ~OEP_ADDRESS_OF;
3685 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3686 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3687 return false;
3688 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3689 OBJ_TYPE_REF_OBJECT (arg1), flags))
3690 return false;
3691 if (virtual_method_call_p (arg0))
3693 if (!virtual_method_call_p (arg1))
3694 return false;
3695 return types_same_for_odr (obj_type_ref_class (arg0),
3696 obj_type_ref_class (arg1));
3698 return false;
3700 default:
3701 return false;
3704 case tcc_vl_exp:
3705 switch (TREE_CODE (arg0))
3707 case CALL_EXPR:
3708 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3709 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3710 /* If not both CALL_EXPRs are either internal or normal function
3711 functions, then they are not equal. */
3712 return false;
3713 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3715 /* If the CALL_EXPRs call different internal functions, then they
3716 are not equal. */
3717 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3718 return false;
3720 else
3722 /* If the CALL_EXPRs call different functions, then they are not
3723 equal. */
3724 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3725 flags))
3726 return false;
3729 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3731 unsigned int cef = call_expr_flags (arg0);
3732 if (flags & OEP_PURE_SAME)
3733 cef &= ECF_CONST | ECF_PURE;
3734 else
3735 cef &= ECF_CONST;
3736 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3737 return false;
3740 /* Now see if all the arguments are the same. */
3742 const_call_expr_arg_iterator iter0, iter1;
3743 const_tree a0, a1;
3744 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3745 a1 = first_const_call_expr_arg (arg1, &iter1);
3746 a0 && a1;
3747 a0 = next_const_call_expr_arg (&iter0),
3748 a1 = next_const_call_expr_arg (&iter1))
3749 if (! operand_equal_p (a0, a1, flags))
3750 return false;
3752 /* If we get here and both argument lists are exhausted
3753 then the CALL_EXPRs are equal. */
3754 return ! (a0 || a1);
3756 default:
3757 return false;
3760 case tcc_declaration:
3761 /* Consider __builtin_sqrt equal to sqrt. */
3762 if (TREE_CODE (arg0) == FUNCTION_DECL)
3763 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3764 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3765 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3766 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3768 if (DECL_P (arg0)
3769 && (flags & OEP_DECL_NAME)
3770 && (flags & OEP_LEXICOGRAPHIC))
3772 /* Consider decls with the same name equal. The caller needs
3773 to make sure they refer to the same entity (such as a function
3774 formal parameter). */
3775 tree a0name = DECL_NAME (arg0);
3776 tree a1name = DECL_NAME (arg1);
3777 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3778 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3779 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3781 return false;
3783 case tcc_exceptional:
3784 if (TREE_CODE (arg0) == CONSTRUCTOR)
3786 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3787 return false;
3789 /* In GIMPLE constructors are used only to build vectors from
3790 elements. Individual elements in the constructor must be
3791 indexed in increasing order and form an initial sequence.
3793 We make no effort to compare nonconstant ones in GENERIC. */
3794 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3795 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3796 return false;
3798 /* Be sure that vectors constructed have the same representation.
3799 We only tested element precision and modes to match.
3800 Vectors may be BLKmode and thus also check that the number of
3801 parts match. */
3802 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3803 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3804 return false;
3806 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3807 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3808 unsigned int len = vec_safe_length (v0);
3810 if (len != vec_safe_length (v1))
3811 return false;
3813 for (unsigned int i = 0; i < len; i++)
3815 constructor_elt *c0 = &(*v0)[i];
3816 constructor_elt *c1 = &(*v1)[i];
3818 if (!operand_equal_p (c0->value, c1->value, flags)
3819 /* In GIMPLE the indexes can be either NULL or matching i.
3820 Double check this so we won't get false
3821 positives for GENERIC. */
3822 || (c0->index
3823 && (TREE_CODE (c0->index) != INTEGER_CST
3824 || compare_tree_int (c0->index, i)))
3825 || (c1->index
3826 && (TREE_CODE (c1->index) != INTEGER_CST
3827 || compare_tree_int (c1->index, i))))
3828 return false;
3830 return true;
3832 else if (TREE_CODE (arg0) == STATEMENT_LIST
3833 && (flags & OEP_LEXICOGRAPHIC))
3835 /* Compare the STATEMENT_LISTs. */
3836 tree_stmt_iterator tsi1, tsi2;
3837 tree body1 = CONST_CAST_TREE (arg0);
3838 tree body2 = CONST_CAST_TREE (arg1);
3839 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3840 tsi_next (&tsi1), tsi_next (&tsi2))
3842 /* The lists don't have the same number of statements. */
3843 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3844 return false;
3845 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3846 return true;
3847 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3848 flags & (OEP_LEXICOGRAPHIC
3849 | OEP_NO_HASH_CHECK)))
3850 return false;
3853 return false;
3855 case tcc_statement:
3856 switch (TREE_CODE (arg0))
3858 case RETURN_EXPR:
3859 if (flags & OEP_LEXICOGRAPHIC)
3860 return OP_SAME_WITH_NULL (0);
3861 return false;
3862 case DEBUG_BEGIN_STMT:
3863 if (flags & OEP_LEXICOGRAPHIC)
3864 return true;
3865 return false;
3866 default:
3867 return false;
3870 default:
3871 return false;
3874 #undef OP_SAME
3875 #undef OP_SAME_WITH_NULL
3878 /* Generate a hash value for an expression. This can be used iteratively
3879 by passing a previous result as the HSTATE argument. */
3881 void
3882 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3883 unsigned int flags)
3885 int i;
3886 enum tree_code code;
3887 enum tree_code_class tclass;
3889 if (t == NULL_TREE || t == error_mark_node)
3891 hstate.merge_hash (0);
3892 return;
3895 STRIP_ANY_LOCATION_WRAPPER (t);
3897 if (!(flags & OEP_ADDRESS_OF))
3898 STRIP_NOPS (t);
3900 code = TREE_CODE (t);
3902 switch (code)
3904 /* Alas, constants aren't shared, so we can't rely on pointer
3905 identity. */
3906 case VOID_CST:
3907 hstate.merge_hash (0);
3908 return;
3909 case INTEGER_CST:
3910 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3911 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3912 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3913 return;
3914 case REAL_CST:
3916 unsigned int val2;
3917 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3918 val2 = rvc_zero;
3919 else
3920 val2 = real_hash (TREE_REAL_CST_PTR (t));
3921 hstate.merge_hash (val2);
3922 return;
3924 case FIXED_CST:
3926 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3927 hstate.merge_hash (val2);
3928 return;
3930 case STRING_CST:
3931 hstate.add ((const void *) TREE_STRING_POINTER (t),
3932 TREE_STRING_LENGTH (t));
3933 return;
3934 case COMPLEX_CST:
3935 hash_operand (TREE_REALPART (t), hstate, flags);
3936 hash_operand (TREE_IMAGPART (t), hstate, flags);
3937 return;
3938 case VECTOR_CST:
3940 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3941 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3942 unsigned int count = vector_cst_encoded_nelts (t);
3943 for (unsigned int i = 0; i < count; ++i)
3944 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3945 return;
3947 case SSA_NAME:
3948 /* We can just compare by pointer. */
3949 hstate.add_hwi (SSA_NAME_VERSION (t));
3950 return;
3951 case PLACEHOLDER_EXPR:
3952 /* The node itself doesn't matter. */
3953 return;
3954 case BLOCK:
3955 case OMP_CLAUSE:
3956 /* Ignore. */
3957 return;
3958 case TREE_LIST:
3959 /* A list of expressions, for a CALL_EXPR or as the elements of a
3960 VECTOR_CST. */
3961 for (; t; t = TREE_CHAIN (t))
3962 hash_operand (TREE_VALUE (t), hstate, flags);
3963 return;
3964 case CONSTRUCTOR:
3966 unsigned HOST_WIDE_INT idx;
3967 tree field, value;
3968 flags &= ~OEP_ADDRESS_OF;
3969 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3972 /* In GIMPLE the indexes can be either NULL or matching i. */
3973 if (field == NULL_TREE)
3974 field = bitsize_int (idx);
3975 if (TREE_CODE (field) == FIELD_DECL)
3977 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
3978 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
3980 else
3981 hash_operand (field, hstate, flags);
3982 hash_operand (value, hstate, flags);
3984 return;
3986 case STATEMENT_LIST:
3988 tree_stmt_iterator i;
3989 for (i = tsi_start (CONST_CAST_TREE (t));
3990 !tsi_end_p (i); tsi_next (&i))
3991 hash_operand (tsi_stmt (i), hstate, flags);
3992 return;
3994 case TREE_VEC:
3995 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3996 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3997 return;
3998 case IDENTIFIER_NODE:
3999 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4000 return;
4001 case FUNCTION_DECL:
4002 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4003 Otherwise nodes that compare equal according to operand_equal_p might
4004 get different hash codes. However, don't do this for machine specific
4005 or front end builtins, since the function code is overloaded in those
4006 cases. */
4007 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4008 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4010 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4011 code = TREE_CODE (t);
4013 /* FALL THROUGH */
4014 default:
4015 if (POLY_INT_CST_P (t))
4017 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4018 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4019 return;
4021 tclass = TREE_CODE_CLASS (code);
4023 if (tclass == tcc_declaration)
4025 /* DECL's have a unique ID */
4026 hstate.add_hwi (DECL_UID (t));
4028 else if (tclass == tcc_comparison && !commutative_tree_code (code))
4030 /* For comparisons that can be swapped, use the lower
4031 tree code. */
4032 enum tree_code ccode = swap_tree_comparison (code);
4033 if (code < ccode)
4034 ccode = code;
4035 hstate.add_object (ccode);
4036 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4037 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4039 else if (CONVERT_EXPR_CODE_P (code))
4041 /* NOP_EXPR and CONVERT_EXPR are considered equal by
4042 operand_equal_p. */
4043 enum tree_code ccode = NOP_EXPR;
4044 hstate.add_object (ccode);
4046 /* Don't hash the type, that can lead to having nodes which
4047 compare equal according to operand_equal_p, but which
4048 have different hash codes. Make sure to include signedness
4049 in the hash computation. */
4050 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4051 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4053 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4054 else if (code == MEM_REF
4055 && (flags & OEP_ADDRESS_OF) != 0
4056 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4057 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4058 && integer_zerop (TREE_OPERAND (t, 1)))
4059 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4060 hstate, flags);
4061 /* Don't ICE on FE specific trees, or their arguments etc.
4062 during operand_equal_p hash verification. */
4063 else if (!IS_EXPR_CODE_CLASS (tclass))
4064 gcc_assert (flags & OEP_HASH_CHECK);
4065 else
4067 unsigned int sflags = flags;
4069 hstate.add_object (code);
4071 switch (code)
4073 case ADDR_EXPR:
4074 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4075 flags |= OEP_ADDRESS_OF;
4076 sflags = flags;
4077 break;
4079 case INDIRECT_REF:
4080 case MEM_REF:
4081 case TARGET_MEM_REF:
4082 flags &= ~OEP_ADDRESS_OF;
4083 sflags = flags;
4084 break;
4086 case COMPONENT_REF:
4087 if (sflags & OEP_ADDRESS_OF)
4089 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4090 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4091 hstate, flags & ~OEP_ADDRESS_OF);
4092 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4093 hstate, flags & ~OEP_ADDRESS_OF);
4094 return;
4096 break;
4097 case ARRAY_REF:
4098 case ARRAY_RANGE_REF:
4099 case BIT_FIELD_REF:
4100 sflags &= ~OEP_ADDRESS_OF;
4101 break;
4103 case COND_EXPR:
4104 flags &= ~OEP_ADDRESS_OF;
4105 break;
4107 case WIDEN_MULT_PLUS_EXPR:
4108 case WIDEN_MULT_MINUS_EXPR:
4110 /* The multiplication operands are commutative. */
4111 inchash::hash one, two;
4112 hash_operand (TREE_OPERAND (t, 0), one, flags);
4113 hash_operand (TREE_OPERAND (t, 1), two, flags);
4114 hstate.add_commutative (one, two);
4115 hash_operand (TREE_OPERAND (t, 2), two, flags);
4116 return;
4119 case CALL_EXPR:
4120 if (CALL_EXPR_FN (t) == NULL_TREE)
4121 hstate.add_int (CALL_EXPR_IFN (t));
4122 break;
4124 case TARGET_EXPR:
4125 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4126 Usually different TARGET_EXPRs just should use
4127 different temporaries in their slots. */
4128 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4129 return;
4131 case OBJ_TYPE_REF:
4132 /* Virtual table reference. */
4133 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4134 flags &= ~OEP_ADDRESS_OF;
4135 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4136 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4137 if (!virtual_method_call_p (t))
4138 return;
4139 if (tree c = obj_type_ref_class (t))
4141 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4142 /* We compute mangled names only when free_lang_data is run.
4143 In that case we can hash precisely. */
4144 if (TREE_CODE (c) == TYPE_DECL
4145 && DECL_ASSEMBLER_NAME_SET_P (c))
4146 hstate.add_object
4147 (IDENTIFIER_HASH_VALUE
4148 (DECL_ASSEMBLER_NAME (c)));
4150 return;
4151 default:
4152 break;
4155 /* Don't hash the type, that can lead to having nodes which
4156 compare equal according to operand_equal_p, but which
4157 have different hash codes. */
4158 if (code == NON_LVALUE_EXPR)
4160 /* Make sure to include signness in the hash computation. */
4161 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4162 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4165 else if (commutative_tree_code (code))
4167 /* It's a commutative expression. We want to hash it the same
4168 however it appears. We do this by first hashing both operands
4169 and then rehashing based on the order of their independent
4170 hashes. */
4171 inchash::hash one, two;
4172 hash_operand (TREE_OPERAND (t, 0), one, flags);
4173 hash_operand (TREE_OPERAND (t, 1), two, flags);
4174 hstate.add_commutative (one, two);
4176 else
4177 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4178 hash_operand (TREE_OPERAND (t, i), hstate,
4179 i == 0 ? flags : sflags);
4181 return;
4185 bool
4186 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4187 unsigned int flags, bool *ret)
4189 /* When checking and unless comparing DECL names, verify that if
4190 the outermost operand_equal_p call returns non-zero then ARG0
4191 and ARG1 have the same hash value. */
4192 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4194 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4196 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4198 inchash::hash hstate0 (0), hstate1 (0);
4199 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4200 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4201 hashval_t h0 = hstate0.end ();
4202 hashval_t h1 = hstate1.end ();
4203 gcc_assert (h0 == h1);
4205 *ret = true;
4207 else
4208 *ret = false;
4210 return true;
4213 return false;
4217 static operand_compare default_compare_instance;
4219 /* Conveinece wrapper around operand_compare class because usually we do
4220 not need to play with the valueizer. */
4222 bool
4223 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4225 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4228 namespace inchash
4231 /* Generate a hash value for an expression. This can be used iteratively
4232 by passing a previous result as the HSTATE argument.
4234 This function is intended to produce the same hash for expressions which
4235 would compare equal using operand_equal_p. */
4236 void
4237 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4239 default_compare_instance.hash_operand (t, hstate, flags);
4244 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4245 with a different signedness or a narrower precision. */
4247 static bool
4248 operand_equal_for_comparison_p (tree arg0, tree arg1)
4250 if (operand_equal_p (arg0, arg1, 0))
4251 return true;
4253 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4254 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4255 return false;
4257 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4258 and see if the inner values are the same. This removes any
4259 signedness comparison, which doesn't matter here. */
4260 tree op0 = arg0;
4261 tree op1 = arg1;
4262 STRIP_NOPS (op0);
4263 STRIP_NOPS (op1);
4264 if (operand_equal_p (op0, op1, 0))
4265 return true;
4267 /* Discard a single widening conversion from ARG1 and see if the inner
4268 value is the same as ARG0. */
4269 if (CONVERT_EXPR_P (arg1)
4270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4271 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4272 < TYPE_PRECISION (TREE_TYPE (arg1))
4273 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4274 return true;
4276 return false;
4279 /* See if ARG is an expression that is either a comparison or is performing
4280 arithmetic on comparisons. The comparisons must only be comparing
4281 two different values, which will be stored in *CVAL1 and *CVAL2; if
4282 they are nonzero it means that some operands have already been found.
4283 No variables may be used anywhere else in the expression except in the
4284 comparisons.
4286 If this is true, return 1. Otherwise, return zero. */
4288 static bool
4289 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4291 enum tree_code code = TREE_CODE (arg);
4292 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4294 /* We can handle some of the tcc_expression cases here. */
4295 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4296 tclass = tcc_unary;
4297 else if (tclass == tcc_expression
4298 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4299 || code == COMPOUND_EXPR))
4300 tclass = tcc_binary;
4302 switch (tclass)
4304 case tcc_unary:
4305 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4307 case tcc_binary:
4308 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4309 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4311 case tcc_constant:
4312 return true;
4314 case tcc_expression:
4315 if (code == COND_EXPR)
4316 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4317 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4318 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4319 return false;
4321 case tcc_comparison:
4322 /* First see if we can handle the first operand, then the second. For
4323 the second operand, we know *CVAL1 can't be zero. It must be that
4324 one side of the comparison is each of the values; test for the
4325 case where this isn't true by failing if the two operands
4326 are the same. */
4328 if (operand_equal_p (TREE_OPERAND (arg, 0),
4329 TREE_OPERAND (arg, 1), 0))
4330 return false;
4332 if (*cval1 == 0)
4333 *cval1 = TREE_OPERAND (arg, 0);
4334 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4336 else if (*cval2 == 0)
4337 *cval2 = TREE_OPERAND (arg, 0);
4338 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4340 else
4341 return false;
4343 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4345 else if (*cval2 == 0)
4346 *cval2 = TREE_OPERAND (arg, 1);
4347 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4349 else
4350 return false;
4352 return true;
4354 default:
4355 return false;
4359 /* ARG is a tree that is known to contain just arithmetic operations and
4360 comparisons. Evaluate the operations in the tree substituting NEW0 for
4361 any occurrence of OLD0 as an operand of a comparison and likewise for
4362 NEW1 and OLD1. */
4364 static tree
4365 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4366 tree old1, tree new1)
4368 tree type = TREE_TYPE (arg);
4369 enum tree_code code = TREE_CODE (arg);
4370 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4372 /* We can handle some of the tcc_expression cases here. */
4373 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4374 tclass = tcc_unary;
4375 else if (tclass == tcc_expression
4376 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4377 tclass = tcc_binary;
4379 switch (tclass)
4381 case tcc_unary:
4382 return fold_build1_loc (loc, code, type,
4383 eval_subst (loc, TREE_OPERAND (arg, 0),
4384 old0, new0, old1, new1));
4386 case tcc_binary:
4387 return fold_build2_loc (loc, code, type,
4388 eval_subst (loc, TREE_OPERAND (arg, 0),
4389 old0, new0, old1, new1),
4390 eval_subst (loc, TREE_OPERAND (arg, 1),
4391 old0, new0, old1, new1));
4393 case tcc_expression:
4394 switch (code)
4396 case SAVE_EXPR:
4397 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4398 old1, new1);
4400 case COMPOUND_EXPR:
4401 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4402 old1, new1);
4404 case COND_EXPR:
4405 return fold_build3_loc (loc, code, type,
4406 eval_subst (loc, TREE_OPERAND (arg, 0),
4407 old0, new0, old1, new1),
4408 eval_subst (loc, TREE_OPERAND (arg, 1),
4409 old0, new0, old1, new1),
4410 eval_subst (loc, TREE_OPERAND (arg, 2),
4411 old0, new0, old1, new1));
4412 default:
4413 break;
4415 /* Fall through - ??? */
4417 case tcc_comparison:
4419 tree arg0 = TREE_OPERAND (arg, 0);
4420 tree arg1 = TREE_OPERAND (arg, 1);
4422 /* We need to check both for exact equality and tree equality. The
4423 former will be true if the operand has a side-effect. In that
4424 case, we know the operand occurred exactly once. */
4426 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4427 arg0 = new0;
4428 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4429 arg0 = new1;
4431 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4432 arg1 = new0;
4433 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4434 arg1 = new1;
4436 return fold_build2_loc (loc, code, type, arg0, arg1);
4439 default:
4440 return arg;
4444 /* Return a tree for the case when the result of an expression is RESULT
4445 converted to TYPE and OMITTED was previously an operand of the expression
4446 but is now not needed (e.g., we folded OMITTED * 0).
4448 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4449 the conversion of RESULT to TYPE. */
4451 tree
4452 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4454 tree t = fold_convert_loc (loc, type, result);
4456 /* If the resulting operand is an empty statement, just return the omitted
4457 statement casted to void. */
4458 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4459 return build1_loc (loc, NOP_EXPR, void_type_node,
4460 fold_ignored_result (omitted));
4462 if (TREE_SIDE_EFFECTS (omitted))
4463 return build2_loc (loc, COMPOUND_EXPR, type,
4464 fold_ignored_result (omitted), t);
4466 return non_lvalue_loc (loc, t);
4469 /* Return a tree for the case when the result of an expression is RESULT
4470 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4471 of the expression but are now not needed.
4473 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4474 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4475 evaluated before OMITTED2. Otherwise, if neither has side effects,
4476 just do the conversion of RESULT to TYPE. */
4478 tree
4479 omit_two_operands_loc (location_t loc, tree type, tree result,
4480 tree omitted1, tree omitted2)
4482 tree t = fold_convert_loc (loc, type, result);
4484 if (TREE_SIDE_EFFECTS (omitted2))
4485 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4486 if (TREE_SIDE_EFFECTS (omitted1))
4487 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4489 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4493 /* Return a simplified tree node for the truth-negation of ARG. This
4494 never alters ARG itself. We assume that ARG is an operation that
4495 returns a truth value (0 or 1).
4497 FIXME: one would think we would fold the result, but it causes
4498 problems with the dominator optimizer. */
4500 static tree
4501 fold_truth_not_expr (location_t loc, tree arg)
4503 tree type = TREE_TYPE (arg);
4504 enum tree_code code = TREE_CODE (arg);
4505 location_t loc1, loc2;
4507 /* If this is a comparison, we can simply invert it, except for
4508 floating-point non-equality comparisons, in which case we just
4509 enclose a TRUTH_NOT_EXPR around what we have. */
4511 if (TREE_CODE_CLASS (code) == tcc_comparison)
4513 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4514 if (FLOAT_TYPE_P (op_type)
4515 && flag_trapping_math
4516 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4517 && code != NE_EXPR && code != EQ_EXPR)
4518 return NULL_TREE;
4520 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4521 if (code == ERROR_MARK)
4522 return NULL_TREE;
4524 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4525 TREE_OPERAND (arg, 1));
4526 copy_warning (ret, arg);
4527 return ret;
4530 switch (code)
4532 case INTEGER_CST:
4533 return constant_boolean_node (integer_zerop (arg), type);
4535 case TRUTH_AND_EXPR:
4536 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4537 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4538 return build2_loc (loc, TRUTH_OR_EXPR, type,
4539 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4540 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4542 case TRUTH_OR_EXPR:
4543 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4544 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4545 return build2_loc (loc, TRUTH_AND_EXPR, type,
4546 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4547 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4549 case TRUTH_XOR_EXPR:
4550 /* Here we can invert either operand. We invert the first operand
4551 unless the second operand is a TRUTH_NOT_EXPR in which case our
4552 result is the XOR of the first operand with the inside of the
4553 negation of the second operand. */
4555 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4556 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4557 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4558 else
4559 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4560 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4561 TREE_OPERAND (arg, 1));
4563 case TRUTH_ANDIF_EXPR:
4564 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4565 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4566 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4567 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4568 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4570 case TRUTH_ORIF_EXPR:
4571 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4572 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4573 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4574 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4575 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4577 case TRUTH_NOT_EXPR:
4578 return TREE_OPERAND (arg, 0);
4580 case COND_EXPR:
4582 tree arg1 = TREE_OPERAND (arg, 1);
4583 tree arg2 = TREE_OPERAND (arg, 2);
4585 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4586 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4588 /* A COND_EXPR may have a throw as one operand, which
4589 then has void type. Just leave void operands
4590 as they are. */
4591 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4592 VOID_TYPE_P (TREE_TYPE (arg1))
4593 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4594 VOID_TYPE_P (TREE_TYPE (arg2))
4595 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4598 case COMPOUND_EXPR:
4599 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4600 return build2_loc (loc, COMPOUND_EXPR, type,
4601 TREE_OPERAND (arg, 0),
4602 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4604 case NON_LVALUE_EXPR:
4605 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4606 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4608 CASE_CONVERT:
4609 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4610 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4612 /* fall through */
4614 case FLOAT_EXPR:
4615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4616 return build1_loc (loc, TREE_CODE (arg), type,
4617 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4619 case BIT_AND_EXPR:
4620 if (!integer_onep (TREE_OPERAND (arg, 1)))
4621 return NULL_TREE;
4622 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4624 case SAVE_EXPR:
4625 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4627 case CLEANUP_POINT_EXPR:
4628 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4629 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4630 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4632 default:
4633 return NULL_TREE;
4637 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4638 assume that ARG is an operation that returns a truth value (0 or 1
4639 for scalars, 0 or -1 for vectors). Return the folded expression if
4640 folding is successful. Otherwise, return NULL_TREE. */
4642 static tree
4643 fold_invert_truthvalue (location_t loc, tree arg)
4645 tree type = TREE_TYPE (arg);
4646 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4647 ? BIT_NOT_EXPR
4648 : TRUTH_NOT_EXPR,
4649 type, arg);
4652 /* Return a simplified tree node for the truth-negation of ARG. This
4653 never alters ARG itself. We assume that ARG is an operation that
4654 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4656 tree
4657 invert_truthvalue_loc (location_t loc, tree arg)
4659 if (TREE_CODE (arg) == ERROR_MARK)
4660 return arg;
4662 tree type = TREE_TYPE (arg);
4663 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4664 ? BIT_NOT_EXPR
4665 : TRUTH_NOT_EXPR,
4666 type, arg);
4669 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4670 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4671 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4672 is the original memory reference used to preserve the alias set of
4673 the access. */
4675 static tree
4676 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4677 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4678 int unsignedp, int reversep)
4680 tree result, bftype;
4682 /* Attempt not to lose the access path if possible. */
4683 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4685 tree ninner = TREE_OPERAND (orig_inner, 0);
4686 machine_mode nmode;
4687 poly_int64 nbitsize, nbitpos;
4688 tree noffset;
4689 int nunsignedp, nreversep, nvolatilep = 0;
4690 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4691 &noffset, &nmode, &nunsignedp,
4692 &nreversep, &nvolatilep);
4693 if (base == inner
4694 && noffset == NULL_TREE
4695 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4696 && !reversep
4697 && !nreversep
4698 && !nvolatilep)
4700 inner = ninner;
4701 bitpos -= nbitpos;
4705 alias_set_type iset = get_alias_set (orig_inner);
4706 if (iset == 0 && get_alias_set (inner) != iset)
4707 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4708 build_fold_addr_expr (inner),
4709 build_int_cst (ptr_type_node, 0));
4711 if (known_eq (bitpos, 0) && !reversep)
4713 tree size = TYPE_SIZE (TREE_TYPE (inner));
4714 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4715 || POINTER_TYPE_P (TREE_TYPE (inner)))
4716 && tree_fits_shwi_p (size)
4717 && tree_to_shwi (size) == bitsize)
4718 return fold_convert_loc (loc, type, inner);
4721 bftype = type;
4722 if (TYPE_PRECISION (bftype) != bitsize
4723 || TYPE_UNSIGNED (bftype) == !unsignedp)
4724 bftype = build_nonstandard_integer_type (bitsize, 0);
4726 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4727 bitsize_int (bitsize), bitsize_int (bitpos));
4728 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4730 if (bftype != type)
4731 result = fold_convert_loc (loc, type, result);
4733 return result;
4736 /* Optimize a bit-field compare.
4738 There are two cases: First is a compare against a constant and the
4739 second is a comparison of two items where the fields are at the same
4740 bit position relative to the start of a chunk (byte, halfword, word)
4741 large enough to contain it. In these cases we can avoid the shift
4742 implicit in bitfield extractions.
4744 For constants, we emit a compare of the shifted constant with the
4745 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4746 compared. For two fields at the same position, we do the ANDs with the
4747 similar mask and compare the result of the ANDs.
4749 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4750 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4751 are the left and right operands of the comparison, respectively.
4753 If the optimization described above can be done, we return the resulting
4754 tree. Otherwise we return zero. */
4756 static tree
4757 optimize_bit_field_compare (location_t loc, enum tree_code code,
4758 tree compare_type, tree lhs, tree rhs)
4760 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4761 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4762 tree type = TREE_TYPE (lhs);
4763 tree unsigned_type;
4764 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4765 machine_mode lmode, rmode;
4766 scalar_int_mode nmode;
4767 int lunsignedp, runsignedp;
4768 int lreversep, rreversep;
4769 int lvolatilep = 0, rvolatilep = 0;
4770 tree linner, rinner = NULL_TREE;
4771 tree mask;
4772 tree offset;
4774 /* Get all the information about the extractions being done. If the bit size
4775 is the same as the size of the underlying object, we aren't doing an
4776 extraction at all and so can do nothing. We also don't want to
4777 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4778 then will no longer be able to replace it. */
4779 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4780 &lunsignedp, &lreversep, &lvolatilep);
4781 if (linner == lhs
4782 || !known_size_p (plbitsize)
4783 || !plbitsize.is_constant (&lbitsize)
4784 || !plbitpos.is_constant (&lbitpos)
4785 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4786 || offset != 0
4787 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4788 || lvolatilep)
4789 return 0;
4791 if (const_p)
4792 rreversep = lreversep;
4793 else
4795 /* If this is not a constant, we can only do something if bit positions,
4796 sizes, signedness and storage order are the same. */
4797 rinner
4798 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4799 &runsignedp, &rreversep, &rvolatilep);
4801 if (rinner == rhs
4802 || maybe_ne (lbitpos, rbitpos)
4803 || maybe_ne (lbitsize, rbitsize)
4804 || lunsignedp != runsignedp
4805 || lreversep != rreversep
4806 || offset != 0
4807 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4808 || rvolatilep)
4809 return 0;
4812 /* Honor the C++ memory model and mimic what RTL expansion does. */
4813 poly_uint64 bitstart = 0;
4814 poly_uint64 bitend = 0;
4815 if (TREE_CODE (lhs) == COMPONENT_REF)
4817 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4818 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4819 return 0;
4822 /* See if we can find a mode to refer to this field. We should be able to,
4823 but fail if we can't. */
4824 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4825 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4826 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4827 TYPE_ALIGN (TREE_TYPE (rinner))),
4828 BITS_PER_WORD, false, &nmode))
4829 return 0;
4831 /* Set signed and unsigned types of the precision of this mode for the
4832 shifts below. */
4833 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4835 /* Compute the bit position and size for the new reference and our offset
4836 within it. If the new reference is the same size as the original, we
4837 won't optimize anything, so return zero. */
4838 nbitsize = GET_MODE_BITSIZE (nmode);
4839 nbitpos = lbitpos & ~ (nbitsize - 1);
4840 lbitpos -= nbitpos;
4841 if (nbitsize == lbitsize)
4842 return 0;
4844 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4845 lbitpos = nbitsize - lbitsize - lbitpos;
4847 /* Make the mask to be used against the extracted field. */
4848 mask = build_int_cst_type (unsigned_type, -1);
4849 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4850 mask = const_binop (RSHIFT_EXPR, mask,
4851 size_int (nbitsize - lbitsize - lbitpos));
4853 if (! const_p)
4855 if (nbitpos < 0)
4856 return 0;
4858 /* If not comparing with constant, just rework the comparison
4859 and return. */
4860 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4861 nbitsize, nbitpos, 1, lreversep);
4862 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4863 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4864 nbitsize, nbitpos, 1, rreversep);
4865 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4866 return fold_build2_loc (loc, code, compare_type, t1, t2);
4869 /* Otherwise, we are handling the constant case. See if the constant is too
4870 big for the field. Warn and return a tree for 0 (false) if so. We do
4871 this not only for its own sake, but to avoid having to test for this
4872 error case below. If we didn't, we might generate wrong code.
4874 For unsigned fields, the constant shifted right by the field length should
4875 be all zero. For signed fields, the high-order bits should agree with
4876 the sign bit. */
4878 if (lunsignedp)
4880 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4882 warning (0, "comparison is always %d due to width of bit-field",
4883 code == NE_EXPR);
4884 return constant_boolean_node (code == NE_EXPR, compare_type);
4887 else
4889 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4890 if (tem != 0 && tem != -1)
4892 warning (0, "comparison is always %d due to width of bit-field",
4893 code == NE_EXPR);
4894 return constant_boolean_node (code == NE_EXPR, compare_type);
4898 if (nbitpos < 0)
4899 return 0;
4901 /* Single-bit compares should always be against zero. */
4902 if (lbitsize == 1 && ! integer_zerop (rhs))
4904 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4905 rhs = build_int_cst (type, 0);
4908 /* Make a new bitfield reference, shift the constant over the
4909 appropriate number of bits and mask it with the computed mask
4910 (in case this was a signed field). If we changed it, make a new one. */
4911 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4912 nbitsize, nbitpos, 1, lreversep);
4914 rhs = const_binop (BIT_AND_EXPR,
4915 const_binop (LSHIFT_EXPR,
4916 fold_convert_loc (loc, unsigned_type, rhs),
4917 size_int (lbitpos)),
4918 mask);
4920 lhs = build2_loc (loc, code, compare_type,
4921 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4922 return lhs;
4925 /* Subroutine for fold_truth_andor_1: decode a field reference.
4927 If EXP is a comparison reference, we return the innermost reference.
4929 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4930 set to the starting bit number.
4932 If the innermost field can be completely contained in a mode-sized
4933 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4935 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4936 otherwise it is not changed.
4938 *PUNSIGNEDP is set to the signedness of the field.
4940 *PREVERSEP is set to the storage order of the field.
4942 *PMASK is set to the mask used. This is either contained in a
4943 BIT_AND_EXPR or derived from the width of the field.
4945 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4947 Return 0 if this is not a component reference or is one that we can't
4948 do anything with. */
4950 static tree
4951 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4952 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4953 int *punsignedp, int *preversep, int *pvolatilep,
4954 tree *pmask, tree *pand_mask)
4956 tree exp = *exp_;
4957 tree outer_type = 0;
4958 tree and_mask = 0;
4959 tree mask, inner, offset;
4960 tree unsigned_type;
4961 unsigned int precision;
4963 /* All the optimizations using this function assume integer fields.
4964 There are problems with FP fields since the type_for_size call
4965 below can fail for, e.g., XFmode. */
4966 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4967 return NULL_TREE;
4969 /* We are interested in the bare arrangement of bits, so strip everything
4970 that doesn't affect the machine mode. However, record the type of the
4971 outermost expression if it may matter below. */
4972 if (CONVERT_EXPR_P (exp)
4973 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4974 outer_type = TREE_TYPE (exp);
4975 STRIP_NOPS (exp);
4977 if (TREE_CODE (exp) == BIT_AND_EXPR)
4979 and_mask = TREE_OPERAND (exp, 1);
4980 exp = TREE_OPERAND (exp, 0);
4981 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4982 if (TREE_CODE (and_mask) != INTEGER_CST)
4983 return NULL_TREE;
4986 poly_int64 poly_bitsize, poly_bitpos;
4987 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4988 pmode, punsignedp, preversep, pvolatilep);
4989 if ((inner == exp && and_mask == 0)
4990 || !poly_bitsize.is_constant (pbitsize)
4991 || !poly_bitpos.is_constant (pbitpos)
4992 || *pbitsize < 0
4993 || offset != 0
4994 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4995 /* Reject out-of-bound accesses (PR79731). */
4996 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4997 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4998 *pbitpos + *pbitsize) < 0))
4999 return NULL_TREE;
5001 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
5002 if (unsigned_type == NULL_TREE)
5003 return NULL_TREE;
5005 *exp_ = exp;
5007 /* If the number of bits in the reference is the same as the bitsize of
5008 the outer type, then the outer type gives the signedness. Otherwise
5009 (in case of a small bitfield) the signedness is unchanged. */
5010 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
5011 *punsignedp = TYPE_UNSIGNED (outer_type);
5013 /* Compute the mask to access the bitfield. */
5014 precision = TYPE_PRECISION (unsigned_type);
5016 mask = build_int_cst_type (unsigned_type, -1);
5018 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5019 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5021 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
5022 if (and_mask != 0)
5023 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
5024 fold_convert_loc (loc, unsigned_type, and_mask), mask);
5026 *pmask = mask;
5027 *pand_mask = and_mask;
5028 return inner;
5031 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
5032 bit positions and MASK is SIGNED. */
5034 static bool
5035 all_ones_mask_p (const_tree mask, unsigned int size)
5037 tree type = TREE_TYPE (mask);
5038 unsigned int precision = TYPE_PRECISION (type);
5040 /* If this function returns true when the type of the mask is
5041 UNSIGNED, then there will be errors. In particular see
5042 gcc.c-torture/execute/990326-1.c. There does not appear to be
5043 any documentation paper trail as to why this is so. But the pre
5044 wide-int worked with that restriction and it has been preserved
5045 here. */
5046 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
5047 return false;
5049 return wi::mask (size, false, precision) == wi::to_wide (mask);
5052 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5053 represents the sign bit of EXP's type. If EXP represents a sign
5054 or zero extension, also test VAL against the unextended type.
5055 The return value is the (sub)expression whose sign bit is VAL,
5056 or NULL_TREE otherwise. */
5058 tree
5059 sign_bit_p (tree exp, const_tree val)
5061 int width;
5062 tree t;
5064 /* Tree EXP must have an integral type. */
5065 t = TREE_TYPE (exp);
5066 if (! INTEGRAL_TYPE_P (t))
5067 return NULL_TREE;
5069 /* Tree VAL must be an integer constant. */
5070 if (TREE_CODE (val) != INTEGER_CST
5071 || TREE_OVERFLOW (val))
5072 return NULL_TREE;
5074 width = TYPE_PRECISION (t);
5075 if (wi::only_sign_bit_p (wi::to_wide (val), width))
5076 return exp;
5078 /* Handle extension from a narrower type. */
5079 if (TREE_CODE (exp) == NOP_EXPR
5080 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5081 return sign_bit_p (TREE_OPERAND (exp, 0), val);
5083 return NULL_TREE;
5086 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5087 operand is simple enough to be evaluated unconditionally. */
5089 static bool
5090 simple_operand_p (const_tree exp)
5092 /* Strip any conversions that don't change the machine mode. */
5093 STRIP_NOPS (exp);
5095 return (CONSTANT_CLASS_P (exp)
5096 || TREE_CODE (exp) == SSA_NAME
5097 || (DECL_P (exp)
5098 && ! TREE_ADDRESSABLE (exp)
5099 && ! TREE_THIS_VOLATILE (exp)
5100 && ! DECL_NONLOCAL (exp)
5101 /* Don't regard global variables as simple. They may be
5102 allocated in ways unknown to the compiler (shared memory,
5103 #pragma weak, etc). */
5104 && ! TREE_PUBLIC (exp)
5105 && ! DECL_EXTERNAL (exp)
5106 /* Weakrefs are not safe to be read, since they can be NULL.
5107 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5108 have DECL_WEAK flag set. */
5109 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5110 /* Loading a static variable is unduly expensive, but global
5111 registers aren't expensive. */
5112 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5115 /* Determine if an operand is simple enough to be evaluated unconditionally.
5116 In addition to simple_operand_p, we assume that comparisons, conversions,
5117 and logic-not operations are simple, if their operands are simple, too. */
5119 bool
5120 simple_condition_p (tree exp)
5122 enum tree_code code;
5124 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5125 return false;
5127 while (CONVERT_EXPR_P (exp))
5128 exp = TREE_OPERAND (exp, 0);
5130 code = TREE_CODE (exp);
5132 if (TREE_CODE_CLASS (code) == tcc_comparison)
5133 return (simple_operand_p (TREE_OPERAND (exp, 0))
5134 && simple_operand_p (TREE_OPERAND (exp, 1)));
5136 if (code == TRUTH_NOT_EXPR)
5137 return simple_condition_p (TREE_OPERAND (exp, 0));
5139 return simple_operand_p (exp);
5143 /* The following functions are subroutines to fold_range_test and allow it to
5144 try to change a logical combination of comparisons into a range test.
5146 For example, both
5147 X == 2 || X == 3 || X == 4 || X == 5
5149 X >= 2 && X <= 5
5150 are converted to
5151 (unsigned) (X - 2) <= 3
5153 We describe each set of comparisons as being either inside or outside
5154 a range, using a variable named like IN_P, and then describe the
5155 range with a lower and upper bound. If one of the bounds is omitted,
5156 it represents either the highest or lowest value of the type.
5158 In the comments below, we represent a range by two numbers in brackets
5159 preceded by a "+" to designate being inside that range, or a "-" to
5160 designate being outside that range, so the condition can be inverted by
5161 flipping the prefix. An omitted bound is represented by a "-". For
5162 example, "- [-, 10]" means being outside the range starting at the lowest
5163 possible value and ending at 10, in other words, being greater than 10.
5164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5165 always false.
5167 We set up things so that the missing bounds are handled in a consistent
5168 manner so neither a missing bound nor "true" and "false" need to be
5169 handled using a special case. */
5171 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5173 and UPPER1_P are nonzero if the respective argument is an upper bound
5174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5175 must be specified for a comparison. ARG1 will be converted to ARG0's
5176 type if both are specified. */
5178 static tree
5179 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5180 tree arg1, int upper1_p)
5182 tree tem;
5183 int result;
5184 int sgn0, sgn1;
5186 /* If neither arg represents infinity, do the normal operation.
5187 Else, if not a comparison, return infinity. Else handle the special
5188 comparison rules. Note that most of the cases below won't occur, but
5189 are handled for consistency. */
5191 if (arg0 != 0 && arg1 != 0)
5193 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5194 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5195 STRIP_NOPS (tem);
5196 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5199 if (TREE_CODE_CLASS (code) != tcc_comparison)
5200 return 0;
5202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5203 for neither. In real maths, we cannot assume open ended ranges are
5204 the same. But, this is computer arithmetic, where numbers are finite.
5205 We can therefore make the transformation of any unbounded range with
5206 the value Z, Z being greater than any representable number. This permits
5207 us to treat unbounded ranges as equal. */
5208 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5209 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5210 switch (code)
5212 case EQ_EXPR:
5213 result = sgn0 == sgn1;
5214 break;
5215 case NE_EXPR:
5216 result = sgn0 != sgn1;
5217 break;
5218 case LT_EXPR:
5219 result = sgn0 < sgn1;
5220 break;
5221 case LE_EXPR:
5222 result = sgn0 <= sgn1;
5223 break;
5224 case GT_EXPR:
5225 result = sgn0 > sgn1;
5226 break;
5227 case GE_EXPR:
5228 result = sgn0 >= sgn1;
5229 break;
5230 default:
5231 gcc_unreachable ();
5234 return constant_boolean_node (result, type);
5237 /* Helper routine for make_range. Perform one step for it, return
5238 new expression if the loop should continue or NULL_TREE if it should
5239 stop. */
5241 tree
5242 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5243 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5244 bool *strict_overflow_p)
5246 tree arg0_type = TREE_TYPE (arg0);
5247 tree n_low, n_high, low = *p_low, high = *p_high;
5248 int in_p = *p_in_p, n_in_p;
5250 switch (code)
5252 case TRUTH_NOT_EXPR:
5253 /* We can only do something if the range is testing for zero. */
5254 if (low == NULL_TREE || high == NULL_TREE
5255 || ! integer_zerop (low) || ! integer_zerop (high))
5256 return NULL_TREE;
5257 *p_in_p = ! in_p;
5258 return arg0;
5260 case EQ_EXPR: case NE_EXPR:
5261 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5262 /* We can only do something if the range is testing for zero
5263 and if the second operand is an integer constant. Note that
5264 saying something is "in" the range we make is done by
5265 complementing IN_P since it will set in the initial case of
5266 being not equal to zero; "out" is leaving it alone. */
5267 if (low == NULL_TREE || high == NULL_TREE
5268 || ! integer_zerop (low) || ! integer_zerop (high)
5269 || TREE_CODE (arg1) != INTEGER_CST)
5270 return NULL_TREE;
5272 switch (code)
5274 case NE_EXPR: /* - [c, c] */
5275 low = high = arg1;
5276 break;
5277 case EQ_EXPR: /* + [c, c] */
5278 in_p = ! in_p, low = high = arg1;
5279 break;
5280 case GT_EXPR: /* - [-, c] */
5281 low = 0, high = arg1;
5282 break;
5283 case GE_EXPR: /* + [c, -] */
5284 in_p = ! in_p, low = arg1, high = 0;
5285 break;
5286 case LT_EXPR: /* - [c, -] */
5287 low = arg1, high = 0;
5288 break;
5289 case LE_EXPR: /* + [-, c] */
5290 in_p = ! in_p, low = 0, high = arg1;
5291 break;
5292 default:
5293 gcc_unreachable ();
5296 /* If this is an unsigned comparison, we also know that EXP is
5297 greater than or equal to zero. We base the range tests we make
5298 on that fact, so we record it here so we can parse existing
5299 range tests. We test arg0_type since often the return type
5300 of, e.g. EQ_EXPR, is boolean. */
5301 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5303 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5304 in_p, low, high, 1,
5305 build_int_cst (arg0_type, 0),
5306 NULL_TREE))
5307 return NULL_TREE;
5309 in_p = n_in_p, low = n_low, high = n_high;
5311 /* If the high bound is missing, but we have a nonzero low
5312 bound, reverse the range so it goes from zero to the low bound
5313 minus 1. */
5314 if (high == 0 && low && ! integer_zerop (low))
5316 in_p = ! in_p;
5317 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5318 build_int_cst (TREE_TYPE (low), 1), 0);
5319 low = build_int_cst (arg0_type, 0);
5323 *p_low = low;
5324 *p_high = high;
5325 *p_in_p = in_p;
5326 return arg0;
5328 case NEGATE_EXPR:
5329 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5330 low and high are non-NULL, then normalize will DTRT. */
5331 if (!TYPE_UNSIGNED (arg0_type)
5332 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5334 if (low == NULL_TREE)
5335 low = TYPE_MIN_VALUE (arg0_type);
5336 if (high == NULL_TREE)
5337 high = TYPE_MAX_VALUE (arg0_type);
5340 /* (-x) IN [a,b] -> x in [-b, -a] */
5341 n_low = range_binop (MINUS_EXPR, exp_type,
5342 build_int_cst (exp_type, 0),
5343 0, high, 1);
5344 n_high = range_binop (MINUS_EXPR, exp_type,
5345 build_int_cst (exp_type, 0),
5346 0, low, 0);
5347 if (n_high != 0 && TREE_OVERFLOW (n_high))
5348 return NULL_TREE;
5349 goto normalize;
5351 case BIT_NOT_EXPR:
5352 /* ~ X -> -X - 1 */
5353 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5354 build_int_cst (exp_type, 1));
5356 case PLUS_EXPR:
5357 case MINUS_EXPR:
5358 if (TREE_CODE (arg1) != INTEGER_CST)
5359 return NULL_TREE;
5361 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5362 move a constant to the other side. */
5363 if (!TYPE_UNSIGNED (arg0_type)
5364 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5365 return NULL_TREE;
5367 /* If EXP is signed, any overflow in the computation is undefined,
5368 so we don't worry about it so long as our computations on
5369 the bounds don't overflow. For unsigned, overflow is defined
5370 and this is exactly the right thing. */
5371 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5372 arg0_type, low, 0, arg1, 0);
5373 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 arg0_type, high, 1, arg1, 0);
5375 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5376 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5377 return NULL_TREE;
5379 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5380 *strict_overflow_p = true;
5382 normalize:
5383 /* Check for an unsigned range which has wrapped around the maximum
5384 value thus making n_high < n_low, and normalize it. */
5385 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5387 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5388 build_int_cst (TREE_TYPE (n_high), 1), 0);
5389 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5390 build_int_cst (TREE_TYPE (n_low), 1), 0);
5392 /* If the range is of the form +/- [ x+1, x ], we won't
5393 be able to normalize it. But then, it represents the
5394 whole range or the empty set, so make it
5395 +/- [ -, - ]. */
5396 if (tree_int_cst_equal (n_low, low)
5397 && tree_int_cst_equal (n_high, high))
5398 low = high = 0;
5399 else
5400 in_p = ! in_p;
5402 else
5403 low = n_low, high = n_high;
5405 *p_low = low;
5406 *p_high = high;
5407 *p_in_p = in_p;
5408 return arg0;
5410 CASE_CONVERT:
5411 case NON_LVALUE_EXPR:
5412 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5413 return NULL_TREE;
5415 if (! INTEGRAL_TYPE_P (arg0_type)
5416 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5417 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5418 return NULL_TREE;
5420 n_low = low, n_high = high;
5422 if (n_low != 0)
5423 n_low = fold_convert_loc (loc, arg0_type, n_low);
5425 if (n_high != 0)
5426 n_high = fold_convert_loc (loc, arg0_type, n_high);
5428 /* If we're converting arg0 from an unsigned type, to exp,
5429 a signed type, we will be doing the comparison as unsigned.
5430 The tests above have already verified that LOW and HIGH
5431 are both positive.
5433 So we have to ensure that we will handle large unsigned
5434 values the same way that the current signed bounds treat
5435 negative values. */
5437 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5439 tree high_positive;
5440 tree equiv_type;
5441 /* For fixed-point modes, we need to pass the saturating flag
5442 as the 2nd parameter. */
5443 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5444 equiv_type
5445 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5446 TYPE_SATURATING (arg0_type));
5447 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5448 equiv_type = arg0_type;
5449 else
5450 equiv_type
5451 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5453 /* A range without an upper bound is, naturally, unbounded.
5454 Since convert would have cropped a very large value, use
5455 the max value for the destination type. */
5456 high_positive
5457 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5458 : TYPE_MAX_VALUE (arg0_type);
5460 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5461 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5462 fold_convert_loc (loc, arg0_type,
5463 high_positive),
5464 build_int_cst (arg0_type, 1));
5466 /* If the low bound is specified, "and" the range with the
5467 range for which the original unsigned value will be
5468 positive. */
5469 if (low != 0)
5471 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5472 1, fold_convert_loc (loc, arg0_type,
5473 integer_zero_node),
5474 high_positive))
5475 return NULL_TREE;
5477 in_p = (n_in_p == in_p);
5479 else
5481 /* Otherwise, "or" the range with the range of the input
5482 that will be interpreted as negative. */
5483 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5484 1, fold_convert_loc (loc, arg0_type,
5485 integer_zero_node),
5486 high_positive))
5487 return NULL_TREE;
5489 in_p = (in_p != n_in_p);
5493 /* Otherwise, if we are converting arg0 from signed type, to exp,
5494 an unsigned type, we will do the comparison as signed. If
5495 high is non-NULL, we punt above if it doesn't fit in the signed
5496 type, so if we get through here, +[-, high] or +[low, high] are
5497 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5498 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5499 high is not, the +[low, -] range is equivalent to union of
5500 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5501 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5502 low being 0, which should be treated as [-, -]. */
5503 else if (TYPE_UNSIGNED (exp_type)
5504 && !TYPE_UNSIGNED (arg0_type)
5505 && low
5506 && !high)
5508 if (integer_zerop (low))
5509 n_low = NULL_TREE;
5510 else
5512 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5513 n_low, build_int_cst (arg0_type, -1));
5514 n_low = build_zero_cst (arg0_type);
5515 in_p = !in_p;
5519 *p_low = n_low;
5520 *p_high = n_high;
5521 *p_in_p = in_p;
5522 return arg0;
5524 default:
5525 return NULL_TREE;
5529 /* Given EXP, a logical expression, set the range it is testing into
5530 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5531 actually being tested. *PLOW and *PHIGH will be made of the same
5532 type as the returned expression. If EXP is not a comparison, we
5533 will most likely not be returning a useful value and range. Set
5534 *STRICT_OVERFLOW_P to true if the return value is only valid
5535 because signed overflow is undefined; otherwise, do not change
5536 *STRICT_OVERFLOW_P. */
5538 tree
5539 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5540 bool *strict_overflow_p)
5542 enum tree_code code;
5543 tree arg0, arg1 = NULL_TREE;
5544 tree exp_type, nexp;
5545 int in_p;
5546 tree low, high;
5547 location_t loc = EXPR_LOCATION (exp);
5549 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5550 and see if we can refine the range. Some of the cases below may not
5551 happen, but it doesn't seem worth worrying about this. We "continue"
5552 the outer loop when we've changed something; otherwise we "break"
5553 the switch, which will "break" the while. */
5555 in_p = 0;
5556 low = high = build_int_cst (TREE_TYPE (exp), 0);
5558 while (1)
5560 code = TREE_CODE (exp);
5561 exp_type = TREE_TYPE (exp);
5562 arg0 = NULL_TREE;
5564 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5566 if (TREE_OPERAND_LENGTH (exp) > 0)
5567 arg0 = TREE_OPERAND (exp, 0);
5568 if (TREE_CODE_CLASS (code) == tcc_binary
5569 || TREE_CODE_CLASS (code) == tcc_comparison
5570 || (TREE_CODE_CLASS (code) == tcc_expression
5571 && TREE_OPERAND_LENGTH (exp) > 1))
5572 arg1 = TREE_OPERAND (exp, 1);
5574 if (arg0 == NULL_TREE)
5575 break;
5577 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5578 &high, &in_p, strict_overflow_p);
5579 if (nexp == NULL_TREE)
5580 break;
5581 exp = nexp;
5584 /* If EXP is a constant, we can evaluate whether this is true or false. */
5585 if (TREE_CODE (exp) == INTEGER_CST)
5587 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5588 exp, 0, low, 0))
5589 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5590 exp, 1, high, 1)));
5591 low = high = 0;
5592 exp = 0;
5595 *pin_p = in_p, *plow = low, *phigh = high;
5596 return exp;
5599 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5600 a bitwise check i.e. when
5601 LOW == 0xXX...X00...0
5602 HIGH == 0xXX...X11...1
5603 Return corresponding mask in MASK and stem in VALUE. */
5605 static bool
5606 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5607 tree *value)
5609 if (TREE_CODE (low) != INTEGER_CST
5610 || TREE_CODE (high) != INTEGER_CST)
5611 return false;
5613 unsigned prec = TYPE_PRECISION (type);
5614 wide_int lo = wi::to_wide (low, prec);
5615 wide_int hi = wi::to_wide (high, prec);
5617 wide_int end_mask = lo ^ hi;
5618 if ((end_mask & (end_mask + 1)) != 0
5619 || (lo & end_mask) != 0)
5620 return false;
5622 wide_int stem_mask = ~end_mask;
5623 wide_int stem = lo & stem_mask;
5624 if (stem != (hi & stem_mask))
5625 return false;
5627 *mask = wide_int_to_tree (type, stem_mask);
5628 *value = wide_int_to_tree (type, stem);
5630 return true;
5633 /* Helper routine for build_range_check and match.pd. Return the type to
5634 perform the check or NULL if it shouldn't be optimized. */
5636 tree
5637 range_check_type (tree etype)
5639 /* First make sure that arithmetics in this type is valid, then make sure
5640 that it wraps around. */
5641 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5642 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5644 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5646 tree utype, minv, maxv;
5648 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5649 for the type in question, as we rely on this here. */
5650 utype = unsigned_type_for (etype);
5651 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5652 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5653 build_int_cst (TREE_TYPE (maxv), 1), 1);
5654 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5656 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5657 minv, 1, maxv, 1)))
5658 etype = utype;
5659 else
5660 return NULL_TREE;
5662 else if (POINTER_TYPE_P (etype)
5663 || TREE_CODE (etype) == OFFSET_TYPE
5664 /* Right now all BITINT_TYPEs satisfy
5665 (unsigned) max + 1 == (unsigned) min, so no need to verify
5666 that like for INTEGER_TYPEs. */
5667 || TREE_CODE (etype) == BITINT_TYPE)
5668 etype = unsigned_type_for (etype);
5669 return etype;
5672 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5673 type, TYPE, return an expression to test if EXP is in (or out of, depending
5674 on IN_P) the range. Return 0 if the test couldn't be created. */
5676 tree
5677 build_range_check (location_t loc, tree type, tree exp, int in_p,
5678 tree low, tree high)
5680 tree etype = TREE_TYPE (exp), mask, value;
5682 /* Disable this optimization for function pointer expressions
5683 on targets that require function pointer canonicalization. */
5684 if (targetm.have_canonicalize_funcptr_for_compare ()
5685 && POINTER_TYPE_P (etype)
5686 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5687 return NULL_TREE;
5689 if (! in_p)
5691 value = build_range_check (loc, type, exp, 1, low, high);
5692 if (value != 0)
5693 return invert_truthvalue_loc (loc, value);
5695 return 0;
5698 if (low == 0 && high == 0)
5699 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5701 if (low == 0)
5702 return fold_build2_loc (loc, LE_EXPR, type, exp,
5703 fold_convert_loc (loc, etype, high));
5705 if (high == 0)
5706 return fold_build2_loc (loc, GE_EXPR, type, exp,
5707 fold_convert_loc (loc, etype, low));
5709 if (operand_equal_p (low, high, 0))
5710 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5711 fold_convert_loc (loc, etype, low));
5713 if (TREE_CODE (exp) == BIT_AND_EXPR
5714 && maskable_range_p (low, high, etype, &mask, &value))
5715 return fold_build2_loc (loc, EQ_EXPR, type,
5716 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5717 exp, mask),
5718 value);
5720 if (integer_zerop (low))
5722 if (! TYPE_UNSIGNED (etype))
5724 etype = unsigned_type_for (etype);
5725 high = fold_convert_loc (loc, etype, high);
5726 exp = fold_convert_loc (loc, etype, exp);
5728 return build_range_check (loc, type, exp, 1, 0, high);
5731 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5732 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5734 int prec = TYPE_PRECISION (etype);
5736 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5738 if (TYPE_UNSIGNED (etype))
5740 tree signed_etype = signed_type_for (etype);
5741 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5742 etype
5743 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5744 else
5745 etype = signed_etype;
5746 exp = fold_convert_loc (loc, etype, exp);
5748 return fold_build2_loc (loc, GT_EXPR, type, exp,
5749 build_int_cst (etype, 0));
5753 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5754 This requires wrap-around arithmetics for the type of the expression. */
5755 etype = range_check_type (etype);
5756 if (etype == NULL_TREE)
5757 return NULL_TREE;
5759 high = fold_convert_loc (loc, etype, high);
5760 low = fold_convert_loc (loc, etype, low);
5761 exp = fold_convert_loc (loc, etype, exp);
5763 value = const_binop (MINUS_EXPR, high, low);
5765 if (value != 0 && !TREE_OVERFLOW (value))
5766 return build_range_check (loc, type,
5767 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5768 1, build_int_cst (etype, 0), value);
5770 return 0;
5773 /* Return the predecessor of VAL in its type, handling the infinite case. */
5775 static tree
5776 range_predecessor (tree val)
5778 tree type = TREE_TYPE (val);
5780 if (INTEGRAL_TYPE_P (type)
5781 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5782 return 0;
5783 else
5784 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5785 build_int_cst (TREE_TYPE (val), 1), 0);
5788 /* Return the successor of VAL in its type, handling the infinite case. */
5790 static tree
5791 range_successor (tree val)
5793 tree type = TREE_TYPE (val);
5795 if (INTEGRAL_TYPE_P (type)
5796 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5797 return 0;
5798 else
5799 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5800 build_int_cst (TREE_TYPE (val), 1), 0);
5803 /* Given two ranges, see if we can merge them into one. Return 1 if we
5804 can, 0 if we can't. Set the output range into the specified parameters. */
5806 bool
5807 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5808 tree high0, int in1_p, tree low1, tree high1)
5810 bool no_overlap;
5811 int subset;
5812 int temp;
5813 tree tem;
5814 int in_p;
5815 tree low, high;
5816 int lowequal = ((low0 == 0 && low1 == 0)
5817 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5818 low0, 0, low1, 0)));
5819 int highequal = ((high0 == 0 && high1 == 0)
5820 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5821 high0, 1, high1, 1)));
5823 /* Make range 0 be the range that starts first, or ends last if they
5824 start at the same value. Swap them if it isn't. */
5825 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5826 low0, 0, low1, 0))
5827 || (lowequal
5828 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5829 high1, 1, high0, 1))))
5831 temp = in0_p, in0_p = in1_p, in1_p = temp;
5832 tem = low0, low0 = low1, low1 = tem;
5833 tem = high0, high0 = high1, high1 = tem;
5836 /* If the second range is != high1 where high1 is the type maximum of
5837 the type, try first merging with < high1 range. */
5838 if (low1
5839 && high1
5840 && TREE_CODE (low1) == INTEGER_CST
5841 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5842 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5843 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5844 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5845 && operand_equal_p (low1, high1, 0))
5847 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5848 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5849 !in1_p, NULL_TREE, range_predecessor (low1)))
5850 return true;
5851 /* Similarly for the second range != low1 where low1 is the type minimum
5852 of the type, try first merging with > low1 range. */
5853 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5854 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5855 !in1_p, range_successor (low1), NULL_TREE))
5856 return true;
5859 /* Now flag two cases, whether the ranges are disjoint or whether the
5860 second range is totally subsumed in the first. Note that the tests
5861 below are simplified by the ones above. */
5862 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5863 high0, 1, low1, 0));
5864 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5865 high1, 1, high0, 1));
5867 /* We now have four cases, depending on whether we are including or
5868 excluding the two ranges. */
5869 if (in0_p && in1_p)
5871 /* If they don't overlap, the result is false. If the second range
5872 is a subset it is the result. Otherwise, the range is from the start
5873 of the second to the end of the first. */
5874 if (no_overlap)
5875 in_p = 0, low = high = 0;
5876 else if (subset)
5877 in_p = 1, low = low1, high = high1;
5878 else
5879 in_p = 1, low = low1, high = high0;
5882 else if (in0_p && ! in1_p)
5884 /* If they don't overlap, the result is the first range. If they are
5885 equal, the result is false. If the second range is a subset of the
5886 first, and the ranges begin at the same place, we go from just after
5887 the end of the second range to the end of the first. If the second
5888 range is not a subset of the first, or if it is a subset and both
5889 ranges end at the same place, the range starts at the start of the
5890 first range and ends just before the second range.
5891 Otherwise, we can't describe this as a single range. */
5892 if (no_overlap)
5893 in_p = 1, low = low0, high = high0;
5894 else if (lowequal && highequal)
5895 in_p = 0, low = high = 0;
5896 else if (subset && lowequal)
5898 low = range_successor (high1);
5899 high = high0;
5900 in_p = 1;
5901 if (low == 0)
5903 /* We are in the weird situation where high0 > high1 but
5904 high1 has no successor. Punt. */
5905 return 0;
5908 else if (! subset || highequal)
5910 low = low0;
5911 high = range_predecessor (low1);
5912 in_p = 1;
5913 if (high == 0)
5915 /* low0 < low1 but low1 has no predecessor. Punt. */
5916 return 0;
5919 else
5920 return 0;
5923 else if (! in0_p && in1_p)
5925 /* If they don't overlap, the result is the second range. If the second
5926 is a subset of the first, the result is false. Otherwise,
5927 the range starts just after the first range and ends at the
5928 end of the second. */
5929 if (no_overlap)
5930 in_p = 1, low = low1, high = high1;
5931 else if (subset || highequal)
5932 in_p = 0, low = high = 0;
5933 else
5935 low = range_successor (high0);
5936 high = high1;
5937 in_p = 1;
5938 if (low == 0)
5940 /* high1 > high0 but high0 has no successor. Punt. */
5941 return 0;
5946 else
5948 /* The case where we are excluding both ranges. Here the complex case
5949 is if they don't overlap. In that case, the only time we have a
5950 range is if they are adjacent. If the second is a subset of the
5951 first, the result is the first. Otherwise, the range to exclude
5952 starts at the beginning of the first range and ends at the end of the
5953 second. */
5954 if (no_overlap)
5956 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5957 range_successor (high0),
5958 1, low1, 0)))
5959 in_p = 0, low = low0, high = high1;
5960 else
5962 /* Canonicalize - [min, x] into - [-, x]. */
5963 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5964 switch (TREE_CODE (TREE_TYPE (low0)))
5966 case ENUMERAL_TYPE:
5967 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5968 GET_MODE_BITSIZE
5969 (TYPE_MODE (TREE_TYPE (low0)))))
5970 break;
5971 /* FALLTHROUGH */
5972 case INTEGER_TYPE:
5973 if (tree_int_cst_equal (low0,
5974 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5975 low0 = 0;
5976 break;
5977 case POINTER_TYPE:
5978 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5979 && integer_zerop (low0))
5980 low0 = 0;
5981 break;
5982 default:
5983 break;
5986 /* Canonicalize - [x, max] into - [x, -]. */
5987 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5988 switch (TREE_CODE (TREE_TYPE (high1)))
5990 case ENUMERAL_TYPE:
5991 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5992 GET_MODE_BITSIZE
5993 (TYPE_MODE (TREE_TYPE (high1)))))
5994 break;
5995 /* FALLTHROUGH */
5996 case INTEGER_TYPE:
5997 if (tree_int_cst_equal (high1,
5998 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5999 high1 = 0;
6000 break;
6001 case POINTER_TYPE:
6002 if (TYPE_UNSIGNED (TREE_TYPE (high1))
6003 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
6004 high1, 1,
6005 build_int_cst (TREE_TYPE (high1), 1),
6006 1)))
6007 high1 = 0;
6008 break;
6009 default:
6010 break;
6013 /* The ranges might be also adjacent between the maximum and
6014 minimum values of the given type. For
6015 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6016 return + [x + 1, y - 1]. */
6017 if (low0 == 0 && high1 == 0)
6019 low = range_successor (high0);
6020 high = range_predecessor (low1);
6021 if (low == 0 || high == 0)
6022 return 0;
6024 in_p = 1;
6026 else
6027 return 0;
6030 else if (subset)
6031 in_p = 0, low = low0, high = high0;
6032 else
6033 in_p = 0, low = low0, high = high1;
6036 *pin_p = in_p, *plow = low, *phigh = high;
6037 return 1;
6041 /* Subroutine of fold, looking inside expressions of the form
6042 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6043 are the three operands of the COND_EXPR. This function is
6044 being used also to optimize A op B ? C : A, by reversing the
6045 comparison first.
6047 Return a folded expression whose code is not a COND_EXPR
6048 anymore, or NULL_TREE if no folding opportunity is found. */
6050 static tree
6051 fold_cond_expr_with_comparison (location_t loc, tree type,
6052 enum tree_code comp_code,
6053 tree arg00, tree arg01, tree arg1, tree arg2)
6055 tree arg1_type = TREE_TYPE (arg1);
6056 tree tem;
6058 STRIP_NOPS (arg1);
6059 STRIP_NOPS (arg2);
6061 /* If we have A op 0 ? A : -A, consider applying the following
6062 transformations:
6064 A == 0? A : -A same as -A
6065 A != 0? A : -A same as A
6066 A >= 0? A : -A same as abs (A)
6067 A > 0? A : -A same as abs (A)
6068 A <= 0? A : -A same as -abs (A)
6069 A < 0? A : -A same as -abs (A)
6071 None of these transformations work for modes with signed
6072 zeros. If A is +/-0, the first two transformations will
6073 change the sign of the result (from +0 to -0, or vice
6074 versa). The last four will fix the sign of the result,
6075 even though the original expressions could be positive or
6076 negative, depending on the sign of A.
6078 Note that all these transformations are correct if A is
6079 NaN, since the two alternatives (A and -A) are also NaNs. */
6080 if (!HONOR_SIGNED_ZEROS (type)
6081 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6082 ? real_zerop (arg01)
6083 : integer_zerop (arg01))
6084 && ((TREE_CODE (arg2) == NEGATE_EXPR
6085 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6086 /* In the case that A is of the form X-Y, '-A' (arg2) may
6087 have already been folded to Y-X, check for that. */
6088 || (TREE_CODE (arg1) == MINUS_EXPR
6089 && TREE_CODE (arg2) == MINUS_EXPR
6090 && operand_equal_p (TREE_OPERAND (arg1, 0),
6091 TREE_OPERAND (arg2, 1), 0)
6092 && operand_equal_p (TREE_OPERAND (arg1, 1),
6093 TREE_OPERAND (arg2, 0), 0))))
6094 switch (comp_code)
6096 case EQ_EXPR:
6097 case UNEQ_EXPR:
6098 tem = fold_convert_loc (loc, arg1_type, arg1);
6099 return fold_convert_loc (loc, type, negate_expr (tem));
6100 case NE_EXPR:
6101 case LTGT_EXPR:
6102 return fold_convert_loc (loc, type, arg1);
6103 case UNGE_EXPR:
6104 case UNGT_EXPR:
6105 if (flag_trapping_math)
6106 break;
6107 /* Fall through. */
6108 case GE_EXPR:
6109 case GT_EXPR:
6110 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6111 break;
6112 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6113 return fold_convert_loc (loc, type, tem);
6114 case UNLE_EXPR:
6115 case UNLT_EXPR:
6116 if (flag_trapping_math)
6117 break;
6118 /* FALLTHRU */
6119 case LE_EXPR:
6120 case LT_EXPR:
6121 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6122 break;
6123 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6124 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6126 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6127 is not, invokes UB both in abs and in the negation of it.
6128 So, use ABSU_EXPR instead. */
6129 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6130 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6131 tem = negate_expr (tem);
6132 return fold_convert_loc (loc, type, tem);
6134 else
6136 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6137 return negate_expr (fold_convert_loc (loc, type, tem));
6139 default:
6140 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6141 break;
6144 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6145 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6146 both transformations are correct when A is NaN: A != 0
6147 is then true, and A == 0 is false. */
6149 if (!HONOR_SIGNED_ZEROS (type)
6150 && integer_zerop (arg01) && integer_zerop (arg2))
6152 if (comp_code == NE_EXPR)
6153 return fold_convert_loc (loc, type, arg1);
6154 else if (comp_code == EQ_EXPR)
6155 return build_zero_cst (type);
6158 /* Try some transformations of A op B ? A : B.
6160 A == B? A : B same as B
6161 A != B? A : B same as A
6162 A >= B? A : B same as max (A, B)
6163 A > B? A : B same as max (B, A)
6164 A <= B? A : B same as min (A, B)
6165 A < B? A : B same as min (B, A)
6167 As above, these transformations don't work in the presence
6168 of signed zeros. For example, if A and B are zeros of
6169 opposite sign, the first two transformations will change
6170 the sign of the result. In the last four, the original
6171 expressions give different results for (A=+0, B=-0) and
6172 (A=-0, B=+0), but the transformed expressions do not.
6174 The first two transformations are correct if either A or B
6175 is a NaN. In the first transformation, the condition will
6176 be false, and B will indeed be chosen. In the case of the
6177 second transformation, the condition A != B will be true,
6178 and A will be chosen.
6180 The conversions to max() and min() are not correct if B is
6181 a number and A is not. The conditions in the original
6182 expressions will be false, so all four give B. The min()
6183 and max() versions would give a NaN instead. */
6184 if (!HONOR_SIGNED_ZEROS (type)
6185 && operand_equal_for_comparison_p (arg01, arg2)
6186 /* Avoid these transformations if the COND_EXPR may be used
6187 as an lvalue in the C++ front-end. PR c++/19199. */
6188 && (in_gimple_form
6189 || VECTOR_TYPE_P (type)
6190 || (! lang_GNU_CXX ()
6191 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6192 || ! maybe_lvalue_p (arg1)
6193 || ! maybe_lvalue_p (arg2)))
6195 tree comp_op0 = arg00;
6196 tree comp_op1 = arg01;
6197 tree comp_type = TREE_TYPE (comp_op0);
6199 switch (comp_code)
6201 case EQ_EXPR:
6202 return fold_convert_loc (loc, type, arg2);
6203 case NE_EXPR:
6204 return fold_convert_loc (loc, type, arg1);
6205 case LE_EXPR:
6206 case LT_EXPR:
6207 case UNLE_EXPR:
6208 case UNLT_EXPR:
6209 /* In C++ a ?: expression can be an lvalue, so put the
6210 operand which will be used if they are equal first
6211 so that we can convert this back to the
6212 corresponding COND_EXPR. */
6213 if (!HONOR_NANS (arg1))
6215 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6216 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6217 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6218 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6219 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6220 comp_op1, comp_op0);
6221 return fold_convert_loc (loc, type, tem);
6223 break;
6224 case GE_EXPR:
6225 case GT_EXPR:
6226 case UNGE_EXPR:
6227 case UNGT_EXPR:
6228 if (!HONOR_NANS (arg1))
6230 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6231 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6232 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6233 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6234 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6235 comp_op1, comp_op0);
6236 return fold_convert_loc (loc, type, tem);
6238 break;
6239 case UNEQ_EXPR:
6240 if (!HONOR_NANS (arg1))
6241 return fold_convert_loc (loc, type, arg2);
6242 break;
6243 case LTGT_EXPR:
6244 if (!HONOR_NANS (arg1))
6245 return fold_convert_loc (loc, type, arg1);
6246 break;
6247 default:
6248 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6249 break;
6253 return NULL_TREE;
6258 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6259 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6260 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6261 false) >= 2)
6262 #endif
6264 /* EXP is some logical combination of boolean tests. See if we can
6265 merge it into some range test. Return the new tree if so. */
6267 static tree
6268 fold_range_test (location_t loc, enum tree_code code, tree type,
6269 tree op0, tree op1)
6271 int or_op = (code == TRUTH_ORIF_EXPR
6272 || code == TRUTH_OR_EXPR);
6273 int in0_p, in1_p, in_p;
6274 tree low0, low1, low, high0, high1, high;
6275 bool strict_overflow_p = false;
6276 tree tem, lhs, rhs;
6277 const char * const warnmsg = G_("assuming signed overflow does not occur "
6278 "when simplifying range test");
6280 if (!INTEGRAL_TYPE_P (type))
6281 return 0;
6283 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6284 /* If op0 is known true or false and this is a short-circuiting
6285 operation we must not merge with op1 since that makes side-effects
6286 unconditional. So special-case this. */
6287 if (!lhs
6288 && ((code == TRUTH_ORIF_EXPR && in0_p)
6289 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6290 return op0;
6291 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6293 /* If this is an OR operation, invert both sides; we will invert
6294 again at the end. */
6295 if (or_op)
6296 in0_p = ! in0_p, in1_p = ! in1_p;
6298 /* If both expressions are the same, if we can merge the ranges, and we
6299 can build the range test, return it or it inverted. If one of the
6300 ranges is always true or always false, consider it to be the same
6301 expression as the other. */
6302 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6303 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6304 in1_p, low1, high1)
6305 && (tem = (build_range_check (loc, type,
6306 lhs != 0 ? lhs
6307 : rhs != 0 ? rhs : integer_zero_node,
6308 in_p, low, high))) != 0)
6310 if (strict_overflow_p)
6311 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6312 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6315 /* On machines where the branch cost is expensive, if this is a
6316 short-circuited branch and the underlying object on both sides
6317 is the same, make a non-short-circuit operation. */
6318 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6319 if (param_logical_op_non_short_circuit != -1)
6320 logical_op_non_short_circuit
6321 = param_logical_op_non_short_circuit;
6322 if (logical_op_non_short_circuit
6323 && !sanitize_coverage_p ()
6324 && lhs != 0 && rhs != 0
6325 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6326 && operand_equal_p (lhs, rhs, 0))
6328 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6329 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6330 which cases we can't do this. */
6331 if (simple_operand_p (lhs))
6332 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6333 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6334 type, op0, op1);
6336 else if (!lang_hooks.decls.global_bindings_p ()
6337 && !CONTAINS_PLACEHOLDER_P (lhs))
6339 tree common = save_expr (lhs);
6341 if ((lhs = build_range_check (loc, type, common,
6342 or_op ? ! in0_p : in0_p,
6343 low0, high0)) != 0
6344 && (rhs = build_range_check (loc, type, common,
6345 or_op ? ! in1_p : in1_p,
6346 low1, high1)) != 0)
6348 if (strict_overflow_p)
6349 fold_overflow_warning (warnmsg,
6350 WARN_STRICT_OVERFLOW_COMPARISON);
6351 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6353 type, lhs, rhs);
6358 return 0;
6361 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6362 bit value. Arrange things so the extra bits will be set to zero if and
6363 only if C is signed-extended to its full width. If MASK is nonzero,
6364 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6366 static tree
6367 unextend (tree c, int p, int unsignedp, tree mask)
6369 tree type = TREE_TYPE (c);
6370 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6371 tree temp;
6373 if (p == modesize || unsignedp)
6374 return c;
6376 /* We work by getting just the sign bit into the low-order bit, then
6377 into the high-order bit, then sign-extend. We then XOR that value
6378 with C. */
6379 temp = build_int_cst (TREE_TYPE (c),
6380 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6382 /* We must use a signed type in order to get an arithmetic right shift.
6383 However, we must also avoid introducing accidental overflows, so that
6384 a subsequent call to integer_zerop will work. Hence we must
6385 do the type conversion here. At this point, the constant is either
6386 zero or one, and the conversion to a signed type can never overflow.
6387 We could get an overflow if this conversion is done anywhere else. */
6388 if (TYPE_UNSIGNED (type))
6389 temp = fold_convert (signed_type_for (type), temp);
6391 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6392 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6393 if (mask != 0)
6394 temp = const_binop (BIT_AND_EXPR, temp,
6395 fold_convert (TREE_TYPE (c), mask));
6396 /* If necessary, convert the type back to match the type of C. */
6397 if (TYPE_UNSIGNED (type))
6398 temp = fold_convert (type, temp);
6400 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6403 /* For an expression that has the form
6404 (A && B) || ~B
6406 (A || B) && ~B,
6407 we can drop one of the inner expressions and simplify to
6408 A || ~B
6410 A && ~B
6411 LOC is the location of the resulting expression. OP is the inner
6412 logical operation; the left-hand side in the examples above, while CMPOP
6413 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6414 removing a condition that guards another, as in
6415 (A != NULL && A->...) || A == NULL
6416 which we must not transform. If RHS_ONLY is true, only eliminate the
6417 right-most operand of the inner logical operation. */
6419 static tree
6420 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6421 bool rhs_only)
6423 tree type = TREE_TYPE (cmpop);
6424 enum tree_code code = TREE_CODE (cmpop);
6425 enum tree_code truthop_code = TREE_CODE (op);
6426 tree lhs = TREE_OPERAND (op, 0);
6427 tree rhs = TREE_OPERAND (op, 1);
6428 tree orig_lhs = lhs, orig_rhs = rhs;
6429 enum tree_code rhs_code = TREE_CODE (rhs);
6430 enum tree_code lhs_code = TREE_CODE (lhs);
6431 enum tree_code inv_code;
6433 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6434 return NULL_TREE;
6436 if (TREE_CODE_CLASS (code) != tcc_comparison)
6437 return NULL_TREE;
6439 if (rhs_code == truthop_code)
6441 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6442 if (newrhs != NULL_TREE)
6444 rhs = newrhs;
6445 rhs_code = TREE_CODE (rhs);
6448 if (lhs_code == truthop_code && !rhs_only)
6450 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6451 if (newlhs != NULL_TREE)
6453 lhs = newlhs;
6454 lhs_code = TREE_CODE (lhs);
6458 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6459 if (inv_code == rhs_code
6460 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6461 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6462 return lhs;
6463 if (!rhs_only && inv_code == lhs_code
6464 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6465 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6466 return rhs;
6467 if (rhs != orig_rhs || lhs != orig_lhs)
6468 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6469 lhs, rhs);
6470 return NULL_TREE;
6473 /* Find ways of folding logical expressions of LHS and RHS:
6474 Try to merge two comparisons to the same innermost item.
6475 Look for range tests like "ch >= '0' && ch <= '9'".
6476 Look for combinations of simple terms on machines with expensive branches
6477 and evaluate the RHS unconditionally.
6479 For example, if we have p->a == 2 && p->b == 4 and we can make an
6480 object large enough to span both A and B, we can do this with a comparison
6481 against the object ANDed with the a mask.
6483 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6484 operations to do this with one comparison.
6486 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6487 function and the one above.
6489 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6490 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6492 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6493 two operands.
6495 We return the simplified tree or 0 if no optimization is possible. */
6497 static tree
6498 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6499 tree lhs, tree rhs)
6501 /* If this is the "or" of two comparisons, we can do something if
6502 the comparisons are NE_EXPR. If this is the "and", we can do something
6503 if the comparisons are EQ_EXPR. I.e.,
6504 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6506 WANTED_CODE is this operation code. For single bit fields, we can
6507 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6508 comparison for one-bit fields. */
6510 enum tree_code wanted_code;
6511 enum tree_code lcode, rcode;
6512 tree ll_arg, lr_arg, rl_arg, rr_arg;
6513 tree ll_inner, lr_inner, rl_inner, rr_inner;
6514 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6515 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6516 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6517 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6518 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6519 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6520 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6521 scalar_int_mode lnmode, rnmode;
6522 tree ll_mask, lr_mask, rl_mask, rr_mask;
6523 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6524 tree l_const, r_const;
6525 tree lntype, rntype, result;
6526 HOST_WIDE_INT first_bit, end_bit;
6527 int volatilep;
6529 /* Start by getting the comparison codes. Fail if anything is volatile.
6530 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6531 it were surrounded with a NE_EXPR. */
6533 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6534 return 0;
6536 lcode = TREE_CODE (lhs);
6537 rcode = TREE_CODE (rhs);
6539 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6541 lhs = build2 (NE_EXPR, truth_type, lhs,
6542 build_int_cst (TREE_TYPE (lhs), 0));
6543 lcode = NE_EXPR;
6546 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6548 rhs = build2 (NE_EXPR, truth_type, rhs,
6549 build_int_cst (TREE_TYPE (rhs), 0));
6550 rcode = NE_EXPR;
6553 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6554 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6555 return 0;
6557 ll_arg = TREE_OPERAND (lhs, 0);
6558 lr_arg = TREE_OPERAND (lhs, 1);
6559 rl_arg = TREE_OPERAND (rhs, 0);
6560 rr_arg = TREE_OPERAND (rhs, 1);
6562 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6563 if (simple_operand_p (ll_arg)
6564 && simple_operand_p (lr_arg))
6566 if (operand_equal_p (ll_arg, rl_arg, 0)
6567 && operand_equal_p (lr_arg, rr_arg, 0))
6569 result = combine_comparisons (loc, code, lcode, rcode,
6570 truth_type, ll_arg, lr_arg);
6571 if (result)
6572 return result;
6574 else if (operand_equal_p (ll_arg, rr_arg, 0)
6575 && operand_equal_p (lr_arg, rl_arg, 0))
6577 result = combine_comparisons (loc, code, lcode,
6578 swap_tree_comparison (rcode),
6579 truth_type, ll_arg, lr_arg);
6580 if (result)
6581 return result;
6585 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6586 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6588 /* If the RHS can be evaluated unconditionally and its operands are
6589 simple, it wins to evaluate the RHS unconditionally on machines
6590 with expensive branches. In this case, this isn't a comparison
6591 that can be merged. */
6593 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6594 false) >= 2
6595 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6596 && simple_operand_p (rl_arg)
6597 && simple_operand_p (rr_arg))
6599 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6600 if (code == TRUTH_OR_EXPR
6601 && lcode == NE_EXPR && integer_zerop (lr_arg)
6602 && rcode == NE_EXPR && integer_zerop (rr_arg)
6603 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6604 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6605 return build2_loc (loc, NE_EXPR, truth_type,
6606 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6607 ll_arg, rl_arg),
6608 build_int_cst (TREE_TYPE (ll_arg), 0));
6610 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6611 if (code == TRUTH_AND_EXPR
6612 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6613 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6614 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6615 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6616 return build2_loc (loc, EQ_EXPR, truth_type,
6617 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6618 ll_arg, rl_arg),
6619 build_int_cst (TREE_TYPE (ll_arg), 0));
6622 /* See if the comparisons can be merged. Then get all the parameters for
6623 each side. */
6625 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6626 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6627 return 0;
6629 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6630 volatilep = 0;
6631 ll_inner = decode_field_reference (loc, &ll_arg,
6632 &ll_bitsize, &ll_bitpos, &ll_mode,
6633 &ll_unsignedp, &ll_reversep, &volatilep,
6634 &ll_mask, &ll_and_mask);
6635 lr_inner = decode_field_reference (loc, &lr_arg,
6636 &lr_bitsize, &lr_bitpos, &lr_mode,
6637 &lr_unsignedp, &lr_reversep, &volatilep,
6638 &lr_mask, &lr_and_mask);
6639 rl_inner = decode_field_reference (loc, &rl_arg,
6640 &rl_bitsize, &rl_bitpos, &rl_mode,
6641 &rl_unsignedp, &rl_reversep, &volatilep,
6642 &rl_mask, &rl_and_mask);
6643 rr_inner = decode_field_reference (loc, &rr_arg,
6644 &rr_bitsize, &rr_bitpos, &rr_mode,
6645 &rr_unsignedp, &rr_reversep, &volatilep,
6646 &rr_mask, &rr_and_mask);
6648 /* It must be true that the inner operation on the lhs of each
6649 comparison must be the same if we are to be able to do anything.
6650 Then see if we have constants. If not, the same must be true for
6651 the rhs's. */
6652 if (volatilep
6653 || ll_reversep != rl_reversep
6654 || ll_inner == 0 || rl_inner == 0
6655 || ! operand_equal_p (ll_inner, rl_inner, 0))
6656 return 0;
6658 if (TREE_CODE (lr_arg) == INTEGER_CST
6659 && TREE_CODE (rr_arg) == INTEGER_CST)
6661 l_const = lr_arg, r_const = rr_arg;
6662 lr_reversep = ll_reversep;
6664 else if (lr_reversep != rr_reversep
6665 || lr_inner == 0 || rr_inner == 0
6666 || ! operand_equal_p (lr_inner, rr_inner, 0))
6667 return 0;
6668 else
6669 l_const = r_const = 0;
6671 /* If either comparison code is not correct for our logical operation,
6672 fail. However, we can convert a one-bit comparison against zero into
6673 the opposite comparison against that bit being set in the field. */
6675 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6676 if (lcode != wanted_code)
6678 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6680 /* Make the left operand unsigned, since we are only interested
6681 in the value of one bit. Otherwise we are doing the wrong
6682 thing below. */
6683 ll_unsignedp = 1;
6684 l_const = ll_mask;
6686 else
6687 return 0;
6690 /* This is analogous to the code for l_const above. */
6691 if (rcode != wanted_code)
6693 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6695 rl_unsignedp = 1;
6696 r_const = rl_mask;
6698 else
6699 return 0;
6702 /* See if we can find a mode that contains both fields being compared on
6703 the left. If we can't, fail. Otherwise, update all constants and masks
6704 to be relative to a field of that size. */
6705 first_bit = MIN (ll_bitpos, rl_bitpos);
6706 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6707 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6708 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6709 volatilep, &lnmode))
6710 return 0;
6712 lnbitsize = GET_MODE_BITSIZE (lnmode);
6713 lnbitpos = first_bit & ~ (lnbitsize - 1);
6714 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6715 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6717 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6719 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6720 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6723 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6724 size_int (xll_bitpos));
6725 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6726 size_int (xrl_bitpos));
6727 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6728 return 0;
6730 if (l_const)
6732 l_const = fold_convert_loc (loc, lntype, l_const);
6733 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6734 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6735 if (l_const == NULL_TREE)
6736 return 0;
6737 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6738 fold_build1_loc (loc, BIT_NOT_EXPR,
6739 lntype, ll_mask))))
6741 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6743 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6746 if (r_const)
6748 r_const = fold_convert_loc (loc, lntype, r_const);
6749 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6750 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6751 if (r_const == NULL_TREE)
6752 return 0;
6753 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6754 fold_build1_loc (loc, BIT_NOT_EXPR,
6755 lntype, rl_mask))))
6757 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6759 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6763 /* If the right sides are not constant, do the same for it. Also,
6764 disallow this optimization if a size, signedness or storage order
6765 mismatch occurs between the left and right sides. */
6766 if (l_const == 0)
6768 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6769 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6770 || ll_reversep != lr_reversep
6771 /* Make sure the two fields on the right
6772 correspond to the left without being swapped. */
6773 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6774 return 0;
6776 first_bit = MIN (lr_bitpos, rr_bitpos);
6777 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6778 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6779 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6780 volatilep, &rnmode))
6781 return 0;
6783 rnbitsize = GET_MODE_BITSIZE (rnmode);
6784 rnbitpos = first_bit & ~ (rnbitsize - 1);
6785 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6786 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6788 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6790 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6791 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6794 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6795 rntype, lr_mask),
6796 size_int (xlr_bitpos));
6797 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6798 rntype, rr_mask),
6799 size_int (xrr_bitpos));
6800 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6801 return 0;
6803 /* Make a mask that corresponds to both fields being compared.
6804 Do this for both items being compared. If the operands are the
6805 same size and the bits being compared are in the same position
6806 then we can do this by masking both and comparing the masked
6807 results. */
6808 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6809 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6810 if (lnbitsize == rnbitsize
6811 && xll_bitpos == xlr_bitpos
6812 && lnbitpos >= 0
6813 && rnbitpos >= 0)
6815 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6816 lntype, lnbitsize, lnbitpos,
6817 ll_unsignedp || rl_unsignedp, ll_reversep);
6818 if (! all_ones_mask_p (ll_mask, lnbitsize))
6819 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6821 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6822 rntype, rnbitsize, rnbitpos,
6823 lr_unsignedp || rr_unsignedp, lr_reversep);
6824 if (! all_ones_mask_p (lr_mask, rnbitsize))
6825 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6827 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6830 /* There is still another way we can do something: If both pairs of
6831 fields being compared are adjacent, we may be able to make a wider
6832 field containing them both.
6834 Note that we still must mask the lhs/rhs expressions. Furthermore,
6835 the mask must be shifted to account for the shift done by
6836 make_bit_field_ref. */
6837 if (((ll_bitsize + ll_bitpos == rl_bitpos
6838 && lr_bitsize + lr_bitpos == rr_bitpos)
6839 || (ll_bitpos == rl_bitpos + rl_bitsize
6840 && lr_bitpos == rr_bitpos + rr_bitsize))
6841 && ll_bitpos >= 0
6842 && rl_bitpos >= 0
6843 && lr_bitpos >= 0
6844 && rr_bitpos >= 0)
6846 tree type;
6848 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6849 ll_bitsize + rl_bitsize,
6850 MIN (ll_bitpos, rl_bitpos),
6851 ll_unsignedp, ll_reversep);
6852 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6853 lr_bitsize + rr_bitsize,
6854 MIN (lr_bitpos, rr_bitpos),
6855 lr_unsignedp, lr_reversep);
6857 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6858 size_int (MIN (xll_bitpos, xrl_bitpos)));
6859 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6860 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6861 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6862 return 0;
6864 /* Convert to the smaller type before masking out unwanted bits. */
6865 type = lntype;
6866 if (lntype != rntype)
6868 if (lnbitsize > rnbitsize)
6870 lhs = fold_convert_loc (loc, rntype, lhs);
6871 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6872 type = rntype;
6874 else if (lnbitsize < rnbitsize)
6876 rhs = fold_convert_loc (loc, lntype, rhs);
6877 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6878 type = lntype;
6882 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6883 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6885 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6886 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6888 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6891 return 0;
6894 /* Handle the case of comparisons with constants. If there is something in
6895 common between the masks, those bits of the constants must be the same.
6896 If not, the condition is always false. Test for this to avoid generating
6897 incorrect code below. */
6898 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6899 if (! integer_zerop (result)
6900 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6901 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6903 if (wanted_code == NE_EXPR)
6905 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6906 return constant_boolean_node (true, truth_type);
6908 else
6910 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6911 return constant_boolean_node (false, truth_type);
6915 if (lnbitpos < 0)
6916 return 0;
6918 /* Construct the expression we will return. First get the component
6919 reference we will make. Unless the mask is all ones the width of
6920 that field, perform the mask operation. Then compare with the
6921 merged constant. */
6922 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6923 lntype, lnbitsize, lnbitpos,
6924 ll_unsignedp || rl_unsignedp, ll_reversep);
6926 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6927 if (! all_ones_mask_p (ll_mask, lnbitsize))
6928 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6930 return build2_loc (loc, wanted_code, truth_type, result,
6931 const_binop (BIT_IOR_EXPR, l_const, r_const));
6934 /* T is an integer expression that is being multiplied, divided, or taken a
6935 modulus (CODE says which and what kind of divide or modulus) by a
6936 constant C. See if we can eliminate that operation by folding it with
6937 other operations already in T. WIDE_TYPE, if non-null, is a type that
6938 should be used for the computation if wider than our type.
6940 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6941 (X * 2) + (Y * 4). We must, however, be assured that either the original
6942 expression would not overflow or that overflow is undefined for the type
6943 in the language in question.
6945 If we return a non-null expression, it is an equivalent form of the
6946 original computation, but need not be in the original type.
6948 We set *STRICT_OVERFLOW_P to true if the return values depends on
6949 signed overflow being undefined. Otherwise we do not change
6950 *STRICT_OVERFLOW_P. */
6952 static tree
6953 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6954 bool *strict_overflow_p)
6956 /* To avoid exponential search depth, refuse to allow recursion past
6957 three levels. Beyond that (1) it's highly unlikely that we'll find
6958 something interesting and (2) we've probably processed it before
6959 when we built the inner expression. */
6961 static int depth;
6962 tree ret;
6964 if (depth > 3)
6965 return NULL;
6967 depth++;
6968 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6969 depth--;
6971 return ret;
6974 static tree
6975 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6976 bool *strict_overflow_p)
6978 tree type = TREE_TYPE (t);
6979 enum tree_code tcode = TREE_CODE (t);
6980 tree ctype = type;
6981 if (wide_type)
6983 if (TREE_CODE (type) == BITINT_TYPE
6984 || TREE_CODE (wide_type) == BITINT_TYPE)
6986 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6987 ctype = wide_type;
6989 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6990 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6991 ctype = wide_type;
6993 tree t1, t2;
6994 bool same_p = tcode == code;
6995 tree op0 = NULL_TREE, op1 = NULL_TREE;
6996 bool sub_strict_overflow_p;
6998 /* Don't deal with constants of zero here; they confuse the code below. */
6999 if (integer_zerop (c))
7000 return NULL_TREE;
7002 if (TREE_CODE_CLASS (tcode) == tcc_unary)
7003 op0 = TREE_OPERAND (t, 0);
7005 if (TREE_CODE_CLASS (tcode) == tcc_binary)
7006 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
7008 /* Note that we need not handle conditional operations here since fold
7009 already handles those cases. So just do arithmetic here. */
7010 switch (tcode)
7012 case INTEGER_CST:
7013 /* For a constant, we can always simplify if we are a multiply
7014 or (for divide and modulus) if it is a multiple of our constant. */
7015 if (code == MULT_EXPR
7016 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
7017 TYPE_SIGN (type)))
7019 tree tem = const_binop (code, fold_convert (ctype, t),
7020 fold_convert (ctype, c));
7021 /* If the multiplication overflowed, we lost information on it.
7022 See PR68142 and PR69845. */
7023 if (TREE_OVERFLOW (tem))
7024 return NULL_TREE;
7025 return tem;
7027 break;
7029 CASE_CONVERT: case NON_LVALUE_EXPR:
7030 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
7031 break;
7032 /* If op0 is an expression ... */
7033 if ((COMPARISON_CLASS_P (op0)
7034 || UNARY_CLASS_P (op0)
7035 || BINARY_CLASS_P (op0)
7036 || VL_EXP_CLASS_P (op0)
7037 || EXPRESSION_CLASS_P (op0))
7038 /* ... and has wrapping overflow, and its type is smaller
7039 than ctype, then we cannot pass through as widening. */
7040 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
7041 && (TYPE_PRECISION (ctype)
7042 > TYPE_PRECISION (TREE_TYPE (op0))))
7043 /* ... or this is a truncation (t is narrower than op0),
7044 then we cannot pass through this narrowing. */
7045 || (TYPE_PRECISION (type)
7046 < TYPE_PRECISION (TREE_TYPE (op0)))
7047 /* ... or signedness changes for division or modulus,
7048 then we cannot pass through this conversion. */
7049 || (code != MULT_EXPR
7050 && (TYPE_UNSIGNED (ctype)
7051 != TYPE_UNSIGNED (TREE_TYPE (op0))))
7052 /* ... or has undefined overflow while the converted to
7053 type has not, we cannot do the operation in the inner type
7054 as that would introduce undefined overflow. */
7055 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
7056 && !TYPE_OVERFLOW_UNDEFINED (type))))
7057 break;
7059 /* Pass the constant down and see if we can make a simplification. If
7060 we can, replace this expression with the inner simplification for
7061 possible later conversion to our or some other type. */
7062 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
7063 && TREE_CODE (t2) == INTEGER_CST
7064 && !TREE_OVERFLOW (t2)
7065 && (t1 = extract_muldiv (op0, t2, code,
7066 code == MULT_EXPR ? ctype : NULL_TREE,
7067 strict_overflow_p)) != 0)
7068 return t1;
7069 break;
7071 case ABS_EXPR:
7072 /* If widening the type changes it from signed to unsigned, then we
7073 must avoid building ABS_EXPR itself as unsigned. */
7074 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
7076 tree cstype = (*signed_type_for) (ctype);
7077 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
7078 != 0)
7080 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
7081 return fold_convert (ctype, t1);
7083 break;
7085 /* If the constant is negative, we cannot simplify this. */
7086 if (tree_int_cst_sgn (c) == -1)
7087 break;
7088 /* FALLTHROUGH */
7089 case NEGATE_EXPR:
7090 /* For division and modulus, type can't be unsigned, as e.g.
7091 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
7092 For signed types, even with wrapping overflow, this is fine. */
7093 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7094 break;
7095 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
7096 != 0)
7097 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7098 break;
7100 case MIN_EXPR: case MAX_EXPR:
7101 /* If widening the type changes the signedness, then we can't perform
7102 this optimization as that changes the result. */
7103 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7104 break;
7106 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7107 sub_strict_overflow_p = false;
7108 if ((t1 = extract_muldiv (op0, c, code, wide_type,
7109 &sub_strict_overflow_p)) != 0
7110 && (t2 = extract_muldiv (op1, c, code, wide_type,
7111 &sub_strict_overflow_p)) != 0)
7113 if (tree_int_cst_sgn (c) < 0)
7114 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7115 if (sub_strict_overflow_p)
7116 *strict_overflow_p = true;
7117 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7118 fold_convert (ctype, t2));
7120 break;
7122 case LSHIFT_EXPR: case RSHIFT_EXPR:
7123 /* If the second operand is constant, this is a multiplication
7124 or floor division, by a power of two, so we can treat it that
7125 way unless the multiplier or divisor overflows. Signed
7126 left-shift overflow is implementation-defined rather than
7127 undefined in C90, so do not convert signed left shift into
7128 multiplication. */
7129 if (TREE_CODE (op1) == INTEGER_CST
7130 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7131 /* const_binop may not detect overflow correctly,
7132 so check for it explicitly here. */
7133 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7134 wi::to_wide (op1))
7135 && (t1 = fold_convert (ctype,
7136 const_binop (LSHIFT_EXPR, size_one_node,
7137 op1))) != 0
7138 && !TREE_OVERFLOW (t1))
7139 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7140 ? MULT_EXPR : FLOOR_DIV_EXPR,
7141 ctype,
7142 fold_convert (ctype, op0),
7143 t1),
7144 c, code, wide_type, strict_overflow_p);
7145 break;
7147 case PLUS_EXPR: case MINUS_EXPR:
7148 /* See if we can eliminate the operation on both sides. If we can, we
7149 can return a new PLUS or MINUS. If we can't, the only remaining
7150 cases where we can do anything are if the second operand is a
7151 constant. */
7152 sub_strict_overflow_p = false;
7153 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7154 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7155 if (t1 != 0 && t2 != 0
7156 && TYPE_OVERFLOW_WRAPS (ctype)
7157 && (code == MULT_EXPR
7158 /* If not multiplication, we can only do this if both operands
7159 are divisible by c. */
7160 || (multiple_of_p (ctype, op0, c)
7161 && multiple_of_p (ctype, op1, c))))
7163 if (sub_strict_overflow_p)
7164 *strict_overflow_p = true;
7165 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7166 fold_convert (ctype, t2));
7169 /* If this was a subtraction, negate OP1 and set it to be an addition.
7170 This simplifies the logic below. */
7171 if (tcode == MINUS_EXPR)
7173 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7174 /* If OP1 was not easily negatable, the constant may be OP0. */
7175 if (TREE_CODE (op0) == INTEGER_CST)
7177 std::swap (op0, op1);
7178 std::swap (t1, t2);
7182 if (TREE_CODE (op1) != INTEGER_CST)
7183 break;
7185 /* If either OP1 or C are negative, this optimization is not safe for
7186 some of the division and remainder types while for others we need
7187 to change the code. */
7188 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7190 if (code == CEIL_DIV_EXPR)
7191 code = FLOOR_DIV_EXPR;
7192 else if (code == FLOOR_DIV_EXPR)
7193 code = CEIL_DIV_EXPR;
7194 else if (code != MULT_EXPR
7195 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7196 break;
7199 /* If it's a multiply or a division/modulus operation of a multiple
7200 of our constant, do the operation and verify it doesn't overflow. */
7201 if (code == MULT_EXPR
7202 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7203 TYPE_SIGN (type)))
7205 op1 = const_binop (code, fold_convert (ctype, op1),
7206 fold_convert (ctype, c));
7207 /* We allow the constant to overflow with wrapping semantics. */
7208 if (op1 == 0
7209 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7210 break;
7212 else
7213 break;
7215 /* If we have an unsigned type, we cannot widen the operation since it
7216 will change the result if the original computation overflowed. */
7217 if (TYPE_UNSIGNED (ctype) && ctype != type)
7218 break;
7220 /* The last case is if we are a multiply. In that case, we can
7221 apply the distributive law to commute the multiply and addition
7222 if the multiplication of the constants doesn't overflow
7223 and overflow is defined. With undefined overflow
7224 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7225 But fold_plusminus_mult_expr would factor back any power-of-two
7226 value so do not distribute in the first place in this case. */
7227 if (code == MULT_EXPR
7228 && TYPE_OVERFLOW_WRAPS (ctype)
7229 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7230 return fold_build2 (tcode, ctype,
7231 fold_build2 (code, ctype,
7232 fold_convert (ctype, op0),
7233 fold_convert (ctype, c)),
7234 op1);
7236 break;
7238 case MULT_EXPR:
7239 /* We have a special case here if we are doing something like
7240 (C * 8) % 4 since we know that's zero. */
7241 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7242 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7243 /* If the multiplication can overflow we cannot optimize this. */
7244 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7245 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7246 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7247 TYPE_SIGN (type)))
7249 *strict_overflow_p = true;
7250 return omit_one_operand (type, integer_zero_node, op0);
7253 /* ... fall through ... */
7255 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7256 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7257 /* If we can extract our operation from the LHS, do so and return a
7258 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7259 do something only if the second operand is a constant. */
7260 if (same_p
7261 && TYPE_OVERFLOW_WRAPS (ctype)
7262 && (t1 = extract_muldiv (op0, c, code, wide_type,
7263 strict_overflow_p)) != 0)
7264 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7265 fold_convert (ctype, op1));
7266 else if (tcode == MULT_EXPR && code == MULT_EXPR
7267 && TYPE_OVERFLOW_WRAPS (ctype)
7268 && (t1 = extract_muldiv (op1, c, code, wide_type,
7269 strict_overflow_p)) != 0)
7270 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7271 fold_convert (ctype, t1));
7272 else if (TREE_CODE (op1) != INTEGER_CST)
7273 return 0;
7275 /* If these are the same operation types, we can associate them
7276 assuming no overflow. */
7277 if (tcode == code)
7279 bool overflow_p = false;
7280 wi::overflow_type overflow_mul;
7281 signop sign = TYPE_SIGN (ctype);
7282 unsigned prec = TYPE_PRECISION (ctype);
7283 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7284 wi::to_wide (c, prec),
7285 sign, &overflow_mul);
7286 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7287 if (overflow_mul
7288 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7289 overflow_p = true;
7290 if (!overflow_p)
7291 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7292 wide_int_to_tree (ctype, mul));
7295 /* If these operations "cancel" each other, we have the main
7296 optimizations of this pass, which occur when either constant is a
7297 multiple of the other, in which case we replace this with either an
7298 operation or CODE or TCODE.
7300 If we have an unsigned type, we cannot do this since it will change
7301 the result if the original computation overflowed. */
7302 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7303 && !TYPE_OVERFLOW_SANITIZED (ctype)
7304 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7305 || (tcode == MULT_EXPR
7306 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7307 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7308 && code != MULT_EXPR)))
7310 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7311 TYPE_SIGN (type)))
7313 *strict_overflow_p = true;
7314 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7315 fold_convert (ctype,
7316 const_binop (TRUNC_DIV_EXPR,
7317 op1, c)));
7319 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7320 TYPE_SIGN (type)))
7322 *strict_overflow_p = true;
7323 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7324 fold_convert (ctype,
7325 const_binop (TRUNC_DIV_EXPR,
7326 c, op1)));
7329 break;
7331 default:
7332 break;
7335 return 0;
7338 /* Return a node which has the indicated constant VALUE (either 0 or
7339 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7340 and is of the indicated TYPE. */
7342 tree
7343 constant_boolean_node (bool value, tree type)
7345 if (type == integer_type_node)
7346 return value ? integer_one_node : integer_zero_node;
7347 else if (type == boolean_type_node)
7348 return value ? boolean_true_node : boolean_false_node;
7349 else if (VECTOR_TYPE_P (type))
7350 return build_vector_from_val (type,
7351 build_int_cst (TREE_TYPE (type),
7352 value ? -1 : 0));
7353 else
7354 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7358 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7359 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7360 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7361 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7362 COND is the first argument to CODE; otherwise (as in the example
7363 given here), it is the second argument. TYPE is the type of the
7364 original expression. Return NULL_TREE if no simplification is
7365 possible. */
7367 static tree
7368 fold_binary_op_with_conditional_arg (location_t loc,
7369 enum tree_code code,
7370 tree type, tree op0, tree op1,
7371 tree cond, tree arg, int cond_first_p)
7373 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7374 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7375 tree test, true_value, false_value;
7376 tree lhs = NULL_TREE;
7377 tree rhs = NULL_TREE;
7378 enum tree_code cond_code = COND_EXPR;
7380 /* Do not move possibly trapping operations into the conditional as this
7381 pessimizes code and causes gimplification issues when applied late. */
7382 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7383 ANY_INTEGRAL_TYPE_P (type)
7384 && TYPE_OVERFLOW_TRAPS (type), op1))
7385 return NULL_TREE;
7387 if (TREE_CODE (cond) == COND_EXPR
7388 || TREE_CODE (cond) == VEC_COND_EXPR)
7390 test = TREE_OPERAND (cond, 0);
7391 true_value = TREE_OPERAND (cond, 1);
7392 false_value = TREE_OPERAND (cond, 2);
7393 /* If this operand throws an expression, then it does not make
7394 sense to try to perform a logical or arithmetic operation
7395 involving it. */
7396 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7397 lhs = true_value;
7398 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7399 rhs = false_value;
7401 else if (!(TREE_CODE (type) != VECTOR_TYPE
7402 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7404 tree testtype = TREE_TYPE (cond);
7405 test = cond;
7406 true_value = constant_boolean_node (true, testtype);
7407 false_value = constant_boolean_node (false, testtype);
7409 else
7410 /* Detect the case of mixing vector and scalar types - bail out. */
7411 return NULL_TREE;
7413 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7414 cond_code = VEC_COND_EXPR;
7416 /* This transformation is only worthwhile if we don't have to wrap ARG
7417 in a SAVE_EXPR and the operation can be simplified without recursing
7418 on at least one of the branches once its pushed inside the COND_EXPR. */
7419 if (!TREE_CONSTANT (arg)
7420 && (TREE_SIDE_EFFECTS (arg)
7421 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7422 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7423 return NULL_TREE;
7425 arg = fold_convert_loc (loc, arg_type, arg);
7426 if (lhs == 0)
7428 true_value = fold_convert_loc (loc, cond_type, true_value);
7429 if (cond_first_p)
7430 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7431 else
7432 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7434 if (rhs == 0)
7436 false_value = fold_convert_loc (loc, cond_type, false_value);
7437 if (cond_first_p)
7438 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7439 else
7440 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7443 /* Check that we have simplified at least one of the branches. */
7444 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7445 return NULL_TREE;
7447 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7451 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7453 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7454 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7455 if ARG - ZERO_ARG is the same as X.
7457 If ARG is NULL, check for any value of type TYPE.
7459 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7460 and finite. The problematic cases are when X is zero, and its mode
7461 has signed zeros. In the case of rounding towards -infinity,
7462 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7463 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7465 bool
7466 fold_real_zero_addition_p (const_tree type, const_tree arg,
7467 const_tree zero_arg, int negate)
7469 if (!real_zerop (zero_arg))
7470 return false;
7472 /* Don't allow the fold with -fsignaling-nans. */
7473 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7474 return false;
7476 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7477 if (!HONOR_SIGNED_ZEROS (type))
7478 return true;
7480 /* There is no case that is safe for all rounding modes. */
7481 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7482 return false;
7484 /* In a vector or complex, we would need to check the sign of all zeros. */
7485 if (TREE_CODE (zero_arg) == VECTOR_CST)
7486 zero_arg = uniform_vector_p (zero_arg);
7487 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7488 return false;
7490 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7491 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7492 negate = !negate;
7494 /* The mode has signed zeros, and we have to honor their sign.
7495 In this situation, there are only two cases we can return true for.
7496 (i) X - 0 is the same as X with default rounding.
7497 (ii) X + 0 is X when X can't possibly be -0.0. */
7498 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7501 /* Subroutine of match.pd that optimizes comparisons of a division by
7502 a nonzero integer constant against an integer constant, i.e.
7503 X/C1 op C2.
7505 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7506 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7508 enum tree_code
7509 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7510 tree *hi, bool *neg_overflow)
7512 tree prod, tmp, type = TREE_TYPE (c1);
7513 signop sign = TYPE_SIGN (type);
7514 wi::overflow_type overflow;
7516 /* We have to do this the hard way to detect unsigned overflow.
7517 prod = int_const_binop (MULT_EXPR, c1, c2); */
7518 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7519 prod = force_fit_type (type, val, -1, overflow);
7520 *neg_overflow = false;
7522 if (sign == UNSIGNED)
7524 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7525 *lo = prod;
7527 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7528 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7529 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7531 else if (tree_int_cst_sgn (c1) >= 0)
7533 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7534 switch (tree_int_cst_sgn (c2))
7536 case -1:
7537 *neg_overflow = true;
7538 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7539 *hi = prod;
7540 break;
7542 case 0:
7543 *lo = fold_negate_const (tmp, type);
7544 *hi = tmp;
7545 break;
7547 case 1:
7548 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7549 *lo = prod;
7550 break;
7552 default:
7553 gcc_unreachable ();
7556 else
7558 /* A negative divisor reverses the relational operators. */
7559 code = swap_tree_comparison (code);
7561 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7562 switch (tree_int_cst_sgn (c2))
7564 case -1:
7565 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7566 *lo = prod;
7567 break;
7569 case 0:
7570 *hi = fold_negate_const (tmp, type);
7571 *lo = tmp;
7572 break;
7574 case 1:
7575 *neg_overflow = true;
7576 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7577 *hi = prod;
7578 break;
7580 default:
7581 gcc_unreachable ();
7585 if (code != EQ_EXPR && code != NE_EXPR)
7586 return code;
7588 if (TREE_OVERFLOW (*lo)
7589 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7590 *lo = NULL_TREE;
7591 if (TREE_OVERFLOW (*hi)
7592 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7593 *hi = NULL_TREE;
7595 return code;
7598 /* Test whether it is preferable to swap two operands, ARG0 and
7599 ARG1, for example because ARG0 is an integer constant and ARG1
7600 isn't. */
7602 bool
7603 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7605 if (CONSTANT_CLASS_P (arg1))
7606 return false;
7607 if (CONSTANT_CLASS_P (arg0))
7608 return true;
7610 STRIP_NOPS (arg0);
7611 STRIP_NOPS (arg1);
7613 if (TREE_CONSTANT (arg1))
7614 return false;
7615 if (TREE_CONSTANT (arg0))
7616 return true;
7618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7619 for commutative and comparison operators. Ensuring a canonical
7620 form allows the optimizers to find additional redundancies without
7621 having to explicitly check for both orderings. */
7622 if (TREE_CODE (arg0) == SSA_NAME
7623 && TREE_CODE (arg1) == SSA_NAME
7624 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7625 return true;
7627 /* Put SSA_NAMEs last. */
7628 if (TREE_CODE (arg1) == SSA_NAME)
7629 return false;
7630 if (TREE_CODE (arg0) == SSA_NAME)
7631 return true;
7633 /* Put variables last. */
7634 if (DECL_P (arg1))
7635 return false;
7636 if (DECL_P (arg0))
7637 return true;
7639 return false;
7643 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7644 means A >= Y && A != MAX, but in this case we know that
7645 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7647 static tree
7648 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7650 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7652 if (TREE_CODE (bound) == LT_EXPR)
7653 a = TREE_OPERAND (bound, 0);
7654 else if (TREE_CODE (bound) == GT_EXPR)
7655 a = TREE_OPERAND (bound, 1);
7656 else
7657 return NULL_TREE;
7659 typea = TREE_TYPE (a);
7660 if (!INTEGRAL_TYPE_P (typea)
7661 && !POINTER_TYPE_P (typea))
7662 return NULL_TREE;
7664 if (TREE_CODE (ineq) == LT_EXPR)
7666 a1 = TREE_OPERAND (ineq, 1);
7667 y = TREE_OPERAND (ineq, 0);
7669 else if (TREE_CODE (ineq) == GT_EXPR)
7671 a1 = TREE_OPERAND (ineq, 0);
7672 y = TREE_OPERAND (ineq, 1);
7674 else
7675 return NULL_TREE;
7677 if (TREE_TYPE (a1) != typea)
7678 return NULL_TREE;
7680 if (POINTER_TYPE_P (typea))
7682 /* Convert the pointer types into integer before taking the difference. */
7683 tree ta = fold_convert_loc (loc, ssizetype, a);
7684 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7685 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7687 else
7688 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7690 if (!diff || !integer_onep (diff))
7691 return NULL_TREE;
7693 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7696 /* Fold a sum or difference of at least one multiplication.
7697 Returns the folded tree or NULL if no simplification could be made. */
7699 static tree
7700 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7701 tree arg0, tree arg1)
7703 tree arg00, arg01, arg10, arg11;
7704 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7706 /* (A * C) +- (B * C) -> (A+-B) * C.
7707 (A * C) +- A -> A * (C+-1).
7708 We are most concerned about the case where C is a constant,
7709 but other combinations show up during loop reduction. Since
7710 it is not difficult, try all four possibilities. */
7712 if (TREE_CODE (arg0) == MULT_EXPR)
7714 arg00 = TREE_OPERAND (arg0, 0);
7715 arg01 = TREE_OPERAND (arg0, 1);
7717 else if (TREE_CODE (arg0) == INTEGER_CST)
7719 arg00 = build_one_cst (type);
7720 arg01 = arg0;
7722 else
7724 /* We cannot generate constant 1 for fract. */
7725 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7726 return NULL_TREE;
7727 arg00 = arg0;
7728 arg01 = build_one_cst (type);
7730 if (TREE_CODE (arg1) == MULT_EXPR)
7732 arg10 = TREE_OPERAND (arg1, 0);
7733 arg11 = TREE_OPERAND (arg1, 1);
7735 else if (TREE_CODE (arg1) == INTEGER_CST)
7737 arg10 = build_one_cst (type);
7738 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7739 the purpose of this canonicalization. */
7740 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7741 && negate_expr_p (arg1)
7742 && code == PLUS_EXPR)
7744 arg11 = negate_expr (arg1);
7745 code = MINUS_EXPR;
7747 else
7748 arg11 = arg1;
7750 else
7752 /* We cannot generate constant 1 for fract. */
7753 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7754 return NULL_TREE;
7755 arg10 = arg1;
7756 arg11 = build_one_cst (type);
7758 same = NULL_TREE;
7760 /* Prefer factoring a common non-constant. */
7761 if (operand_equal_p (arg00, arg10, 0))
7762 same = arg00, alt0 = arg01, alt1 = arg11;
7763 else if (operand_equal_p (arg01, arg11, 0))
7764 same = arg01, alt0 = arg00, alt1 = arg10;
7765 else if (operand_equal_p (arg00, arg11, 0))
7766 same = arg00, alt0 = arg01, alt1 = arg10;
7767 else if (operand_equal_p (arg01, arg10, 0))
7768 same = arg01, alt0 = arg00, alt1 = arg11;
7770 /* No identical multiplicands; see if we can find a common
7771 power-of-two factor in non-power-of-two multiplies. This
7772 can help in multi-dimensional array access. */
7773 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7775 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7776 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7777 HOST_WIDE_INT tmp;
7778 bool swap = false;
7779 tree maybe_same;
7781 /* Move min of absolute values to int11. */
7782 if (absu_hwi (int01) < absu_hwi (int11))
7784 tmp = int01, int01 = int11, int11 = tmp;
7785 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7786 maybe_same = arg01;
7787 swap = true;
7789 else
7790 maybe_same = arg11;
7792 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7793 if (factor > 1
7794 && pow2p_hwi (factor)
7795 && (int01 & (factor - 1)) == 0
7796 /* The remainder should not be a constant, otherwise we
7797 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7798 increased the number of multiplications necessary. */
7799 && TREE_CODE (arg10) != INTEGER_CST)
7801 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7802 build_int_cst (TREE_TYPE (arg00),
7803 int01 / int11));
7804 alt1 = arg10;
7805 same = maybe_same;
7806 if (swap)
7807 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7811 if (!same)
7812 return NULL_TREE;
7814 if (! ANY_INTEGRAL_TYPE_P (type)
7815 || TYPE_OVERFLOW_WRAPS (type)
7816 /* We are neither factoring zero nor minus one. */
7817 || TREE_CODE (same) == INTEGER_CST)
7818 return fold_build2_loc (loc, MULT_EXPR, type,
7819 fold_build2_loc (loc, code, type,
7820 fold_convert_loc (loc, type, alt0),
7821 fold_convert_loc (loc, type, alt1)),
7822 fold_convert_loc (loc, type, same));
7824 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7825 same may be minus one and thus the multiplication may overflow. Perform
7826 the sum operation in an unsigned type. */
7827 tree utype = unsigned_type_for (type);
7828 tree tem = fold_build2_loc (loc, code, utype,
7829 fold_convert_loc (loc, utype, alt0),
7830 fold_convert_loc (loc, utype, alt1));
7831 /* If the sum evaluated to a constant that is not -INF the multiplication
7832 cannot overflow. */
7833 if (TREE_CODE (tem) == INTEGER_CST
7834 && (wi::to_wide (tem)
7835 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7836 return fold_build2_loc (loc, MULT_EXPR, type,
7837 fold_convert (type, tem), same);
7839 /* Do not resort to unsigned multiplication because
7840 we lose the no-overflow property of the expression. */
7841 return NULL_TREE;
7844 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7845 specified by EXPR into the buffer PTR of length LEN bytes.
7846 Return the number of bytes placed in the buffer, or zero
7847 upon failure. */
7849 static int
7850 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7852 tree type = TREE_TYPE (expr);
7853 int total_bytes;
7854 if (TREE_CODE (type) == BITINT_TYPE)
7856 struct bitint_info info;
7857 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7858 gcc_assert (ok);
7859 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7860 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7862 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7863 /* More work is needed when adding _BitInt support to PDP endian
7864 if limb is smaller than word, or if _BitInt limb ordering doesn't
7865 match target endianity here. */
7866 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7867 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7868 || (GET_MODE_SIZE (limb_mode)
7869 >= UNITS_PER_WORD)));
7871 else
7872 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7874 else
7875 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7876 int byte, offset, word, words;
7877 unsigned char value;
7879 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7880 return 0;
7881 if (off == -1)
7882 off = 0;
7884 if (ptr == NULL)
7885 /* Dry run. */
7886 return MIN (len, total_bytes - off);
7888 words = total_bytes / UNITS_PER_WORD;
7890 for (byte = 0; byte < total_bytes; byte++)
7892 int bitpos = byte * BITS_PER_UNIT;
7893 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7894 number of bytes. */
7895 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7897 if (total_bytes > UNITS_PER_WORD)
7899 word = byte / UNITS_PER_WORD;
7900 if (WORDS_BIG_ENDIAN)
7901 word = (words - 1) - word;
7902 offset = word * UNITS_PER_WORD;
7903 if (BYTES_BIG_ENDIAN)
7904 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7905 else
7906 offset += byte % UNITS_PER_WORD;
7908 else
7909 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7910 if (offset >= off && offset - off < len)
7911 ptr[offset - off] = value;
7913 return MIN (len, total_bytes - off);
7917 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7918 specified by EXPR into the buffer PTR of length LEN bytes.
7919 Return the number of bytes placed in the buffer, or zero
7920 upon failure. */
7922 static int
7923 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7925 tree type = TREE_TYPE (expr);
7926 scalar_mode mode = SCALAR_TYPE_MODE (type);
7927 int total_bytes = GET_MODE_SIZE (mode);
7928 FIXED_VALUE_TYPE value;
7929 tree i_value, i_type;
7931 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7932 return 0;
7934 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7936 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7937 return 0;
7939 value = TREE_FIXED_CST (expr);
7940 i_value = double_int_to_tree (i_type, value.data);
7942 return native_encode_int (i_value, ptr, len, off);
7946 /* Subroutine of native_encode_expr. Encode the REAL_CST
7947 specified by EXPR into the buffer PTR of length LEN bytes.
7948 Return the number of bytes placed in the buffer, or zero
7949 upon failure. */
7951 static int
7952 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7954 tree type = TREE_TYPE (expr);
7955 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7956 int byte, offset, word, words, bitpos;
7957 unsigned char value;
7959 /* There are always 32 bits in each long, no matter the size of
7960 the hosts long. We handle floating point representations with
7961 up to 192 bits. */
7962 long tmp[6];
7964 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7965 return 0;
7966 if (off == -1)
7967 off = 0;
7969 if (ptr == NULL)
7970 /* Dry run. */
7971 return MIN (len, total_bytes - off);
7973 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7975 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7977 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7978 bitpos += BITS_PER_UNIT)
7980 byte = (bitpos / BITS_PER_UNIT) & 3;
7981 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7983 if (UNITS_PER_WORD < 4)
7985 word = byte / UNITS_PER_WORD;
7986 if (WORDS_BIG_ENDIAN)
7987 word = (words - 1) - word;
7988 offset = word * UNITS_PER_WORD;
7989 if (BYTES_BIG_ENDIAN)
7990 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7991 else
7992 offset += byte % UNITS_PER_WORD;
7994 else
7996 offset = byte;
7997 if (BYTES_BIG_ENDIAN)
7999 /* Reverse bytes within each long, or within the entire float
8000 if it's smaller than a long (for HFmode). */
8001 offset = MIN (3, total_bytes - 1) - offset;
8002 gcc_assert (offset >= 0);
8005 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
8006 if (offset >= off
8007 && offset - off < len)
8008 ptr[offset - off] = value;
8010 return MIN (len, total_bytes - off);
8013 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
8014 specified by EXPR into the buffer PTR of length LEN bytes.
8015 Return the number of bytes placed in the buffer, or zero
8016 upon failure. */
8018 static int
8019 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
8021 int rsize, isize;
8022 tree part;
8024 part = TREE_REALPART (expr);
8025 rsize = native_encode_expr (part, ptr, len, off);
8026 if (off == -1 && rsize == 0)
8027 return 0;
8028 part = TREE_IMAGPART (expr);
8029 if (off != -1)
8030 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
8031 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
8032 len - rsize, off);
8033 if (off == -1 && isize != rsize)
8034 return 0;
8035 return rsize + isize;
8038 /* Like native_encode_vector, but only encode the first COUNT elements.
8039 The other arguments are as for native_encode_vector. */
8041 static int
8042 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
8043 int off, unsigned HOST_WIDE_INT count)
8045 tree itype = TREE_TYPE (TREE_TYPE (expr));
8046 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
8047 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
8049 /* This is the only case in which elements can be smaller than a byte.
8050 Element 0 is always in the lsb of the containing byte. */
8051 unsigned int elt_bits = TYPE_PRECISION (itype);
8052 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
8053 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8054 return 0;
8056 if (off == -1)
8057 off = 0;
8059 /* Zero the buffer and then set bits later where necessary. */
8060 int extract_bytes = MIN (len, total_bytes - off);
8061 if (ptr)
8062 memset (ptr, 0, extract_bytes);
8064 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
8065 unsigned int first_elt = off * elts_per_byte;
8066 unsigned int extract_elts = extract_bytes * elts_per_byte;
8067 for (unsigned int i = 0; i < extract_elts; ++i)
8069 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
8070 if (TREE_CODE (elt) != INTEGER_CST)
8071 return 0;
8073 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
8075 unsigned int bit = i * elt_bits;
8076 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8079 return extract_bytes;
8082 int offset = 0;
8083 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8084 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8086 if (off >= size)
8088 off -= size;
8089 continue;
8091 tree elem = VECTOR_CST_ELT (expr, i);
8092 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8093 len - offset, off);
8094 if ((off == -1 && res != size) || res == 0)
8095 return 0;
8096 offset += res;
8097 if (offset >= len)
8098 return (off == -1 && i < count - 1) ? 0 : offset;
8099 if (off != -1)
8100 off = 0;
8102 return offset;
8105 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8106 specified by EXPR into the buffer PTR of length LEN bytes.
8107 Return the number of bytes placed in the buffer, or zero
8108 upon failure. */
8110 static int
8111 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8113 unsigned HOST_WIDE_INT count;
8114 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8115 return 0;
8116 return native_encode_vector_part (expr, ptr, len, off, count);
8120 /* Subroutine of native_encode_expr. Encode the STRING_CST
8121 specified by EXPR into the buffer PTR of length LEN bytes.
8122 Return the number of bytes placed in the buffer, or zero
8123 upon failure. */
8125 static int
8126 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8128 tree type = TREE_TYPE (expr);
8130 /* Wide-char strings are encoded in target byte-order so native
8131 encoding them is trivial. */
8132 if (BITS_PER_UNIT != CHAR_BIT
8133 || TREE_CODE (type) != ARRAY_TYPE
8134 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8135 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8136 return 0;
8138 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8139 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8140 return 0;
8141 if (off == -1)
8142 off = 0;
8143 len = MIN (total_bytes - off, len);
8144 if (ptr == NULL)
8145 /* Dry run. */;
8146 else
8148 int written = 0;
8149 if (off < TREE_STRING_LENGTH (expr))
8151 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8152 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8154 memset (ptr + written, 0, len - written);
8156 return len;
8160 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8161 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8162 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8163 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8164 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8165 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8166 Return the number of bytes placed in the buffer, or zero upon failure. */
8169 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8171 /* We don't support starting at negative offset and -1 is special. */
8172 if (off < -1)
8173 return 0;
8175 switch (TREE_CODE (expr))
8177 case INTEGER_CST:
8178 return native_encode_int (expr, ptr, len, off);
8180 case REAL_CST:
8181 return native_encode_real (expr, ptr, len, off);
8183 case FIXED_CST:
8184 return native_encode_fixed (expr, ptr, len, off);
8186 case COMPLEX_CST:
8187 return native_encode_complex (expr, ptr, len, off);
8189 case VECTOR_CST:
8190 return native_encode_vector (expr, ptr, len, off);
8192 case STRING_CST:
8193 return native_encode_string (expr, ptr, len, off);
8195 default:
8196 return 0;
8200 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8201 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8202 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8203 machine modes, we can't just use build_nonstandard_integer_type. */
8205 tree
8206 find_bitfield_repr_type (int fieldsize, int len)
8208 machine_mode mode;
8209 for (int pass = 0; pass < 2; pass++)
8211 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8212 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8213 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8214 && known_eq (GET_MODE_PRECISION (mode),
8215 GET_MODE_BITSIZE (mode))
8216 && known_le (GET_MODE_SIZE (mode), len))
8218 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8219 if (ret && TYPE_MODE (ret) == mode)
8220 return ret;
8224 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8225 if (int_n_enabled_p[i]
8226 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8227 && int_n_trees[i].unsigned_type)
8229 tree ret = int_n_trees[i].unsigned_type;
8230 mode = TYPE_MODE (ret);
8231 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8232 && known_eq (GET_MODE_PRECISION (mode),
8233 GET_MODE_BITSIZE (mode))
8234 && known_le (GET_MODE_SIZE (mode), len))
8235 return ret;
8238 return NULL_TREE;
8241 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8242 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8243 to be non-NULL and OFF zero), then in addition to filling the
8244 bytes pointed by PTR with the value also clear any bits pointed
8245 by MASK that are known to be initialized, keep them as is for
8246 e.g. uninitialized padding bits or uninitialized fields. */
8249 native_encode_initializer (tree init, unsigned char *ptr, int len,
8250 int off, unsigned char *mask)
8252 int r;
8254 /* We don't support starting at negative offset and -1 is special. */
8255 if (off < -1 || init == NULL_TREE)
8256 return 0;
8258 gcc_assert (mask == NULL || (off == 0 && ptr));
8260 STRIP_NOPS (init);
8261 switch (TREE_CODE (init))
8263 case VIEW_CONVERT_EXPR:
8264 case NON_LVALUE_EXPR:
8265 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8266 mask);
8267 default:
8268 r = native_encode_expr (init, ptr, len, off);
8269 if (mask)
8270 memset (mask, 0, r);
8271 return r;
8272 case CONSTRUCTOR:
8273 tree type = TREE_TYPE (init);
8274 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8275 if (total_bytes < 0)
8276 return 0;
8277 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8278 return 0;
8279 int o = off == -1 ? 0 : off;
8280 if (TREE_CODE (type) == ARRAY_TYPE)
8282 tree min_index;
8283 unsigned HOST_WIDE_INT cnt;
8284 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8285 constructor_elt *ce;
8287 if (!TYPE_DOMAIN (type)
8288 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8289 return 0;
8291 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8292 if (fieldsize <= 0)
8293 return 0;
8295 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8296 if (ptr)
8297 memset (ptr, '\0', MIN (total_bytes - off, len));
8299 for (cnt = 0; ; cnt++)
8301 tree val = NULL_TREE, index = NULL_TREE;
8302 HOST_WIDE_INT pos = curpos, count = 0;
8303 bool full = false;
8304 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8306 val = ce->value;
8307 index = ce->index;
8309 else if (mask == NULL
8310 || CONSTRUCTOR_NO_CLEARING (init)
8311 || curpos >= total_bytes)
8312 break;
8313 else
8314 pos = total_bytes;
8316 if (index && TREE_CODE (index) == RANGE_EXPR)
8318 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8319 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8320 return 0;
8321 offset_int wpos
8322 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8323 - wi::to_offset (min_index),
8324 TYPE_PRECISION (sizetype));
8325 wpos *= fieldsize;
8326 if (!wi::fits_shwi_p (pos))
8327 return 0;
8328 pos = wpos.to_shwi ();
8329 offset_int wcount
8330 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8331 - wi::to_offset (TREE_OPERAND (index, 0)),
8332 TYPE_PRECISION (sizetype));
8333 if (!wi::fits_shwi_p (wcount))
8334 return 0;
8335 count = wcount.to_shwi ();
8337 else if (index)
8339 if (TREE_CODE (index) != INTEGER_CST)
8340 return 0;
8341 offset_int wpos
8342 = wi::sext (wi::to_offset (index)
8343 - wi::to_offset (min_index),
8344 TYPE_PRECISION (sizetype));
8345 wpos *= fieldsize;
8346 if (!wi::fits_shwi_p (wpos))
8347 return 0;
8348 pos = wpos.to_shwi ();
8351 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8353 if (valueinit == -1)
8355 tree zero = build_zero_cst (TREE_TYPE (type));
8356 r = native_encode_initializer (zero, ptr + curpos,
8357 fieldsize, 0,
8358 mask + curpos);
8359 if (TREE_CODE (zero) == CONSTRUCTOR)
8360 ggc_free (zero);
8361 if (!r)
8362 return 0;
8363 valueinit = curpos;
8364 curpos += fieldsize;
8366 while (curpos != pos)
8368 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8369 memcpy (mask + curpos, mask + valueinit, fieldsize);
8370 curpos += fieldsize;
8374 curpos = pos;
8375 if (val)
8378 if (off == -1
8379 || (curpos >= off
8380 && (curpos + fieldsize
8381 <= (HOST_WIDE_INT) off + len)))
8383 if (full)
8385 if (ptr)
8386 memcpy (ptr + (curpos - o), ptr + (pos - o),
8387 fieldsize);
8388 if (mask)
8389 memcpy (mask + curpos, mask + pos, fieldsize);
8391 else if (!native_encode_initializer (val,
8393 ? ptr + curpos - o
8394 : NULL,
8395 fieldsize,
8396 off == -1 ? -1
8397 : 0,
8398 mask
8399 ? mask + curpos
8400 : NULL))
8401 return 0;
8402 else
8404 full = true;
8405 pos = curpos;
8408 else if (curpos + fieldsize > off
8409 && curpos < (HOST_WIDE_INT) off + len)
8411 /* Partial overlap. */
8412 unsigned char *p = NULL;
8413 int no = 0;
8414 int l;
8415 gcc_assert (mask == NULL);
8416 if (curpos >= off)
8418 if (ptr)
8419 p = ptr + curpos - off;
8420 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8421 fieldsize);
8423 else
8425 p = ptr;
8426 no = off - curpos;
8427 l = len;
8429 if (!native_encode_initializer (val, p, l, no, NULL))
8430 return 0;
8432 curpos += fieldsize;
8434 while (count-- != 0);
8436 return MIN (total_bytes - off, len);
8438 else if (TREE_CODE (type) == RECORD_TYPE
8439 || TREE_CODE (type) == UNION_TYPE)
8441 unsigned HOST_WIDE_INT cnt;
8442 constructor_elt *ce;
8443 tree fld_base = TYPE_FIELDS (type);
8444 tree to_free = NULL_TREE;
8446 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8447 if (ptr != NULL)
8448 memset (ptr, '\0', MIN (total_bytes - o, len));
8449 for (cnt = 0; ; cnt++)
8451 tree val = NULL_TREE, field = NULL_TREE;
8452 HOST_WIDE_INT pos = 0, fieldsize;
8453 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8455 if (to_free)
8457 ggc_free (to_free);
8458 to_free = NULL_TREE;
8461 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8463 val = ce->value;
8464 field = ce->index;
8465 if (field == NULL_TREE)
8466 return 0;
8468 pos = int_byte_position (field);
8469 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8470 continue;
8472 else if (mask == NULL
8473 || CONSTRUCTOR_NO_CLEARING (init))
8474 break;
8475 else
8476 pos = total_bytes;
8478 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8480 tree fld;
8481 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8483 if (TREE_CODE (fld) != FIELD_DECL)
8484 continue;
8485 if (fld == field)
8486 break;
8487 if (DECL_PADDING_P (fld))
8488 continue;
8489 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8490 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8491 return 0;
8492 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8493 continue;
8494 break;
8496 if (fld == NULL_TREE)
8498 if (ce == NULL)
8499 break;
8500 return 0;
8502 fld_base = DECL_CHAIN (fld);
8503 if (fld != field)
8505 cnt--;
8506 field = fld;
8507 pos = int_byte_position (field);
8508 val = build_zero_cst (TREE_TYPE (fld));
8509 if (TREE_CODE (val) == CONSTRUCTOR)
8510 to_free = val;
8514 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8515 && TYPE_DOMAIN (TREE_TYPE (field))
8516 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8518 if (mask || off != -1)
8519 return 0;
8520 if (val == NULL_TREE)
8521 continue;
8522 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8523 return 0;
8524 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8525 if (fieldsize < 0
8526 || (int) fieldsize != fieldsize
8527 || (pos + fieldsize) > INT_MAX)
8528 return 0;
8529 if (pos + fieldsize > total_bytes)
8531 if (ptr != NULL && total_bytes < len)
8532 memset (ptr + total_bytes, '\0',
8533 MIN (pos + fieldsize, len) - total_bytes);
8534 total_bytes = pos + fieldsize;
8537 else
8539 if (DECL_SIZE_UNIT (field) == NULL_TREE
8540 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8541 return 0;
8542 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8544 if (fieldsize == 0)
8545 continue;
8547 /* Prepare to deal with integral bit-fields and filter out other
8548 bit-fields that do not start and end on a byte boundary. */
8549 if (DECL_BIT_FIELD (field))
8551 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8552 return 0;
8553 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8554 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8556 bpos %= BITS_PER_UNIT;
8557 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8558 epos = fieldsize % BITS_PER_UNIT;
8559 fieldsize += BITS_PER_UNIT - 1;
8560 fieldsize /= BITS_PER_UNIT;
8562 else if (bpos % BITS_PER_UNIT
8563 || DECL_SIZE (field) == NULL_TREE
8564 || !tree_fits_shwi_p (DECL_SIZE (field))
8565 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8566 return 0;
8569 if (off != -1 && pos + fieldsize <= off)
8570 continue;
8572 if (val == NULL_TREE)
8573 continue;
8575 if (DECL_BIT_FIELD (field)
8576 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8578 /* FIXME: Handle PDP endian. */
8579 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8580 return 0;
8582 if (TREE_CODE (val) != INTEGER_CST)
8583 return 0;
8585 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8586 tree repr_type = NULL_TREE;
8587 HOST_WIDE_INT rpos = 0;
8588 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8590 rpos = int_byte_position (repr);
8591 repr_type = TREE_TYPE (repr);
8593 else
8595 repr_type = find_bitfield_repr_type (fieldsize, len);
8596 if (repr_type == NULL_TREE)
8597 return 0;
8598 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8599 gcc_assert (repr_size > 0 && repr_size <= len);
8600 if (pos + repr_size <= o + len)
8601 rpos = pos;
8602 else
8604 rpos = o + len - repr_size;
8605 gcc_assert (rpos <= pos);
8609 if (rpos > pos)
8610 return 0;
8611 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8612 int diff = (TYPE_PRECISION (repr_type)
8613 - TYPE_PRECISION (TREE_TYPE (field)));
8614 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8615 if (!BYTES_BIG_ENDIAN)
8616 w = wi::lshift (w, bitoff);
8617 else
8618 w = wi::lshift (w, diff - bitoff);
8619 val = wide_int_to_tree (repr_type, w);
8621 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8622 / BITS_PER_UNIT + 1];
8623 int l = native_encode_int (val, buf, sizeof buf, 0);
8624 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8625 return 0;
8627 if (ptr == NULL)
8628 continue;
8630 /* If the bitfield does not start at byte boundary, handle
8631 the partial byte at the start. */
8632 if (bpos
8633 && (off == -1 || (pos >= off && len >= 1)))
8635 if (!BYTES_BIG_ENDIAN)
8637 int msk = (1 << bpos) - 1;
8638 buf[pos - rpos] &= ~msk;
8639 buf[pos - rpos] |= ptr[pos - o] & msk;
8640 if (mask)
8642 if (fieldsize > 1 || epos == 0)
8643 mask[pos] &= msk;
8644 else
8645 mask[pos] &= (msk | ~((1 << epos) - 1));
8648 else
8650 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8651 buf[pos - rpos] &= msk;
8652 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8653 if (mask)
8655 if (fieldsize > 1 || epos == 0)
8656 mask[pos] &= ~msk;
8657 else
8658 mask[pos] &= (~msk
8659 | ((1 << (BITS_PER_UNIT - epos))
8660 - 1));
8664 /* If the bitfield does not end at byte boundary, handle
8665 the partial byte at the end. */
8666 if (epos
8667 && (off == -1
8668 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8670 if (!BYTES_BIG_ENDIAN)
8672 int msk = (1 << epos) - 1;
8673 buf[pos - rpos + fieldsize - 1] &= msk;
8674 buf[pos - rpos + fieldsize - 1]
8675 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8676 if (mask && (fieldsize > 1 || bpos == 0))
8677 mask[pos + fieldsize - 1] &= ~msk;
8679 else
8681 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8682 buf[pos - rpos + fieldsize - 1] &= ~msk;
8683 buf[pos - rpos + fieldsize - 1]
8684 |= ptr[pos + fieldsize - 1 - o] & msk;
8685 if (mask && (fieldsize > 1 || bpos == 0))
8686 mask[pos + fieldsize - 1] &= msk;
8689 if (off == -1
8690 || (pos >= off
8691 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8693 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8694 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8695 memset (mask + pos + (bpos != 0), 0,
8696 fieldsize - (bpos != 0) - (epos != 0));
8698 else
8700 /* Partial overlap. */
8701 HOST_WIDE_INT fsz = fieldsize;
8702 gcc_assert (mask == NULL);
8703 if (pos < off)
8705 fsz -= (off - pos);
8706 pos = off;
8708 if (pos + fsz > (HOST_WIDE_INT) off + len)
8709 fsz = (HOST_WIDE_INT) off + len - pos;
8710 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8712 continue;
8715 if (off == -1
8716 || (pos >= off
8717 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8719 int fldsize = fieldsize;
8720 if (off == -1)
8722 tree fld = DECL_CHAIN (field);
8723 while (fld)
8725 if (TREE_CODE (fld) == FIELD_DECL)
8726 break;
8727 fld = DECL_CHAIN (fld);
8729 if (fld == NULL_TREE)
8730 fldsize = len - pos;
8732 r = native_encode_initializer (val, ptr ? ptr + pos - o
8733 : NULL,
8734 fldsize,
8735 off == -1 ? -1 : 0,
8736 mask ? mask + pos : NULL);
8737 if (!r)
8738 return 0;
8739 if (off == -1
8740 && fldsize != fieldsize
8741 && r > fieldsize
8742 && pos + r > total_bytes)
8743 total_bytes = pos + r;
8745 else
8747 /* Partial overlap. */
8748 unsigned char *p = NULL;
8749 int no = 0;
8750 int l;
8751 gcc_assert (mask == NULL);
8752 if (pos >= off)
8754 if (ptr)
8755 p = ptr + pos - off;
8756 l = MIN ((HOST_WIDE_INT) off + len - pos,
8757 fieldsize);
8759 else
8761 p = ptr;
8762 no = off - pos;
8763 l = len;
8765 if (!native_encode_initializer (val, p, l, no, NULL))
8766 return 0;
8769 return MIN (total_bytes - off, len);
8771 return 0;
8776 /* Subroutine of native_interpret_expr. Interpret the contents of
8777 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8778 If the buffer cannot be interpreted, return NULL_TREE. */
8780 static tree
8781 native_interpret_int (tree type, const unsigned char *ptr, int len)
8783 int total_bytes;
8784 if (TREE_CODE (type) == BITINT_TYPE)
8786 struct bitint_info info;
8787 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8788 gcc_assert (ok);
8789 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8790 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8792 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8793 /* More work is needed when adding _BitInt support to PDP endian
8794 if limb is smaller than word, or if _BitInt limb ordering doesn't
8795 match target endianity here. */
8796 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8797 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8798 || (GET_MODE_SIZE (limb_mode)
8799 >= UNITS_PER_WORD)));
8801 else
8802 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8804 else
8805 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8807 if (total_bytes > len)
8808 return NULL_TREE;
8810 wide_int result = wi::from_buffer (ptr, total_bytes);
8812 return wide_int_to_tree (type, result);
8816 /* Subroutine of native_interpret_expr. Interpret the contents of
8817 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8818 If the buffer cannot be interpreted, return NULL_TREE. */
8820 static tree
8821 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8823 scalar_mode mode = SCALAR_TYPE_MODE (type);
8824 int total_bytes = GET_MODE_SIZE (mode);
8825 double_int result;
8826 FIXED_VALUE_TYPE fixed_value;
8828 if (total_bytes > len
8829 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8830 return NULL_TREE;
8832 result = double_int::from_buffer (ptr, total_bytes);
8833 fixed_value = fixed_from_double_int (result, mode);
8835 return build_fixed (type, fixed_value);
8839 /* Subroutine of native_interpret_expr. Interpret the contents of
8840 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8841 If the buffer cannot be interpreted, return NULL_TREE. */
8843 tree
8844 native_interpret_real (tree type, const unsigned char *ptr, int len)
8846 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8847 int total_bytes = GET_MODE_SIZE (mode);
8848 unsigned char value;
8849 /* There are always 32 bits in each long, no matter the size of
8850 the hosts long. We handle floating point representations with
8851 up to 192 bits. */
8852 REAL_VALUE_TYPE r;
8853 long tmp[6];
8855 if (total_bytes > len || total_bytes > 24)
8856 return NULL_TREE;
8857 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8859 memset (tmp, 0, sizeof (tmp));
8860 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8861 bitpos += BITS_PER_UNIT)
8863 /* Both OFFSET and BYTE index within a long;
8864 bitpos indexes the whole float. */
8865 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8866 if (UNITS_PER_WORD < 4)
8868 int word = byte / UNITS_PER_WORD;
8869 if (WORDS_BIG_ENDIAN)
8870 word = (words - 1) - word;
8871 offset = word * UNITS_PER_WORD;
8872 if (BYTES_BIG_ENDIAN)
8873 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8874 else
8875 offset += byte % UNITS_PER_WORD;
8877 else
8879 offset = byte;
8880 if (BYTES_BIG_ENDIAN)
8882 /* Reverse bytes within each long, or within the entire float
8883 if it's smaller than a long (for HFmode). */
8884 offset = MIN (3, total_bytes - 1) - offset;
8885 gcc_assert (offset >= 0);
8888 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8890 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8893 real_from_target (&r, tmp, mode);
8894 return build_real (type, r);
8898 /* Subroutine of native_interpret_expr. Interpret the contents of
8899 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8900 If the buffer cannot be interpreted, return NULL_TREE. */
8902 static tree
8903 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8905 tree etype, rpart, ipart;
8906 int size;
8908 etype = TREE_TYPE (type);
8909 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8910 if (size * 2 > len)
8911 return NULL_TREE;
8912 rpart = native_interpret_expr (etype, ptr, size);
8913 if (!rpart)
8914 return NULL_TREE;
8915 ipart = native_interpret_expr (etype, ptr+size, size);
8916 if (!ipart)
8917 return NULL_TREE;
8918 return build_complex (type, rpart, ipart);
8921 /* Read a vector of type TYPE from the target memory image given by BYTES,
8922 which contains LEN bytes. The vector is known to be encodable using
8923 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8925 Return the vector on success, otherwise return null. */
8927 static tree
8928 native_interpret_vector_part (tree type, const unsigned char *bytes,
8929 unsigned int len, unsigned int npatterns,
8930 unsigned int nelts_per_pattern)
8932 tree elt_type = TREE_TYPE (type);
8933 if (VECTOR_BOOLEAN_TYPE_P (type)
8934 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8936 /* This is the only case in which elements can be smaller than a byte.
8937 Element 0 is always in the lsb of the containing byte. */
8938 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8939 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8940 return NULL_TREE;
8942 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8943 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8945 unsigned int bit_index = i * elt_bits;
8946 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8947 unsigned int lsb = bit_index % BITS_PER_UNIT;
8948 builder.quick_push (bytes[byte_index] & (1 << lsb)
8949 ? build_all_ones_cst (elt_type)
8950 : build_zero_cst (elt_type));
8952 return builder.build ();
8955 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8956 if (elt_bytes * npatterns * nelts_per_pattern > len)
8957 return NULL_TREE;
8959 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8960 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8962 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8963 if (!elt)
8964 return NULL_TREE;
8965 builder.quick_push (elt);
8966 bytes += elt_bytes;
8968 return builder.build ();
8971 /* Subroutine of native_interpret_expr. Interpret the contents of
8972 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8973 If the buffer cannot be interpreted, return NULL_TREE. */
8975 static tree
8976 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8978 unsigned HOST_WIDE_INT size;
8980 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8981 || size > len)
8982 return NULL_TREE;
8984 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8985 return native_interpret_vector_part (type, ptr, len, count, 1);
8989 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8990 the buffer PTR of length LEN as a constant of type TYPE. For
8991 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8992 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8993 return NULL_TREE. */
8995 tree
8996 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8998 switch (TREE_CODE (type))
9000 case INTEGER_TYPE:
9001 case ENUMERAL_TYPE:
9002 case BOOLEAN_TYPE:
9003 case POINTER_TYPE:
9004 case REFERENCE_TYPE:
9005 case OFFSET_TYPE:
9006 case BITINT_TYPE:
9007 return native_interpret_int (type, ptr, len);
9009 case REAL_TYPE:
9010 if (tree ret = native_interpret_real (type, ptr, len))
9012 /* For floating point values in composite modes, punt if this
9013 folding doesn't preserve bit representation. As the mode doesn't
9014 have fixed precision while GCC pretends it does, there could be
9015 valid values that GCC can't really represent accurately.
9016 See PR95450. Even for other modes, e.g. x86 XFmode can have some
9017 bit combinationations which GCC doesn't preserve. */
9018 unsigned char buf[24 * 2];
9019 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
9020 int total_bytes = GET_MODE_SIZE (mode);
9021 memcpy (buf + 24, ptr, total_bytes);
9022 clear_type_padding_in_mask (type, buf + 24);
9023 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
9024 || memcmp (buf + 24, buf, total_bytes) != 0)
9025 return NULL_TREE;
9026 return ret;
9028 return NULL_TREE;
9030 case FIXED_POINT_TYPE:
9031 return native_interpret_fixed (type, ptr, len);
9033 case COMPLEX_TYPE:
9034 return native_interpret_complex (type, ptr, len);
9036 case VECTOR_TYPE:
9037 return native_interpret_vector (type, ptr, len);
9039 default:
9040 return NULL_TREE;
9044 /* Returns true if we can interpret the contents of a native encoding
9045 as TYPE. */
9047 bool
9048 can_native_interpret_type_p (tree type)
9050 switch (TREE_CODE (type))
9052 case INTEGER_TYPE:
9053 case ENUMERAL_TYPE:
9054 case BOOLEAN_TYPE:
9055 case POINTER_TYPE:
9056 case REFERENCE_TYPE:
9057 case FIXED_POINT_TYPE:
9058 case REAL_TYPE:
9059 case COMPLEX_TYPE:
9060 case VECTOR_TYPE:
9061 case OFFSET_TYPE:
9062 return true;
9063 default:
9064 return false;
9068 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
9069 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
9071 tree
9072 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
9073 int len)
9075 vec<constructor_elt, va_gc> *elts = NULL;
9076 if (TREE_CODE (type) == ARRAY_TYPE)
9078 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
9079 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
9080 return NULL_TREE;
9082 HOST_WIDE_INT cnt = 0;
9083 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
9085 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
9086 return NULL_TREE;
9087 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
9089 if (eltsz == 0)
9090 cnt = 0;
9091 HOST_WIDE_INT pos = 0;
9092 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9094 tree v = NULL_TREE;
9095 if (pos >= len || pos + eltsz > len)
9096 return NULL_TREE;
9097 if (can_native_interpret_type_p (TREE_TYPE (type)))
9099 v = native_interpret_expr (TREE_TYPE (type),
9100 ptr + off + pos, eltsz);
9101 if (v == NULL_TREE)
9102 return NULL_TREE;
9104 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9105 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9106 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9107 eltsz);
9108 if (v == NULL_TREE)
9109 return NULL_TREE;
9110 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9112 return build_constructor (type, elts);
9114 if (TREE_CODE (type) != RECORD_TYPE)
9115 return NULL_TREE;
9116 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9118 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9119 || is_empty_type (TREE_TYPE (field)))
9120 continue;
9121 tree fld = field;
9122 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9123 int diff = 0;
9124 tree v = NULL_TREE;
9125 if (DECL_BIT_FIELD (field))
9127 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9128 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9130 poly_int64 bitoffset;
9131 poly_uint64 field_offset, fld_offset;
9132 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9133 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9134 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9135 else
9136 bitoffset = 0;
9137 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9138 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9139 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9140 - TYPE_PRECISION (TREE_TYPE (field)));
9141 if (!bitoffset.is_constant (&bitoff)
9142 || bitoff < 0
9143 || bitoff > diff)
9144 return NULL_TREE;
9146 else
9148 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9149 return NULL_TREE;
9150 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9151 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9152 bpos %= BITS_PER_UNIT;
9153 fieldsize += bpos;
9154 fieldsize += BITS_PER_UNIT - 1;
9155 fieldsize /= BITS_PER_UNIT;
9156 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9157 if (repr_type == NULL_TREE)
9158 return NULL_TREE;
9159 sz = int_size_in_bytes (repr_type);
9160 if (sz < 0 || sz > len)
9161 return NULL_TREE;
9162 pos = int_byte_position (field);
9163 if (pos < 0 || pos > len || pos + fieldsize > len)
9164 return NULL_TREE;
9165 HOST_WIDE_INT rpos;
9166 if (pos + sz <= len)
9167 rpos = pos;
9168 else
9170 rpos = len - sz;
9171 gcc_assert (rpos <= pos);
9173 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9174 pos = rpos;
9175 diff = (TYPE_PRECISION (repr_type)
9176 - TYPE_PRECISION (TREE_TYPE (field)));
9177 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9178 if (v == NULL_TREE)
9179 return NULL_TREE;
9180 fld = NULL_TREE;
9184 if (fld)
9186 sz = int_size_in_bytes (TREE_TYPE (fld));
9187 if (sz < 0 || sz > len)
9188 return NULL_TREE;
9189 tree byte_pos = byte_position (fld);
9190 if (!tree_fits_shwi_p (byte_pos))
9191 return NULL_TREE;
9192 pos = tree_to_shwi (byte_pos);
9193 if (pos < 0 || pos > len || pos + sz > len)
9194 return NULL_TREE;
9196 if (fld == NULL_TREE)
9197 /* Already handled above. */;
9198 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9200 v = native_interpret_expr (TREE_TYPE (fld),
9201 ptr + off + pos, sz);
9202 if (v == NULL_TREE)
9203 return NULL_TREE;
9205 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9206 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9207 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9208 if (v == NULL_TREE)
9209 return NULL_TREE;
9210 if (fld != field)
9212 if (TREE_CODE (v) != INTEGER_CST)
9213 return NULL_TREE;
9215 /* FIXME: Figure out how to handle PDP endian bitfields. */
9216 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9217 return NULL_TREE;
9218 if (!BYTES_BIG_ENDIAN)
9219 v = wide_int_to_tree (TREE_TYPE (field),
9220 wi::lrshift (wi::to_wide (v), bitoff));
9221 else
9222 v = wide_int_to_tree (TREE_TYPE (field),
9223 wi::lrshift (wi::to_wide (v),
9224 diff - bitoff));
9226 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9228 return build_constructor (type, elts);
9231 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9232 or extracted constant positions and/or sizes aren't byte aligned. */
9234 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9235 bits between adjacent elements. AMNT should be within
9236 [0, BITS_PER_UNIT).
9237 Example, AMNT = 2:
9238 00011111|11100000 << 2 = 01111111|10000000
9239 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9241 void
9242 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9243 unsigned int amnt)
9245 if (amnt == 0)
9246 return;
9248 unsigned char carry_over = 0U;
9249 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9250 unsigned char clear_mask = (~0U) << amnt;
9252 for (unsigned int i = 0; i < sz; i++)
9254 unsigned prev_carry_over = carry_over;
9255 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9257 ptr[i] <<= amnt;
9258 if (i != 0)
9260 ptr[i] &= clear_mask;
9261 ptr[i] |= prev_carry_over;
9266 /* Like shift_bytes_in_array_left but for big-endian.
9267 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9268 bits between adjacent elements. AMNT should be within
9269 [0, BITS_PER_UNIT).
9270 Example, AMNT = 2:
9271 00011111|11100000 >> 2 = 00000111|11111000
9272 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9274 void
9275 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9276 unsigned int amnt)
9278 if (amnt == 0)
9279 return;
9281 unsigned char carry_over = 0U;
9282 unsigned char carry_mask = ~(~0U << amnt);
9284 for (unsigned int i = 0; i < sz; i++)
9286 unsigned prev_carry_over = carry_over;
9287 carry_over = ptr[i] & carry_mask;
9289 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9290 ptr[i] >>= amnt;
9291 ptr[i] |= prev_carry_over;
9295 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9296 directly on the VECTOR_CST encoding, in a way that works for variable-
9297 length vectors. Return the resulting VECTOR_CST on success or null
9298 on failure. */
9300 static tree
9301 fold_view_convert_vector_encoding (tree type, tree expr)
9303 tree expr_type = TREE_TYPE (expr);
9304 poly_uint64 type_bits, expr_bits;
9305 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9306 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9307 return NULL_TREE;
9309 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9310 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9311 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9312 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9314 /* We can only preserve the semantics of a stepped pattern if the new
9315 vector element is an integer of the same size. */
9316 if (VECTOR_CST_STEPPED_P (expr)
9317 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9318 return NULL_TREE;
9320 /* The number of bits needed to encode one element from every pattern
9321 of the original vector. */
9322 unsigned int expr_sequence_bits
9323 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9325 /* The number of bits needed to encode one element from every pattern
9326 of the result. */
9327 unsigned int type_sequence_bits
9328 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9330 /* Don't try to read more bytes than are available, which can happen
9331 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9332 The general VIEW_CONVERT handling can cope with that case, so there's
9333 no point complicating things here. */
9334 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9335 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9336 BITS_PER_UNIT);
9337 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9338 if (known_gt (buffer_bits, expr_bits))
9339 return NULL_TREE;
9341 /* Get enough bytes of EXPR to form the new encoding. */
9342 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9343 buffer.quick_grow (buffer_bytes);
9344 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9345 buffer_bits / expr_elt_bits)
9346 != (int) buffer_bytes)
9347 return NULL_TREE;
9349 /* Reencode the bytes as TYPE. */
9350 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9351 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9352 type_npatterns, nelts_per_pattern);
9355 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9356 TYPE at compile-time. If we're unable to perform the conversion
9357 return NULL_TREE. */
9359 static tree
9360 fold_view_convert_expr (tree type, tree expr)
9362 unsigned char buffer[128];
9363 unsigned char *buf;
9364 int len;
9365 HOST_WIDE_INT l;
9367 /* Check that the host and target are sane. */
9368 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9369 return NULL_TREE;
9371 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9372 if (tree res = fold_view_convert_vector_encoding (type, expr))
9373 return res;
9375 l = int_size_in_bytes (type);
9376 if (l > (int) sizeof (buffer)
9377 && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9379 buf = XALLOCAVEC (unsigned char, l);
9380 len = l;
9382 else
9384 buf = buffer;
9385 len = sizeof (buffer);
9387 len = native_encode_expr (expr, buf, len);
9388 if (len == 0)
9389 return NULL_TREE;
9391 return native_interpret_expr (type, buf, len);
9394 /* Build an expression for the address of T. Folds away INDIRECT_REF
9395 to avoid confusing the gimplify process. */
9397 tree
9398 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9400 /* The size of the object is not relevant when talking about its address. */
9401 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9402 t = TREE_OPERAND (t, 0);
9404 if (INDIRECT_REF_P (t))
9406 t = TREE_OPERAND (t, 0);
9408 if (TREE_TYPE (t) != ptrtype)
9409 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9411 else if (TREE_CODE (t) == MEM_REF
9412 && integer_zerop (TREE_OPERAND (t, 1)))
9414 t = TREE_OPERAND (t, 0);
9416 if (TREE_TYPE (t) != ptrtype)
9417 t = fold_convert_loc (loc, ptrtype, t);
9419 else if (TREE_CODE (t) == MEM_REF
9420 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9421 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9422 TREE_OPERAND (t, 0),
9423 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9424 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9426 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9428 if (TREE_TYPE (t) != ptrtype)
9429 t = fold_convert_loc (loc, ptrtype, t);
9431 else
9432 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9434 return t;
9437 /* Build an expression for the address of T. */
9439 tree
9440 build_fold_addr_expr_loc (location_t loc, tree t)
9442 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9444 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9447 /* Fold a unary expression of code CODE and type TYPE with operand
9448 OP0. Return the folded expression if folding is successful.
9449 Otherwise, return NULL_TREE. */
9451 tree
9452 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9454 tree tem;
9455 tree arg0;
9456 enum tree_code_class kind = TREE_CODE_CLASS (code);
9458 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9459 && TREE_CODE_LENGTH (code) == 1);
9461 arg0 = op0;
9462 if (arg0)
9464 if (CONVERT_EXPR_CODE_P (code)
9465 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9467 /* Don't use STRIP_NOPS, because signedness of argument type
9468 matters. */
9469 STRIP_SIGN_NOPS (arg0);
9471 else
9473 /* Strip any conversions that don't change the mode. This
9474 is safe for every expression, except for a comparison
9475 expression because its signedness is derived from its
9476 operands.
9478 Note that this is done as an internal manipulation within
9479 the constant folder, in order to find the simplest
9480 representation of the arguments so that their form can be
9481 studied. In any cases, the appropriate type conversions
9482 should be put back in the tree that will get out of the
9483 constant folder. */
9484 STRIP_NOPS (arg0);
9487 if (CONSTANT_CLASS_P (arg0))
9489 tree tem = const_unop (code, type, arg0);
9490 if (tem)
9492 if (TREE_TYPE (tem) != type)
9493 tem = fold_convert_loc (loc, type, tem);
9494 return tem;
9499 tem = generic_simplify (loc, code, type, op0);
9500 if (tem)
9501 return tem;
9503 if (TREE_CODE_CLASS (code) == tcc_unary)
9505 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9506 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9507 fold_build1_loc (loc, code, type,
9508 fold_convert_loc (loc, TREE_TYPE (op0),
9509 TREE_OPERAND (arg0, 1))));
9510 else if (TREE_CODE (arg0) == COND_EXPR)
9512 tree arg01 = TREE_OPERAND (arg0, 1);
9513 tree arg02 = TREE_OPERAND (arg0, 2);
9514 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9515 arg01 = fold_build1_loc (loc, code, type,
9516 fold_convert_loc (loc,
9517 TREE_TYPE (op0), arg01));
9518 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9519 arg02 = fold_build1_loc (loc, code, type,
9520 fold_convert_loc (loc,
9521 TREE_TYPE (op0), arg02));
9522 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9523 arg01, arg02);
9525 /* If this was a conversion, and all we did was to move into
9526 inside the COND_EXPR, bring it back out. But leave it if
9527 it is a conversion from integer to integer and the
9528 result precision is no wider than a word since such a
9529 conversion is cheap and may be optimized away by combine,
9530 while it couldn't if it were outside the COND_EXPR. Then return
9531 so we don't get into an infinite recursion loop taking the
9532 conversion out and then back in. */
9534 if ((CONVERT_EXPR_CODE_P (code)
9535 || code == NON_LVALUE_EXPR)
9536 && TREE_CODE (tem) == COND_EXPR
9537 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9538 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9539 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9540 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9541 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9542 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9543 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9544 && (INTEGRAL_TYPE_P
9545 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9546 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9547 || flag_syntax_only))
9548 tem = build1_loc (loc, code, type,
9549 build3 (COND_EXPR,
9550 TREE_TYPE (TREE_OPERAND
9551 (TREE_OPERAND (tem, 1), 0)),
9552 TREE_OPERAND (tem, 0),
9553 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9554 TREE_OPERAND (TREE_OPERAND (tem, 2),
9555 0)));
9556 return tem;
9560 switch (code)
9562 case NON_LVALUE_EXPR:
9563 if (!maybe_lvalue_p (op0))
9564 return fold_convert_loc (loc, type, op0);
9565 return NULL_TREE;
9567 CASE_CONVERT:
9568 case FLOAT_EXPR:
9569 case FIX_TRUNC_EXPR:
9570 if (COMPARISON_CLASS_P (op0))
9572 /* If we have (type) (a CMP b) and type is an integral type, return
9573 new expression involving the new type. Canonicalize
9574 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9575 non-integral type.
9576 Do not fold the result as that would not simplify further, also
9577 folding again results in recursions. */
9578 if (TREE_CODE (type) == BOOLEAN_TYPE)
9579 return build2_loc (loc, TREE_CODE (op0), type,
9580 TREE_OPERAND (op0, 0),
9581 TREE_OPERAND (op0, 1));
9582 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9583 && TREE_CODE (type) != VECTOR_TYPE)
9584 return build3_loc (loc, COND_EXPR, type, op0,
9585 constant_boolean_node (true, type),
9586 constant_boolean_node (false, type));
9589 /* Handle (T *)&A.B.C for A being of type T and B and C
9590 living at offset zero. This occurs frequently in
9591 C++ upcasting and then accessing the base. */
9592 if (TREE_CODE (op0) == ADDR_EXPR
9593 && POINTER_TYPE_P (type)
9594 && handled_component_p (TREE_OPERAND (op0, 0)))
9596 poly_int64 bitsize, bitpos;
9597 tree offset;
9598 machine_mode mode;
9599 int unsignedp, reversep, volatilep;
9600 tree base
9601 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9602 &offset, &mode, &unsignedp, &reversep,
9603 &volatilep);
9604 /* If the reference was to a (constant) zero offset, we can use
9605 the address of the base if it has the same base type
9606 as the result type and the pointer type is unqualified. */
9607 if (!offset
9608 && known_eq (bitpos, 0)
9609 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9610 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9611 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9612 return fold_convert_loc (loc, type,
9613 build_fold_addr_expr_loc (loc, base));
9616 if (TREE_CODE (op0) == MODIFY_EXPR
9617 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9618 /* Detect assigning a bitfield. */
9619 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9620 && DECL_BIT_FIELD
9621 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9623 /* Don't leave an assignment inside a conversion
9624 unless assigning a bitfield. */
9625 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9626 /* First do the assignment, then return converted constant. */
9627 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9628 suppress_warning (tem /* What warning? */);
9629 TREE_USED (tem) = 1;
9630 return tem;
9633 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9634 constants (if x has signed type, the sign bit cannot be set
9635 in c). This folds extension into the BIT_AND_EXPR.
9636 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9637 very likely don't have maximal range for their precision and this
9638 transformation effectively doesn't preserve non-maximal ranges. */
9639 if (TREE_CODE (type) == INTEGER_TYPE
9640 && TREE_CODE (op0) == BIT_AND_EXPR
9641 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9643 tree and_expr = op0;
9644 tree and0 = TREE_OPERAND (and_expr, 0);
9645 tree and1 = TREE_OPERAND (and_expr, 1);
9646 int change = 0;
9648 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9649 || (TYPE_PRECISION (type)
9650 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9651 change = 1;
9652 else if (TYPE_PRECISION (TREE_TYPE (and1))
9653 <= HOST_BITS_PER_WIDE_INT
9654 && tree_fits_uhwi_p (and1))
9656 unsigned HOST_WIDE_INT cst;
9658 cst = tree_to_uhwi (and1);
9659 cst &= HOST_WIDE_INT_M1U
9660 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9661 change = (cst == 0);
9662 if (change
9663 && !flag_syntax_only
9664 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9665 == ZERO_EXTEND))
9667 tree uns = unsigned_type_for (TREE_TYPE (and0));
9668 and0 = fold_convert_loc (loc, uns, and0);
9669 and1 = fold_convert_loc (loc, uns, and1);
9672 if (change)
9674 tree and1_type = TREE_TYPE (and1);
9675 unsigned prec = MAX (TYPE_PRECISION (and1_type),
9676 TYPE_PRECISION (type));
9677 tem = force_fit_type (type,
9678 wide_int::from (wi::to_wide (and1), prec,
9679 TYPE_SIGN (and1_type)),
9680 0, TREE_OVERFLOW (and1));
9681 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9682 fold_convert_loc (loc, type, and0), tem);
9686 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9687 cast (T1)X will fold away. We assume that this happens when X itself
9688 is a cast. */
9689 if (POINTER_TYPE_P (type)
9690 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9691 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9693 tree arg00 = TREE_OPERAND (arg0, 0);
9694 tree arg01 = TREE_OPERAND (arg0, 1);
9696 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9697 when the pointed type needs higher alignment than
9698 the p+ first operand's pointed type. */
9699 if (!in_gimple_form
9700 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9701 && (min_align_of_type (TREE_TYPE (type))
9702 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9703 return NULL_TREE;
9705 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9706 when type is a reference type and arg00's type is not,
9707 because arg00 could be validly nullptr and if arg01 doesn't return,
9708 we don't want false positive binding of reference to nullptr. */
9709 if (TREE_CODE (type) == REFERENCE_TYPE
9710 && !in_gimple_form
9711 && sanitize_flags_p (SANITIZE_NULL)
9712 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9713 return NULL_TREE;
9715 arg00 = fold_convert_loc (loc, type, arg00);
9716 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9719 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9720 of the same precision, and X is an integer type not narrower than
9721 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9722 if (INTEGRAL_TYPE_P (type)
9723 && TREE_CODE (op0) == BIT_NOT_EXPR
9724 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9725 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9726 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9728 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9729 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9730 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9731 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9732 fold_convert_loc (loc, type, tem));
9735 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9736 type of X and Y (integer types only). */
9737 if (INTEGRAL_TYPE_P (type)
9738 && TREE_CODE (op0) == MULT_EXPR
9739 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9740 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9741 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9742 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9744 /* Be careful not to introduce new overflows. */
9745 tree mult_type;
9746 if (TYPE_OVERFLOW_WRAPS (type))
9747 mult_type = type;
9748 else
9749 mult_type = unsigned_type_for (type);
9751 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9753 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9754 fold_convert_loc (loc, mult_type,
9755 TREE_OPERAND (op0, 0)),
9756 fold_convert_loc (loc, mult_type,
9757 TREE_OPERAND (op0, 1)));
9758 return fold_convert_loc (loc, type, tem);
9762 return NULL_TREE;
9764 case VIEW_CONVERT_EXPR:
9765 if (TREE_CODE (op0) == MEM_REF)
9767 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9768 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9769 tem = fold_build2_loc (loc, MEM_REF, type,
9770 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9771 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9772 return tem;
9775 return NULL_TREE;
9777 case NEGATE_EXPR:
9778 tem = fold_negate_expr (loc, arg0);
9779 if (tem)
9780 return fold_convert_loc (loc, type, tem);
9781 return NULL_TREE;
9783 case ABS_EXPR:
9784 /* Convert fabs((double)float) into (double)fabsf(float). */
9785 if (TREE_CODE (arg0) == NOP_EXPR
9786 && TREE_CODE (type) == REAL_TYPE)
9788 tree targ0 = strip_float_extensions (arg0);
9789 if (targ0 != arg0)
9790 return fold_convert_loc (loc, type,
9791 fold_build1_loc (loc, ABS_EXPR,
9792 TREE_TYPE (targ0),
9793 targ0));
9795 return NULL_TREE;
9797 case BIT_NOT_EXPR:
9798 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9799 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9800 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9801 fold_convert_loc (loc, type,
9802 TREE_OPERAND (arg0, 0)))))
9803 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9804 fold_convert_loc (loc, type,
9805 TREE_OPERAND (arg0, 1)));
9806 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9807 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9808 fold_convert_loc (loc, type,
9809 TREE_OPERAND (arg0, 1)))))
9810 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9811 fold_convert_loc (loc, type,
9812 TREE_OPERAND (arg0, 0)), tem);
9814 return NULL_TREE;
9816 case TRUTH_NOT_EXPR:
9817 /* Note that the operand of this must be an int
9818 and its values must be 0 or 1.
9819 ("true" is a fixed value perhaps depending on the language,
9820 but we don't handle values other than 1 correctly yet.) */
9821 tem = fold_truth_not_expr (loc, arg0);
9822 if (!tem)
9823 return NULL_TREE;
9824 return fold_convert_loc (loc, type, tem);
9826 case INDIRECT_REF:
9827 /* Fold *&X to X if X is an lvalue. */
9828 if (TREE_CODE (op0) == ADDR_EXPR)
9830 tree op00 = TREE_OPERAND (op0, 0);
9831 if ((VAR_P (op00)
9832 || TREE_CODE (op00) == PARM_DECL
9833 || TREE_CODE (op00) == RESULT_DECL)
9834 && !TREE_READONLY (op00))
9835 return op00;
9837 return NULL_TREE;
9839 default:
9840 return NULL_TREE;
9841 } /* switch (code) */
9845 /* If the operation was a conversion do _not_ mark a resulting constant
9846 with TREE_OVERFLOW if the original constant was not. These conversions
9847 have implementation defined behavior and retaining the TREE_OVERFLOW
9848 flag here would confuse later passes such as VRP. */
9849 tree
9850 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9851 tree type, tree op0)
9853 tree res = fold_unary_loc (loc, code, type, op0);
9854 if (res
9855 && TREE_CODE (res) == INTEGER_CST
9856 && TREE_CODE (op0) == INTEGER_CST
9857 && CONVERT_EXPR_CODE_P (code))
9858 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9860 return res;
9863 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9864 operands OP0 and OP1. LOC is the location of the resulting expression.
9865 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9866 Return the folded expression if folding is successful. Otherwise,
9867 return NULL_TREE. */
9868 static tree
9869 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9870 tree arg0, tree arg1, tree op0, tree op1)
9872 tree tem;
9874 /* We only do these simplifications if we are optimizing. */
9875 if (!optimize)
9876 return NULL_TREE;
9878 /* Check for things like (A || B) && (A || C). We can convert this
9879 to A || (B && C). Note that either operator can be any of the four
9880 truth and/or operations and the transformation will still be
9881 valid. Also note that we only care about order for the
9882 ANDIF and ORIF operators. If B contains side effects, this
9883 might change the truth-value of A. */
9884 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9885 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9886 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9887 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9888 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9889 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9891 tree a00 = TREE_OPERAND (arg0, 0);
9892 tree a01 = TREE_OPERAND (arg0, 1);
9893 tree a10 = TREE_OPERAND (arg1, 0);
9894 tree a11 = TREE_OPERAND (arg1, 1);
9895 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9896 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9897 && (code == TRUTH_AND_EXPR
9898 || code == TRUTH_OR_EXPR));
9900 if (operand_equal_p (a00, a10, 0))
9901 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9902 fold_build2_loc (loc, code, type, a01, a11));
9903 else if (commutative && operand_equal_p (a00, a11, 0))
9904 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9905 fold_build2_loc (loc, code, type, a01, a10));
9906 else if (commutative && operand_equal_p (a01, a10, 0))
9907 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9908 fold_build2_loc (loc, code, type, a00, a11));
9910 /* This case if tricky because we must either have commutative
9911 operators or else A10 must not have side-effects. */
9913 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9914 && operand_equal_p (a01, a11, 0))
9915 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9916 fold_build2_loc (loc, code, type, a00, a10),
9917 a01);
9920 /* See if we can build a range comparison. */
9921 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9922 return tem;
9924 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9925 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9927 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9928 if (tem)
9929 return fold_build2_loc (loc, code, type, tem, arg1);
9932 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9933 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9935 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9936 if (tem)
9937 return fold_build2_loc (loc, code, type, arg0, tem);
9940 /* Check for the possibility of merging component references. If our
9941 lhs is another similar operation, try to merge its rhs with our
9942 rhs. Then try to merge our lhs and rhs. */
9943 if (TREE_CODE (arg0) == code
9944 && (tem = fold_truth_andor_1 (loc, code, type,
9945 TREE_OPERAND (arg0, 1), arg1)) != 0)
9946 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9948 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9949 return tem;
9951 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9952 if (param_logical_op_non_short_circuit != -1)
9953 logical_op_non_short_circuit
9954 = param_logical_op_non_short_circuit;
9955 if (logical_op_non_short_circuit
9956 && !sanitize_coverage_p ()
9957 && (code == TRUTH_AND_EXPR
9958 || code == TRUTH_ANDIF_EXPR
9959 || code == TRUTH_OR_EXPR
9960 || code == TRUTH_ORIF_EXPR))
9962 enum tree_code ncode, icode;
9964 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9965 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9966 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9968 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9969 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9970 We don't want to pack more than two leafs to a non-IF AND/OR
9971 expression.
9972 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9973 equal to IF-CODE, then we don't want to add right-hand operand.
9974 If the inner right-hand side of left-hand operand has
9975 side-effects, or isn't simple, then we can't add to it,
9976 as otherwise we might destroy if-sequence. */
9977 if (TREE_CODE (arg0) == icode
9978 && simple_condition_p (arg1)
9979 /* Needed for sequence points to handle trappings, and
9980 side-effects. */
9981 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9983 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9984 arg1);
9985 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9986 tem);
9988 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9989 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9990 else if (TREE_CODE (arg1) == icode
9991 && simple_condition_p (arg0)
9992 /* Needed for sequence points to handle trappings, and
9993 side-effects. */
9994 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9996 tem = fold_build2_loc (loc, ncode, type,
9997 arg0, TREE_OPERAND (arg1, 0));
9998 return fold_build2_loc (loc, icode, type, tem,
9999 TREE_OPERAND (arg1, 1));
10001 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
10002 into (A OR B).
10003 For sequence point consistancy, we need to check for trapping,
10004 and side-effects. */
10005 else if (code == icode && simple_condition_p (arg0)
10006 && simple_condition_p (arg1))
10007 return fold_build2_loc (loc, ncode, type, arg0, arg1);
10010 return NULL_TREE;
10013 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
10014 by changing CODE to reduce the magnitude of constants involved in
10015 ARG0 of the comparison.
10016 Returns a canonicalized comparison tree if a simplification was
10017 possible, otherwise returns NULL_TREE.
10018 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
10019 valid if signed overflow is undefined. */
10021 static tree
10022 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
10023 tree arg0, tree arg1,
10024 bool *strict_overflow_p)
10026 enum tree_code code0 = TREE_CODE (arg0);
10027 tree t, cst0 = NULL_TREE;
10028 int sgn0;
10030 /* Match A +- CST code arg1. We can change this only if overflow
10031 is undefined. */
10032 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10033 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
10034 /* In principle pointers also have undefined overflow behavior,
10035 but that causes problems elsewhere. */
10036 && !POINTER_TYPE_P (TREE_TYPE (arg0))
10037 && (code0 == MINUS_EXPR
10038 || code0 == PLUS_EXPR)
10039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
10040 return NULL_TREE;
10042 /* Identify the constant in arg0 and its sign. */
10043 cst0 = TREE_OPERAND (arg0, 1);
10044 sgn0 = tree_int_cst_sgn (cst0);
10046 /* Overflowed constants and zero will cause problems. */
10047 if (integer_zerop (cst0)
10048 || TREE_OVERFLOW (cst0))
10049 return NULL_TREE;
10051 /* See if we can reduce the magnitude of the constant in
10052 arg0 by changing the comparison code. */
10053 /* A - CST < arg1 -> A - CST-1 <= arg1. */
10054 if (code == LT_EXPR
10055 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10056 code = LE_EXPR;
10057 /* A + CST > arg1 -> A + CST-1 >= arg1. */
10058 else if (code == GT_EXPR
10059 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10060 code = GE_EXPR;
10061 /* A + CST <= arg1 -> A + CST-1 < arg1. */
10062 else if (code == LE_EXPR
10063 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10064 code = LT_EXPR;
10065 /* A - CST >= arg1 -> A - CST-1 > arg1. */
10066 else if (code == GE_EXPR
10067 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10068 code = GT_EXPR;
10069 else
10070 return NULL_TREE;
10071 *strict_overflow_p = true;
10073 /* Now build the constant reduced in magnitude. But not if that
10074 would produce one outside of its types range. */
10075 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
10076 && ((sgn0 == 1
10077 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
10078 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
10079 || (sgn0 == -1
10080 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
10081 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
10082 return NULL_TREE;
10084 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
10085 cst0, build_int_cst (TREE_TYPE (cst0), 1));
10086 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
10087 t = fold_convert (TREE_TYPE (arg1), t);
10089 return fold_build2_loc (loc, code, type, t, arg1);
10092 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
10093 overflow further. Try to decrease the magnitude of constants involved
10094 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
10095 and put sole constants at the second argument position.
10096 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
10098 static tree
10099 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
10100 tree arg0, tree arg1)
10102 tree t;
10103 bool strict_overflow_p;
10104 const char * const warnmsg = G_("assuming signed overflow does not occur "
10105 "when reducing constant in comparison");
10107 /* Try canonicalization by simplifying arg0. */
10108 strict_overflow_p = false;
10109 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
10110 &strict_overflow_p);
10111 if (t)
10113 if (strict_overflow_p)
10114 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10115 return t;
10118 /* Try canonicalization by simplifying arg1 using the swapped
10119 comparison. */
10120 code = swap_tree_comparison (code);
10121 strict_overflow_p = false;
10122 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10123 &strict_overflow_p);
10124 if (t && strict_overflow_p)
10125 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10126 return t;
10129 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10130 space. This is used to avoid issuing overflow warnings for
10131 expressions like &p->x which cannot wrap. */
10133 static bool
10134 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10136 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10137 return true;
10139 if (maybe_lt (bitpos, 0))
10140 return true;
10142 poly_wide_int wi_offset;
10143 int precision = TYPE_PRECISION (TREE_TYPE (base));
10144 if (offset == NULL_TREE)
10145 wi_offset = wi::zero (precision);
10146 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10147 return true;
10148 else
10149 wi_offset = wi::to_poly_wide (offset);
10151 wi::overflow_type overflow;
10152 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10153 precision);
10154 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10155 if (overflow)
10156 return true;
10158 poly_uint64 total_hwi, size;
10159 if (!total.to_uhwi (&total_hwi)
10160 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10161 &size)
10162 || known_eq (size, 0U))
10163 return true;
10165 if (known_le (total_hwi, size))
10166 return false;
10168 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10169 array. */
10170 if (TREE_CODE (base) == ADDR_EXPR
10171 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10172 &size)
10173 && maybe_ne (size, 0U)
10174 && known_le (total_hwi, size))
10175 return false;
10177 return true;
10180 /* Return a positive integer when the symbol DECL is known to have
10181 a nonzero address, zero when it's known not to (e.g., it's a weak
10182 symbol), and a negative integer when the symbol is not yet in the
10183 symbol table and so whether or not its address is zero is unknown.
10184 For function local objects always return positive integer. */
10185 static int
10186 maybe_nonzero_address (tree decl)
10188 /* Normally, don't do anything for variables and functions before symtab is
10189 built; it is quite possible that DECL will be declared weak later.
10190 But if folding_initializer, we need a constant answer now, so create
10191 the symtab entry and prevent later weak declaration. */
10192 if (DECL_P (decl) && decl_in_symtab_p (decl))
10193 if (struct symtab_node *symbol
10194 = (folding_initializer
10195 ? symtab_node::get_create (decl)
10196 : symtab_node::get (decl)))
10197 return symbol->nonzero_address ();
10199 /* Function local objects are never NULL. */
10200 if (DECL_P (decl)
10201 && (DECL_CONTEXT (decl)
10202 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10203 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10204 return 1;
10206 return -1;
10209 /* Subroutine of fold_binary. This routine performs all of the
10210 transformations that are common to the equality/inequality
10211 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10212 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10213 fold_binary should call fold_binary. Fold a comparison with
10214 tree code CODE and type TYPE with operands OP0 and OP1. Return
10215 the folded comparison or NULL_TREE. */
10217 static tree
10218 fold_comparison (location_t loc, enum tree_code code, tree type,
10219 tree op0, tree op1)
10221 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10222 tree arg0, arg1, tem;
10224 arg0 = op0;
10225 arg1 = op1;
10227 STRIP_SIGN_NOPS (arg0);
10228 STRIP_SIGN_NOPS (arg1);
10230 /* For comparisons of pointers we can decompose it to a compile time
10231 comparison of the base objects and the offsets into the object.
10232 This requires at least one operand being an ADDR_EXPR or a
10233 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10234 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10235 && (TREE_CODE (arg0) == ADDR_EXPR
10236 || TREE_CODE (arg1) == ADDR_EXPR
10237 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10238 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10240 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10241 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10242 machine_mode mode;
10243 int volatilep, reversep, unsignedp;
10244 bool indirect_base0 = false, indirect_base1 = false;
10246 /* Get base and offset for the access. Strip ADDR_EXPR for
10247 get_inner_reference, but put it back by stripping INDIRECT_REF
10248 off the base object if possible. indirect_baseN will be true
10249 if baseN is not an address but refers to the object itself. */
10250 base0 = arg0;
10251 if (TREE_CODE (arg0) == ADDR_EXPR)
10253 base0
10254 = get_inner_reference (TREE_OPERAND (arg0, 0),
10255 &bitsize, &bitpos0, &offset0, &mode,
10256 &unsignedp, &reversep, &volatilep);
10257 if (INDIRECT_REF_P (base0))
10258 base0 = TREE_OPERAND (base0, 0);
10259 else
10260 indirect_base0 = true;
10262 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10264 base0 = TREE_OPERAND (arg0, 0);
10265 STRIP_SIGN_NOPS (base0);
10266 if (TREE_CODE (base0) == ADDR_EXPR)
10268 base0
10269 = get_inner_reference (TREE_OPERAND (base0, 0),
10270 &bitsize, &bitpos0, &offset0, &mode,
10271 &unsignedp, &reversep, &volatilep);
10272 if (INDIRECT_REF_P (base0))
10273 base0 = TREE_OPERAND (base0, 0);
10274 else
10275 indirect_base0 = true;
10277 if (offset0 == NULL_TREE || integer_zerop (offset0))
10278 offset0 = TREE_OPERAND (arg0, 1);
10279 else
10280 offset0 = size_binop (PLUS_EXPR, offset0,
10281 TREE_OPERAND (arg0, 1));
10282 if (poly_int_tree_p (offset0))
10284 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10285 TYPE_PRECISION (sizetype));
10286 tem <<= LOG2_BITS_PER_UNIT;
10287 tem += bitpos0;
10288 if (tem.to_shwi (&bitpos0))
10289 offset0 = NULL_TREE;
10293 base1 = arg1;
10294 if (TREE_CODE (arg1) == ADDR_EXPR)
10296 base1
10297 = get_inner_reference (TREE_OPERAND (arg1, 0),
10298 &bitsize, &bitpos1, &offset1, &mode,
10299 &unsignedp, &reversep, &volatilep);
10300 if (INDIRECT_REF_P (base1))
10301 base1 = TREE_OPERAND (base1, 0);
10302 else
10303 indirect_base1 = true;
10305 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10307 base1 = TREE_OPERAND (arg1, 0);
10308 STRIP_SIGN_NOPS (base1);
10309 if (TREE_CODE (base1) == ADDR_EXPR)
10311 base1
10312 = get_inner_reference (TREE_OPERAND (base1, 0),
10313 &bitsize, &bitpos1, &offset1, &mode,
10314 &unsignedp, &reversep, &volatilep);
10315 if (INDIRECT_REF_P (base1))
10316 base1 = TREE_OPERAND (base1, 0);
10317 else
10318 indirect_base1 = true;
10320 if (offset1 == NULL_TREE || integer_zerop (offset1))
10321 offset1 = TREE_OPERAND (arg1, 1);
10322 else
10323 offset1 = size_binop (PLUS_EXPR, offset1,
10324 TREE_OPERAND (arg1, 1));
10325 if (poly_int_tree_p (offset1))
10327 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10328 TYPE_PRECISION (sizetype));
10329 tem <<= LOG2_BITS_PER_UNIT;
10330 tem += bitpos1;
10331 if (tem.to_shwi (&bitpos1))
10332 offset1 = NULL_TREE;
10336 /* If we have equivalent bases we might be able to simplify. */
10337 if (indirect_base0 == indirect_base1
10338 && operand_equal_p (base0, base1,
10339 indirect_base0 ? OEP_ADDRESS_OF : 0))
10341 /* We can fold this expression to a constant if the non-constant
10342 offset parts are equal. */
10343 if ((offset0 == offset1
10344 || (offset0 && offset1
10345 && operand_equal_p (offset0, offset1, 0)))
10346 && (equality_code
10347 || (indirect_base0
10348 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10349 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10351 if (!equality_code
10352 && maybe_ne (bitpos0, bitpos1)
10353 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10354 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10355 fold_overflow_warning (("assuming pointer wraparound does not "
10356 "occur when comparing P +- C1 with "
10357 "P +- C2"),
10358 WARN_STRICT_OVERFLOW_CONDITIONAL);
10360 switch (code)
10362 case EQ_EXPR:
10363 if (known_eq (bitpos0, bitpos1))
10364 return constant_boolean_node (true, type);
10365 if (known_ne (bitpos0, bitpos1))
10366 return constant_boolean_node (false, type);
10367 break;
10368 case NE_EXPR:
10369 if (known_ne (bitpos0, bitpos1))
10370 return constant_boolean_node (true, type);
10371 if (known_eq (bitpos0, bitpos1))
10372 return constant_boolean_node (false, type);
10373 break;
10374 case LT_EXPR:
10375 if (known_lt (bitpos0, bitpos1))
10376 return constant_boolean_node (true, type);
10377 if (known_ge (bitpos0, bitpos1))
10378 return constant_boolean_node (false, type);
10379 break;
10380 case LE_EXPR:
10381 if (known_le (bitpos0, bitpos1))
10382 return constant_boolean_node (true, type);
10383 if (known_gt (bitpos0, bitpos1))
10384 return constant_boolean_node (false, type);
10385 break;
10386 case GE_EXPR:
10387 if (known_ge (bitpos0, bitpos1))
10388 return constant_boolean_node (true, type);
10389 if (known_lt (bitpos0, bitpos1))
10390 return constant_boolean_node (false, type);
10391 break;
10392 case GT_EXPR:
10393 if (known_gt (bitpos0, bitpos1))
10394 return constant_boolean_node (true, type);
10395 if (known_le (bitpos0, bitpos1))
10396 return constant_boolean_node (false, type);
10397 break;
10398 default:;
10401 /* We can simplify the comparison to a comparison of the variable
10402 offset parts if the constant offset parts are equal.
10403 Be careful to use signed sizetype here because otherwise we
10404 mess with array offsets in the wrong way. This is possible
10405 because pointer arithmetic is restricted to retain within an
10406 object and overflow on pointer differences is undefined as of
10407 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10408 else if (known_eq (bitpos0, bitpos1)
10409 && (equality_code
10410 || (indirect_base0
10411 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10412 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10414 /* By converting to signed sizetype we cover middle-end pointer
10415 arithmetic which operates on unsigned pointer types of size
10416 type size and ARRAY_REF offsets which are properly sign or
10417 zero extended from their type in case it is narrower than
10418 sizetype. */
10419 if (offset0 == NULL_TREE)
10420 offset0 = build_int_cst (ssizetype, 0);
10421 else
10422 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10423 if (offset1 == NULL_TREE)
10424 offset1 = build_int_cst (ssizetype, 0);
10425 else
10426 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10428 if (!equality_code
10429 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10430 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10431 fold_overflow_warning (("assuming pointer wraparound does not "
10432 "occur when comparing P +- C1 with "
10433 "P +- C2"),
10434 WARN_STRICT_OVERFLOW_COMPARISON);
10436 return fold_build2_loc (loc, code, type, offset0, offset1);
10439 /* For equal offsets we can simplify to a comparison of the
10440 base addresses. */
10441 else if (known_eq (bitpos0, bitpos1)
10442 && (indirect_base0
10443 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10444 && (indirect_base1
10445 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10446 && ((offset0 == offset1)
10447 || (offset0 && offset1
10448 && operand_equal_p (offset0, offset1, 0))))
10450 if (indirect_base0)
10451 base0 = build_fold_addr_expr_loc (loc, base0);
10452 if (indirect_base1)
10453 base1 = build_fold_addr_expr_loc (loc, base1);
10454 return fold_build2_loc (loc, code, type, base0, base1);
10456 /* Comparison between an ordinary (non-weak) symbol and a null
10457 pointer can be eliminated since such symbols must have a non
10458 null address. In C, relational expressions between pointers
10459 to objects and null pointers are undefined. The results
10460 below follow the C++ rules with the additional property that
10461 every object pointer compares greater than a null pointer.
10463 else if (((DECL_P (base0)
10464 && maybe_nonzero_address (base0) > 0
10465 /* Avoid folding references to struct members at offset 0 to
10466 prevent tests like '&ptr->firstmember == 0' from getting
10467 eliminated. When ptr is null, although the -> expression
10468 is strictly speaking invalid, GCC retains it as a matter
10469 of QoI. See PR c/44555. */
10470 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10471 || CONSTANT_CLASS_P (base0))
10472 && indirect_base0
10473 /* The caller guarantees that when one of the arguments is
10474 constant (i.e., null in this case) it is second. */
10475 && integer_zerop (arg1))
10477 switch (code)
10479 case EQ_EXPR:
10480 case LE_EXPR:
10481 case LT_EXPR:
10482 return constant_boolean_node (false, type);
10483 case GE_EXPR:
10484 case GT_EXPR:
10485 case NE_EXPR:
10486 return constant_boolean_node (true, type);
10487 default:
10488 gcc_unreachable ();
10493 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10494 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10495 the resulting offset is smaller in absolute value than the
10496 original one and has the same sign. */
10497 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10498 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10499 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10500 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10501 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10502 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10503 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10504 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10506 tree const1 = TREE_OPERAND (arg0, 1);
10507 tree const2 = TREE_OPERAND (arg1, 1);
10508 tree variable1 = TREE_OPERAND (arg0, 0);
10509 tree variable2 = TREE_OPERAND (arg1, 0);
10510 tree cst;
10511 const char * const warnmsg = G_("assuming signed overflow does not "
10512 "occur when combining constants around "
10513 "a comparison");
10515 /* Put the constant on the side where it doesn't overflow and is
10516 of lower absolute value and of same sign than before. */
10517 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10518 ? MINUS_EXPR : PLUS_EXPR,
10519 const2, const1);
10520 if (!TREE_OVERFLOW (cst)
10521 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10522 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10524 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10525 return fold_build2_loc (loc, code, type,
10526 variable1,
10527 fold_build2_loc (loc, TREE_CODE (arg1),
10528 TREE_TYPE (arg1),
10529 variable2, cst));
10532 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10533 ? MINUS_EXPR : PLUS_EXPR,
10534 const1, const2);
10535 if (!TREE_OVERFLOW (cst)
10536 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10537 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10539 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10540 return fold_build2_loc (loc, code, type,
10541 fold_build2_loc (loc, TREE_CODE (arg0),
10542 TREE_TYPE (arg0),
10543 variable1, cst),
10544 variable2);
10548 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10549 if (tem)
10550 return tem;
10552 /* If we are comparing an expression that just has comparisons
10553 of two integer values, arithmetic expressions of those comparisons,
10554 and constants, we can simplify it. There are only three cases
10555 to check: the two values can either be equal, the first can be
10556 greater, or the second can be greater. Fold the expression for
10557 those three values. Since each value must be 0 or 1, we have
10558 eight possibilities, each of which corresponds to the constant 0
10559 or 1 or one of the six possible comparisons.
10561 This handles common cases like (a > b) == 0 but also handles
10562 expressions like ((x > y) - (y > x)) > 0, which supposedly
10563 occur in macroized code. */
10565 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10567 tree cval1 = 0, cval2 = 0;
10569 if (twoval_comparison_p (arg0, &cval1, &cval2)
10570 /* Don't handle degenerate cases here; they should already
10571 have been handled anyway. */
10572 && cval1 != 0 && cval2 != 0
10573 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10574 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10575 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10576 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10577 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10578 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10579 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10581 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10582 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10584 /* We can't just pass T to eval_subst in case cval1 or cval2
10585 was the same as ARG1. */
10587 tree high_result
10588 = fold_build2_loc (loc, code, type,
10589 eval_subst (loc, arg0, cval1, maxval,
10590 cval2, minval),
10591 arg1);
10592 tree equal_result
10593 = fold_build2_loc (loc, code, type,
10594 eval_subst (loc, arg0, cval1, maxval,
10595 cval2, maxval),
10596 arg1);
10597 tree low_result
10598 = fold_build2_loc (loc, code, type,
10599 eval_subst (loc, arg0, cval1, minval,
10600 cval2, maxval),
10601 arg1);
10603 /* All three of these results should be 0 or 1. Confirm they are.
10604 Then use those values to select the proper code to use. */
10606 if (TREE_CODE (high_result) == INTEGER_CST
10607 && TREE_CODE (equal_result) == INTEGER_CST
10608 && TREE_CODE (low_result) == INTEGER_CST)
10610 /* Make a 3-bit mask with the high-order bit being the
10611 value for `>', the next for '=', and the low for '<'. */
10612 switch ((integer_onep (high_result) * 4)
10613 + (integer_onep (equal_result) * 2)
10614 + integer_onep (low_result))
10616 case 0:
10617 /* Always false. */
10618 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10619 case 1:
10620 code = LT_EXPR;
10621 break;
10622 case 2:
10623 code = EQ_EXPR;
10624 break;
10625 case 3:
10626 code = LE_EXPR;
10627 break;
10628 case 4:
10629 code = GT_EXPR;
10630 break;
10631 case 5:
10632 code = NE_EXPR;
10633 break;
10634 case 6:
10635 code = GE_EXPR;
10636 break;
10637 case 7:
10638 /* Always true. */
10639 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10642 return fold_build2_loc (loc, code, type, cval1, cval2);
10647 return NULL_TREE;
10651 /* Subroutine of fold_binary. Optimize complex multiplications of the
10652 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10653 argument EXPR represents the expression "z" of type TYPE. */
10655 static tree
10656 fold_mult_zconjz (location_t loc, tree type, tree expr)
10658 tree itype = TREE_TYPE (type);
10659 tree rpart, ipart, tem;
10661 if (TREE_CODE (expr) == COMPLEX_EXPR)
10663 rpart = TREE_OPERAND (expr, 0);
10664 ipart = TREE_OPERAND (expr, 1);
10666 else if (TREE_CODE (expr) == COMPLEX_CST)
10668 rpart = TREE_REALPART (expr);
10669 ipart = TREE_IMAGPART (expr);
10671 else
10673 expr = save_expr (expr);
10674 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10675 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10678 rpart = save_expr (rpart);
10679 ipart = save_expr (ipart);
10680 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10681 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10682 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10683 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10684 build_zero_cst (itype));
10688 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10689 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10690 true if successful. */
10692 static bool
10693 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10695 unsigned HOST_WIDE_INT i, nunits;
10697 if (TREE_CODE (arg) == VECTOR_CST
10698 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10700 for (i = 0; i < nunits; ++i)
10701 elts[i] = VECTOR_CST_ELT (arg, i);
10703 else if (TREE_CODE (arg) == CONSTRUCTOR)
10705 constructor_elt *elt;
10707 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10708 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10709 return false;
10710 else
10711 elts[i] = elt->value;
10713 else
10714 return false;
10715 for (; i < nelts; i++)
10716 elts[i]
10717 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10718 return true;
10721 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10722 mask for VLA vec_perm folding.
10723 REASON if specified, will contain the reason why SEL is not suitable.
10724 Used only for debugging and unit-testing. */
10726 static bool
10727 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10728 const vec_perm_indices &sel,
10729 const char **reason = NULL)
10731 unsigned sel_npatterns = sel.encoding ().npatterns ();
10732 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10734 if (!(pow2p_hwi (sel_npatterns)
10735 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10736 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10738 if (reason)
10739 *reason = "npatterns is not power of 2";
10740 return false;
10743 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10744 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10745 poly_uint64 esel;
10746 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10748 if (reason)
10749 *reason = "sel.length is not multiple of sel_npatterns";
10750 return false;
10753 if (sel_nelts_per_pattern < 3)
10754 return true;
10756 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10758 poly_uint64 a1 = sel[pattern + sel_npatterns];
10759 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10760 HOST_WIDE_INT step;
10761 if (!poly_int64 (a2 - a1).is_constant (&step))
10763 if (reason)
10764 *reason = "step is not constant";
10765 return false;
10767 // FIXME: Punt on step < 0 for now, revisit later.
10768 if (step < 0)
10769 return false;
10770 if (step == 0)
10771 continue;
10773 if (!pow2p_hwi (step))
10775 if (reason)
10776 *reason = "step is not power of 2";
10777 return false;
10780 /* Ensure that stepped sequence of the pattern selects elements
10781 only from the same input vector. */
10782 uint64_t q1, qe;
10783 poly_uint64 r1, re;
10784 poly_uint64 ae = a1 + (esel - 2) * step;
10785 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10787 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10788 && can_div_trunc_p (ae, arg_len, &qe, &re)
10789 && q1 == qe))
10791 if (reason)
10792 *reason = "crossed input vectors";
10793 return false;
10796 /* Ensure that the stepped sequence always selects from the same
10797 input pattern. */
10798 tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10799 unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10801 if (!multiple_p (step, arg_npatterns))
10803 if (reason)
10804 *reason = "step is not multiple of npatterns";
10805 return false;
10808 /* If a1 chooses base element from arg, ensure that it's a natural
10809 stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10810 to preserve arg's encoding. */
10812 if (maybe_lt (r1, arg_npatterns))
10814 unsigned HOST_WIDE_INT index;
10815 if (!r1.is_constant (&index))
10816 return false;
10818 tree arg_elem0 = vector_cst_elt (arg, index);
10819 tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10820 tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10822 tree step1, step2;
10823 if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10824 || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10825 || !operand_equal_p (step1, step2, 0))
10827 if (reason)
10828 *reason = "not a natural stepped sequence";
10829 return false;
10834 return true;
10837 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10838 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10839 REASON has same purpose as described in
10840 valid_mask_for_fold_vec_perm_cst_p. */
10842 static tree
10843 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10844 const char **reason = NULL)
10846 unsigned res_npatterns, res_nelts_per_pattern;
10847 unsigned HOST_WIDE_INT res_nelts;
10849 /* First try to implement the fold in a VLA-friendly way.
10851 (1) If the selector is simply a duplication of N elements, the
10852 result is likewise a duplication of N elements.
10854 (2) If the selector is N elements followed by a duplication
10855 of N elements, the result is too.
10857 (3) If the selector is N elements followed by an interleaving
10858 of N linear series, the situation is more complex.
10860 valid_mask_for_fold_vec_perm_cst_p detects whether we
10861 can handle this case. If we can, then each of the N linear
10862 series either (a) selects the same element each time or
10863 (b) selects a linear series from one of the input patterns.
10865 If (b) holds for one of the linear series, the result
10866 will contain a linear series, and so the result will have
10867 the same shape as the selector. If (a) holds for all of
10868 the linear series, the result will be the same as (2) above.
10870 (b) can only hold if one of the input patterns has a
10871 stepped encoding. */
10873 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10875 res_npatterns = sel.encoding ().npatterns ();
10876 res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10877 if (res_nelts_per_pattern == 3
10878 && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10879 && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10880 res_nelts_per_pattern = 2;
10881 res_nelts = res_npatterns * res_nelts_per_pattern;
10883 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10885 res_npatterns = res_nelts;
10886 res_nelts_per_pattern = 1;
10888 else
10889 return NULL_TREE;
10891 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10892 for (unsigned i = 0; i < res_nelts; i++)
10894 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10895 uint64_t q;
10896 poly_uint64 r;
10897 unsigned HOST_WIDE_INT index;
10899 /* Punt if sel[i] /trunc_div len cannot be determined,
10900 because the input vector to be chosen will depend on
10901 runtime vector length.
10902 For example if len == 4 + 4x, and sel[i] == 4,
10903 If len at runtime equals 4, we choose arg1[0].
10904 For any other value of len > 4 at runtime, we choose arg0[4].
10905 which makes the element choice dependent on runtime vector length. */
10906 if (!can_div_trunc_p (sel[i], len, &q, &r))
10908 if (reason)
10909 *reason = "cannot divide selector element by arg len";
10910 return NULL_TREE;
10913 /* sel[i] % len will give the index of element in the chosen input
10914 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10915 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10916 if (!r.is_constant (&index))
10918 if (reason)
10919 *reason = "remainder is not constant";
10920 return NULL_TREE;
10923 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10924 tree elem = vector_cst_elt (arg, index);
10925 out_elts.quick_push (elem);
10928 return out_elts.build ();
10931 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10932 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10933 NULL_TREE otherwise. */
10935 tree
10936 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10938 unsigned int i;
10939 unsigned HOST_WIDE_INT nelts;
10941 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10942 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10943 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10945 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10946 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10947 return NULL_TREE;
10949 if (TREE_CODE (arg0) == VECTOR_CST
10950 && TREE_CODE (arg1) == VECTOR_CST)
10951 return fold_vec_perm_cst (type, arg0, arg1, sel);
10953 /* For fall back case, we want to ensure we have VLS vectors
10954 with equal length. */
10955 if (!sel.length ().is_constant (&nelts))
10956 return NULL_TREE;
10958 gcc_assert (known_eq (sel.length (),
10959 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10960 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10961 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10962 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10963 return NULL_TREE;
10965 vec<constructor_elt, va_gc> *v;
10966 vec_alloc (v, nelts);
10967 for (i = 0; i < nelts; i++)
10969 HOST_WIDE_INT index;
10970 if (!sel[i].is_constant (&index))
10971 return NULL_TREE;
10972 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10974 return build_constructor (type, v);
10977 /* Try to fold a pointer difference of type TYPE two address expressions of
10978 array references AREF0 and AREF1 using location LOC. Return a
10979 simplified expression for the difference or NULL_TREE. */
10981 static tree
10982 fold_addr_of_array_ref_difference (location_t loc, tree type,
10983 tree aref0, tree aref1,
10984 bool use_pointer_diff)
10986 tree base0 = TREE_OPERAND (aref0, 0);
10987 tree base1 = TREE_OPERAND (aref1, 0);
10988 tree base_offset = build_int_cst (type, 0);
10990 /* If the bases are array references as well, recurse. If the bases
10991 are pointer indirections compute the difference of the pointers.
10992 If the bases are equal, we are set. */
10993 if ((TREE_CODE (base0) == ARRAY_REF
10994 && TREE_CODE (base1) == ARRAY_REF
10995 && (base_offset
10996 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10997 use_pointer_diff)))
10998 || (INDIRECT_REF_P (base0)
10999 && INDIRECT_REF_P (base1)
11000 && (base_offset
11001 = use_pointer_diff
11002 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
11003 TREE_OPERAND (base0, 0),
11004 TREE_OPERAND (base1, 0))
11005 : fold_binary_loc (loc, MINUS_EXPR, type,
11006 fold_convert (type,
11007 TREE_OPERAND (base0, 0)),
11008 fold_convert (type,
11009 TREE_OPERAND (base1, 0)))))
11010 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
11012 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
11013 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
11014 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
11015 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
11016 return fold_build2_loc (loc, PLUS_EXPR, type,
11017 base_offset,
11018 fold_build2_loc (loc, MULT_EXPR, type,
11019 diff, esz));
11021 return NULL_TREE;
11024 /* If the real or vector real constant CST of type TYPE has an exact
11025 inverse, return it, else return NULL. */
11027 tree
11028 exact_inverse (tree type, tree cst)
11030 REAL_VALUE_TYPE r;
11031 tree unit_type;
11032 machine_mode mode;
11034 switch (TREE_CODE (cst))
11036 case REAL_CST:
11037 r = TREE_REAL_CST (cst);
11039 if (exact_real_inverse (TYPE_MODE (type), &r))
11040 return build_real (type, r);
11042 return NULL_TREE;
11044 case VECTOR_CST:
11046 unit_type = TREE_TYPE (type);
11047 mode = TYPE_MODE (unit_type);
11049 tree_vector_builder elts;
11050 if (!elts.new_unary_operation (type, cst, false))
11051 return NULL_TREE;
11052 unsigned int count = elts.encoded_nelts ();
11053 for (unsigned int i = 0; i < count; ++i)
11055 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
11056 if (!exact_real_inverse (mode, &r))
11057 return NULL_TREE;
11058 elts.quick_push (build_real (unit_type, r));
11061 return elts.build ();
11064 default:
11065 return NULL_TREE;
11069 /* Mask out the tz least significant bits of X of type TYPE where
11070 tz is the number of trailing zeroes in Y. */
11071 static wide_int
11072 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
11074 int tz = wi::ctz (y);
11075 if (tz > 0)
11076 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
11077 return x;
11080 /* Return true when T is an address and is known to be nonzero.
11081 For floating point we further ensure that T is not denormal.
11082 Similar logic is present in nonzero_address in rtlanal.h.
11084 If the return value is based on the assumption that signed overflow
11085 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
11086 change *STRICT_OVERFLOW_P. */
11088 static bool
11089 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
11091 tree type = TREE_TYPE (t);
11092 enum tree_code code;
11094 /* Doing something useful for floating point would need more work. */
11095 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11096 return false;
11098 code = TREE_CODE (t);
11099 switch (TREE_CODE_CLASS (code))
11101 case tcc_unary:
11102 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11103 strict_overflow_p);
11104 case tcc_binary:
11105 case tcc_comparison:
11106 return tree_binary_nonzero_warnv_p (code, type,
11107 TREE_OPERAND (t, 0),
11108 TREE_OPERAND (t, 1),
11109 strict_overflow_p);
11110 case tcc_constant:
11111 case tcc_declaration:
11112 case tcc_reference:
11113 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11115 default:
11116 break;
11119 switch (code)
11121 case TRUTH_NOT_EXPR:
11122 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11123 strict_overflow_p);
11125 case TRUTH_AND_EXPR:
11126 case TRUTH_OR_EXPR:
11127 case TRUTH_XOR_EXPR:
11128 return tree_binary_nonzero_warnv_p (code, type,
11129 TREE_OPERAND (t, 0),
11130 TREE_OPERAND (t, 1),
11131 strict_overflow_p);
11133 case COND_EXPR:
11134 case CONSTRUCTOR:
11135 case OBJ_TYPE_REF:
11136 case ADDR_EXPR:
11137 case WITH_SIZE_EXPR:
11138 case SSA_NAME:
11139 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11141 case COMPOUND_EXPR:
11142 case MODIFY_EXPR:
11143 case BIND_EXPR:
11144 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
11145 strict_overflow_p);
11147 case SAVE_EXPR:
11148 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11149 strict_overflow_p);
11151 case CALL_EXPR:
11153 tree fndecl = get_callee_fndecl (t);
11154 if (!fndecl) return false;
11155 if (flag_delete_null_pointer_checks && !flag_check_new
11156 && DECL_IS_OPERATOR_NEW_P (fndecl)
11157 && !TREE_NOTHROW (fndecl))
11158 return true;
11159 if (flag_delete_null_pointer_checks
11160 && lookup_attribute ("returns_nonnull",
11161 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11162 return true;
11163 return alloca_call_p (t);
11166 default:
11167 break;
11169 return false;
11172 /* Return true when T is an address and is known to be nonzero.
11173 Handle warnings about undefined signed overflow. */
11175 bool
11176 tree_expr_nonzero_p (tree t)
11178 bool ret, strict_overflow_p;
11180 strict_overflow_p = false;
11181 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11182 if (strict_overflow_p)
11183 fold_overflow_warning (("assuming signed overflow does not occur when "
11184 "determining that expression is always "
11185 "non-zero"),
11186 WARN_STRICT_OVERFLOW_MISC);
11187 return ret;
11190 /* Return true if T is known not to be equal to an integer W. */
11192 bool
11193 expr_not_equal_to (tree t, const wide_int &w)
11195 int_range_max vr;
11196 switch (TREE_CODE (t))
11198 case INTEGER_CST:
11199 return wi::to_wide (t) != w;
11201 case SSA_NAME:
11202 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11203 return false;
11205 get_range_query (cfun)->range_of_expr (vr, t);
11206 if (!vr.undefined_p () && !vr.contains_p (w))
11207 return true;
11208 /* If T has some known zero bits and W has any of those bits set,
11209 then T is known not to be equal to W. */
11210 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11211 TYPE_PRECISION (TREE_TYPE (t))), 0))
11212 return true;
11213 return false;
11215 default:
11216 return false;
11220 /* Fold a binary expression of code CODE and type TYPE with operands
11221 OP0 and OP1. LOC is the location of the resulting expression.
11222 Return the folded expression if folding is successful. Otherwise,
11223 return NULL_TREE. */
11225 tree
11226 fold_binary_loc (location_t loc, enum tree_code code, tree type,
11227 tree op0, tree op1)
11229 enum tree_code_class kind = TREE_CODE_CLASS (code);
11230 tree arg0, arg1, tem;
11231 tree t1 = NULL_TREE;
11232 bool strict_overflow_p;
11233 unsigned int prec;
11235 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11236 && TREE_CODE_LENGTH (code) == 2
11237 && op0 != NULL_TREE
11238 && op1 != NULL_TREE);
11240 arg0 = op0;
11241 arg1 = op1;
11243 /* Strip any conversions that don't change the mode. This is
11244 safe for every expression, except for a comparison expression
11245 because its signedness is derived from its operands. So, in
11246 the latter case, only strip conversions that don't change the
11247 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11248 preserved.
11250 Note that this is done as an internal manipulation within the
11251 constant folder, in order to find the simplest representation
11252 of the arguments so that their form can be studied. In any
11253 cases, the appropriate type conversions should be put back in
11254 the tree that will get out of the constant folder. */
11256 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11258 STRIP_SIGN_NOPS (arg0);
11259 STRIP_SIGN_NOPS (arg1);
11261 else
11263 STRIP_NOPS (arg0);
11264 STRIP_NOPS (arg1);
11267 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11268 constant but we can't do arithmetic on them. */
11269 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11271 tem = const_binop (code, type, arg0, arg1);
11272 if (tem != NULL_TREE)
11274 if (TREE_TYPE (tem) != type)
11275 tem = fold_convert_loc (loc, type, tem);
11276 return tem;
11280 /* If this is a commutative operation, and ARG0 is a constant, move it
11281 to ARG1 to reduce the number of tests below. */
11282 if (commutative_tree_code (code)
11283 && tree_swap_operands_p (arg0, arg1))
11284 return fold_build2_loc (loc, code, type, op1, op0);
11286 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11287 to ARG1 to reduce the number of tests below. */
11288 if (kind == tcc_comparison
11289 && tree_swap_operands_p (arg0, arg1))
11290 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11292 tem = generic_simplify (loc, code, type, op0, op1);
11293 if (tem)
11294 return tem;
11296 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11298 First check for cases where an arithmetic operation is applied to a
11299 compound, conditional, or comparison operation. Push the arithmetic
11300 operation inside the compound or conditional to see if any folding
11301 can then be done. Convert comparison to conditional for this purpose.
11302 The also optimizes non-constant cases that used to be done in
11303 expand_expr.
11305 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11306 one of the operands is a comparison and the other is a comparison, a
11307 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11308 code below would make the expression more complex. Change it to a
11309 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11310 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11312 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11313 || code == EQ_EXPR || code == NE_EXPR)
11314 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11315 && ((truth_value_p (TREE_CODE (arg0))
11316 && (truth_value_p (TREE_CODE (arg1))
11317 || (TREE_CODE (arg1) == BIT_AND_EXPR
11318 && integer_onep (TREE_OPERAND (arg1, 1)))))
11319 || (truth_value_p (TREE_CODE (arg1))
11320 && (truth_value_p (TREE_CODE (arg0))
11321 || (TREE_CODE (arg0) == BIT_AND_EXPR
11322 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11324 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11325 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11326 : TRUTH_XOR_EXPR,
11327 boolean_type_node,
11328 fold_convert_loc (loc, boolean_type_node, arg0),
11329 fold_convert_loc (loc, boolean_type_node, arg1));
11331 if (code == EQ_EXPR)
11332 tem = invert_truthvalue_loc (loc, tem);
11334 return fold_convert_loc (loc, type, tem);
11337 if (TREE_CODE_CLASS (code) == tcc_binary
11338 || TREE_CODE_CLASS (code) == tcc_comparison)
11340 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11342 tem = fold_build2_loc (loc, code, type,
11343 fold_convert_loc (loc, TREE_TYPE (op0),
11344 TREE_OPERAND (arg0, 1)), op1);
11345 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11346 tem);
11348 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11350 tem = fold_build2_loc (loc, code, type, op0,
11351 fold_convert_loc (loc, TREE_TYPE (op1),
11352 TREE_OPERAND (arg1, 1)));
11353 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11354 tem);
11357 if (TREE_CODE (arg0) == COND_EXPR
11358 || TREE_CODE (arg0) == VEC_COND_EXPR
11359 || COMPARISON_CLASS_P (arg0))
11361 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11362 arg0, arg1,
11363 /*cond_first_p=*/1);
11364 if (tem != NULL_TREE)
11365 return tem;
11368 if (TREE_CODE (arg1) == COND_EXPR
11369 || TREE_CODE (arg1) == VEC_COND_EXPR
11370 || COMPARISON_CLASS_P (arg1))
11372 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11373 arg1, arg0,
11374 /*cond_first_p=*/0);
11375 if (tem != NULL_TREE)
11376 return tem;
11380 switch (code)
11382 case MEM_REF:
11383 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11384 if (TREE_CODE (arg0) == ADDR_EXPR
11385 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11387 tree iref = TREE_OPERAND (arg0, 0);
11388 return fold_build2 (MEM_REF, type,
11389 TREE_OPERAND (iref, 0),
11390 int_const_binop (PLUS_EXPR, arg1,
11391 TREE_OPERAND (iref, 1)));
11394 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11395 if (TREE_CODE (arg0) == ADDR_EXPR
11396 && handled_component_p (TREE_OPERAND (arg0, 0)))
11398 tree base;
11399 poly_int64 coffset;
11400 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11401 &coffset);
11402 if (!base)
11403 return NULL_TREE;
11404 return fold_build2 (MEM_REF, type,
11405 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11406 int_const_binop (PLUS_EXPR, arg1,
11407 size_int (coffset)));
11410 return NULL_TREE;
11412 case POINTER_PLUS_EXPR:
11413 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11414 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11415 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11416 return fold_convert_loc (loc, type,
11417 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11418 fold_convert_loc (loc, sizetype,
11419 arg1),
11420 fold_convert_loc (loc, sizetype,
11421 arg0)));
11423 return NULL_TREE;
11425 case PLUS_EXPR:
11426 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11428 /* X + (X / CST) * -CST is X % CST. */
11429 if (TREE_CODE (arg1) == MULT_EXPR
11430 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11431 && operand_equal_p (arg0,
11432 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11434 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11435 tree cst1 = TREE_OPERAND (arg1, 1);
11436 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11437 cst1, cst0);
11438 if (sum && integer_zerop (sum))
11439 return fold_convert_loc (loc, type,
11440 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11441 TREE_TYPE (arg0), arg0,
11442 cst0));
11446 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11447 one. Make sure the type is not saturating and has the signedness of
11448 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11449 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11450 if ((TREE_CODE (arg0) == MULT_EXPR
11451 || TREE_CODE (arg1) == MULT_EXPR)
11452 && !TYPE_SATURATING (type)
11453 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11454 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11455 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11457 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11458 if (tem)
11459 return tem;
11462 if (! FLOAT_TYPE_P (type))
11464 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11465 (plus (plus (mult) (mult)) (foo)) so that we can
11466 take advantage of the factoring cases below. */
11467 if (ANY_INTEGRAL_TYPE_P (type)
11468 && TYPE_OVERFLOW_WRAPS (type)
11469 && (((TREE_CODE (arg0) == PLUS_EXPR
11470 || TREE_CODE (arg0) == MINUS_EXPR)
11471 && TREE_CODE (arg1) == MULT_EXPR)
11472 || ((TREE_CODE (arg1) == PLUS_EXPR
11473 || TREE_CODE (arg1) == MINUS_EXPR)
11474 && TREE_CODE (arg0) == MULT_EXPR)))
11476 tree parg0, parg1, parg, marg;
11477 enum tree_code pcode;
11479 if (TREE_CODE (arg1) == MULT_EXPR)
11480 parg = arg0, marg = arg1;
11481 else
11482 parg = arg1, marg = arg0;
11483 pcode = TREE_CODE (parg);
11484 parg0 = TREE_OPERAND (parg, 0);
11485 parg1 = TREE_OPERAND (parg, 1);
11486 STRIP_NOPS (parg0);
11487 STRIP_NOPS (parg1);
11489 if (TREE_CODE (parg0) == MULT_EXPR
11490 && TREE_CODE (parg1) != MULT_EXPR)
11491 return fold_build2_loc (loc, pcode, type,
11492 fold_build2_loc (loc, PLUS_EXPR, type,
11493 fold_convert_loc (loc, type,
11494 parg0),
11495 fold_convert_loc (loc, type,
11496 marg)),
11497 fold_convert_loc (loc, type, parg1));
11498 if (TREE_CODE (parg0) != MULT_EXPR
11499 && TREE_CODE (parg1) == MULT_EXPR)
11500 return
11501 fold_build2_loc (loc, PLUS_EXPR, type,
11502 fold_convert_loc (loc, type, parg0),
11503 fold_build2_loc (loc, pcode, type,
11504 fold_convert_loc (loc, type, marg),
11505 fold_convert_loc (loc, type,
11506 parg1)));
11509 else
11511 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11512 to __complex__ ( x, y ). This is not the same for SNaNs or
11513 if signed zeros are involved. */
11514 if (!HONOR_SNANS (arg0)
11515 && !HONOR_SIGNED_ZEROS (arg0)
11516 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11518 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11519 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11520 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11521 bool arg0rz = false, arg0iz = false;
11522 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11523 || (arg0i && (arg0iz = real_zerop (arg0i))))
11525 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11526 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11527 if (arg0rz && arg1i && real_zerop (arg1i))
11529 tree rp = arg1r ? arg1r
11530 : build1 (REALPART_EXPR, rtype, arg1);
11531 tree ip = arg0i ? arg0i
11532 : build1 (IMAGPART_EXPR, rtype, arg0);
11533 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11535 else if (arg0iz && arg1r && real_zerop (arg1r))
11537 tree rp = arg0r ? arg0r
11538 : build1 (REALPART_EXPR, rtype, arg0);
11539 tree ip = arg1i ? arg1i
11540 : build1 (IMAGPART_EXPR, rtype, arg1);
11541 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11546 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11547 We associate floats only if the user has specified
11548 -fassociative-math. */
11549 if (flag_associative_math
11550 && TREE_CODE (arg1) == PLUS_EXPR
11551 && TREE_CODE (arg0) != MULT_EXPR)
11553 tree tree10 = TREE_OPERAND (arg1, 0);
11554 tree tree11 = TREE_OPERAND (arg1, 1);
11555 if (TREE_CODE (tree11) == MULT_EXPR
11556 && TREE_CODE (tree10) == MULT_EXPR)
11558 tree tree0;
11559 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11560 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11563 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11564 We associate floats only if the user has specified
11565 -fassociative-math. */
11566 if (flag_associative_math
11567 && TREE_CODE (arg0) == PLUS_EXPR
11568 && TREE_CODE (arg1) != MULT_EXPR)
11570 tree tree00 = TREE_OPERAND (arg0, 0);
11571 tree tree01 = TREE_OPERAND (arg0, 1);
11572 if (TREE_CODE (tree01) == MULT_EXPR
11573 && TREE_CODE (tree00) == MULT_EXPR)
11575 tree tree0;
11576 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11577 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11582 bit_rotate:
11583 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11584 is a rotate of A by C1 bits. */
11585 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11586 is a rotate of A by B bits.
11587 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11588 though in this case CODE must be | and not + or ^, otherwise
11589 it doesn't return A when B is 0. */
11591 enum tree_code code0, code1;
11592 tree rtype;
11593 code0 = TREE_CODE (arg0);
11594 code1 = TREE_CODE (arg1);
11595 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11596 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11597 && operand_equal_p (TREE_OPERAND (arg0, 0),
11598 TREE_OPERAND (arg1, 0), 0)
11599 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11600 TYPE_UNSIGNED (rtype))
11601 /* Only create rotates in complete modes. Other cases are not
11602 expanded properly. */
11603 && (element_precision (rtype)
11604 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11606 tree tree01, tree11;
11607 tree orig_tree01, orig_tree11;
11608 enum tree_code code01, code11;
11610 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11611 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11612 STRIP_NOPS (tree01);
11613 STRIP_NOPS (tree11);
11614 code01 = TREE_CODE (tree01);
11615 code11 = TREE_CODE (tree11);
11616 if (code11 != MINUS_EXPR
11617 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11619 std::swap (code0, code1);
11620 std::swap (code01, code11);
11621 std::swap (tree01, tree11);
11622 std::swap (orig_tree01, orig_tree11);
11624 if (code01 == INTEGER_CST
11625 && code11 == INTEGER_CST
11626 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11627 == element_precision (rtype)))
11629 tem = build2_loc (loc, LROTATE_EXPR,
11630 rtype, TREE_OPERAND (arg0, 0),
11631 code0 == LSHIFT_EXPR
11632 ? orig_tree01 : orig_tree11);
11633 return fold_convert_loc (loc, type, tem);
11635 else if (code11 == MINUS_EXPR)
11637 tree tree110, tree111;
11638 tree110 = TREE_OPERAND (tree11, 0);
11639 tree111 = TREE_OPERAND (tree11, 1);
11640 STRIP_NOPS (tree110);
11641 STRIP_NOPS (tree111);
11642 if (TREE_CODE (tree110) == INTEGER_CST
11643 && compare_tree_int (tree110,
11644 element_precision (rtype)) == 0
11645 && operand_equal_p (tree01, tree111, 0))
11647 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11648 ? LROTATE_EXPR : RROTATE_EXPR),
11649 rtype, TREE_OPERAND (arg0, 0),
11650 orig_tree01);
11651 return fold_convert_loc (loc, type, tem);
11654 else if (code == BIT_IOR_EXPR
11655 && code11 == BIT_AND_EXPR
11656 && pow2p_hwi (element_precision (rtype)))
11658 tree tree110, tree111;
11659 tree110 = TREE_OPERAND (tree11, 0);
11660 tree111 = TREE_OPERAND (tree11, 1);
11661 STRIP_NOPS (tree110);
11662 STRIP_NOPS (tree111);
11663 if (TREE_CODE (tree110) == NEGATE_EXPR
11664 && TREE_CODE (tree111) == INTEGER_CST
11665 && compare_tree_int (tree111,
11666 element_precision (rtype) - 1) == 0
11667 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11669 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11670 ? LROTATE_EXPR : RROTATE_EXPR),
11671 rtype, TREE_OPERAND (arg0, 0),
11672 orig_tree01);
11673 return fold_convert_loc (loc, type, tem);
11679 associate:
11680 /* In most languages, can't associate operations on floats through
11681 parentheses. Rather than remember where the parentheses were, we
11682 don't associate floats at all, unless the user has specified
11683 -fassociative-math.
11684 And, we need to make sure type is not saturating. */
11686 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11687 && !TYPE_SATURATING (type)
11688 && !TYPE_OVERFLOW_SANITIZED (type))
11690 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11691 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11692 tree atype = type;
11693 bool ok = true;
11695 /* Split both trees into variables, constants, and literals. Then
11696 associate each group together, the constants with literals,
11697 then the result with variables. This increases the chances of
11698 literals being recombined later and of generating relocatable
11699 expressions for the sum of a constant and literal. */
11700 var0 = split_tree (arg0, type, code,
11701 &minus_var0, &con0, &minus_con0,
11702 &lit0, &minus_lit0, 0);
11703 var1 = split_tree (arg1, type, code,
11704 &minus_var1, &con1, &minus_con1,
11705 &lit1, &minus_lit1, code == MINUS_EXPR);
11707 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11708 if (code == MINUS_EXPR)
11709 code = PLUS_EXPR;
11711 /* With undefined overflow prefer doing association in a type
11712 which wraps on overflow, if that is one of the operand types. */
11713 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11714 && !TYPE_OVERFLOW_WRAPS (type))
11716 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11717 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11718 atype = TREE_TYPE (arg0);
11719 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11720 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11721 atype = TREE_TYPE (arg1);
11722 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11725 /* With undefined overflow we can only associate constants with one
11726 variable, and constants whose association doesn't overflow. */
11727 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11728 && !TYPE_OVERFLOW_WRAPS (atype))
11730 if ((var0 && var1) || (minus_var0 && minus_var1))
11732 /* ??? If split_tree would handle NEGATE_EXPR we could
11733 simply reject these cases and the allowed cases would
11734 be the var0/minus_var1 ones. */
11735 tree tmp0 = var0 ? var0 : minus_var0;
11736 tree tmp1 = var1 ? var1 : minus_var1;
11737 bool one_neg = false;
11739 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11741 tmp0 = TREE_OPERAND (tmp0, 0);
11742 one_neg = !one_neg;
11744 if (CONVERT_EXPR_P (tmp0)
11745 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11746 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11747 <= TYPE_PRECISION (atype)))
11748 tmp0 = TREE_OPERAND (tmp0, 0);
11749 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11751 tmp1 = TREE_OPERAND (tmp1, 0);
11752 one_neg = !one_neg;
11754 if (CONVERT_EXPR_P (tmp1)
11755 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11756 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11757 <= TYPE_PRECISION (atype)))
11758 tmp1 = TREE_OPERAND (tmp1, 0);
11759 /* The only case we can still associate with two variables
11760 is if they cancel out. */
11761 if (!one_neg
11762 || !operand_equal_p (tmp0, tmp1, 0))
11763 ok = false;
11765 else if ((var0 && minus_var1
11766 && ! operand_equal_p (var0, minus_var1, 0))
11767 || (minus_var0 && var1
11768 && ! operand_equal_p (minus_var0, var1, 0)))
11769 ok = false;
11772 /* Only do something if we found more than two objects. Otherwise,
11773 nothing has changed and we risk infinite recursion. */
11774 if (ok
11775 && ((var0 != 0) + (var1 != 0)
11776 + (minus_var0 != 0) + (minus_var1 != 0)
11777 + (con0 != 0) + (con1 != 0)
11778 + (minus_con0 != 0) + (minus_con1 != 0)
11779 + (lit0 != 0) + (lit1 != 0)
11780 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11782 var0 = associate_trees (loc, var0, var1, code, atype);
11783 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11784 code, atype);
11785 con0 = associate_trees (loc, con0, con1, code, atype);
11786 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11787 code, atype);
11788 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11789 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11790 code, atype);
11792 if (minus_var0 && var0)
11794 var0 = associate_trees (loc, var0, minus_var0,
11795 MINUS_EXPR, atype);
11796 minus_var0 = 0;
11798 if (minus_con0 && con0)
11800 con0 = associate_trees (loc, con0, minus_con0,
11801 MINUS_EXPR, atype);
11802 minus_con0 = 0;
11805 /* Preserve the MINUS_EXPR if the negative part of the literal is
11806 greater than the positive part. Otherwise, the multiplicative
11807 folding code (i.e extract_muldiv) may be fooled in case
11808 unsigned constants are subtracted, like in the following
11809 example: ((X*2 + 4) - 8U)/2. */
11810 if (minus_lit0 && lit0)
11812 if (TREE_CODE (lit0) == INTEGER_CST
11813 && TREE_CODE (minus_lit0) == INTEGER_CST
11814 && tree_int_cst_lt (lit0, minus_lit0)
11815 /* But avoid ending up with only negated parts. */
11816 && (var0 || con0))
11818 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11819 MINUS_EXPR, atype);
11820 lit0 = 0;
11822 else
11824 lit0 = associate_trees (loc, lit0, minus_lit0,
11825 MINUS_EXPR, atype);
11826 minus_lit0 = 0;
11830 /* Don't introduce overflows through reassociation. */
11831 if ((lit0 && TREE_OVERFLOW_P (lit0))
11832 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11833 return NULL_TREE;
11835 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11836 con0 = associate_trees (loc, con0, lit0, code, atype);
11837 lit0 = 0;
11838 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11839 code, atype);
11840 minus_lit0 = 0;
11842 /* Eliminate minus_con0. */
11843 if (minus_con0)
11845 if (con0)
11846 con0 = associate_trees (loc, con0, minus_con0,
11847 MINUS_EXPR, atype);
11848 else if (var0)
11849 var0 = associate_trees (loc, var0, minus_con0,
11850 MINUS_EXPR, atype);
11851 else
11852 gcc_unreachable ();
11853 minus_con0 = 0;
11856 /* Eliminate minus_var0. */
11857 if (minus_var0)
11859 if (con0)
11860 con0 = associate_trees (loc, con0, minus_var0,
11861 MINUS_EXPR, atype);
11862 else
11863 gcc_unreachable ();
11864 minus_var0 = 0;
11867 return
11868 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11869 code, atype));
11873 return NULL_TREE;
11875 case POINTER_DIFF_EXPR:
11876 case MINUS_EXPR:
11877 /* Fold &a[i] - &a[j] to i-j. */
11878 if (TREE_CODE (arg0) == ADDR_EXPR
11879 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11880 && TREE_CODE (arg1) == ADDR_EXPR
11881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11883 tree tem = fold_addr_of_array_ref_difference (loc, type,
11884 TREE_OPERAND (arg0, 0),
11885 TREE_OPERAND (arg1, 0),
11886 code
11887 == POINTER_DIFF_EXPR);
11888 if (tem)
11889 return tem;
11892 /* Further transformations are not for pointers. */
11893 if (code == POINTER_DIFF_EXPR)
11894 return NULL_TREE;
11896 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11897 if (TREE_CODE (arg0) == NEGATE_EXPR
11898 && negate_expr_p (op1)
11899 /* If arg0 is e.g. unsigned int and type is int, then this could
11900 introduce UB, because if A is INT_MIN at runtime, the original
11901 expression can be well defined while the latter is not.
11902 See PR83269. */
11903 && !(ANY_INTEGRAL_TYPE_P (type)
11904 && TYPE_OVERFLOW_UNDEFINED (type)
11905 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11906 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11907 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11908 fold_convert_loc (loc, type,
11909 TREE_OPERAND (arg0, 0)));
11911 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11912 __complex__ ( x, -y ). This is not the same for SNaNs or if
11913 signed zeros are involved. */
11914 if (!HONOR_SNANS (arg0)
11915 && !HONOR_SIGNED_ZEROS (arg0)
11916 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11918 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11919 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11920 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11921 bool arg0rz = false, arg0iz = false;
11922 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11923 || (arg0i && (arg0iz = real_zerop (arg0i))))
11925 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11926 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11927 if (arg0rz && arg1i && real_zerop (arg1i))
11929 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11930 arg1r ? arg1r
11931 : build1 (REALPART_EXPR, rtype, arg1));
11932 tree ip = arg0i ? arg0i
11933 : build1 (IMAGPART_EXPR, rtype, arg0);
11934 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11936 else if (arg0iz && arg1r && real_zerop (arg1r))
11938 tree rp = arg0r ? arg0r
11939 : build1 (REALPART_EXPR, rtype, arg0);
11940 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11941 arg1i ? arg1i
11942 : build1 (IMAGPART_EXPR, rtype, arg1));
11943 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11948 /* A - B -> A + (-B) if B is easily negatable. */
11949 if (negate_expr_p (op1)
11950 && ! TYPE_OVERFLOW_SANITIZED (type)
11951 && ((FLOAT_TYPE_P (type)
11952 /* Avoid this transformation if B is a positive REAL_CST. */
11953 && (TREE_CODE (op1) != REAL_CST
11954 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11955 || INTEGRAL_TYPE_P (type)))
11956 return fold_build2_loc (loc, PLUS_EXPR, type,
11957 fold_convert_loc (loc, type, arg0),
11958 negate_expr (op1));
11960 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11961 one. Make sure the type is not saturating and has the signedness of
11962 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11963 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11964 if ((TREE_CODE (arg0) == MULT_EXPR
11965 || TREE_CODE (arg1) == MULT_EXPR)
11966 && !TYPE_SATURATING (type)
11967 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11968 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11969 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11971 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11972 if (tem)
11973 return tem;
11976 goto associate;
11978 case MULT_EXPR:
11979 if (! FLOAT_TYPE_P (type))
11981 /* Transform x * -C into -x * C if x is easily negatable. */
11982 if (TREE_CODE (op1) == INTEGER_CST
11983 && tree_int_cst_sgn (op1) == -1
11984 && negate_expr_p (op0)
11985 && negate_expr_p (op1)
11986 && (tem = negate_expr (op1)) != op1
11987 && ! TREE_OVERFLOW (tem))
11988 return fold_build2_loc (loc, MULT_EXPR, type,
11989 fold_convert_loc (loc, type,
11990 negate_expr (op0)), tem);
11992 strict_overflow_p = false;
11993 if (TREE_CODE (arg1) == INTEGER_CST
11994 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11995 &strict_overflow_p)) != 0)
11997 if (strict_overflow_p)
11998 fold_overflow_warning (("assuming signed overflow does not "
11999 "occur when simplifying "
12000 "multiplication"),
12001 WARN_STRICT_OVERFLOW_MISC);
12002 return fold_convert_loc (loc, type, tem);
12005 /* Optimize z * conj(z) for integer complex numbers. */
12006 if (TREE_CODE (arg0) == CONJ_EXPR
12007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12008 return fold_mult_zconjz (loc, type, arg1);
12009 if (TREE_CODE (arg1) == CONJ_EXPR
12010 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12011 return fold_mult_zconjz (loc, type, arg0);
12013 else
12015 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
12016 This is not the same for NaNs or if signed zeros are
12017 involved. */
12018 if (!HONOR_NANS (arg0)
12019 && !HONOR_SIGNED_ZEROS (arg0)
12020 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12021 && TREE_CODE (arg1) == COMPLEX_CST
12022 && real_zerop (TREE_REALPART (arg1)))
12024 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
12025 if (real_onep (TREE_IMAGPART (arg1)))
12026 return
12027 fold_build2_loc (loc, COMPLEX_EXPR, type,
12028 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
12029 rtype, arg0)),
12030 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
12031 else if (real_minus_onep (TREE_IMAGPART (arg1)))
12032 return
12033 fold_build2_loc (loc, COMPLEX_EXPR, type,
12034 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
12035 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
12036 rtype, arg0)));
12039 /* Optimize z * conj(z) for floating point complex numbers.
12040 Guarded by flag_unsafe_math_optimizations as non-finite
12041 imaginary components don't produce scalar results. */
12042 if (flag_unsafe_math_optimizations
12043 && TREE_CODE (arg0) == CONJ_EXPR
12044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12045 return fold_mult_zconjz (loc, type, arg1);
12046 if (flag_unsafe_math_optimizations
12047 && TREE_CODE (arg1) == CONJ_EXPR
12048 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12049 return fold_mult_zconjz (loc, type, arg0);
12051 goto associate;
12053 case BIT_IOR_EXPR:
12054 /* Canonicalize (X & C1) | C2. */
12055 if (TREE_CODE (arg0) == BIT_AND_EXPR
12056 && TREE_CODE (arg1) == INTEGER_CST
12057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12059 int width = TYPE_PRECISION (type), w;
12060 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
12061 wide_int c2 = wi::to_wide (arg1);
12063 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
12064 if ((c1 & c2) == c1)
12065 return omit_one_operand_loc (loc, type, arg1,
12066 TREE_OPERAND (arg0, 0));
12068 wide_int msk = wi::mask (width, false,
12069 TYPE_PRECISION (TREE_TYPE (arg1)));
12071 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
12072 if (wi::bit_and_not (msk, c1 | c2) == 0)
12074 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12075 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12078 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
12079 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
12080 mode which allows further optimizations. */
12081 c1 &= msk;
12082 c2 &= msk;
12083 wide_int c3 = wi::bit_and_not (c1, c2);
12084 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
12086 wide_int mask = wi::mask (w, false,
12087 TYPE_PRECISION (type));
12088 if (((c1 | c2) & mask) == mask
12089 && wi::bit_and_not (c1, mask) == 0)
12091 c3 = mask;
12092 break;
12096 if (c3 != c1)
12098 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12099 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
12100 wide_int_to_tree (type, c3));
12101 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12105 /* See if this can be simplified into a rotate first. If that
12106 is unsuccessful continue in the association code. */
12107 goto bit_rotate;
12109 case BIT_XOR_EXPR:
12110 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
12111 if (TREE_CODE (arg0) == BIT_AND_EXPR
12112 && INTEGRAL_TYPE_P (type)
12113 && integer_onep (TREE_OPERAND (arg0, 1))
12114 && integer_onep (arg1))
12115 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
12116 build_zero_cst (TREE_TYPE (arg0)));
12118 /* See if this can be simplified into a rotate first. If that
12119 is unsuccessful continue in the association code. */
12120 goto bit_rotate;
12122 case BIT_AND_EXPR:
12123 /* Fold !X & 1 as X == 0. */
12124 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12125 && integer_onep (arg1))
12127 tem = TREE_OPERAND (arg0, 0);
12128 return fold_build2_loc (loc, EQ_EXPR, type, tem,
12129 build_zero_cst (TREE_TYPE (tem)));
12132 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12133 multiple of 1 << CST. */
12134 if (TREE_CODE (arg1) == INTEGER_CST)
12136 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12137 wide_int ncst1 = -cst1;
12138 if ((cst1 & ncst1) == ncst1
12139 && multiple_of_p (type, arg0,
12140 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
12141 return fold_convert_loc (loc, type, arg0);
12144 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12145 bits from CST2. */
12146 if (TREE_CODE (arg1) == INTEGER_CST
12147 && TREE_CODE (arg0) == MULT_EXPR
12148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12150 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12151 wide_int masked
12152 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12154 if (masked == 0)
12155 return omit_two_operands_loc (loc, type, build_zero_cst (type),
12156 arg0, arg1);
12157 else if (masked != warg1)
12159 /* Avoid the transform if arg1 is a mask of some
12160 mode which allows further optimizations. */
12161 int pop = wi::popcount (warg1);
12162 if (!(pop >= BITS_PER_UNIT
12163 && pow2p_hwi (pop)
12164 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12165 return fold_build2_loc (loc, code, type, op0,
12166 wide_int_to_tree (type, masked));
12170 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12171 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12172 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12174 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12176 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12177 if (mask == -1)
12178 return
12179 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12182 goto associate;
12184 case RDIV_EXPR:
12185 /* Don't touch a floating-point divide by zero unless the mode
12186 of the constant can represent infinity. */
12187 if (TREE_CODE (arg1) == REAL_CST
12188 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12189 && real_zerop (arg1))
12190 return NULL_TREE;
12192 /* (-A) / (-B) -> A / B */
12193 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12194 return fold_build2_loc (loc, RDIV_EXPR, type,
12195 TREE_OPERAND (arg0, 0),
12196 negate_expr (arg1));
12197 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12198 return fold_build2_loc (loc, RDIV_EXPR, type,
12199 negate_expr (arg0),
12200 TREE_OPERAND (arg1, 0));
12201 return NULL_TREE;
12203 case TRUNC_DIV_EXPR:
12204 /* Fall through */
12206 case FLOOR_DIV_EXPR:
12207 /* Simplify A / (B << N) where A and B are positive and B is
12208 a power of 2, to A >> (N + log2(B)). */
12209 strict_overflow_p = false;
12210 if (TREE_CODE (arg1) == LSHIFT_EXPR
12211 && (TYPE_UNSIGNED (type)
12212 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12214 tree sval = TREE_OPERAND (arg1, 0);
12215 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12217 tree sh_cnt = TREE_OPERAND (arg1, 1);
12218 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12219 wi::exact_log2 (wi::to_wide (sval)));
12221 if (strict_overflow_p)
12222 fold_overflow_warning (("assuming signed overflow does not "
12223 "occur when simplifying A / (B << N)"),
12224 WARN_STRICT_OVERFLOW_MISC);
12226 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12227 sh_cnt, pow2);
12228 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12229 fold_convert_loc (loc, type, arg0), sh_cnt);
12233 /* Fall through */
12235 case ROUND_DIV_EXPR:
12236 case CEIL_DIV_EXPR:
12237 case EXACT_DIV_EXPR:
12238 if (integer_zerop (arg1))
12239 return NULL_TREE;
12241 /* Convert -A / -B to A / B when the type is signed and overflow is
12242 undefined. */
12243 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12244 && TREE_CODE (op0) == NEGATE_EXPR
12245 && negate_expr_p (op1))
12247 if (ANY_INTEGRAL_TYPE_P (type))
12248 fold_overflow_warning (("assuming signed overflow does not occur "
12249 "when distributing negation across "
12250 "division"),
12251 WARN_STRICT_OVERFLOW_MISC);
12252 return fold_build2_loc (loc, code, type,
12253 fold_convert_loc (loc, type,
12254 TREE_OPERAND (arg0, 0)),
12255 negate_expr (op1));
12257 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12258 && TREE_CODE (arg1) == NEGATE_EXPR
12259 && negate_expr_p (op0))
12261 if (ANY_INTEGRAL_TYPE_P (type))
12262 fold_overflow_warning (("assuming signed overflow does not occur "
12263 "when distributing negation across "
12264 "division"),
12265 WARN_STRICT_OVERFLOW_MISC);
12266 return fold_build2_loc (loc, code, type,
12267 negate_expr (op0),
12268 fold_convert_loc (loc, type,
12269 TREE_OPERAND (arg1, 0)));
12272 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12273 operation, EXACT_DIV_EXPR.
12275 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12276 At one time others generated faster code, it's not clear if they do
12277 after the last round to changes to the DIV code in expmed.cc. */
12278 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12279 && multiple_of_p (type, arg0, arg1))
12280 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12281 fold_convert (type, arg0),
12282 fold_convert (type, arg1));
12284 strict_overflow_p = false;
12285 if (TREE_CODE (arg1) == INTEGER_CST
12286 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12287 &strict_overflow_p)) != 0)
12289 if (strict_overflow_p)
12290 fold_overflow_warning (("assuming signed overflow does not occur "
12291 "when simplifying division"),
12292 WARN_STRICT_OVERFLOW_MISC);
12293 return fold_convert_loc (loc, type, tem);
12296 return NULL_TREE;
12298 case CEIL_MOD_EXPR:
12299 case FLOOR_MOD_EXPR:
12300 case ROUND_MOD_EXPR:
12301 case TRUNC_MOD_EXPR:
12302 strict_overflow_p = false;
12303 if (TREE_CODE (arg1) == INTEGER_CST
12304 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12305 &strict_overflow_p)) != 0)
12307 if (strict_overflow_p)
12308 fold_overflow_warning (("assuming signed overflow does not occur "
12309 "when simplifying modulus"),
12310 WARN_STRICT_OVERFLOW_MISC);
12311 return fold_convert_loc (loc, type, tem);
12314 return NULL_TREE;
12316 case LROTATE_EXPR:
12317 case RROTATE_EXPR:
12318 case RSHIFT_EXPR:
12319 case LSHIFT_EXPR:
12320 /* Since negative shift count is not well-defined,
12321 don't try to compute it in the compiler. */
12322 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12323 return NULL_TREE;
12325 prec = element_precision (type);
12327 /* If we have a rotate of a bit operation with the rotate count and
12328 the second operand of the bit operation both constant,
12329 permute the two operations. */
12330 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12331 && (TREE_CODE (arg0) == BIT_AND_EXPR
12332 || TREE_CODE (arg0) == BIT_IOR_EXPR
12333 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12334 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12336 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12337 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12338 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12339 fold_build2_loc (loc, code, type,
12340 arg00, arg1),
12341 fold_build2_loc (loc, code, type,
12342 arg01, arg1));
12345 /* Two consecutive rotates adding up to the some integer
12346 multiple of the precision of the type can be ignored. */
12347 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12348 && TREE_CODE (arg0) == RROTATE_EXPR
12349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12350 && wi::umod_trunc (wi::to_wide (arg1)
12351 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12352 prec) == 0)
12353 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12355 return NULL_TREE;
12357 case MIN_EXPR:
12358 case MAX_EXPR:
12359 goto associate;
12361 case TRUTH_ANDIF_EXPR:
12362 /* Note that the operands of this must be ints
12363 and their values must be 0 or 1.
12364 ("true" is a fixed value perhaps depending on the language.) */
12365 /* If first arg is constant zero, return it. */
12366 if (integer_zerop (arg0))
12367 return fold_convert_loc (loc, type, arg0);
12368 /* FALLTHRU */
12369 case TRUTH_AND_EXPR:
12370 /* If either arg is constant true, drop it. */
12371 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12372 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12373 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12374 /* Preserve sequence points. */
12375 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12377 /* If second arg is constant zero, result is zero, but first arg
12378 must be evaluated. */
12379 if (integer_zerop (arg1))
12380 return omit_one_operand_loc (loc, type, arg1, arg0);
12381 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12382 case will be handled here. */
12383 if (integer_zerop (arg0))
12384 return omit_one_operand_loc (loc, type, arg0, arg1);
12386 /* !X && X is always false. */
12387 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12389 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12390 /* X && !X is always false. */
12391 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12392 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12393 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12395 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12396 means A >= Y && A != MAX, but in this case we know that
12397 A < X <= MAX. */
12399 if (!TREE_SIDE_EFFECTS (arg0)
12400 && !TREE_SIDE_EFFECTS (arg1))
12402 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12403 if (tem && !operand_equal_p (tem, arg0, 0))
12404 return fold_convert (type,
12405 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12406 tem, arg1));
12408 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12409 if (tem && !operand_equal_p (tem, arg1, 0))
12410 return fold_convert (type,
12411 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12412 arg0, tem));
12415 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12416 != NULL_TREE)
12417 return tem;
12419 return NULL_TREE;
12421 case TRUTH_ORIF_EXPR:
12422 /* Note that the operands of this must be ints
12423 and their values must be 0 or true.
12424 ("true" is a fixed value perhaps depending on the language.) */
12425 /* If first arg is constant true, return it. */
12426 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12427 return fold_convert_loc (loc, type, arg0);
12428 /* FALLTHRU */
12429 case TRUTH_OR_EXPR:
12430 /* If either arg is constant zero, drop it. */
12431 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12432 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12433 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12434 /* Preserve sequence points. */
12435 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12436 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12437 /* If second arg is constant true, result is true, but we must
12438 evaluate first arg. */
12439 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12440 return omit_one_operand_loc (loc, type, arg1, arg0);
12441 /* Likewise for first arg, but note this only occurs here for
12442 TRUTH_OR_EXPR. */
12443 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12444 return omit_one_operand_loc (loc, type, arg0, arg1);
12446 /* !X || X is always true. */
12447 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12449 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12450 /* X || !X is always true. */
12451 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12453 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12455 /* (X && !Y) || (!X && Y) is X ^ Y */
12456 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12457 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12459 tree a0, a1, l0, l1, n0, n1;
12461 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12462 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12464 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12465 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12467 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12468 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12470 if ((operand_equal_p (n0, a0, 0)
12471 && operand_equal_p (n1, a1, 0))
12472 || (operand_equal_p (n0, a1, 0)
12473 && operand_equal_p (n1, a0, 0)))
12474 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12477 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12478 != NULL_TREE)
12479 return tem;
12481 return NULL_TREE;
12483 case TRUTH_XOR_EXPR:
12484 /* If the second arg is constant zero, drop it. */
12485 if (integer_zerop (arg1))
12486 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12487 /* If the second arg is constant true, this is a logical inversion. */
12488 if (integer_onep (arg1))
12490 tem = invert_truthvalue_loc (loc, arg0);
12491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12493 /* Identical arguments cancel to zero. */
12494 if (operand_equal_p (arg0, arg1, 0))
12495 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12497 /* !X ^ X is always true. */
12498 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12499 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12500 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12502 /* X ^ !X is always true. */
12503 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12504 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12505 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12507 return NULL_TREE;
12509 case EQ_EXPR:
12510 case NE_EXPR:
12511 STRIP_NOPS (arg0);
12512 STRIP_NOPS (arg1);
12514 tem = fold_comparison (loc, code, type, op0, op1);
12515 if (tem != NULL_TREE)
12516 return tem;
12518 /* bool_var != 1 becomes !bool_var. */
12519 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12520 && code == NE_EXPR)
12521 return fold_convert_loc (loc, type,
12522 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12523 TREE_TYPE (arg0), arg0));
12525 /* bool_var == 0 becomes !bool_var. */
12526 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12527 && code == EQ_EXPR)
12528 return fold_convert_loc (loc, type,
12529 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12530 TREE_TYPE (arg0), arg0));
12532 /* !exp != 0 becomes !exp */
12533 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12534 && code == NE_EXPR)
12535 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12537 /* If this is an EQ or NE comparison with zero and ARG0 is
12538 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12539 two operations, but the latter can be done in one less insn
12540 on machines that have only two-operand insns or on which a
12541 constant cannot be the first operand. */
12542 if (TREE_CODE (arg0) == BIT_AND_EXPR
12543 && integer_zerop (arg1))
12545 tree arg00 = TREE_OPERAND (arg0, 0);
12546 tree arg01 = TREE_OPERAND (arg0, 1);
12547 if (TREE_CODE (arg00) == LSHIFT_EXPR
12548 && integer_onep (TREE_OPERAND (arg00, 0)))
12550 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12551 arg01, TREE_OPERAND (arg00, 1));
12552 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12553 build_one_cst (TREE_TYPE (arg0)));
12554 return fold_build2_loc (loc, code, type,
12555 fold_convert_loc (loc, TREE_TYPE (arg1),
12556 tem), arg1);
12558 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12559 && integer_onep (TREE_OPERAND (arg01, 0)))
12561 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12562 arg00, TREE_OPERAND (arg01, 1));
12563 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12564 build_one_cst (TREE_TYPE (arg0)));
12565 return fold_build2_loc (loc, code, type,
12566 fold_convert_loc (loc, TREE_TYPE (arg1),
12567 tem), arg1);
12571 /* If this is a comparison of a field, we may be able to simplify it. */
12572 if ((TREE_CODE (arg0) == COMPONENT_REF
12573 || TREE_CODE (arg0) == BIT_FIELD_REF)
12574 /* Handle the constant case even without -O
12575 to make sure the warnings are given. */
12576 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12578 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12579 if (t1)
12580 return t1;
12583 /* Optimize comparisons of strlen vs zero to a compare of the
12584 first character of the string vs zero. To wit,
12585 strlen(ptr) == 0 => *ptr == 0
12586 strlen(ptr) != 0 => *ptr != 0
12587 Other cases should reduce to one of these two (or a constant)
12588 due to the return value of strlen being unsigned. */
12589 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12591 tree fndecl = get_callee_fndecl (arg0);
12593 if (fndecl
12594 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12595 && call_expr_nargs (arg0) == 1
12596 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12597 == POINTER_TYPE))
12599 tree ptrtype
12600 = build_pointer_type (build_qualified_type (char_type_node,
12601 TYPE_QUAL_CONST));
12602 tree ptr = fold_convert_loc (loc, ptrtype,
12603 CALL_EXPR_ARG (arg0, 0));
12604 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12605 return fold_build2_loc (loc, code, type, iref,
12606 build_int_cst (TREE_TYPE (iref), 0));
12610 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12611 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12612 if (TREE_CODE (arg0) == RSHIFT_EXPR
12613 && integer_zerop (arg1)
12614 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12616 tree arg00 = TREE_OPERAND (arg0, 0);
12617 tree arg01 = TREE_OPERAND (arg0, 1);
12618 tree itype = TREE_TYPE (arg00);
12619 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12621 if (TYPE_UNSIGNED (itype))
12623 itype = signed_type_for (itype);
12624 arg00 = fold_convert_loc (loc, itype, arg00);
12626 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12627 type, arg00, build_zero_cst (itype));
12631 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12632 (X & C) == 0 when C is a single bit. */
12633 if (TREE_CODE (arg0) == BIT_AND_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12635 && integer_zerop (arg1)
12636 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12638 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12639 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12640 TREE_OPERAND (arg0, 1));
12641 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12642 type, tem,
12643 fold_convert_loc (loc, TREE_TYPE (arg0),
12644 arg1));
12647 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12648 constant C is a power of two, i.e. a single bit. */
12649 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12650 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12651 && integer_zerop (arg1)
12652 && integer_pow2p (TREE_OPERAND (arg0, 1))
12653 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12654 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12656 tree arg00 = TREE_OPERAND (arg0, 0);
12657 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12658 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12661 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12662 when is C is a power of two, i.e. a single bit. */
12663 if (TREE_CODE (arg0) == BIT_AND_EXPR
12664 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12665 && integer_zerop (arg1)
12666 && integer_pow2p (TREE_OPERAND (arg0, 1))
12667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12668 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12670 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12671 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12672 arg000, TREE_OPERAND (arg0, 1));
12673 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12674 tem, build_int_cst (TREE_TYPE (tem), 0));
12677 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12678 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12680 tree arg00 = TREE_OPERAND (arg0, 0);
12681 tree arg01 = TREE_OPERAND (arg0, 1);
12682 tree arg10 = TREE_OPERAND (arg1, 0);
12683 tree arg11 = TREE_OPERAND (arg1, 1);
12684 tree itype = TREE_TYPE (arg0);
12686 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12687 operand_equal_p guarantees no side-effects so we don't need
12688 to use omit_one_operand on Z. */
12689 if (operand_equal_p (arg01, arg11, 0))
12690 return fold_build2_loc (loc, code, type, arg00,
12691 fold_convert_loc (loc, TREE_TYPE (arg00),
12692 arg10));
12693 if (operand_equal_p (arg01, arg10, 0))
12694 return fold_build2_loc (loc, code, type, arg00,
12695 fold_convert_loc (loc, TREE_TYPE (arg00),
12696 arg11));
12697 if (operand_equal_p (arg00, arg11, 0))
12698 return fold_build2_loc (loc, code, type, arg01,
12699 fold_convert_loc (loc, TREE_TYPE (arg01),
12700 arg10));
12701 if (operand_equal_p (arg00, arg10, 0))
12702 return fold_build2_loc (loc, code, type, arg01,
12703 fold_convert_loc (loc, TREE_TYPE (arg01),
12704 arg11));
12706 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12707 if (TREE_CODE (arg01) == INTEGER_CST
12708 && TREE_CODE (arg11) == INTEGER_CST)
12710 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12711 fold_convert_loc (loc, itype, arg11));
12712 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12713 return fold_build2_loc (loc, code, type, tem,
12714 fold_convert_loc (loc, itype, arg10));
12718 /* Attempt to simplify equality/inequality comparisons of complex
12719 values. Only lower the comparison if the result is known or
12720 can be simplified to a single scalar comparison. */
12721 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12722 || TREE_CODE (arg0) == COMPLEX_CST)
12723 && (TREE_CODE (arg1) == COMPLEX_EXPR
12724 || TREE_CODE (arg1) == COMPLEX_CST))
12726 tree real0, imag0, real1, imag1;
12727 tree rcond, icond;
12729 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12731 real0 = TREE_OPERAND (arg0, 0);
12732 imag0 = TREE_OPERAND (arg0, 1);
12734 else
12736 real0 = TREE_REALPART (arg0);
12737 imag0 = TREE_IMAGPART (arg0);
12740 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12742 real1 = TREE_OPERAND (arg1, 0);
12743 imag1 = TREE_OPERAND (arg1, 1);
12745 else
12747 real1 = TREE_REALPART (arg1);
12748 imag1 = TREE_IMAGPART (arg1);
12751 rcond = fold_binary_loc (loc, code, type, real0, real1);
12752 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12754 if (integer_zerop (rcond))
12756 if (code == EQ_EXPR)
12757 return omit_two_operands_loc (loc, type, boolean_false_node,
12758 imag0, imag1);
12759 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12761 else
12763 if (code == NE_EXPR)
12764 return omit_two_operands_loc (loc, type, boolean_true_node,
12765 imag0, imag1);
12766 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12770 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12771 if (icond && TREE_CODE (icond) == INTEGER_CST)
12773 if (integer_zerop (icond))
12775 if (code == EQ_EXPR)
12776 return omit_two_operands_loc (loc, type, boolean_false_node,
12777 real0, real1);
12778 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12780 else
12782 if (code == NE_EXPR)
12783 return omit_two_operands_loc (loc, type, boolean_true_node,
12784 real0, real1);
12785 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12790 return NULL_TREE;
12792 case LT_EXPR:
12793 case GT_EXPR:
12794 case LE_EXPR:
12795 case GE_EXPR:
12796 tem = fold_comparison (loc, code, type, op0, op1);
12797 if (tem != NULL_TREE)
12798 return tem;
12800 /* Transform comparisons of the form X +- C CMP X. */
12801 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12802 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12803 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12804 && !HONOR_SNANS (arg0))
12806 tree arg01 = TREE_OPERAND (arg0, 1);
12807 enum tree_code code0 = TREE_CODE (arg0);
12808 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12810 /* (X - c) > X becomes false. */
12811 if (code == GT_EXPR
12812 && ((code0 == MINUS_EXPR && is_positive >= 0)
12813 || (code0 == PLUS_EXPR && is_positive <= 0)))
12814 return constant_boolean_node (0, type);
12816 /* Likewise (X + c) < X becomes false. */
12817 if (code == LT_EXPR
12818 && ((code0 == PLUS_EXPR && is_positive >= 0)
12819 || (code0 == MINUS_EXPR && is_positive <= 0)))
12820 return constant_boolean_node (0, type);
12822 /* Convert (X - c) <= X to true. */
12823 if (!HONOR_NANS (arg1)
12824 && code == LE_EXPR
12825 && ((code0 == MINUS_EXPR && is_positive >= 0)
12826 || (code0 == PLUS_EXPR && is_positive <= 0)))
12827 return constant_boolean_node (1, type);
12829 /* Convert (X + c) >= X to true. */
12830 if (!HONOR_NANS (arg1)
12831 && code == GE_EXPR
12832 && ((code0 == PLUS_EXPR && is_positive >= 0)
12833 || (code0 == MINUS_EXPR && is_positive <= 0)))
12834 return constant_boolean_node (1, type);
12837 /* If we are comparing an ABS_EXPR with a constant, we can
12838 convert all the cases into explicit comparisons, but they may
12839 well not be faster than doing the ABS and one comparison.
12840 But ABS (X) <= C is a range comparison, which becomes a subtraction
12841 and a comparison, and is probably faster. */
12842 if (code == LE_EXPR
12843 && TREE_CODE (arg1) == INTEGER_CST
12844 && TREE_CODE (arg0) == ABS_EXPR
12845 && ! TREE_SIDE_EFFECTS (arg0)
12846 && (tem = negate_expr (arg1)) != 0
12847 && TREE_CODE (tem) == INTEGER_CST
12848 && !TREE_OVERFLOW (tem))
12849 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12850 build2 (GE_EXPR, type,
12851 TREE_OPERAND (arg0, 0), tem),
12852 build2 (LE_EXPR, type,
12853 TREE_OPERAND (arg0, 0), arg1));
12855 /* Convert ABS_EXPR<x> >= 0 to true. */
12856 strict_overflow_p = false;
12857 if (code == GE_EXPR
12858 && (integer_zerop (arg1)
12859 || (! HONOR_NANS (arg0)
12860 && real_zerop (arg1)))
12861 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12863 if (strict_overflow_p)
12864 fold_overflow_warning (("assuming signed overflow does not occur "
12865 "when simplifying comparison of "
12866 "absolute value and zero"),
12867 WARN_STRICT_OVERFLOW_CONDITIONAL);
12868 return omit_one_operand_loc (loc, type,
12869 constant_boolean_node (true, type),
12870 arg0);
12873 /* Convert ABS_EXPR<x> < 0 to false. */
12874 strict_overflow_p = false;
12875 if (code == LT_EXPR
12876 && (integer_zerop (arg1) || real_zerop (arg1))
12877 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12879 if (strict_overflow_p)
12880 fold_overflow_warning (("assuming signed overflow does not occur "
12881 "when simplifying comparison of "
12882 "absolute value and zero"),
12883 WARN_STRICT_OVERFLOW_CONDITIONAL);
12884 return omit_one_operand_loc (loc, type,
12885 constant_boolean_node (false, type),
12886 arg0);
12889 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12890 and similarly for >= into !=. */
12891 if ((code == LT_EXPR || code == GE_EXPR)
12892 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12893 && TREE_CODE (arg1) == LSHIFT_EXPR
12894 && integer_onep (TREE_OPERAND (arg1, 0)))
12895 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12896 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12897 TREE_OPERAND (arg1, 1)),
12898 build_zero_cst (TREE_TYPE (arg0)));
12900 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12901 otherwise Y might be >= # of bits in X's type and thus e.g.
12902 (unsigned char) (1 << Y) for Y 15 might be 0.
12903 If the cast is widening, then 1 << Y should have unsigned type,
12904 otherwise if Y is number of bits in the signed shift type minus 1,
12905 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12906 31 might be 0xffffffff80000000. */
12907 if ((code == LT_EXPR || code == GE_EXPR)
12908 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12909 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12910 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12911 && CONVERT_EXPR_P (arg1)
12912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12913 && (element_precision (TREE_TYPE (arg1))
12914 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12915 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12916 || (element_precision (TREE_TYPE (arg1))
12917 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12918 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12920 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12921 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12922 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12923 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12924 build_zero_cst (TREE_TYPE (arg0)));
12927 return NULL_TREE;
12929 case UNORDERED_EXPR:
12930 case ORDERED_EXPR:
12931 case UNLT_EXPR:
12932 case UNLE_EXPR:
12933 case UNGT_EXPR:
12934 case UNGE_EXPR:
12935 case UNEQ_EXPR:
12936 case LTGT_EXPR:
12937 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12939 tree targ0 = strip_float_extensions (arg0);
12940 tree targ1 = strip_float_extensions (arg1);
12941 tree newtype = TREE_TYPE (targ0);
12943 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12944 newtype = TREE_TYPE (targ1);
12946 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
12947 && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
12948 return fold_build2_loc (loc, code, type,
12949 fold_convert_loc (loc, newtype, targ0),
12950 fold_convert_loc (loc, newtype, targ1));
12953 return NULL_TREE;
12955 case COMPOUND_EXPR:
12956 /* When pedantic, a compound expression can be neither an lvalue
12957 nor an integer constant expression. */
12958 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12959 return NULL_TREE;
12960 /* Don't let (0, 0) be null pointer constant. */
12961 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12962 : fold_convert_loc (loc, type, arg1);
12963 return tem;
12965 default:
12966 return NULL_TREE;
12967 } /* switch (code) */
12970 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12971 ((A & N) + B) & M -> (A + B) & M
12972 Similarly if (N & M) == 0,
12973 ((A | N) + B) & M -> (A + B) & M
12974 and for - instead of + (or unary - instead of +)
12975 and/or ^ instead of |.
12976 If B is constant and (B & M) == 0, fold into A & M.
12978 This function is a helper for match.pd patterns. Return non-NULL
12979 type in which the simplified operation should be performed only
12980 if any optimization is possible.
12982 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12983 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12984 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12985 +/-. */
12986 tree
12987 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12988 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12989 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12990 tree *pmop)
12992 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12993 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12994 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12995 if (~cst1 == 0
12996 || (cst1 & (cst1 + 1)) != 0
12997 || !INTEGRAL_TYPE_P (type)
12998 || (!TYPE_OVERFLOW_WRAPS (type)
12999 && TREE_CODE (type) != INTEGER_TYPE)
13000 || (wi::max_value (type) & cst1) != cst1)
13001 return NULL_TREE;
13003 enum tree_code codes[2] = { code00, code01 };
13004 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
13005 int which = 0;
13006 wide_int cst0;
13008 /* Now we know that arg0 is (C + D) or (C - D) or -C and
13009 arg1 (M) is == (1LL << cst) - 1.
13010 Store C into PMOP[0] and D into PMOP[1]. */
13011 pmop[0] = arg00;
13012 pmop[1] = arg01;
13013 which = code != NEGATE_EXPR;
13015 for (; which >= 0; which--)
13016 switch (codes[which])
13018 case BIT_AND_EXPR:
13019 case BIT_IOR_EXPR:
13020 case BIT_XOR_EXPR:
13021 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
13022 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
13023 if (codes[which] == BIT_AND_EXPR)
13025 if (cst0 != cst1)
13026 break;
13028 else if (cst0 != 0)
13029 break;
13030 /* If C or D is of the form (A & N) where
13031 (N & M) == M, or of the form (A | N) or
13032 (A ^ N) where (N & M) == 0, replace it with A. */
13033 pmop[which] = arg0xx[2 * which];
13034 break;
13035 case ERROR_MARK:
13036 if (TREE_CODE (pmop[which]) != INTEGER_CST)
13037 break;
13038 /* If C or D is a N where (N & M) == 0, it can be
13039 omitted (replaced with 0). */
13040 if ((code == PLUS_EXPR
13041 || (code == MINUS_EXPR && which == 0))
13042 && (cst1 & wi::to_wide (pmop[which])) == 0)
13043 pmop[which] = build_int_cst (type, 0);
13044 /* Similarly, with C - N where (-N & M) == 0. */
13045 if (code == MINUS_EXPR
13046 && which == 1
13047 && (cst1 & -wi::to_wide (pmop[which])) == 0)
13048 pmop[which] = build_int_cst (type, 0);
13049 break;
13050 default:
13051 gcc_unreachable ();
13054 /* Only build anything new if we optimized one or both arguments above. */
13055 if (pmop[0] == arg00 && pmop[1] == arg01)
13056 return NULL_TREE;
13058 if (TYPE_OVERFLOW_WRAPS (type))
13059 return type;
13060 else
13061 return unsigned_type_for (type);
13064 /* Used by contains_label_[p1]. */
13066 struct contains_label_data
13068 hash_set<tree> *pset;
13069 bool inside_switch_p;
13072 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13073 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
13074 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
13076 static tree
13077 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
13079 contains_label_data *d = (contains_label_data *) data;
13080 switch (TREE_CODE (*tp))
13082 case LABEL_EXPR:
13083 return *tp;
13085 case CASE_LABEL_EXPR:
13086 if (!d->inside_switch_p)
13087 return *tp;
13088 return NULL_TREE;
13090 case SWITCH_EXPR:
13091 if (!d->inside_switch_p)
13093 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
13094 return *tp;
13095 d->inside_switch_p = true;
13096 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
13097 return *tp;
13098 d->inside_switch_p = false;
13099 *walk_subtrees = 0;
13101 return NULL_TREE;
13103 case GOTO_EXPR:
13104 *walk_subtrees = 0;
13105 return NULL_TREE;
13107 default:
13108 return NULL_TREE;
13112 /* Return whether the sub-tree ST contains a label which is accessible from
13113 outside the sub-tree. */
13115 static bool
13116 contains_label_p (tree st)
13118 hash_set<tree> pset;
13119 contains_label_data data = { &pset, false };
13120 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13123 /* Fold a ternary expression of code CODE and type TYPE with operands
13124 OP0, OP1, and OP2. Return the folded expression if folding is
13125 successful. Otherwise, return NULL_TREE. */
13127 tree
13128 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13129 tree op0, tree op1, tree op2)
13131 tree tem;
13132 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13133 enum tree_code_class kind = TREE_CODE_CLASS (code);
13135 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13136 && TREE_CODE_LENGTH (code) == 3);
13138 /* If this is a commutative operation, and OP0 is a constant, move it
13139 to OP1 to reduce the number of tests below. */
13140 if (commutative_ternary_tree_code (code)
13141 && tree_swap_operands_p (op0, op1))
13142 return fold_build3_loc (loc, code, type, op1, op0, op2);
13144 tem = generic_simplify (loc, code, type, op0, op1, op2);
13145 if (tem)
13146 return tem;
13148 /* Strip any conversions that don't change the mode. This is safe
13149 for every expression, except for a comparison expression because
13150 its signedness is derived from its operands. So, in the latter
13151 case, only strip conversions that don't change the signedness.
13153 Note that this is done as an internal manipulation within the
13154 constant folder, in order to find the simplest representation of
13155 the arguments so that their form can be studied. In any cases,
13156 the appropriate type conversions should be put back in the tree
13157 that will get out of the constant folder. */
13158 if (op0)
13160 arg0 = op0;
13161 STRIP_NOPS (arg0);
13164 if (op1)
13166 arg1 = op1;
13167 STRIP_NOPS (arg1);
13170 if (op2)
13172 arg2 = op2;
13173 STRIP_NOPS (arg2);
13176 switch (code)
13178 case COMPONENT_REF:
13179 if (TREE_CODE (arg0) == CONSTRUCTOR
13180 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13182 unsigned HOST_WIDE_INT idx;
13183 tree field, value;
13184 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13185 if (field == arg1)
13186 return value;
13188 return NULL_TREE;
13190 case COND_EXPR:
13191 case VEC_COND_EXPR:
13192 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13193 so all simple results must be passed through pedantic_non_lvalue. */
13194 if (TREE_CODE (arg0) == INTEGER_CST)
13196 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13197 tem = integer_zerop (arg0) ? op2 : op1;
13198 /* Only optimize constant conditions when the selected branch
13199 has the same type as the COND_EXPR. This avoids optimizing
13200 away "c ? x : throw", where the throw has a void type.
13201 Avoid throwing away that operand which contains label. */
13202 if ((!TREE_SIDE_EFFECTS (unused_op)
13203 || !contains_label_p (unused_op))
13204 && (! VOID_TYPE_P (TREE_TYPE (tem))
13205 || VOID_TYPE_P (type)))
13206 return protected_set_expr_location_unshare (tem, loc);
13207 return NULL_TREE;
13209 else if (TREE_CODE (arg0) == VECTOR_CST)
13211 unsigned HOST_WIDE_INT nelts;
13212 if ((TREE_CODE (arg1) == VECTOR_CST
13213 || TREE_CODE (arg1) == CONSTRUCTOR)
13214 && (TREE_CODE (arg2) == VECTOR_CST
13215 || TREE_CODE (arg2) == CONSTRUCTOR)
13216 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13218 vec_perm_builder sel (nelts, nelts, 1);
13219 for (unsigned int i = 0; i < nelts; i++)
13221 tree val = VECTOR_CST_ELT (arg0, i);
13222 if (integer_all_onesp (val))
13223 sel.quick_push (i);
13224 else if (integer_zerop (val))
13225 sel.quick_push (nelts + i);
13226 else /* Currently unreachable. */
13227 return NULL_TREE;
13229 vec_perm_indices indices (sel, 2, nelts);
13230 tree t = fold_vec_perm (type, arg1, arg2, indices);
13231 if (t != NULL_TREE)
13232 return t;
13236 /* If we have A op B ? A : C, we may be able to convert this to a
13237 simpler expression, depending on the operation and the values
13238 of B and C. Signed zeros prevent all of these transformations,
13239 for reasons given above each one.
13241 Also try swapping the arguments and inverting the conditional. */
13242 if (COMPARISON_CLASS_P (arg0)
13243 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13244 && !HONOR_SIGNED_ZEROS (op1))
13246 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13247 TREE_OPERAND (arg0, 0),
13248 TREE_OPERAND (arg0, 1),
13249 op1, op2);
13250 if (tem)
13251 return tem;
13254 if (COMPARISON_CLASS_P (arg0)
13255 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13256 && !HONOR_SIGNED_ZEROS (op2))
13258 enum tree_code comp_code = TREE_CODE (arg0);
13259 tree arg00 = TREE_OPERAND (arg0, 0);
13260 tree arg01 = TREE_OPERAND (arg0, 1);
13261 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13262 if (comp_code != ERROR_MARK)
13263 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13264 arg00,
13265 arg01,
13266 op2, op1);
13267 if (tem)
13268 return tem;
13271 /* If the second operand is simpler than the third, swap them
13272 since that produces better jump optimization results. */
13273 if (truth_value_p (TREE_CODE (arg0))
13274 && tree_swap_operands_p (op1, op2))
13276 location_t loc0 = expr_location_or (arg0, loc);
13277 /* See if this can be inverted. If it can't, possibly because
13278 it was a floating-point inequality comparison, don't do
13279 anything. */
13280 tem = fold_invert_truthvalue (loc0, arg0);
13281 if (tem)
13282 return fold_build3_loc (loc, code, type, tem, op2, op1);
13285 /* Convert A ? 1 : 0 to simply A. */
13286 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13287 : (integer_onep (op1)
13288 && !VECTOR_TYPE_P (type)))
13289 && integer_zerop (op2)
13290 /* If we try to convert OP0 to our type, the
13291 call to fold will try to move the conversion inside
13292 a COND, which will recurse. In that case, the COND_EXPR
13293 is probably the best choice, so leave it alone. */
13294 && type == TREE_TYPE (arg0))
13295 return protected_set_expr_location_unshare (arg0, loc);
13297 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13298 over COND_EXPR in cases such as floating point comparisons. */
13299 if (integer_zerop (op1)
13300 && code == COND_EXPR
13301 && integer_onep (op2)
13302 && !VECTOR_TYPE_P (type)
13303 && truth_value_p (TREE_CODE (arg0)))
13304 return fold_convert_loc (loc, type,
13305 invert_truthvalue_loc (loc, arg0));
13307 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13308 if (TREE_CODE (arg0) == LT_EXPR
13309 && integer_zerop (TREE_OPERAND (arg0, 1))
13310 && integer_zerop (op2)
13311 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13313 /* sign_bit_p looks through both zero and sign extensions,
13314 but for this optimization only sign extensions are
13315 usable. */
13316 tree tem2 = TREE_OPERAND (arg0, 0);
13317 while (tem != tem2)
13319 if (TREE_CODE (tem2) != NOP_EXPR
13320 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13322 tem = NULL_TREE;
13323 break;
13325 tem2 = TREE_OPERAND (tem2, 0);
13327 /* sign_bit_p only checks ARG1 bits within A's precision.
13328 If <sign bit of A> has wider type than A, bits outside
13329 of A's precision in <sign bit of A> need to be checked.
13330 If they are all 0, this optimization needs to be done
13331 in unsigned A's type, if they are all 1 in signed A's type,
13332 otherwise this can't be done. */
13333 if (tem
13334 && TYPE_PRECISION (TREE_TYPE (tem))
13335 < TYPE_PRECISION (TREE_TYPE (arg1))
13336 && TYPE_PRECISION (TREE_TYPE (tem))
13337 < TYPE_PRECISION (type))
13339 int inner_width, outer_width;
13340 tree tem_type;
13342 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13343 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13344 if (outer_width > TYPE_PRECISION (type))
13345 outer_width = TYPE_PRECISION (type);
13347 wide_int mask = wi::shifted_mask
13348 (inner_width, outer_width - inner_width, false,
13349 TYPE_PRECISION (TREE_TYPE (arg1)));
13351 wide_int common = mask & wi::to_wide (arg1);
13352 if (common == mask)
13354 tem_type = signed_type_for (TREE_TYPE (tem));
13355 tem = fold_convert_loc (loc, tem_type, tem);
13357 else if (common == 0)
13359 tem_type = unsigned_type_for (TREE_TYPE (tem));
13360 tem = fold_convert_loc (loc, tem_type, tem);
13362 else
13363 tem = NULL;
13366 if (tem)
13367 return
13368 fold_convert_loc (loc, type,
13369 fold_build2_loc (loc, BIT_AND_EXPR,
13370 TREE_TYPE (tem), tem,
13371 fold_convert_loc (loc,
13372 TREE_TYPE (tem),
13373 arg1)));
13376 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13377 already handled above. */
13378 if (TREE_CODE (arg0) == BIT_AND_EXPR
13379 && integer_onep (TREE_OPERAND (arg0, 1))
13380 && integer_zerop (op2)
13381 && integer_pow2p (arg1))
13383 tree tem = TREE_OPERAND (arg0, 0);
13384 STRIP_NOPS (tem);
13385 if (TREE_CODE (tem) == RSHIFT_EXPR
13386 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13387 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13388 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13389 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13390 fold_convert_loc (loc, type,
13391 TREE_OPERAND (tem, 0)),
13392 op1);
13395 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13396 is probably obsolete because the first operand should be a
13397 truth value (that's why we have the two cases above), but let's
13398 leave it in until we can confirm this for all front-ends. */
13399 if (integer_zerop (op2)
13400 && TREE_CODE (arg0) == NE_EXPR
13401 && integer_zerop (TREE_OPERAND (arg0, 1))
13402 && integer_pow2p (arg1)
13403 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13404 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13405 arg1, OEP_ONLY_CONST)
13406 /* operand_equal_p compares just value, not precision, so e.g.
13407 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13408 second operand 32-bit -128, which is not a power of two (or vice
13409 versa. */
13410 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13411 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13413 /* Disable the transformations below for vectors, since
13414 fold_binary_op_with_conditional_arg may undo them immediately,
13415 yielding an infinite loop. */
13416 if (code == VEC_COND_EXPR)
13417 return NULL_TREE;
13419 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13420 if (integer_zerop (op2)
13421 && truth_value_p (TREE_CODE (arg0))
13422 && truth_value_p (TREE_CODE (arg1))
13423 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13424 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13425 : TRUTH_ANDIF_EXPR,
13426 type, fold_convert_loc (loc, type, arg0), op1);
13428 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13429 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13430 && truth_value_p (TREE_CODE (arg0))
13431 && truth_value_p (TREE_CODE (arg1))
13432 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13434 location_t loc0 = expr_location_or (arg0, loc);
13435 /* Only perform transformation if ARG0 is easily inverted. */
13436 tem = fold_invert_truthvalue (loc0, arg0);
13437 if (tem)
13438 return fold_build2_loc (loc, code == VEC_COND_EXPR
13439 ? BIT_IOR_EXPR
13440 : TRUTH_ORIF_EXPR,
13441 type, fold_convert_loc (loc, type, tem),
13442 op1);
13445 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13446 if (integer_zerop (arg1)
13447 && truth_value_p (TREE_CODE (arg0))
13448 && truth_value_p (TREE_CODE (op2))
13449 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13451 location_t loc0 = expr_location_or (arg0, loc);
13452 /* Only perform transformation if ARG0 is easily inverted. */
13453 tem = fold_invert_truthvalue (loc0, arg0);
13454 if (tem)
13455 return fold_build2_loc (loc, code == VEC_COND_EXPR
13456 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13457 type, fold_convert_loc (loc, type, tem),
13458 op2);
13461 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13462 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13463 && truth_value_p (TREE_CODE (arg0))
13464 && truth_value_p (TREE_CODE (op2))
13465 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13466 return fold_build2_loc (loc, code == VEC_COND_EXPR
13467 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13468 type, fold_convert_loc (loc, type, arg0), op2);
13470 return NULL_TREE;
13472 case CALL_EXPR:
13473 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13474 of fold_ternary on them. */
13475 gcc_unreachable ();
13477 case BIT_FIELD_REF:
13478 if (TREE_CODE (arg0) == VECTOR_CST
13479 && (type == TREE_TYPE (TREE_TYPE (arg0))
13480 || (VECTOR_TYPE_P (type)
13481 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13482 && tree_fits_uhwi_p (op1)
13483 && tree_fits_uhwi_p (op2))
13485 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13486 unsigned HOST_WIDE_INT width
13487 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13488 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13489 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13490 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13492 if (n != 0
13493 && (idx % width) == 0
13494 && (n % width) == 0
13495 && known_le ((idx + n) / width,
13496 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13498 idx = idx / width;
13499 n = n / width;
13501 if (TREE_CODE (arg0) == VECTOR_CST)
13503 if (n == 1)
13505 tem = VECTOR_CST_ELT (arg0, idx);
13506 if (VECTOR_TYPE_P (type))
13507 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13508 return tem;
13511 tree_vector_builder vals (type, n, 1);
13512 for (unsigned i = 0; i < n; ++i)
13513 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13514 return vals.build ();
13519 /* On constants we can use native encode/interpret to constant
13520 fold (nearly) all BIT_FIELD_REFs. */
13521 if (CONSTANT_CLASS_P (arg0)
13522 && can_native_interpret_type_p (type)
13523 && BITS_PER_UNIT == 8
13524 && tree_fits_uhwi_p (op1)
13525 && tree_fits_uhwi_p (op2))
13527 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13528 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13529 /* Limit us to a reasonable amount of work. To relax the
13530 other limitations we need bit-shifting of the buffer
13531 and rounding up the size. */
13532 if (bitpos % BITS_PER_UNIT == 0
13533 && bitsize % BITS_PER_UNIT == 0
13534 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13536 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13537 unsigned HOST_WIDE_INT len
13538 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13539 bitpos / BITS_PER_UNIT);
13540 if (len > 0
13541 && len * BITS_PER_UNIT >= bitsize)
13543 tree v = native_interpret_expr (type, b,
13544 bitsize / BITS_PER_UNIT);
13545 if (v)
13546 return v;
13551 return NULL_TREE;
13553 case VEC_PERM_EXPR:
13554 /* Perform constant folding of BIT_INSERT_EXPR. */
13555 if (TREE_CODE (arg2) == VECTOR_CST
13556 && TREE_CODE (op0) == VECTOR_CST
13557 && TREE_CODE (op1) == VECTOR_CST)
13559 /* Build a vector of integers from the tree mask. */
13560 vec_perm_builder builder;
13561 if (!tree_to_vec_perm_builder (&builder, arg2))
13562 return NULL_TREE;
13564 /* Create a vec_perm_indices for the integer vector. */
13565 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13566 bool single_arg = (op0 == op1);
13567 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13568 return fold_vec_perm (type, op0, op1, sel);
13570 return NULL_TREE;
13572 case BIT_INSERT_EXPR:
13573 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13574 if (TREE_CODE (arg0) == INTEGER_CST
13575 && TREE_CODE (arg1) == INTEGER_CST)
13577 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13578 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13579 wide_int tem = (wi::to_wide (arg0)
13580 & wi::shifted_mask (bitpos, bitsize, true,
13581 TYPE_PRECISION (type)));
13582 wide_int tem2
13583 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13584 bitsize), bitpos);
13585 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13587 else if (TREE_CODE (arg0) == VECTOR_CST
13588 && CONSTANT_CLASS_P (arg1)
13589 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13590 TREE_TYPE (arg1)))
13592 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13593 unsigned HOST_WIDE_INT elsize
13594 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13595 if (bitpos % elsize == 0)
13597 unsigned k = bitpos / elsize;
13598 unsigned HOST_WIDE_INT nelts;
13599 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13600 return arg0;
13601 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13603 tree_vector_builder elts (type, nelts, 1);
13604 elts.quick_grow (nelts);
13605 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13606 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13607 return elts.build ();
13611 return NULL_TREE;
13613 default:
13614 return NULL_TREE;
13615 } /* switch (code) */
13618 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13619 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13620 constructor element index of the value returned. If the element is
13621 not found NULL_TREE is returned and *CTOR_IDX is updated to
13622 the index of the element after the ACCESS_INDEX position (which
13623 may be outside of the CTOR array). */
13625 tree
13626 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13627 unsigned *ctor_idx)
13629 tree index_type = NULL_TREE;
13630 signop index_sgn = UNSIGNED;
13631 offset_int low_bound = 0;
13633 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13635 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13636 if (domain_type && TYPE_MIN_VALUE (domain_type))
13638 /* Static constructors for variably sized objects makes no sense. */
13639 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13640 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13641 /* ??? When it is obvious that the range is signed, treat it so. */
13642 if (TYPE_UNSIGNED (index_type)
13643 && TYPE_MAX_VALUE (domain_type)
13644 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13645 TYPE_MIN_VALUE (domain_type)))
13647 index_sgn = SIGNED;
13648 low_bound
13649 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13650 SIGNED);
13652 else
13654 index_sgn = TYPE_SIGN (index_type);
13655 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13660 if (index_type)
13661 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13662 index_sgn);
13664 offset_int index = low_bound;
13665 if (index_type)
13666 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13668 offset_int max_index = index;
13669 unsigned cnt;
13670 tree cfield, cval;
13671 bool first_p = true;
13673 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13675 /* Array constructor might explicitly set index, or specify a range,
13676 or leave index NULL meaning that it is next index after previous
13677 one. */
13678 if (cfield)
13680 if (TREE_CODE (cfield) == INTEGER_CST)
13681 max_index = index
13682 = offset_int::from (wi::to_wide (cfield), index_sgn);
13683 else
13685 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13686 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13687 index_sgn);
13688 max_index
13689 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13690 index_sgn);
13691 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13694 else if (!first_p)
13696 index = max_index + 1;
13697 if (index_type)
13698 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13699 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13700 max_index = index;
13702 else
13703 first_p = false;
13705 /* Do we have match? */
13706 if (wi::cmp (access_index, index, index_sgn) >= 0)
13708 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13710 if (ctor_idx)
13711 *ctor_idx = cnt;
13712 return cval;
13715 else if (in_gimple_form)
13716 /* We're past the element we search for. Note during parsing
13717 the elements might not be sorted.
13718 ??? We should use a binary search and a flag on the
13719 CONSTRUCTOR as to whether elements are sorted in declaration
13720 order. */
13721 break;
13723 if (ctor_idx)
13724 *ctor_idx = cnt;
13725 return NULL_TREE;
13728 /* Perform constant folding and related simplification of EXPR.
13729 The related simplifications include x*1 => x, x*0 => 0, etc.,
13730 and application of the associative law.
13731 NOP_EXPR conversions may be removed freely (as long as we
13732 are careful not to change the type of the overall expression).
13733 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13734 but we can constant-fold them if they have constant operands. */
13736 #ifdef ENABLE_FOLD_CHECKING
13737 # define fold(x) fold_1 (x)
13738 static tree fold_1 (tree);
13739 static
13740 #endif
13741 tree
13742 fold (tree expr)
13744 const tree t = expr;
13745 enum tree_code code = TREE_CODE (t);
13746 enum tree_code_class kind = TREE_CODE_CLASS (code);
13747 tree tem;
13748 location_t loc = EXPR_LOCATION (expr);
13750 /* Return right away if a constant. */
13751 if (kind == tcc_constant)
13752 return t;
13754 /* CALL_EXPR-like objects with variable numbers of operands are
13755 treated specially. */
13756 if (kind == tcc_vl_exp)
13758 if (code == CALL_EXPR)
13760 tem = fold_call_expr (loc, expr, false);
13761 return tem ? tem : expr;
13763 return expr;
13766 if (IS_EXPR_CODE_CLASS (kind))
13768 tree type = TREE_TYPE (t);
13769 tree op0, op1, op2;
13771 switch (TREE_CODE_LENGTH (code))
13773 case 1:
13774 op0 = TREE_OPERAND (t, 0);
13775 tem = fold_unary_loc (loc, code, type, op0);
13776 return tem ? tem : expr;
13777 case 2:
13778 op0 = TREE_OPERAND (t, 0);
13779 op1 = TREE_OPERAND (t, 1);
13780 tem = fold_binary_loc (loc, code, type, op0, op1);
13781 return tem ? tem : expr;
13782 case 3:
13783 op0 = TREE_OPERAND (t, 0);
13784 op1 = TREE_OPERAND (t, 1);
13785 op2 = TREE_OPERAND (t, 2);
13786 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13787 return tem ? tem : expr;
13788 default:
13789 break;
13793 switch (code)
13795 case ARRAY_REF:
13797 tree op0 = TREE_OPERAND (t, 0);
13798 tree op1 = TREE_OPERAND (t, 1);
13800 if (TREE_CODE (op1) == INTEGER_CST
13801 && TREE_CODE (op0) == CONSTRUCTOR
13802 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13804 tree val = get_array_ctor_element_at_index (op0,
13805 wi::to_offset (op1));
13806 if (val)
13807 return val;
13810 return t;
13813 /* Return a VECTOR_CST if possible. */
13814 case CONSTRUCTOR:
13816 tree type = TREE_TYPE (t);
13817 if (TREE_CODE (type) != VECTOR_TYPE)
13818 return t;
13820 unsigned i;
13821 tree val;
13822 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13823 if (! CONSTANT_CLASS_P (val))
13824 return t;
13826 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13829 case CONST_DECL:
13830 return fold (DECL_INITIAL (t));
13832 default:
13833 return t;
13834 } /* switch (code) */
13837 #ifdef ENABLE_FOLD_CHECKING
13838 #undef fold
13840 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13841 hash_table<nofree_ptr_hash<const tree_node> > *);
13842 static void fold_check_failed (const_tree, const_tree);
13843 void print_fold_checksum (const_tree);
13845 /* When --enable-checking=fold, compute a digest of expr before
13846 and after actual fold call to see if fold did not accidentally
13847 change original expr. */
13849 tree
13850 fold (tree expr)
13852 tree ret;
13853 struct md5_ctx ctx;
13854 unsigned char checksum_before[16], checksum_after[16];
13855 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13857 md5_init_ctx (&ctx);
13858 fold_checksum_tree (expr, &ctx, &ht);
13859 md5_finish_ctx (&ctx, checksum_before);
13860 ht.empty ();
13862 ret = fold_1 (expr);
13864 md5_init_ctx (&ctx);
13865 fold_checksum_tree (expr, &ctx, &ht);
13866 md5_finish_ctx (&ctx, checksum_after);
13868 if (memcmp (checksum_before, checksum_after, 16))
13869 fold_check_failed (expr, ret);
13871 return ret;
13874 void
13875 print_fold_checksum (const_tree expr)
13877 struct md5_ctx ctx;
13878 unsigned char checksum[16], cnt;
13879 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13881 md5_init_ctx (&ctx);
13882 fold_checksum_tree (expr, &ctx, &ht);
13883 md5_finish_ctx (&ctx, checksum);
13884 for (cnt = 0; cnt < 16; ++cnt)
13885 fprintf (stderr, "%02x", checksum[cnt]);
13886 putc ('\n', stderr);
13889 static void
13890 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13892 internal_error ("fold check: original tree changed by fold");
13895 static void
13896 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13897 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13899 const tree_node **slot;
13900 enum tree_code code;
13901 union tree_node *buf;
13902 int i, len;
13904 recursive_label:
13905 if (expr == NULL)
13906 return;
13907 slot = ht->find_slot (expr, INSERT);
13908 if (*slot != NULL)
13909 return;
13910 *slot = expr;
13911 code = TREE_CODE (expr);
13912 if (TREE_CODE_CLASS (code) == tcc_declaration
13913 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13915 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13916 size_t sz = tree_size (expr);
13917 buf = XALLOCAVAR (union tree_node, sz);
13918 memcpy ((char *) buf, expr, sz);
13919 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13920 buf->decl_with_vis.symtab_node = NULL;
13921 buf->base.nowarning_flag = 0;
13922 expr = (tree) buf;
13924 else if (TREE_CODE_CLASS (code) == tcc_type
13925 && (TYPE_POINTER_TO (expr)
13926 || TYPE_REFERENCE_TO (expr)
13927 || TYPE_CACHED_VALUES_P (expr)
13928 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13929 || TYPE_NEXT_VARIANT (expr)
13930 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13932 /* Allow these fields to be modified. */
13933 tree tmp;
13934 size_t sz = tree_size (expr);
13935 buf = XALLOCAVAR (union tree_node, sz);
13936 memcpy ((char *) buf, expr, sz);
13937 expr = tmp = (tree) buf;
13938 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13939 TYPE_POINTER_TO (tmp) = NULL;
13940 TYPE_REFERENCE_TO (tmp) = NULL;
13941 TYPE_NEXT_VARIANT (tmp) = NULL;
13942 TYPE_ALIAS_SET (tmp) = -1;
13943 if (TYPE_CACHED_VALUES_P (tmp))
13945 TYPE_CACHED_VALUES_P (tmp) = 0;
13946 TYPE_CACHED_VALUES (tmp) = NULL;
13949 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13951 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13952 that and change builtins.cc etc. instead - see PR89543. */
13953 size_t sz = tree_size (expr);
13954 buf = XALLOCAVAR (union tree_node, sz);
13955 memcpy ((char *) buf, expr, sz);
13956 buf->base.nowarning_flag = 0;
13957 expr = (tree) buf;
13959 md5_process_bytes (expr, tree_size (expr), ctx);
13960 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13961 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13962 if (TREE_CODE_CLASS (code) != tcc_type
13963 && TREE_CODE_CLASS (code) != tcc_declaration
13964 && code != TREE_LIST
13965 && code != SSA_NAME
13966 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13967 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13968 switch (TREE_CODE_CLASS (code))
13970 case tcc_constant:
13971 switch (code)
13973 case STRING_CST:
13974 md5_process_bytes (TREE_STRING_POINTER (expr),
13975 TREE_STRING_LENGTH (expr), ctx);
13976 break;
13977 case COMPLEX_CST:
13978 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13979 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13980 break;
13981 case VECTOR_CST:
13982 len = vector_cst_encoded_nelts (expr);
13983 for (i = 0; i < len; ++i)
13984 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13985 break;
13986 default:
13987 break;
13989 break;
13990 case tcc_exceptional:
13991 switch (code)
13993 case TREE_LIST:
13994 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13995 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13996 expr = TREE_CHAIN (expr);
13997 goto recursive_label;
13998 break;
13999 case TREE_VEC:
14000 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14001 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14002 break;
14003 default:
14004 break;
14006 break;
14007 case tcc_expression:
14008 case tcc_reference:
14009 case tcc_comparison:
14010 case tcc_unary:
14011 case tcc_binary:
14012 case tcc_statement:
14013 case tcc_vl_exp:
14014 len = TREE_OPERAND_LENGTH (expr);
14015 for (i = 0; i < len; ++i)
14016 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14017 break;
14018 case tcc_declaration:
14019 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14020 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14021 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14023 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14024 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14025 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14026 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14027 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14030 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14032 if (TREE_CODE (expr) == FUNCTION_DECL)
14034 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14035 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14037 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14039 break;
14040 case tcc_type:
14041 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14042 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14043 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14044 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14045 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14046 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14047 if (INTEGRAL_TYPE_P (expr)
14048 || SCALAR_FLOAT_TYPE_P (expr))
14050 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14051 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14053 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14054 if (RECORD_OR_UNION_TYPE_P (expr))
14055 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14056 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14057 break;
14058 default:
14059 break;
14063 /* Helper function for outputting the checksum of a tree T. When
14064 debugging with gdb, you can "define mynext" to be "next" followed
14065 by "call debug_fold_checksum (op0)", then just trace down till the
14066 outputs differ. */
14068 DEBUG_FUNCTION void
14069 debug_fold_checksum (const_tree t)
14071 int i;
14072 unsigned char checksum[16];
14073 struct md5_ctx ctx;
14074 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14076 md5_init_ctx (&ctx);
14077 fold_checksum_tree (t, &ctx, &ht);
14078 md5_finish_ctx (&ctx, checksum);
14079 ht.empty ();
14081 for (i = 0; i < 16; i++)
14082 fprintf (stderr, "%d ", checksum[i]);
14084 fprintf (stderr, "\n");
14087 #endif
14089 /* Fold a unary tree expression with code CODE of type TYPE with an
14090 operand OP0. LOC is the location of the resulting expression.
14091 Return a folded expression if successful. Otherwise, return a tree
14092 expression with code CODE of type TYPE with an operand OP0. */
14094 tree
14095 fold_build1_loc (location_t loc,
14096 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14098 tree tem;
14099 #ifdef ENABLE_FOLD_CHECKING
14100 unsigned char checksum_before[16], checksum_after[16];
14101 struct md5_ctx ctx;
14102 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14104 md5_init_ctx (&ctx);
14105 fold_checksum_tree (op0, &ctx, &ht);
14106 md5_finish_ctx (&ctx, checksum_before);
14107 ht.empty ();
14108 #endif
14110 tem = fold_unary_loc (loc, code, type, op0);
14111 if (!tem)
14112 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
14114 #ifdef ENABLE_FOLD_CHECKING
14115 md5_init_ctx (&ctx);
14116 fold_checksum_tree (op0, &ctx, &ht);
14117 md5_finish_ctx (&ctx, checksum_after);
14119 if (memcmp (checksum_before, checksum_after, 16))
14120 fold_check_failed (op0, tem);
14121 #endif
14122 return tem;
14125 /* Fold a binary tree expression with code CODE of type TYPE with
14126 operands OP0 and OP1. LOC is the location of the resulting
14127 expression. Return a folded expression if successful. Otherwise,
14128 return a tree expression with code CODE of type TYPE with operands
14129 OP0 and OP1. */
14131 tree
14132 fold_build2_loc (location_t loc,
14133 enum tree_code code, tree type, tree op0, tree op1
14134 MEM_STAT_DECL)
14136 tree tem;
14137 #ifdef ENABLE_FOLD_CHECKING
14138 unsigned char checksum_before_op0[16],
14139 checksum_before_op1[16],
14140 checksum_after_op0[16],
14141 checksum_after_op1[16];
14142 struct md5_ctx ctx;
14143 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14145 md5_init_ctx (&ctx);
14146 fold_checksum_tree (op0, &ctx, &ht);
14147 md5_finish_ctx (&ctx, checksum_before_op0);
14148 ht.empty ();
14150 md5_init_ctx (&ctx);
14151 fold_checksum_tree (op1, &ctx, &ht);
14152 md5_finish_ctx (&ctx, checksum_before_op1);
14153 ht.empty ();
14154 #endif
14156 tem = fold_binary_loc (loc, code, type, op0, op1);
14157 if (!tem)
14158 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14160 #ifdef ENABLE_FOLD_CHECKING
14161 md5_init_ctx (&ctx);
14162 fold_checksum_tree (op0, &ctx, &ht);
14163 md5_finish_ctx (&ctx, checksum_after_op0);
14164 ht.empty ();
14166 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14167 fold_check_failed (op0, tem);
14169 md5_init_ctx (&ctx);
14170 fold_checksum_tree (op1, &ctx, &ht);
14171 md5_finish_ctx (&ctx, checksum_after_op1);
14173 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14174 fold_check_failed (op1, tem);
14175 #endif
14176 return tem;
14179 /* Fold a ternary tree expression with code CODE of type TYPE with
14180 operands OP0, OP1, and OP2. Return a folded expression if
14181 successful. Otherwise, return a tree expression with code CODE of
14182 type TYPE with operands OP0, OP1, and OP2. */
14184 tree
14185 fold_build3_loc (location_t loc, enum tree_code code, tree type,
14186 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14188 tree tem;
14189 #ifdef ENABLE_FOLD_CHECKING
14190 unsigned char checksum_before_op0[16],
14191 checksum_before_op1[16],
14192 checksum_before_op2[16],
14193 checksum_after_op0[16],
14194 checksum_after_op1[16],
14195 checksum_after_op2[16];
14196 struct md5_ctx ctx;
14197 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14199 md5_init_ctx (&ctx);
14200 fold_checksum_tree (op0, &ctx, &ht);
14201 md5_finish_ctx (&ctx, checksum_before_op0);
14202 ht.empty ();
14204 md5_init_ctx (&ctx);
14205 fold_checksum_tree (op1, &ctx, &ht);
14206 md5_finish_ctx (&ctx, checksum_before_op1);
14207 ht.empty ();
14209 md5_init_ctx (&ctx);
14210 fold_checksum_tree (op2, &ctx, &ht);
14211 md5_finish_ctx (&ctx, checksum_before_op2);
14212 ht.empty ();
14213 #endif
14215 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14216 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14217 if (!tem)
14218 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14220 #ifdef ENABLE_FOLD_CHECKING
14221 md5_init_ctx (&ctx);
14222 fold_checksum_tree (op0, &ctx, &ht);
14223 md5_finish_ctx (&ctx, checksum_after_op0);
14224 ht.empty ();
14226 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14227 fold_check_failed (op0, tem);
14229 md5_init_ctx (&ctx);
14230 fold_checksum_tree (op1, &ctx, &ht);
14231 md5_finish_ctx (&ctx, checksum_after_op1);
14232 ht.empty ();
14234 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14235 fold_check_failed (op1, tem);
14237 md5_init_ctx (&ctx);
14238 fold_checksum_tree (op2, &ctx, &ht);
14239 md5_finish_ctx (&ctx, checksum_after_op2);
14241 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14242 fold_check_failed (op2, tem);
14243 #endif
14244 return tem;
14247 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14248 arguments in ARGARRAY, and a null static chain.
14249 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14250 of type TYPE from the given operands as constructed by build_call_array. */
14252 tree
14253 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14254 int nargs, tree *argarray)
14256 tree tem;
14257 #ifdef ENABLE_FOLD_CHECKING
14258 unsigned char checksum_before_fn[16],
14259 checksum_before_arglist[16],
14260 checksum_after_fn[16],
14261 checksum_after_arglist[16];
14262 struct md5_ctx ctx;
14263 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14264 int i;
14266 md5_init_ctx (&ctx);
14267 fold_checksum_tree (fn, &ctx, &ht);
14268 md5_finish_ctx (&ctx, checksum_before_fn);
14269 ht.empty ();
14271 md5_init_ctx (&ctx);
14272 for (i = 0; i < nargs; i++)
14273 fold_checksum_tree (argarray[i], &ctx, &ht);
14274 md5_finish_ctx (&ctx, checksum_before_arglist);
14275 ht.empty ();
14276 #endif
14278 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14279 if (!tem)
14280 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14282 #ifdef ENABLE_FOLD_CHECKING
14283 md5_init_ctx (&ctx);
14284 fold_checksum_tree (fn, &ctx, &ht);
14285 md5_finish_ctx (&ctx, checksum_after_fn);
14286 ht.empty ();
14288 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14289 fold_check_failed (fn, tem);
14291 md5_init_ctx (&ctx);
14292 for (i = 0; i < nargs; i++)
14293 fold_checksum_tree (argarray[i], &ctx, &ht);
14294 md5_finish_ctx (&ctx, checksum_after_arglist);
14296 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14297 fold_check_failed (NULL_TREE, tem);
14298 #endif
14299 return tem;
14302 /* Perform constant folding and related simplification of initializer
14303 expression EXPR. These behave identically to "fold_buildN" but ignore
14304 potential run-time traps and exceptions that fold must preserve. */
14306 #define START_FOLD_INIT \
14307 int saved_signaling_nans = flag_signaling_nans;\
14308 int saved_trapping_math = flag_trapping_math;\
14309 int saved_rounding_math = flag_rounding_math;\
14310 int saved_trapv = flag_trapv;\
14311 int saved_folding_initializer = folding_initializer;\
14312 flag_signaling_nans = 0;\
14313 flag_trapping_math = 0;\
14314 flag_rounding_math = 0;\
14315 flag_trapv = 0;\
14316 folding_initializer = 1;
14318 #define END_FOLD_INIT \
14319 flag_signaling_nans = saved_signaling_nans;\
14320 flag_trapping_math = saved_trapping_math;\
14321 flag_rounding_math = saved_rounding_math;\
14322 flag_trapv = saved_trapv;\
14323 folding_initializer = saved_folding_initializer;
14325 tree
14326 fold_init (tree expr)
14328 tree result;
14329 START_FOLD_INIT;
14331 result = fold (expr);
14333 END_FOLD_INIT;
14334 return result;
14337 tree
14338 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14339 tree type, tree op)
14341 tree result;
14342 START_FOLD_INIT;
14344 result = fold_build1_loc (loc, code, type, op);
14346 END_FOLD_INIT;
14347 return result;
14350 tree
14351 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14352 tree type, tree op0, tree op1)
14354 tree result;
14355 START_FOLD_INIT;
14357 result = fold_build2_loc (loc, code, type, op0, op1);
14359 END_FOLD_INIT;
14360 return result;
14363 tree
14364 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14365 int nargs, tree *argarray)
14367 tree result;
14368 START_FOLD_INIT;
14370 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14372 END_FOLD_INIT;
14373 return result;
14376 tree
14377 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14378 tree lhs, tree rhs)
14380 tree result;
14381 START_FOLD_INIT;
14383 result = fold_binary_loc (loc, code, type, lhs, rhs);
14385 END_FOLD_INIT;
14386 return result;
14389 #undef START_FOLD_INIT
14390 #undef END_FOLD_INIT
14392 /* Determine if first argument is a multiple of second argument. Return
14393 false if it is not, or we cannot easily determined it to be.
14395 An example of the sort of thing we care about (at this point; this routine
14396 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14397 fold cases do now) is discovering that
14399 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14401 is a multiple of
14403 SAVE_EXPR (J * 8)
14405 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14407 This code also handles discovering that
14409 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14411 is a multiple of 8 so we don't have to worry about dealing with a
14412 possible remainder.
14414 Note that we *look* inside a SAVE_EXPR only to determine how it was
14415 calculated; it is not safe for fold to do much of anything else with the
14416 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14417 at run time. For example, the latter example above *cannot* be implemented
14418 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14419 evaluation time of the original SAVE_EXPR is not necessarily the same at
14420 the time the new expression is evaluated. The only optimization of this
14421 sort that would be valid is changing
14423 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14425 divided by 8 to
14427 SAVE_EXPR (I) * SAVE_EXPR (J)
14429 (where the same SAVE_EXPR (J) is used in the original and the
14430 transformed version).
14432 NOWRAP specifies whether all outer operations in TYPE should
14433 be considered not wrapping. Any type conversion within TOP acts
14434 as a barrier and we will fall back to NOWRAP being false.
14435 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14436 as not wrapping even though they are generally using unsigned arithmetic. */
14438 bool
14439 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14441 gimple *stmt;
14442 tree op1, op2;
14444 if (operand_equal_p (top, bottom, 0))
14445 return true;
14447 if (TREE_CODE (type) != INTEGER_TYPE)
14448 return false;
14450 switch (TREE_CODE (top))
14452 case BIT_AND_EXPR:
14453 /* Bitwise and provides a power of two multiple. If the mask is
14454 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14455 if (!integer_pow2p (bottom))
14456 return false;
14457 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14458 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14460 case MULT_EXPR:
14461 /* If the multiplication can wrap we cannot recurse further unless
14462 the bottom is a power of two which is where wrapping does not
14463 matter. */
14464 if (!nowrap
14465 && !TYPE_OVERFLOW_UNDEFINED (type)
14466 && !integer_pow2p (bottom))
14467 return false;
14468 if (TREE_CODE (bottom) == INTEGER_CST)
14470 op1 = TREE_OPERAND (top, 0);
14471 op2 = TREE_OPERAND (top, 1);
14472 if (TREE_CODE (op1) == INTEGER_CST)
14473 std::swap (op1, op2);
14474 if (TREE_CODE (op2) == INTEGER_CST)
14476 if (multiple_of_p (type, op2, bottom, nowrap))
14477 return true;
14478 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14479 if (multiple_of_p (type, bottom, op2, nowrap))
14481 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14482 wi::to_widest (op2));
14483 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14485 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14486 return multiple_of_p (type, op1, op2, nowrap);
14489 return multiple_of_p (type, op1, bottom, nowrap);
14492 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14493 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14495 case LSHIFT_EXPR:
14496 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14497 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14499 op1 = TREE_OPERAND (top, 1);
14500 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14502 wide_int mul_op
14503 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14504 return multiple_of_p (type,
14505 wide_int_to_tree (type, mul_op), bottom,
14506 nowrap);
14509 return false;
14511 case MINUS_EXPR:
14512 case PLUS_EXPR:
14513 /* If the addition or subtraction can wrap we cannot recurse further
14514 unless bottom is a power of two which is where wrapping does not
14515 matter. */
14516 if (!nowrap
14517 && !TYPE_OVERFLOW_UNDEFINED (type)
14518 && !integer_pow2p (bottom))
14519 return false;
14521 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14522 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14523 but 0xfffffffd is not. */
14524 op1 = TREE_OPERAND (top, 1);
14525 if (TREE_CODE (top) == PLUS_EXPR
14526 && nowrap
14527 && TYPE_UNSIGNED (type)
14528 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14529 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14531 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14532 precisely, so be conservative here checking if both op0 and op1
14533 are multiple of bottom. Note we check the second operand first
14534 since it's usually simpler. */
14535 return (multiple_of_p (type, op1, bottom, nowrap)
14536 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14538 CASE_CONVERT:
14539 /* Can't handle conversions from non-integral or wider integral type. */
14540 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14541 || (TYPE_PRECISION (type)
14542 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14543 return false;
14544 /* NOWRAP only extends to operations in the outermost type so
14545 make sure to strip it off here. */
14546 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14547 TREE_OPERAND (top, 0), bottom, false);
14549 case SAVE_EXPR:
14550 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14552 case COND_EXPR:
14553 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14554 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14556 case INTEGER_CST:
14557 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14558 return false;
14559 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14560 SIGNED);
14562 case SSA_NAME:
14563 if (TREE_CODE (bottom) == INTEGER_CST
14564 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14565 && gimple_code (stmt) == GIMPLE_ASSIGN)
14567 enum tree_code code = gimple_assign_rhs_code (stmt);
14569 /* Check for special cases to see if top is defined as multiple
14570 of bottom:
14572 top = (X & ~(bottom - 1) ; bottom is power of 2
14576 Y = X % bottom
14577 top = X - Y. */
14578 if (code == BIT_AND_EXPR
14579 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14580 && TREE_CODE (op2) == INTEGER_CST
14581 && integer_pow2p (bottom)
14582 && wi::multiple_of_p (wi::to_widest (op2),
14583 wi::to_widest (bottom), SIGNED))
14584 return true;
14586 op1 = gimple_assign_rhs1 (stmt);
14587 if (code == MINUS_EXPR
14588 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14589 && TREE_CODE (op2) == SSA_NAME
14590 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14591 && gimple_code (stmt) == GIMPLE_ASSIGN
14592 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14593 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14594 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14595 return true;
14598 /* fall through */
14600 default:
14601 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14602 return multiple_p (wi::to_poly_widest (top),
14603 wi::to_poly_widest (bottom));
14605 return false;
14609 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14610 This function returns true for integer expressions, and returns
14611 false if uncertain. */
14613 bool
14614 tree_expr_finite_p (const_tree x)
14616 machine_mode mode = element_mode (x);
14617 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14618 return true;
14619 switch (TREE_CODE (x))
14621 case REAL_CST:
14622 return real_isfinite (TREE_REAL_CST_PTR (x));
14623 case COMPLEX_CST:
14624 return tree_expr_finite_p (TREE_REALPART (x))
14625 && tree_expr_finite_p (TREE_IMAGPART (x));
14626 case FLOAT_EXPR:
14627 return true;
14628 case ABS_EXPR:
14629 case CONVERT_EXPR:
14630 case NON_LVALUE_EXPR:
14631 case NEGATE_EXPR:
14632 case SAVE_EXPR:
14633 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14634 case MIN_EXPR:
14635 case MAX_EXPR:
14636 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14637 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14638 case COND_EXPR:
14639 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14640 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14641 case CALL_EXPR:
14642 switch (get_call_combined_fn (x))
14644 CASE_CFN_FABS:
14645 CASE_CFN_FABS_FN:
14646 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14647 CASE_CFN_FMAX:
14648 CASE_CFN_FMAX_FN:
14649 CASE_CFN_FMIN:
14650 CASE_CFN_FMIN_FN:
14651 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14652 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14653 default:
14654 return false;
14657 default:
14658 return false;
14662 /* Return true if expression X evaluates to an infinity.
14663 This function returns false for integer expressions. */
14665 bool
14666 tree_expr_infinite_p (const_tree x)
14668 if (!HONOR_INFINITIES (x))
14669 return false;
14670 switch (TREE_CODE (x))
14672 case REAL_CST:
14673 return real_isinf (TREE_REAL_CST_PTR (x));
14674 case ABS_EXPR:
14675 case NEGATE_EXPR:
14676 case NON_LVALUE_EXPR:
14677 case SAVE_EXPR:
14678 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14679 case COND_EXPR:
14680 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14681 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14682 default:
14683 return false;
14687 /* Return true if expression X could evaluate to an infinity.
14688 This function returns false for integer expressions, and returns
14689 true if uncertain. */
14691 bool
14692 tree_expr_maybe_infinite_p (const_tree x)
14694 if (!HONOR_INFINITIES (x))
14695 return false;
14696 switch (TREE_CODE (x))
14698 case REAL_CST:
14699 return real_isinf (TREE_REAL_CST_PTR (x));
14700 case FLOAT_EXPR:
14701 return false;
14702 case ABS_EXPR:
14703 case NEGATE_EXPR:
14704 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14705 case COND_EXPR:
14706 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14707 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14708 default:
14709 return true;
14713 /* Return true if expression X evaluates to a signaling NaN.
14714 This function returns false for integer expressions. */
14716 bool
14717 tree_expr_signaling_nan_p (const_tree x)
14719 if (!HONOR_SNANS (x))
14720 return false;
14721 switch (TREE_CODE (x))
14723 case REAL_CST:
14724 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14725 case NON_LVALUE_EXPR:
14726 case SAVE_EXPR:
14727 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14728 case COND_EXPR:
14729 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14730 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14731 default:
14732 return false;
14736 /* Return true if expression X could evaluate to a signaling NaN.
14737 This function returns false for integer expressions, and returns
14738 true if uncertain. */
14740 bool
14741 tree_expr_maybe_signaling_nan_p (const_tree x)
14743 if (!HONOR_SNANS (x))
14744 return false;
14745 switch (TREE_CODE (x))
14747 case REAL_CST:
14748 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14749 case FLOAT_EXPR:
14750 return false;
14751 case ABS_EXPR:
14752 case CONVERT_EXPR:
14753 case NEGATE_EXPR:
14754 case NON_LVALUE_EXPR:
14755 case SAVE_EXPR:
14756 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14757 case MIN_EXPR:
14758 case MAX_EXPR:
14759 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14760 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14761 case COND_EXPR:
14762 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14763 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14764 case CALL_EXPR:
14765 switch (get_call_combined_fn (x))
14767 CASE_CFN_FABS:
14768 CASE_CFN_FABS_FN:
14769 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14770 CASE_CFN_FMAX:
14771 CASE_CFN_FMAX_FN:
14772 CASE_CFN_FMIN:
14773 CASE_CFN_FMIN_FN:
14774 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14775 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14776 default:
14777 return true;
14779 default:
14780 return true;
14784 /* Return true if expression X evaluates to a NaN.
14785 This function returns false for integer expressions. */
14787 bool
14788 tree_expr_nan_p (const_tree x)
14790 if (!HONOR_NANS (x))
14791 return false;
14792 switch (TREE_CODE (x))
14794 case REAL_CST:
14795 return real_isnan (TREE_REAL_CST_PTR (x));
14796 case NON_LVALUE_EXPR:
14797 case SAVE_EXPR:
14798 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14799 case COND_EXPR:
14800 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14801 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14802 default:
14803 return false;
14807 /* Return true if expression X could evaluate to a NaN.
14808 This function returns false for integer expressions, and returns
14809 true if uncertain. */
14811 bool
14812 tree_expr_maybe_nan_p (const_tree x)
14814 if (!HONOR_NANS (x))
14815 return false;
14816 switch (TREE_CODE (x))
14818 case REAL_CST:
14819 return real_isnan (TREE_REAL_CST_PTR (x));
14820 case FLOAT_EXPR:
14821 return false;
14822 case PLUS_EXPR:
14823 case MINUS_EXPR:
14824 case MULT_EXPR:
14825 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14826 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14827 case ABS_EXPR:
14828 case CONVERT_EXPR:
14829 case NEGATE_EXPR:
14830 case NON_LVALUE_EXPR:
14831 case SAVE_EXPR:
14832 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14833 case MIN_EXPR:
14834 case MAX_EXPR:
14835 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14836 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14837 case COND_EXPR:
14838 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14839 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14840 case CALL_EXPR:
14841 switch (get_call_combined_fn (x))
14843 CASE_CFN_FABS:
14844 CASE_CFN_FABS_FN:
14845 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14846 CASE_CFN_FMAX:
14847 CASE_CFN_FMAX_FN:
14848 CASE_CFN_FMIN:
14849 CASE_CFN_FMIN_FN:
14850 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14851 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14852 default:
14853 return true;
14855 default:
14856 return true;
14860 /* Return true if expression X could evaluate to -0.0.
14861 This function returns true if uncertain. */
14863 bool
14864 tree_expr_maybe_real_minus_zero_p (const_tree x)
14866 if (!HONOR_SIGNED_ZEROS (x))
14867 return false;
14868 switch (TREE_CODE (x))
14870 case REAL_CST:
14871 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14872 case INTEGER_CST:
14873 case FLOAT_EXPR:
14874 case ABS_EXPR:
14875 return false;
14876 case NON_LVALUE_EXPR:
14877 case SAVE_EXPR:
14878 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14879 case COND_EXPR:
14880 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14881 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14882 case CALL_EXPR:
14883 switch (get_call_combined_fn (x))
14885 CASE_CFN_FABS:
14886 CASE_CFN_FABS_FN:
14887 return false;
14888 default:
14889 break;
14891 default:
14892 break;
14894 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14895 * but currently those predicates require tree and not const_tree. */
14896 return true;
14899 #define tree_expr_nonnegative_warnv_p(X, Y) \
14900 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14902 #define RECURSE(X) \
14903 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14905 /* Return true if CODE or TYPE is known to be non-negative. */
14907 static bool
14908 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14910 if (!VECTOR_TYPE_P (type)
14911 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14912 && truth_value_p (code))
14913 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14914 have a signed:1 type (where the value is -1 and 0). */
14915 return true;
14916 return false;
14919 /* Return true if (CODE OP0) is known to be non-negative. If the return
14920 value is based on the assumption that signed overflow is undefined,
14921 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14922 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14924 bool
14925 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14926 bool *strict_overflow_p, int depth)
14928 if (TYPE_UNSIGNED (type))
14929 return true;
14931 switch (code)
14933 case ABS_EXPR:
14934 /* We can't return 1 if flag_wrapv is set because
14935 ABS_EXPR<INT_MIN> = INT_MIN. */
14936 if (!ANY_INTEGRAL_TYPE_P (type))
14937 return true;
14938 if (TYPE_OVERFLOW_UNDEFINED (type))
14940 *strict_overflow_p = true;
14941 return true;
14943 break;
14945 case NON_LVALUE_EXPR:
14946 case FLOAT_EXPR:
14947 case FIX_TRUNC_EXPR:
14948 return RECURSE (op0);
14950 CASE_CONVERT:
14952 tree inner_type = TREE_TYPE (op0);
14953 tree outer_type = type;
14955 if (SCALAR_FLOAT_TYPE_P (outer_type))
14957 if (SCALAR_FLOAT_TYPE_P (inner_type))
14958 return RECURSE (op0);
14959 if (INTEGRAL_TYPE_P (inner_type))
14961 if (TYPE_UNSIGNED (inner_type))
14962 return true;
14963 return RECURSE (op0);
14966 else if (INTEGRAL_TYPE_P (outer_type))
14968 if (SCALAR_FLOAT_TYPE_P (inner_type))
14969 return RECURSE (op0);
14970 if (INTEGRAL_TYPE_P (inner_type))
14971 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14972 && TYPE_UNSIGNED (inner_type);
14975 break;
14977 default:
14978 return tree_simple_nonnegative_warnv_p (code, type);
14981 /* We don't know sign of `t', so be conservative and return false. */
14982 return false;
14985 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14986 value is based on the assumption that signed overflow is undefined,
14987 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14988 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14990 bool
14991 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14992 tree op1, bool *strict_overflow_p,
14993 int depth)
14995 if (TYPE_UNSIGNED (type))
14996 return true;
14998 switch (code)
15000 case POINTER_PLUS_EXPR:
15001 case PLUS_EXPR:
15002 if (FLOAT_TYPE_P (type))
15003 return RECURSE (op0) && RECURSE (op1);
15005 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15006 both unsigned and at least 2 bits shorter than the result. */
15007 if (TREE_CODE (type) == INTEGER_TYPE
15008 && TREE_CODE (op0) == NOP_EXPR
15009 && TREE_CODE (op1) == NOP_EXPR)
15011 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15012 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15013 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15014 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15016 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15017 TYPE_PRECISION (inner2)) + 1;
15018 return prec < TYPE_PRECISION (type);
15021 break;
15023 case MULT_EXPR:
15024 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15026 /* x * x is always non-negative for floating point x
15027 or without overflow. */
15028 if (operand_equal_p (op0, op1, 0)
15029 || (RECURSE (op0) && RECURSE (op1)))
15031 if (ANY_INTEGRAL_TYPE_P (type)
15032 && TYPE_OVERFLOW_UNDEFINED (type))
15033 *strict_overflow_p = true;
15034 return true;
15038 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15039 both unsigned and their total bits is shorter than the result. */
15040 if (TREE_CODE (type) == INTEGER_TYPE
15041 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15042 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15044 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15045 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15046 : TREE_TYPE (op0);
15047 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15048 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15049 : TREE_TYPE (op1);
15051 bool unsigned0 = TYPE_UNSIGNED (inner0);
15052 bool unsigned1 = TYPE_UNSIGNED (inner1);
15054 if (TREE_CODE (op0) == INTEGER_CST)
15055 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15057 if (TREE_CODE (op1) == INTEGER_CST)
15058 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15060 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15061 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15063 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15064 ? tree_int_cst_min_precision (op0, UNSIGNED)
15065 : TYPE_PRECISION (inner0);
15067 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15068 ? tree_int_cst_min_precision (op1, UNSIGNED)
15069 : TYPE_PRECISION (inner1);
15071 return precision0 + precision1 < TYPE_PRECISION (type);
15074 return false;
15076 case BIT_AND_EXPR:
15077 return RECURSE (op0) || RECURSE (op1);
15079 case MAX_EXPR:
15080 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
15081 things. */
15082 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
15083 return RECURSE (op0) && RECURSE (op1);
15084 return RECURSE (op0) || RECURSE (op1);
15086 case BIT_IOR_EXPR:
15087 case BIT_XOR_EXPR:
15088 case MIN_EXPR:
15089 case RDIV_EXPR:
15090 case TRUNC_DIV_EXPR:
15091 case CEIL_DIV_EXPR:
15092 case FLOOR_DIV_EXPR:
15093 case ROUND_DIV_EXPR:
15094 return RECURSE (op0) && RECURSE (op1);
15096 case TRUNC_MOD_EXPR:
15097 return RECURSE (op0);
15099 case FLOOR_MOD_EXPR:
15100 return RECURSE (op1);
15102 case CEIL_MOD_EXPR:
15103 case ROUND_MOD_EXPR:
15104 default:
15105 return tree_simple_nonnegative_warnv_p (code, type);
15108 /* We don't know sign of `t', so be conservative and return false. */
15109 return false;
15112 /* Return true if T is known to be non-negative. If the return
15113 value is based on the assumption that signed overflow is undefined,
15114 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15115 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15117 bool
15118 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15120 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15121 return true;
15123 switch (TREE_CODE (t))
15125 case INTEGER_CST:
15126 return tree_int_cst_sgn (t) >= 0;
15128 case REAL_CST:
15129 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15131 case FIXED_CST:
15132 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15134 case COND_EXPR:
15135 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15137 case SSA_NAME:
15138 /* Limit the depth of recursion to avoid quadratic behavior.
15139 This is expected to catch almost all occurrences in practice.
15140 If this code misses important cases that unbounded recursion
15141 would not, passes that need this information could be revised
15142 to provide it through dataflow propagation. */
15143 return (!name_registered_for_update_p (t)
15144 && depth < param_max_ssa_name_query_depth
15145 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15146 strict_overflow_p, depth));
15148 default:
15149 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15153 /* Return true if T is known to be non-negative. If the return
15154 value is based on the assumption that signed overflow is undefined,
15155 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15156 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15158 bool
15159 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15160 bool *strict_overflow_p, int depth)
15162 switch (fn)
15164 CASE_CFN_ACOS:
15165 CASE_CFN_ACOS_FN:
15166 CASE_CFN_ACOSH:
15167 CASE_CFN_ACOSH_FN:
15168 CASE_CFN_CABS:
15169 CASE_CFN_CABS_FN:
15170 CASE_CFN_COSH:
15171 CASE_CFN_COSH_FN:
15172 CASE_CFN_ERFC:
15173 CASE_CFN_ERFC_FN:
15174 CASE_CFN_EXP:
15175 CASE_CFN_EXP_FN:
15176 CASE_CFN_EXP10:
15177 CASE_CFN_EXP2:
15178 CASE_CFN_EXP2_FN:
15179 CASE_CFN_FABS:
15180 CASE_CFN_FABS_FN:
15181 CASE_CFN_FDIM:
15182 CASE_CFN_FDIM_FN:
15183 CASE_CFN_HYPOT:
15184 CASE_CFN_HYPOT_FN:
15185 CASE_CFN_POW10:
15186 CASE_CFN_FFS:
15187 CASE_CFN_PARITY:
15188 CASE_CFN_POPCOUNT:
15189 CASE_CFN_CLZ:
15190 CASE_CFN_CLRSB:
15191 case CFN_BUILT_IN_BSWAP16:
15192 case CFN_BUILT_IN_BSWAP32:
15193 case CFN_BUILT_IN_BSWAP64:
15194 case CFN_BUILT_IN_BSWAP128:
15195 /* Always true. */
15196 return true;
15198 CASE_CFN_SQRT:
15199 CASE_CFN_SQRT_FN:
15200 /* sqrt(-0.0) is -0.0. */
15201 if (!HONOR_SIGNED_ZEROS (type))
15202 return true;
15203 return RECURSE (arg0);
15205 CASE_CFN_ASINH:
15206 CASE_CFN_ASINH_FN:
15207 CASE_CFN_ATAN:
15208 CASE_CFN_ATAN_FN:
15209 CASE_CFN_ATANH:
15210 CASE_CFN_ATANH_FN:
15211 CASE_CFN_CBRT:
15212 CASE_CFN_CBRT_FN:
15213 CASE_CFN_CEIL:
15214 CASE_CFN_CEIL_FN:
15215 CASE_CFN_ERF:
15216 CASE_CFN_ERF_FN:
15217 CASE_CFN_EXPM1:
15218 CASE_CFN_EXPM1_FN:
15219 CASE_CFN_FLOOR:
15220 CASE_CFN_FLOOR_FN:
15221 CASE_CFN_FMOD:
15222 CASE_CFN_FMOD_FN:
15223 CASE_CFN_FREXP:
15224 CASE_CFN_FREXP_FN:
15225 CASE_CFN_ICEIL:
15226 CASE_CFN_IFLOOR:
15227 CASE_CFN_IRINT:
15228 CASE_CFN_IROUND:
15229 CASE_CFN_LCEIL:
15230 CASE_CFN_LDEXP:
15231 CASE_CFN_LFLOOR:
15232 CASE_CFN_LLCEIL:
15233 CASE_CFN_LLFLOOR:
15234 CASE_CFN_LLRINT:
15235 CASE_CFN_LLRINT_FN:
15236 CASE_CFN_LLROUND:
15237 CASE_CFN_LLROUND_FN:
15238 CASE_CFN_LRINT:
15239 CASE_CFN_LRINT_FN:
15240 CASE_CFN_LROUND:
15241 CASE_CFN_LROUND_FN:
15242 CASE_CFN_MODF:
15243 CASE_CFN_MODF_FN:
15244 CASE_CFN_NEARBYINT:
15245 CASE_CFN_NEARBYINT_FN:
15246 CASE_CFN_RINT:
15247 CASE_CFN_RINT_FN:
15248 CASE_CFN_ROUND:
15249 CASE_CFN_ROUND_FN:
15250 CASE_CFN_ROUNDEVEN:
15251 CASE_CFN_ROUNDEVEN_FN:
15252 CASE_CFN_SCALB:
15253 CASE_CFN_SCALBLN:
15254 CASE_CFN_SCALBLN_FN:
15255 CASE_CFN_SCALBN:
15256 CASE_CFN_SCALBN_FN:
15257 CASE_CFN_SIGNBIT:
15258 CASE_CFN_SIGNIFICAND:
15259 CASE_CFN_SINH:
15260 CASE_CFN_SINH_FN:
15261 CASE_CFN_TANH:
15262 CASE_CFN_TANH_FN:
15263 CASE_CFN_TRUNC:
15264 CASE_CFN_TRUNC_FN:
15265 /* True if the 1st argument is nonnegative. */
15266 return RECURSE (arg0);
15268 CASE_CFN_FMAX:
15269 CASE_CFN_FMAX_FN:
15270 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15271 things. In the presence of sNaNs, we're only guaranteed to be
15272 non-negative if both operands are non-negative. In the presence
15273 of qNaNs, we're non-negative if either operand is non-negative
15274 and can't be a qNaN, or if both operands are non-negative. */
15275 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15276 tree_expr_maybe_signaling_nan_p (arg1))
15277 return RECURSE (arg0) && RECURSE (arg1);
15278 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15279 || RECURSE (arg1))
15280 : (RECURSE (arg1)
15281 && !tree_expr_maybe_nan_p (arg1));
15283 CASE_CFN_FMIN:
15284 CASE_CFN_FMIN_FN:
15285 /* True if the 1st AND 2nd arguments are nonnegative. */
15286 return RECURSE (arg0) && RECURSE (arg1);
15288 CASE_CFN_COPYSIGN:
15289 CASE_CFN_COPYSIGN_FN:
15290 /* True if the 2nd argument is nonnegative. */
15291 return RECURSE (arg1);
15293 CASE_CFN_POWI:
15294 /* True if the 1st argument is nonnegative or the second
15295 argument is an even integer. */
15296 if (TREE_CODE (arg1) == INTEGER_CST
15297 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15298 return true;
15299 return RECURSE (arg0);
15301 CASE_CFN_POW:
15302 CASE_CFN_POW_FN:
15303 /* True if the 1st argument is nonnegative or the second
15304 argument is an even integer valued real. */
15305 if (TREE_CODE (arg1) == REAL_CST)
15307 REAL_VALUE_TYPE c;
15308 HOST_WIDE_INT n;
15310 c = TREE_REAL_CST (arg1);
15311 n = real_to_integer (&c);
15312 if ((n & 1) == 0)
15314 REAL_VALUE_TYPE cint;
15315 real_from_integer (&cint, VOIDmode, n, SIGNED);
15316 if (real_identical (&c, &cint))
15317 return true;
15320 return RECURSE (arg0);
15322 default:
15323 break;
15325 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15328 /* Return true if T is known to be non-negative. If the return
15329 value is based on the assumption that signed overflow is undefined,
15330 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15331 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15333 static bool
15334 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15336 enum tree_code code = TREE_CODE (t);
15337 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15338 return true;
15340 switch (code)
15342 case TARGET_EXPR:
15344 tree temp = TARGET_EXPR_SLOT (t);
15345 t = TARGET_EXPR_INITIAL (t);
15347 /* If the initializer is non-void, then it's a normal expression
15348 that will be assigned to the slot. */
15349 if (!VOID_TYPE_P (TREE_TYPE (t)))
15350 return RECURSE (t);
15352 /* Otherwise, the initializer sets the slot in some way. One common
15353 way is an assignment statement at the end of the initializer. */
15354 while (1)
15356 if (TREE_CODE (t) == BIND_EXPR)
15357 t = expr_last (BIND_EXPR_BODY (t));
15358 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15359 || TREE_CODE (t) == TRY_CATCH_EXPR)
15360 t = expr_last (TREE_OPERAND (t, 0));
15361 else if (TREE_CODE (t) == STATEMENT_LIST)
15362 t = expr_last (t);
15363 else
15364 break;
15366 if (TREE_CODE (t) == MODIFY_EXPR
15367 && TREE_OPERAND (t, 0) == temp)
15368 return RECURSE (TREE_OPERAND (t, 1));
15370 return false;
15373 case CALL_EXPR:
15375 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15376 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15378 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15379 get_call_combined_fn (t),
15380 arg0,
15381 arg1,
15382 strict_overflow_p, depth);
15384 case COMPOUND_EXPR:
15385 case MODIFY_EXPR:
15386 return RECURSE (TREE_OPERAND (t, 1));
15388 case BIND_EXPR:
15389 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15391 case SAVE_EXPR:
15392 return RECURSE (TREE_OPERAND (t, 0));
15394 default:
15395 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15399 #undef RECURSE
15400 #undef tree_expr_nonnegative_warnv_p
15402 /* Return true if T is known to be non-negative. If the return
15403 value is based on the assumption that signed overflow is undefined,
15404 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15405 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15407 bool
15408 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15410 enum tree_code code;
15411 if (t == error_mark_node)
15412 return false;
15414 code = TREE_CODE (t);
15415 switch (TREE_CODE_CLASS (code))
15417 case tcc_binary:
15418 case tcc_comparison:
15419 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15420 TREE_TYPE (t),
15421 TREE_OPERAND (t, 0),
15422 TREE_OPERAND (t, 1),
15423 strict_overflow_p, depth);
15425 case tcc_unary:
15426 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15427 TREE_TYPE (t),
15428 TREE_OPERAND (t, 0),
15429 strict_overflow_p, depth);
15431 case tcc_constant:
15432 case tcc_declaration:
15433 case tcc_reference:
15434 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15436 default:
15437 break;
15440 switch (code)
15442 case TRUTH_AND_EXPR:
15443 case TRUTH_OR_EXPR:
15444 case TRUTH_XOR_EXPR:
15445 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15446 TREE_TYPE (t),
15447 TREE_OPERAND (t, 0),
15448 TREE_OPERAND (t, 1),
15449 strict_overflow_p, depth);
15450 case TRUTH_NOT_EXPR:
15451 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15452 TREE_TYPE (t),
15453 TREE_OPERAND (t, 0),
15454 strict_overflow_p, depth);
15456 case COND_EXPR:
15457 case CONSTRUCTOR:
15458 case OBJ_TYPE_REF:
15459 case ADDR_EXPR:
15460 case WITH_SIZE_EXPR:
15461 case SSA_NAME:
15462 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15464 default:
15465 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15469 /* Return true if `t' is known to be non-negative. Handle warnings
15470 about undefined signed overflow. */
15472 bool
15473 tree_expr_nonnegative_p (tree t)
15475 bool ret, strict_overflow_p;
15477 strict_overflow_p = false;
15478 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15479 if (strict_overflow_p)
15480 fold_overflow_warning (("assuming signed overflow does not occur when "
15481 "determining that expression is always "
15482 "non-negative"),
15483 WARN_STRICT_OVERFLOW_MISC);
15484 return ret;
15488 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15489 For floating point we further ensure that T is not denormal.
15490 Similar logic is present in nonzero_address in rtlanal.h.
15492 If the return value is based on the assumption that signed overflow
15493 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15494 change *STRICT_OVERFLOW_P. */
15496 bool
15497 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15498 bool *strict_overflow_p)
15500 switch (code)
15502 case ABS_EXPR:
15503 return tree_expr_nonzero_warnv_p (op0,
15504 strict_overflow_p);
15506 case NOP_EXPR:
15508 tree inner_type = TREE_TYPE (op0);
15509 tree outer_type = type;
15511 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15512 && tree_expr_nonzero_warnv_p (op0,
15513 strict_overflow_p));
15515 break;
15517 case NON_LVALUE_EXPR:
15518 return tree_expr_nonzero_warnv_p (op0,
15519 strict_overflow_p);
15521 default:
15522 break;
15525 return false;
15528 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15529 For floating point we further ensure that T is not denormal.
15530 Similar logic is present in nonzero_address in rtlanal.h.
15532 If the return value is based on the assumption that signed overflow
15533 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15534 change *STRICT_OVERFLOW_P. */
15536 bool
15537 tree_binary_nonzero_warnv_p (enum tree_code code,
15538 tree type,
15539 tree op0,
15540 tree op1, bool *strict_overflow_p)
15542 bool sub_strict_overflow_p;
15543 switch (code)
15545 case POINTER_PLUS_EXPR:
15546 case PLUS_EXPR:
15547 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15549 /* With the presence of negative values it is hard
15550 to say something. */
15551 sub_strict_overflow_p = false;
15552 if (!tree_expr_nonnegative_warnv_p (op0,
15553 &sub_strict_overflow_p)
15554 || !tree_expr_nonnegative_warnv_p (op1,
15555 &sub_strict_overflow_p))
15556 return false;
15557 /* One of operands must be positive and the other non-negative. */
15558 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15559 overflows, on a twos-complement machine the sum of two
15560 nonnegative numbers can never be zero. */
15561 return (tree_expr_nonzero_warnv_p (op0,
15562 strict_overflow_p)
15563 || tree_expr_nonzero_warnv_p (op1,
15564 strict_overflow_p));
15566 break;
15568 case MULT_EXPR:
15569 if (TYPE_OVERFLOW_UNDEFINED (type))
15571 if (tree_expr_nonzero_warnv_p (op0,
15572 strict_overflow_p)
15573 && tree_expr_nonzero_warnv_p (op1,
15574 strict_overflow_p))
15576 *strict_overflow_p = true;
15577 return true;
15580 break;
15582 case MIN_EXPR:
15583 sub_strict_overflow_p = false;
15584 if (tree_expr_nonzero_warnv_p (op0,
15585 &sub_strict_overflow_p)
15586 && tree_expr_nonzero_warnv_p (op1,
15587 &sub_strict_overflow_p))
15589 if (sub_strict_overflow_p)
15590 *strict_overflow_p = true;
15592 break;
15594 case MAX_EXPR:
15595 sub_strict_overflow_p = false;
15596 if (tree_expr_nonzero_warnv_p (op0,
15597 &sub_strict_overflow_p))
15599 if (sub_strict_overflow_p)
15600 *strict_overflow_p = true;
15602 /* When both operands are nonzero, then MAX must be too. */
15603 if (tree_expr_nonzero_warnv_p (op1,
15604 strict_overflow_p))
15605 return true;
15607 /* MAX where operand 0 is positive is positive. */
15608 return tree_expr_nonnegative_warnv_p (op0,
15609 strict_overflow_p);
15611 /* MAX where operand 1 is positive is positive. */
15612 else if (tree_expr_nonzero_warnv_p (op1,
15613 &sub_strict_overflow_p)
15614 && tree_expr_nonnegative_warnv_p (op1,
15615 &sub_strict_overflow_p))
15617 if (sub_strict_overflow_p)
15618 *strict_overflow_p = true;
15619 return true;
15621 break;
15623 case BIT_IOR_EXPR:
15624 return (tree_expr_nonzero_warnv_p (op1,
15625 strict_overflow_p)
15626 || tree_expr_nonzero_warnv_p (op0,
15627 strict_overflow_p));
15629 default:
15630 break;
15633 return false;
15636 /* Return true when T is an address and is known to be nonzero.
15637 For floating point we further ensure that T is not denormal.
15638 Similar logic is present in nonzero_address in rtlanal.h.
15640 If the return value is based on the assumption that signed overflow
15641 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15642 change *STRICT_OVERFLOW_P. */
15644 bool
15645 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15647 bool sub_strict_overflow_p;
15648 switch (TREE_CODE (t))
15650 case INTEGER_CST:
15651 return !integer_zerop (t);
15653 case ADDR_EXPR:
15655 tree base = TREE_OPERAND (t, 0);
15657 if (!DECL_P (base))
15658 base = get_base_address (base);
15660 if (base && TREE_CODE (base) == TARGET_EXPR)
15661 base = TARGET_EXPR_SLOT (base);
15663 if (!base)
15664 return false;
15666 /* For objects in symbol table check if we know they are non-zero.
15667 Don't do anything for variables and functions before symtab is built;
15668 it is quite possible that they will be declared weak later. */
15669 int nonzero_addr = maybe_nonzero_address (base);
15670 if (nonzero_addr >= 0)
15671 return nonzero_addr;
15673 /* Constants are never weak. */
15674 if (CONSTANT_CLASS_P (base))
15675 return true;
15677 return false;
15680 case COND_EXPR:
15681 sub_strict_overflow_p = false;
15682 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15683 &sub_strict_overflow_p)
15684 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15685 &sub_strict_overflow_p))
15687 if (sub_strict_overflow_p)
15688 *strict_overflow_p = true;
15689 return true;
15691 break;
15693 case SSA_NAME:
15694 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15695 break;
15696 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15698 default:
15699 break;
15701 return false;
15704 #define integer_valued_real_p(X) \
15705 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15707 #define RECURSE(X) \
15708 ((integer_valued_real_p) (X, depth + 1))
15710 /* Return true if the floating point result of (CODE OP0) has an
15711 integer value. We also allow +Inf, -Inf and NaN to be considered
15712 integer values. Return false for signaling NaN.
15714 DEPTH is the current nesting depth of the query. */
15716 bool
15717 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15719 switch (code)
15721 case FLOAT_EXPR:
15722 return true;
15724 case ABS_EXPR:
15725 return RECURSE (op0);
15727 CASE_CONVERT:
15729 tree type = TREE_TYPE (op0);
15730 if (TREE_CODE (type) == INTEGER_TYPE)
15731 return true;
15732 if (SCALAR_FLOAT_TYPE_P (type))
15733 return RECURSE (op0);
15734 break;
15737 default:
15738 break;
15740 return false;
15743 /* Return true if the floating point result of (CODE OP0 OP1) has an
15744 integer value. We also allow +Inf, -Inf and NaN to be considered
15745 integer values. Return false for signaling NaN.
15747 DEPTH is the current nesting depth of the query. */
15749 bool
15750 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15752 switch (code)
15754 case PLUS_EXPR:
15755 case MINUS_EXPR:
15756 case MULT_EXPR:
15757 case MIN_EXPR:
15758 case MAX_EXPR:
15759 return RECURSE (op0) && RECURSE (op1);
15761 default:
15762 break;
15764 return false;
15767 /* Return true if the floating point result of calling FNDECL with arguments
15768 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15769 considered integer values. Return false for signaling NaN. If FNDECL
15770 takes fewer than 2 arguments, the remaining ARGn are null.
15772 DEPTH is the current nesting depth of the query. */
15774 bool
15775 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15777 switch (fn)
15779 CASE_CFN_CEIL:
15780 CASE_CFN_CEIL_FN:
15781 CASE_CFN_FLOOR:
15782 CASE_CFN_FLOOR_FN:
15783 CASE_CFN_NEARBYINT:
15784 CASE_CFN_NEARBYINT_FN:
15785 CASE_CFN_RINT:
15786 CASE_CFN_RINT_FN:
15787 CASE_CFN_ROUND:
15788 CASE_CFN_ROUND_FN:
15789 CASE_CFN_ROUNDEVEN:
15790 CASE_CFN_ROUNDEVEN_FN:
15791 CASE_CFN_TRUNC:
15792 CASE_CFN_TRUNC_FN:
15793 return true;
15795 CASE_CFN_FMIN:
15796 CASE_CFN_FMIN_FN:
15797 CASE_CFN_FMAX:
15798 CASE_CFN_FMAX_FN:
15799 return RECURSE (arg0) && RECURSE (arg1);
15801 default:
15802 break;
15804 return false;
15807 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15808 has an integer value. We also allow +Inf, -Inf and NaN to be
15809 considered integer values. Return false for signaling NaN.
15811 DEPTH is the current nesting depth of the query. */
15813 bool
15814 integer_valued_real_single_p (tree t, int depth)
15816 switch (TREE_CODE (t))
15818 case REAL_CST:
15819 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15821 case COND_EXPR:
15822 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15824 case SSA_NAME:
15825 /* Limit the depth of recursion to avoid quadratic behavior.
15826 This is expected to catch almost all occurrences in practice.
15827 If this code misses important cases that unbounded recursion
15828 would not, passes that need this information could be revised
15829 to provide it through dataflow propagation. */
15830 return (!name_registered_for_update_p (t)
15831 && depth < param_max_ssa_name_query_depth
15832 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15833 depth));
15835 default:
15836 break;
15838 return false;
15841 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15842 has an integer value. We also allow +Inf, -Inf and NaN to be
15843 considered integer values. Return false for signaling NaN.
15845 DEPTH is the current nesting depth of the query. */
15847 static bool
15848 integer_valued_real_invalid_p (tree t, int depth)
15850 switch (TREE_CODE (t))
15852 case COMPOUND_EXPR:
15853 case MODIFY_EXPR:
15854 case BIND_EXPR:
15855 return RECURSE (TREE_OPERAND (t, 1));
15857 case SAVE_EXPR:
15858 return RECURSE (TREE_OPERAND (t, 0));
15860 default:
15861 break;
15863 return false;
15866 #undef RECURSE
15867 #undef integer_valued_real_p
15869 /* Return true if the floating point expression T has an integer value.
15870 We also allow +Inf, -Inf and NaN to be considered integer values.
15871 Return false for signaling NaN.
15873 DEPTH is the current nesting depth of the query. */
15875 bool
15876 integer_valued_real_p (tree t, int depth)
15878 if (t == error_mark_node)
15879 return false;
15881 STRIP_ANY_LOCATION_WRAPPER (t);
15883 tree_code code = TREE_CODE (t);
15884 switch (TREE_CODE_CLASS (code))
15886 case tcc_binary:
15887 case tcc_comparison:
15888 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15889 TREE_OPERAND (t, 1), depth);
15891 case tcc_unary:
15892 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15894 case tcc_constant:
15895 case tcc_declaration:
15896 case tcc_reference:
15897 return integer_valued_real_single_p (t, depth);
15899 default:
15900 break;
15903 switch (code)
15905 case COND_EXPR:
15906 case SSA_NAME:
15907 return integer_valued_real_single_p (t, depth);
15909 case CALL_EXPR:
15911 tree arg0 = (call_expr_nargs (t) > 0
15912 ? CALL_EXPR_ARG (t, 0)
15913 : NULL_TREE);
15914 tree arg1 = (call_expr_nargs (t) > 1
15915 ? CALL_EXPR_ARG (t, 1)
15916 : NULL_TREE);
15917 return integer_valued_real_call_p (get_call_combined_fn (t),
15918 arg0, arg1, depth);
15921 default:
15922 return integer_valued_real_invalid_p (t, depth);
15926 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15927 attempt to fold the expression to a constant without modifying TYPE,
15928 OP0 or OP1.
15930 If the expression could be simplified to a constant, then return
15931 the constant. If the expression would not be simplified to a
15932 constant, then return NULL_TREE. */
15934 tree
15935 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15937 tree tem = fold_binary (code, type, op0, op1);
15938 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15941 /* Given the components of a unary expression CODE, TYPE and OP0,
15942 attempt to fold the expression to a constant without modifying
15943 TYPE or OP0.
15945 If the expression could be simplified to a constant, then return
15946 the constant. If the expression would not be simplified to a
15947 constant, then return NULL_TREE. */
15949 tree
15950 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15952 tree tem = fold_unary (code, type, op0);
15953 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15956 /* If EXP represents referencing an element in a constant string
15957 (either via pointer arithmetic or array indexing), return the
15958 tree representing the value accessed, otherwise return NULL. */
15960 tree
15961 fold_read_from_constant_string (tree exp)
15963 if ((INDIRECT_REF_P (exp)
15964 || TREE_CODE (exp) == ARRAY_REF)
15965 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15967 tree exp1 = TREE_OPERAND (exp, 0);
15968 tree index;
15969 tree string;
15970 location_t loc = EXPR_LOCATION (exp);
15972 if (INDIRECT_REF_P (exp))
15973 string = string_constant (exp1, &index, NULL, NULL);
15974 else
15976 tree low_bound = array_ref_low_bound (exp);
15977 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15979 /* Optimize the special-case of a zero lower bound.
15981 We convert the low_bound to sizetype to avoid some problems
15982 with constant folding. (E.g. suppose the lower bound is 1,
15983 and its mode is QI. Without the conversion,l (ARRAY
15984 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15985 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15986 if (! integer_zerop (low_bound))
15987 index = size_diffop_loc (loc, index,
15988 fold_convert_loc (loc, sizetype, low_bound));
15990 string = exp1;
15993 scalar_int_mode char_mode;
15994 if (string
15995 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15996 && TREE_CODE (string) == STRING_CST
15997 && tree_fits_uhwi_p (index)
15998 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15999 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
16000 &char_mode)
16001 && GET_MODE_SIZE (char_mode) == 1)
16002 return build_int_cst_type (TREE_TYPE (exp),
16003 (TREE_STRING_POINTER (string)
16004 [TREE_INT_CST_LOW (index)]));
16006 return NULL;
16009 /* Folds a read from vector element at IDX of vector ARG. */
16011 tree
16012 fold_read_from_vector (tree arg, poly_uint64 idx)
16014 unsigned HOST_WIDE_INT i;
16015 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
16016 && known_ge (idx, 0u)
16017 && idx.is_constant (&i))
16019 if (TREE_CODE (arg) == VECTOR_CST)
16020 return VECTOR_CST_ELT (arg, i);
16021 else if (TREE_CODE (arg) == CONSTRUCTOR)
16023 if (CONSTRUCTOR_NELTS (arg)
16024 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
16025 return NULL_TREE;
16026 if (i >= CONSTRUCTOR_NELTS (arg))
16027 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
16028 return CONSTRUCTOR_ELT (arg, i)->value;
16031 return NULL_TREE;
16034 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16035 an integer constant, real, or fixed-point constant.
16037 TYPE is the type of the result. */
16039 static tree
16040 fold_negate_const (tree arg0, tree type)
16042 tree t = NULL_TREE;
16044 switch (TREE_CODE (arg0))
16046 case REAL_CST:
16047 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16048 break;
16050 case FIXED_CST:
16052 FIXED_VALUE_TYPE f;
16053 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16054 &(TREE_FIXED_CST (arg0)), NULL,
16055 TYPE_SATURATING (type));
16056 t = build_fixed (type, f);
16057 /* Propagate overflow flags. */
16058 if (overflow_p | TREE_OVERFLOW (arg0))
16059 TREE_OVERFLOW (t) = 1;
16060 break;
16063 default:
16064 if (poly_int_tree_p (arg0))
16066 wi::overflow_type overflow;
16067 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
16068 t = force_fit_type (type, res, 1,
16069 (overflow && ! TYPE_UNSIGNED (type))
16070 || TREE_OVERFLOW (arg0));
16071 break;
16074 gcc_unreachable ();
16077 return t;
16080 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16081 an integer constant or real constant.
16083 TYPE is the type of the result. */
16085 tree
16086 fold_abs_const (tree arg0, tree type)
16088 tree t = NULL_TREE;
16090 switch (TREE_CODE (arg0))
16092 case INTEGER_CST:
16094 /* If the value is unsigned or non-negative, then the absolute value
16095 is the same as the ordinary value. */
16096 wide_int val = wi::to_wide (arg0);
16097 wi::overflow_type overflow = wi::OVF_NONE;
16098 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
16101 /* If the value is negative, then the absolute value is
16102 its negation. */
16103 else
16104 val = wi::neg (val, &overflow);
16106 /* Force to the destination type, set TREE_OVERFLOW for signed
16107 TYPE only. */
16108 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
16110 break;
16112 case REAL_CST:
16113 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16114 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16115 else
16116 t = arg0;
16117 break;
16119 default:
16120 gcc_unreachable ();
16123 return t;
16126 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16127 constant. TYPE is the type of the result. */
16129 static tree
16130 fold_not_const (const_tree arg0, tree type)
16132 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16134 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
16137 /* Given CODE, a relational operator, the target type, TYPE and two
16138 constant operands OP0 and OP1, return the result of the
16139 relational operation. If the result is not a compile time
16140 constant, then return NULL_TREE. */
16142 static tree
16143 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16145 int result, invert;
16147 /* From here on, the only cases we handle are when the result is
16148 known to be a constant. */
16150 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16152 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16153 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16155 /* Handle the cases where either operand is a NaN. */
16156 if (real_isnan (c0) || real_isnan (c1))
16158 switch (code)
16160 case EQ_EXPR:
16161 case ORDERED_EXPR:
16162 result = 0;
16163 break;
16165 case NE_EXPR:
16166 case UNORDERED_EXPR:
16167 case UNLT_EXPR:
16168 case UNLE_EXPR:
16169 case UNGT_EXPR:
16170 case UNGE_EXPR:
16171 case UNEQ_EXPR:
16172 result = 1;
16173 break;
16175 case LT_EXPR:
16176 case LE_EXPR:
16177 case GT_EXPR:
16178 case GE_EXPR:
16179 case LTGT_EXPR:
16180 if (flag_trapping_math)
16181 return NULL_TREE;
16182 result = 0;
16183 break;
16185 default:
16186 gcc_unreachable ();
16189 return constant_boolean_node (result, type);
16192 return constant_boolean_node (real_compare (code, c0, c1), type);
16195 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16197 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16198 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16199 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16202 /* Handle equality/inequality of complex constants. */
16203 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16205 tree rcond = fold_relational_const (code, type,
16206 TREE_REALPART (op0),
16207 TREE_REALPART (op1));
16208 tree icond = fold_relational_const (code, type,
16209 TREE_IMAGPART (op0),
16210 TREE_IMAGPART (op1));
16211 if (code == EQ_EXPR)
16212 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16213 else if (code == NE_EXPR)
16214 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16215 else
16216 return NULL_TREE;
16219 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16221 if (!VECTOR_TYPE_P (type))
16223 /* Have vector comparison with scalar boolean result. */
16224 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16225 && known_eq (VECTOR_CST_NELTS (op0),
16226 VECTOR_CST_NELTS (op1)));
16227 unsigned HOST_WIDE_INT nunits;
16228 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16229 return NULL_TREE;
16230 for (unsigned i = 0; i < nunits; i++)
16232 tree elem0 = VECTOR_CST_ELT (op0, i);
16233 tree elem1 = VECTOR_CST_ELT (op1, i);
16234 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16235 if (tmp == NULL_TREE)
16236 return NULL_TREE;
16237 if (integer_zerop (tmp))
16238 return constant_boolean_node (code == NE_EXPR, type);
16240 return constant_boolean_node (code == EQ_EXPR, type);
16242 tree_vector_builder elts;
16243 if (!elts.new_binary_operation (type, op0, op1, false))
16244 return NULL_TREE;
16245 unsigned int count = elts.encoded_nelts ();
16246 for (unsigned i = 0; i < count; i++)
16248 tree elem_type = TREE_TYPE (type);
16249 tree elem0 = VECTOR_CST_ELT (op0, i);
16250 tree elem1 = VECTOR_CST_ELT (op1, i);
16252 tree tem = fold_relational_const (code, elem_type,
16253 elem0, elem1);
16255 if (tem == NULL_TREE)
16256 return NULL_TREE;
16258 elts.quick_push (build_int_cst (elem_type,
16259 integer_zerop (tem) ? 0 : -1));
16262 return elts.build ();
16265 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16267 To compute GT, swap the arguments and do LT.
16268 To compute GE, do LT and invert the result.
16269 To compute LE, swap the arguments, do LT and invert the result.
16270 To compute NE, do EQ and invert the result.
16272 Therefore, the code below must handle only EQ and LT. */
16274 if (code == LE_EXPR || code == GT_EXPR)
16276 std::swap (op0, op1);
16277 code = swap_tree_comparison (code);
16280 /* Note that it is safe to invert for real values here because we
16281 have already handled the one case that it matters. */
16283 invert = 0;
16284 if (code == NE_EXPR || code == GE_EXPR)
16286 invert = 1;
16287 code = invert_tree_comparison (code, false);
16290 /* Compute a result for LT or EQ if args permit;
16291 Otherwise return T. */
16292 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16294 if (code == EQ_EXPR)
16295 result = tree_int_cst_equal (op0, op1);
16296 else
16297 result = tree_int_cst_lt (op0, op1);
16299 else
16300 return NULL_TREE;
16302 if (invert)
16303 result ^= 1;
16304 return constant_boolean_node (result, type);
16307 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16308 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16309 itself. */
16311 tree
16312 fold_build_cleanup_point_expr (tree type, tree expr)
16314 /* If the expression does not have side effects then we don't have to wrap
16315 it with a cleanup point expression. */
16316 if (!TREE_SIDE_EFFECTS (expr))
16317 return expr;
16319 /* If the expression is a return, check to see if the expression inside the
16320 return has no side effects or the right hand side of the modify expression
16321 inside the return. If either don't have side effects set we don't need to
16322 wrap the expression in a cleanup point expression. Note we don't check the
16323 left hand side of the modify because it should always be a return decl. */
16324 if (TREE_CODE (expr) == RETURN_EXPR)
16326 tree op = TREE_OPERAND (expr, 0);
16327 if (!op || !TREE_SIDE_EFFECTS (op))
16328 return expr;
16329 op = TREE_OPERAND (op, 1);
16330 if (!TREE_SIDE_EFFECTS (op))
16331 return expr;
16334 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16337 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16338 of an indirection through OP0, or NULL_TREE if no simplification is
16339 possible. */
16341 tree
16342 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16344 tree sub = op0;
16345 tree subtype;
16346 poly_uint64 const_op01;
16348 STRIP_NOPS (sub);
16349 subtype = TREE_TYPE (sub);
16350 if (!POINTER_TYPE_P (subtype)
16351 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16352 return NULL_TREE;
16354 if (TREE_CODE (sub) == ADDR_EXPR)
16356 tree op = TREE_OPERAND (sub, 0);
16357 tree optype = TREE_TYPE (op);
16359 /* *&CONST_DECL -> to the value of the const decl. */
16360 if (TREE_CODE (op) == CONST_DECL)
16361 return DECL_INITIAL (op);
16362 /* *&p => p; make sure to handle *&"str"[cst] here. */
16363 if (type == optype)
16365 tree fop = fold_read_from_constant_string (op);
16366 if (fop)
16367 return fop;
16368 else
16369 return op;
16371 /* *(foo *)&fooarray => fooarray[0] */
16372 else if (TREE_CODE (optype) == ARRAY_TYPE
16373 && type == TREE_TYPE (optype)
16374 && (!in_gimple_form
16375 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16377 tree type_domain = TYPE_DOMAIN (optype);
16378 tree min_val = size_zero_node;
16379 if (type_domain && TYPE_MIN_VALUE (type_domain))
16380 min_val = TYPE_MIN_VALUE (type_domain);
16381 if (in_gimple_form
16382 && TREE_CODE (min_val) != INTEGER_CST)
16383 return NULL_TREE;
16384 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16385 NULL_TREE, NULL_TREE);
16387 /* *(foo *)&complexfoo => __real__ complexfoo */
16388 else if (TREE_CODE (optype) == COMPLEX_TYPE
16389 && type == TREE_TYPE (optype))
16390 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16391 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16392 else if (VECTOR_TYPE_P (optype)
16393 && type == TREE_TYPE (optype))
16395 tree part_width = TYPE_SIZE (type);
16396 tree index = bitsize_int (0);
16397 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16398 index);
16402 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16403 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16405 tree op00 = TREE_OPERAND (sub, 0);
16406 tree op01 = TREE_OPERAND (sub, 1);
16408 STRIP_NOPS (op00);
16409 if (TREE_CODE (op00) == ADDR_EXPR)
16411 tree op00type;
16412 op00 = TREE_OPERAND (op00, 0);
16413 op00type = TREE_TYPE (op00);
16415 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16416 if (VECTOR_TYPE_P (op00type)
16417 && type == TREE_TYPE (op00type)
16418 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16419 but we want to treat offsets with MSB set as negative.
16420 For the code below negative offsets are invalid and
16421 TYPE_SIZE of the element is something unsigned, so
16422 check whether op01 fits into poly_int64, which implies
16423 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16424 then just use poly_uint64 because we want to treat the
16425 value as unsigned. */
16426 && tree_fits_poly_int64_p (op01))
16428 tree part_width = TYPE_SIZE (type);
16429 poly_uint64 max_offset
16430 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16431 * TYPE_VECTOR_SUBPARTS (op00type));
16432 if (known_lt (const_op01, max_offset))
16434 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16435 return fold_build3_loc (loc,
16436 BIT_FIELD_REF, type, op00,
16437 part_width, index);
16440 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16441 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16442 && type == TREE_TYPE (op00type))
16444 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16445 const_op01))
16446 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16448 /* ((foo *)&fooarray)[1] => fooarray[1] */
16449 else if (TREE_CODE (op00type) == ARRAY_TYPE
16450 && type == TREE_TYPE (op00type))
16452 tree type_domain = TYPE_DOMAIN (op00type);
16453 tree min_val = size_zero_node;
16454 if (type_domain && TYPE_MIN_VALUE (type_domain))
16455 min_val = TYPE_MIN_VALUE (type_domain);
16456 poly_uint64 type_size, index;
16457 if (poly_int_tree_p (min_val)
16458 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16459 && multiple_p (const_op01, type_size, &index))
16461 poly_offset_int off = index + wi::to_poly_offset (min_val);
16462 op01 = wide_int_to_tree (sizetype, off);
16463 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16464 NULL_TREE, NULL_TREE);
16470 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16471 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16472 && type == TREE_TYPE (TREE_TYPE (subtype))
16473 && (!in_gimple_form
16474 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16476 tree type_domain;
16477 tree min_val = size_zero_node;
16478 sub = build_fold_indirect_ref_loc (loc, sub);
16479 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16480 if (type_domain && TYPE_MIN_VALUE (type_domain))
16481 min_val = TYPE_MIN_VALUE (type_domain);
16482 if (in_gimple_form
16483 && TREE_CODE (min_val) != INTEGER_CST)
16484 return NULL_TREE;
16485 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16486 NULL_TREE);
16489 return NULL_TREE;
16492 /* Builds an expression for an indirection through T, simplifying some
16493 cases. */
16495 tree
16496 build_fold_indirect_ref_loc (location_t loc, tree t)
16498 tree type = TREE_TYPE (TREE_TYPE (t));
16499 tree sub = fold_indirect_ref_1 (loc, type, t);
16501 if (sub)
16502 return sub;
16504 return build1_loc (loc, INDIRECT_REF, type, t);
16507 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16509 tree
16510 fold_indirect_ref_loc (location_t loc, tree t)
16512 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16514 if (sub)
16515 return sub;
16516 else
16517 return t;
16520 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16521 whose result is ignored. The type of the returned tree need not be
16522 the same as the original expression. */
16524 tree
16525 fold_ignored_result (tree t)
16527 if (!TREE_SIDE_EFFECTS (t))
16528 return integer_zero_node;
16530 for (;;)
16531 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16533 case tcc_unary:
16534 t = TREE_OPERAND (t, 0);
16535 break;
16537 case tcc_binary:
16538 case tcc_comparison:
16539 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16540 t = TREE_OPERAND (t, 0);
16541 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16542 t = TREE_OPERAND (t, 1);
16543 else
16544 return t;
16545 break;
16547 case tcc_expression:
16548 switch (TREE_CODE (t))
16550 case COMPOUND_EXPR:
16551 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16552 return t;
16553 t = TREE_OPERAND (t, 0);
16554 break;
16556 case COND_EXPR:
16557 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16558 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16559 return t;
16560 t = TREE_OPERAND (t, 0);
16561 break;
16563 default:
16564 return t;
16566 break;
16568 default:
16569 return t;
16573 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16575 tree
16576 round_up_loc (location_t loc, tree value, unsigned int divisor)
16578 tree div = NULL_TREE;
16580 if (divisor == 1)
16581 return value;
16583 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16584 have to do anything. Only do this when we are not given a const,
16585 because in that case, this check is more expensive than just
16586 doing it. */
16587 if (TREE_CODE (value) != INTEGER_CST)
16589 div = build_int_cst (TREE_TYPE (value), divisor);
16591 if (multiple_of_p (TREE_TYPE (value), value, div))
16592 return value;
16595 /* If divisor is a power of two, simplify this to bit manipulation. */
16596 if (pow2_or_zerop (divisor))
16598 if (TREE_CODE (value) == INTEGER_CST)
16600 wide_int val = wi::to_wide (value);
16601 bool overflow_p;
16603 if ((val & (divisor - 1)) == 0)
16604 return value;
16606 overflow_p = TREE_OVERFLOW (value);
16607 val += divisor - 1;
16608 val &= (int) -divisor;
16609 if (val == 0)
16610 overflow_p = true;
16612 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16614 else
16616 tree t;
16618 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16619 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16620 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16621 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16624 else
16626 if (!div)
16627 div = build_int_cst (TREE_TYPE (value), divisor);
16628 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16629 value = size_binop_loc (loc, MULT_EXPR, value, div);
16632 return value;
16635 /* Likewise, but round down. */
16637 tree
16638 round_down_loc (location_t loc, tree value, int divisor)
16640 tree div = NULL_TREE;
16642 gcc_assert (divisor > 0);
16643 if (divisor == 1)
16644 return value;
16646 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16647 have to do anything. Only do this when we are not given a const,
16648 because in that case, this check is more expensive than just
16649 doing it. */
16650 if (TREE_CODE (value) != INTEGER_CST)
16652 div = build_int_cst (TREE_TYPE (value), divisor);
16654 if (multiple_of_p (TREE_TYPE (value), value, div))
16655 return value;
16658 /* If divisor is a power of two, simplify this to bit manipulation. */
16659 if (pow2_or_zerop (divisor))
16661 tree t;
16663 t = build_int_cst (TREE_TYPE (value), -divisor);
16664 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16666 else
16668 if (!div)
16669 div = build_int_cst (TREE_TYPE (value), divisor);
16670 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16671 value = size_binop_loc (loc, MULT_EXPR, value, div);
16674 return value;
16677 /* Returns the pointer to the base of the object addressed by EXP and
16678 extracts the information about the offset of the access, storing it
16679 to PBITPOS and POFFSET. */
16681 static tree
16682 split_address_to_core_and_offset (tree exp,
16683 poly_int64 *pbitpos, tree *poffset)
16685 tree core;
16686 machine_mode mode;
16687 int unsignedp, reversep, volatilep;
16688 poly_int64 bitsize;
16689 location_t loc = EXPR_LOCATION (exp);
16691 if (TREE_CODE (exp) == SSA_NAME)
16692 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16693 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16694 exp = gimple_assign_rhs1 (def);
16696 if (TREE_CODE (exp) == ADDR_EXPR)
16698 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16699 poffset, &mode, &unsignedp, &reversep,
16700 &volatilep);
16701 core = build_fold_addr_expr_loc (loc, core);
16703 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16705 core = TREE_OPERAND (exp, 0);
16706 STRIP_NOPS (core);
16707 *pbitpos = 0;
16708 *poffset = TREE_OPERAND (exp, 1);
16709 if (poly_int_tree_p (*poffset))
16711 poly_offset_int tem
16712 = wi::sext (wi::to_poly_offset (*poffset),
16713 TYPE_PRECISION (TREE_TYPE (*poffset)));
16714 tem <<= LOG2_BITS_PER_UNIT;
16715 if (tem.to_shwi (pbitpos))
16716 *poffset = NULL_TREE;
16719 else
16721 core = exp;
16722 *pbitpos = 0;
16723 *poffset = NULL_TREE;
16726 return core;
16729 /* Returns true if addresses of E1 and E2 differ by a constant, false
16730 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16732 bool
16733 ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16735 tree core1, core2;
16736 poly_int64 bitpos1, bitpos2;
16737 tree toffset1, toffset2, tdiff, type;
16739 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16740 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16742 poly_int64 bytepos1, bytepos2;
16743 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16744 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16745 || !operand_equal_p (core1, core2, 0))
16746 return false;
16748 if (toffset1 && toffset2)
16750 type = TREE_TYPE (toffset1);
16751 if (type != TREE_TYPE (toffset2))
16752 toffset2 = fold_convert (type, toffset2);
16754 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16755 if (!cst_and_fits_in_hwi (tdiff))
16756 return false;
16758 *diff = int_cst_value (tdiff);
16760 else if (toffset1 || toffset2)
16762 /* If only one of the offsets is non-constant, the difference cannot
16763 be a constant. */
16764 return false;
16766 else
16767 *diff = 0;
16769 *diff += bytepos1 - bytepos2;
16770 return true;
16773 /* Return OFF converted to a pointer offset type suitable as offset for
16774 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16775 tree
16776 convert_to_ptrofftype_loc (location_t loc, tree off)
16778 if (ptrofftype_p (TREE_TYPE (off)))
16779 return off;
16780 return fold_convert_loc (loc, sizetype, off);
16783 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16784 tree
16785 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16787 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16788 ptr, convert_to_ptrofftype_loc (loc, off));
16791 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16792 tree
16793 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16795 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16796 ptr, size_int (off));
16799 /* Return a pointer to a NUL-terminated string containing the sequence
16800 of bytes corresponding to the representation of the object referred to
16801 by SRC (or a subsequence of such bytes within it if SRC is a reference
16802 to an initialized constant array plus some constant offset).
16803 Set *STRSIZE the number of bytes in the constant sequence including
16804 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16805 where A is the array that stores the constant sequence that SRC points
16806 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16807 need not point to a string or even an array of characters but may point
16808 to an object of any type. */
16810 const char *
16811 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16813 /* The offset into the array A storing the string, and A's byte size. */
16814 tree offset_node;
16815 tree mem_size;
16817 if (strsize)
16818 *strsize = 0;
16820 if (strsize)
16821 src = byte_representation (src, &offset_node, &mem_size, NULL);
16822 else
16823 src = string_constant (src, &offset_node, &mem_size, NULL);
16824 if (!src)
16825 return NULL;
16827 unsigned HOST_WIDE_INT offset = 0;
16828 if (offset_node != NULL_TREE)
16830 if (!tree_fits_uhwi_p (offset_node))
16831 return NULL;
16832 else
16833 offset = tree_to_uhwi (offset_node);
16836 if (!tree_fits_uhwi_p (mem_size))
16837 return NULL;
16839 /* ARRAY_SIZE is the byte size of the array the constant sequence
16840 is stored in and equal to sizeof A. INIT_BYTES is the number
16841 of bytes in the constant sequence used to initialize the array,
16842 including any embedded NULs as well as the terminating NUL (for
16843 strings), but not including any trailing zeros/NULs past
16844 the terminating one appended implicitly to a string literal to
16845 zero out the remainder of the array it's stored in. For example,
16846 given:
16847 const char a[7] = "abc\0d";
16848 n = strlen (a + 1);
16849 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16850 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16851 is equal to strlen (A) + 1. */
16852 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16853 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16854 const char *string = TREE_STRING_POINTER (src);
16856 /* Ideally this would turn into a gcc_checking_assert over time. */
16857 if (init_bytes > array_size)
16858 init_bytes = array_size;
16860 if (init_bytes == 0 || offset >= array_size)
16861 return NULL;
16863 if (strsize)
16865 /* Compute and store the number of characters from the beginning
16866 of the substring at OFFSET to the end, including the terminating
16867 nul. Offsets past the initial length refer to null strings. */
16868 if (offset < init_bytes)
16869 *strsize = init_bytes - offset;
16870 else
16871 *strsize = 1;
16873 else
16875 tree eltype = TREE_TYPE (TREE_TYPE (src));
16876 /* Support only properly NUL-terminated single byte strings. */
16877 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16878 return NULL;
16879 if (string[init_bytes - 1] != '\0')
16880 return NULL;
16883 return offset < init_bytes ? string + offset : "";
16886 /* Return a pointer to a NUL-terminated string corresponding to
16887 the expression STR referencing a constant string, possibly
16888 involving a constant offset. Return null if STR either doesn't
16889 reference a constant string or if it involves a nonconstant
16890 offset. */
16892 const char *
16893 c_getstr (tree str)
16895 return getbyterep (str, NULL);
16898 /* Given a tree T, compute which bits in T may be nonzero. */
16900 wide_int
16901 tree_nonzero_bits (const_tree t)
16903 switch (TREE_CODE (t))
16905 case INTEGER_CST:
16906 return wi::to_wide (t);
16907 case SSA_NAME:
16908 return get_nonzero_bits (t);
16909 case NON_LVALUE_EXPR:
16910 case SAVE_EXPR:
16911 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16912 case BIT_AND_EXPR:
16913 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16914 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16915 case BIT_IOR_EXPR:
16916 case BIT_XOR_EXPR:
16917 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16918 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16919 case COND_EXPR:
16920 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16921 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16922 CASE_CONVERT:
16923 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16924 TYPE_PRECISION (TREE_TYPE (t)),
16925 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16926 case PLUS_EXPR:
16927 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16929 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16930 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16931 if (wi::bit_and (nzbits1, nzbits2) == 0)
16932 return wi::bit_or (nzbits1, nzbits2);
16934 break;
16935 case LSHIFT_EXPR:
16936 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16938 tree type = TREE_TYPE (t);
16939 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16940 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16941 TYPE_PRECISION (type));
16942 return wi::neg_p (arg1)
16943 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16944 : wi::lshift (nzbits, arg1);
16946 break;
16947 case RSHIFT_EXPR:
16948 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16950 tree type = TREE_TYPE (t);
16951 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16952 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16953 TYPE_PRECISION (type));
16954 return wi::neg_p (arg1)
16955 ? wi::lshift (nzbits, -arg1)
16956 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16958 break;
16959 default:
16960 break;
16963 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16966 /* Helper function for address compare simplifications in match.pd.
16967 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16968 TYPE is the type of comparison operands.
16969 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16970 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16971 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16972 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16973 and 2 if unknown. */
16976 address_compare (tree_code code, tree type, tree op0, tree op1,
16977 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16978 bool generic)
16980 if (TREE_CODE (op0) == SSA_NAME)
16981 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16982 if (TREE_CODE (op1) == SSA_NAME)
16983 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16984 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16985 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16986 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16987 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16988 if (base0 && TREE_CODE (base0) == MEM_REF)
16990 off0 += mem_ref_offset (base0).force_shwi ();
16991 base0 = TREE_OPERAND (base0, 0);
16993 if (base1 && TREE_CODE (base1) == MEM_REF)
16995 off1 += mem_ref_offset (base1).force_shwi ();
16996 base1 = TREE_OPERAND (base1, 0);
16998 if (base0 == NULL_TREE || base1 == NULL_TREE)
16999 return 2;
17001 int equal = 2;
17002 /* Punt in GENERIC on variables with value expressions;
17003 the value expressions might point to fields/elements
17004 of other vars etc. */
17005 if (generic
17006 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
17007 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
17008 return 2;
17009 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
17011 symtab_node *node0 = symtab_node::get_create (base0);
17012 symtab_node *node1 = symtab_node::get_create (base1);
17013 equal = node0->equal_address_to (node1);
17015 else if ((DECL_P (base0)
17016 || TREE_CODE (base0) == SSA_NAME
17017 || TREE_CODE (base0) == STRING_CST)
17018 && (DECL_P (base1)
17019 || TREE_CODE (base1) == SSA_NAME
17020 || TREE_CODE (base1) == STRING_CST))
17021 equal = (base0 == base1);
17022 /* Assume different STRING_CSTs with the same content will be
17023 merged. */
17024 if (equal == 0
17025 && TREE_CODE (base0) == STRING_CST
17026 && TREE_CODE (base1) == STRING_CST
17027 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
17028 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
17029 TREE_STRING_LENGTH (base0)) == 0)
17030 equal = 1;
17031 if (equal == 1)
17033 if (code == EQ_EXPR
17034 || code == NE_EXPR
17035 /* If the offsets are equal we can ignore overflow. */
17036 || known_eq (off0, off1)
17037 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
17038 /* Or if we compare using pointers to decls or strings. */
17039 || (POINTER_TYPE_P (type)
17040 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
17041 return 1;
17042 return 2;
17044 if (equal != 0)
17045 return equal;
17046 if (code != EQ_EXPR && code != NE_EXPR)
17047 return 2;
17049 /* At this point we know (or assume) the two pointers point at
17050 different objects. */
17051 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
17052 off0.is_constant (&ioff0);
17053 off1.is_constant (&ioff1);
17054 /* Punt on non-zero offsets from functions. */
17055 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
17056 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
17057 return 2;
17058 /* Or if the bases are neither decls nor string literals. */
17059 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
17060 return 2;
17061 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
17062 return 2;
17063 /* For initializers, assume addresses of different functions are
17064 different. */
17065 if (folding_initializer
17066 && TREE_CODE (base0) == FUNCTION_DECL
17067 && TREE_CODE (base1) == FUNCTION_DECL)
17068 return 0;
17070 /* Compute whether one address points to the start of one
17071 object and another one to the end of another one. */
17072 poly_int64 size0 = 0, size1 = 0;
17073 if (TREE_CODE (base0) == STRING_CST)
17075 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
17076 equal = 2;
17077 else
17078 size0 = TREE_STRING_LENGTH (base0);
17080 else if (TREE_CODE (base0) == FUNCTION_DECL)
17081 size0 = 1;
17082 else
17084 tree sz0 = DECL_SIZE_UNIT (base0);
17085 if (!tree_fits_poly_int64_p (sz0))
17086 equal = 2;
17087 else
17088 size0 = tree_to_poly_int64 (sz0);
17090 if (TREE_CODE (base1) == STRING_CST)
17092 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
17093 equal = 2;
17094 else
17095 size1 = TREE_STRING_LENGTH (base1);
17097 else if (TREE_CODE (base1) == FUNCTION_DECL)
17098 size1 = 1;
17099 else
17101 tree sz1 = DECL_SIZE_UNIT (base1);
17102 if (!tree_fits_poly_int64_p (sz1))
17103 equal = 2;
17104 else
17105 size1 = tree_to_poly_int64 (sz1);
17107 if (equal == 0)
17109 /* If one offset is pointing (or could be) to the beginning of one
17110 object and the other is pointing to one past the last byte of the
17111 other object, punt. */
17112 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
17113 equal = 2;
17114 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
17115 equal = 2;
17116 /* If both offsets are the same, there are some cases we know that are
17117 ok. Either if we know they aren't zero, or if we know both sizes
17118 are no zero. */
17119 if (equal == 2
17120 && known_eq (off0, off1)
17121 && (known_ne (off0, 0)
17122 || (known_ne (size0, 0) && known_ne (size1, 0))))
17123 equal = 0;
17126 /* At this point, equal is 2 if either one or both pointers are out of
17127 bounds of their object, or one points to start of its object and the
17128 other points to end of its object. This is unspecified behavior
17129 e.g. in C++. Otherwise equal is 0. */
17130 if (folding_cxx_constexpr && equal)
17131 return equal;
17133 /* When both pointers point to string literals, even when equal is 0,
17134 due to tail merging of string literals the pointers might be the same. */
17135 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17137 if (ioff0 < 0
17138 || ioff1 < 0
17139 || ioff0 > TREE_STRING_LENGTH (base0)
17140 || ioff1 > TREE_STRING_LENGTH (base1))
17141 return 2;
17143 /* If the bytes in the string literals starting at the pointers
17144 differ, the pointers need to be different. */
17145 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17146 TREE_STRING_POINTER (base1) + ioff1,
17147 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17148 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17150 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17151 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17152 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17153 ioffmin) == 0)
17154 /* If even the bytes in the string literal before the
17155 pointers are the same, the string literals could be
17156 tail merged. */
17157 return 2;
17159 return 0;
17162 if (folding_cxx_constexpr)
17163 return 0;
17165 /* If this is a pointer comparison, ignore for now even
17166 valid equalities where one pointer is the offset zero
17167 of one object and the other to one past end of another one. */
17168 if (!INTEGRAL_TYPE_P (type))
17169 return 0;
17171 /* Assume that string literals can't be adjacent to variables
17172 (automatic or global). */
17173 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17174 return 0;
17176 /* Assume that automatic variables can't be adjacent to global
17177 variables. */
17178 if (is_global_var (base0) != is_global_var (base1))
17179 return 0;
17181 return equal;
17184 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17185 tree
17186 ctor_single_nonzero_element (const_tree t)
17188 unsigned HOST_WIDE_INT idx;
17189 constructor_elt *ce;
17190 tree elt = NULL_TREE;
17192 if (TREE_CODE (t) != CONSTRUCTOR)
17193 return NULL_TREE;
17194 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17195 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17197 if (elt)
17198 return NULL_TREE;
17199 elt = ce->value;
17201 return elt;
17204 #if CHECKING_P
17206 namespace selftest {
17208 /* Helper functions for writing tests of folding trees. */
17210 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17212 static void
17213 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17214 tree constant)
17216 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17219 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17220 wrapping WRAPPED_EXPR. */
17222 static void
17223 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17224 tree wrapped_expr)
17226 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17227 ASSERT_NE (wrapped_expr, result);
17228 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17229 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17232 /* Verify that various arithmetic binary operations are folded
17233 correctly. */
17235 static void
17236 test_arithmetic_folding ()
17238 tree type = integer_type_node;
17239 tree x = create_tmp_var_raw (type, "x");
17240 tree zero = build_zero_cst (type);
17241 tree one = build_int_cst (type, 1);
17243 /* Addition. */
17244 /* 1 <-- (0 + 1) */
17245 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17246 one);
17247 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17248 one);
17250 /* (nonlvalue)x <-- (x + 0) */
17251 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17254 /* Subtraction. */
17255 /* 0 <-- (x - x) */
17256 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17257 zero);
17258 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17261 /* Multiplication. */
17262 /* 0 <-- (x * 0) */
17263 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17264 zero);
17266 /* (nonlvalue)x <-- (x * 1) */
17267 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17271 namespace test_fold_vec_perm_cst {
17273 /* Build a VECTOR_CST corresponding to VMODE, and has
17274 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17275 Fill it with randomized elements, using rand() % THRESHOLD. */
17277 static tree
17278 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17279 unsigned nelts_per_pattern,
17280 int step = 0, bool natural_stepped = false,
17281 int threshold = 100)
17283 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17284 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17285 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17287 // Fill a0 for each pattern
17288 for (unsigned i = 0; i < npatterns; i++)
17289 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17291 if (nelts_per_pattern == 1)
17292 return builder.build ();
17294 // Fill a1 for each pattern
17295 for (unsigned i = 0; i < npatterns; i++)
17297 tree a1;
17298 if (natural_stepped)
17300 tree a0 = builder[i];
17301 wide_int a0_val = wi::to_wide (a0);
17302 wide_int a1_val = a0_val + step;
17303 a1 = wide_int_to_tree (inner_type, a1_val);
17305 else
17306 a1 = build_int_cst (inner_type, rand () % threshold);
17307 builder.quick_push (a1);
17309 if (nelts_per_pattern == 2)
17310 return builder.build ();
17312 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17314 tree prev_elem = builder[i - npatterns];
17315 wide_int prev_elem_val = wi::to_wide (prev_elem);
17316 wide_int val = prev_elem_val + step;
17317 builder.quick_push (wide_int_to_tree (inner_type, val));
17320 return builder.build ();
17323 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17324 when result is VLA. */
17326 static void
17327 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17328 tree res, tree *expected_res)
17330 /* Actual npatterns and encoded_elts in res may be less than expected due
17331 to canonicalization. */
17332 ASSERT_TRUE (res != NULL_TREE);
17333 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17334 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17336 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17337 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17340 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17341 when the result is VLS. */
17343 static void
17344 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17346 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17347 for (unsigned i = 0; i < expected_nelts; i++)
17348 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17351 /* Helper routine to push multiple elements into BUILDER. */
17352 template<unsigned N>
17353 static void builder_push_elems (vec_perm_builder& builder,
17354 poly_uint64 (&elems)[N])
17356 for (unsigned i = 0; i < N; i++)
17357 builder.quick_push (elems[i]);
17360 #define ARG0(index) vector_cst_elt (arg0, index)
17361 #define ARG1(index) vector_cst_elt (arg1, index)
17363 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17365 static void
17366 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17368 for (int i = 0; i < 10; i++)
17370 /* Case 1:
17371 sel = { 0, 4, 1, 5, ... }
17372 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17374 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17375 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17377 tree inner_type
17378 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17379 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17381 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17382 vec_perm_builder builder (res_len, 4, 1);
17383 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17384 builder_push_elems (builder, mask_elems);
17386 vec_perm_indices sel (builder, 2, res_len);
17387 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17389 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17390 validate_res (4, 1, res, expected_res);
17393 /* Case 2: Same as case 1, but contains an out of bounds access which
17394 should wrap around.
17395 sel = {0, 8, 4, 12, ...} (4, 1)
17396 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17398 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17399 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17401 tree inner_type
17402 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17403 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17405 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17406 vec_perm_builder builder (res_len, 4, 1);
17407 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17408 builder_push_elems (builder, mask_elems);
17410 vec_perm_indices sel (builder, 2, res_len);
17411 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17413 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17414 validate_res (4, 1, res, expected_res);
17419 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17421 static void
17422 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17424 for (int i = 0; i < 10; i++)
17426 /* Case 1:
17427 sel = { 0, 1, 2, 3}
17428 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17430 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17431 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17433 tree inner_type
17434 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17435 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17437 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17438 vec_perm_builder builder (res_len, 4, 1);
17439 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17440 builder_push_elems (builder, mask_elems);
17442 vec_perm_indices sel (builder, 2, res_len);
17443 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17445 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17446 validate_res_vls (res, expected_res, 4);
17449 /* Case 2: Same as Case 1, but crossing input vector.
17450 sel = {0, 2, 4, 6}
17451 In this case,the index 4 is ambiguous since len = 4 + 4x.
17452 Since we cannot determine, which vector to choose from during
17453 compile time, should return NULL_TREE. */
17455 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17456 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17458 tree inner_type
17459 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17460 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17462 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17463 vec_perm_builder builder (res_len, 4, 1);
17464 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17465 builder_push_elems (builder, mask_elems);
17467 vec_perm_indices sel (builder, 2, res_len);
17468 const char *reason;
17469 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17471 ASSERT_TRUE (res == NULL_TREE);
17472 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17477 /* Test all input vectors. */
17479 static void
17480 test_all_nunits (machine_mode vmode)
17482 /* Test with 10 different inputs. */
17483 for (int i = 0; i < 10; i++)
17485 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17486 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17487 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17489 /* Case 1: mask = {0, ...} // (1, 1)
17490 res = { arg0[0], ... } // (1, 1) */
17492 vec_perm_builder builder (len, 1, 1);
17493 builder.quick_push (0);
17494 vec_perm_indices sel (builder, 2, len);
17495 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17496 tree expected_res[] = { ARG0(0) };
17497 validate_res (1, 1, res, expected_res);
17500 /* Case 2: mask = {len, ...} // (1, 1)
17501 res = { arg1[0], ... } // (1, 1) */
17503 vec_perm_builder builder (len, 1, 1);
17504 builder.quick_push (len);
17505 vec_perm_indices sel (builder, 2, len);
17506 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17508 tree expected_res[] = { ARG1(0) };
17509 validate_res (1, 1, res, expected_res);
17514 /* Test all vectors which contain at-least 2 elements. */
17516 static void
17517 test_nunits_min_2 (machine_mode vmode)
17519 for (int i = 0; i < 10; i++)
17521 /* Case 1: mask = { 0, len, ... } // (2, 1)
17522 res = { arg0[0], arg1[0], ... } // (2, 1) */
17524 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17525 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17526 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17528 vec_perm_builder builder (len, 2, 1);
17529 poly_uint64 mask_elems[] = { 0, len };
17530 builder_push_elems (builder, mask_elems);
17532 vec_perm_indices sel (builder, 2, len);
17533 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17535 tree expected_res[] = { ARG0(0), ARG1(0) };
17536 validate_res (2, 1, res, expected_res);
17539 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17540 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17542 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17543 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17544 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17546 vec_perm_builder builder (len, 2, 2);
17547 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17548 builder_push_elems (builder, mask_elems);
17550 vec_perm_indices sel (builder, 2, len);
17551 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17553 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17554 validate_res (2, 2, res, expected_res);
17557 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17558 Test that the stepped sequence of the pattern selects from
17559 same input pattern. Since input vectors have npatterns = 2,
17560 and step (a2 - a1) = 1, step is not a multiple of npatterns
17561 in input vector. So return NULL_TREE. */
17563 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17564 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17565 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17567 vec_perm_builder builder (len, 1, 3);
17568 poly_uint64 mask_elems[] = { 0, 0, 1 };
17569 builder_push_elems (builder, mask_elems);
17571 vec_perm_indices sel (builder, 2, len);
17572 const char *reason;
17573 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17574 &reason);
17575 ASSERT_TRUE (res == NULL_TREE);
17576 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17579 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17580 Test that stepped sequence of the pattern selects from arg0.
17581 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17583 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17584 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17585 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17587 vec_perm_builder builder (len, 1, 3);
17588 poly_uint64 mask_elems[] = { len, 0, 1 };
17589 builder_push_elems (builder, mask_elems);
17591 vec_perm_indices sel (builder, 2, len);
17592 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17594 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17595 validate_res (1, 3, res, expected_res);
17598 /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17599 In this case ensure that arg has a natural stepped sequence
17600 to preserve arg's encoding.
17602 As a concrete example, consider:
17603 arg0: { -16, -9, -10, ... } // (1, 3)
17604 arg1: { -12, -5, -6, ... } // (1, 3)
17605 sel = { 0, len, len + 1, ... } // (1, 3)
17607 This will create res with following encoding:
17608 res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17609 = { -16, -12, -5, ... }
17611 The step in above encoding would be: (-5) - (-12) = 7
17612 And hence res[3] would be computed as -5 + 7 = 2.
17613 instead of arg1[2], ie, -6.
17614 Ensure that valid_mask_for_fold_vec_perm_cst returns false
17615 for this case. */
17617 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17618 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17619 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17621 vec_perm_builder builder (len, 1, 3);
17622 poly_uint64 mask_elems[] = { 0, len, len+1 };
17623 builder_push_elems (builder, mask_elems);
17625 vec_perm_indices sel (builder, 2, len);
17626 const char *reason;
17627 /* FIXME: It may happen that build_vec_cst_rand may build a natural
17628 stepped pattern, even if we didn't explicitly tell it to. So folding
17629 may not always fail, but if it does, ensure that's because arg1 does
17630 not have a natural stepped sequence (and not due to other reason) */
17631 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17632 if (res == NULL_TREE)
17633 ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17636 /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17637 sequence and thus folding should be valid for this case. */
17639 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17640 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17641 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17643 vec_perm_builder builder (len, 1, 3);
17644 poly_uint64 mask_elems[] = { 0, len, len+1 };
17645 builder_push_elems (builder, mask_elems);
17647 vec_perm_indices sel (builder, 2, len);
17648 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17650 tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17651 validate_res (1, 3, res, expected_res);
17654 /* Case 8: Same as aarch64/sve/slp_3.c:
17655 arg0, arg1 are dup vectors.
17656 sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17657 So res = { arg0[0], arg1[0], ... } // (2, 1)
17659 In this case, since the input vectors are dup, only the first two
17660 elements per pattern in sel are considered significant. */
17662 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17663 tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17664 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17666 vec_perm_builder builder (len, 2, 3);
17667 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17668 builder_push_elems (builder, mask_elems);
17670 vec_perm_indices sel (builder, 2, len);
17671 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17673 tree expected_res[] = { ARG0(0), ARG1(0) };
17674 validate_res (2, 1, res, expected_res);
17679 /* Test all vectors which contain at-least 4 elements. */
17681 static void
17682 test_nunits_min_4 (machine_mode vmode)
17684 for (int i = 0; i < 10; i++)
17686 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17687 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17689 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17690 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17691 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17693 vec_perm_builder builder (len, 4, 1);
17694 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17695 builder_push_elems (builder, mask_elems);
17697 vec_perm_indices sel (builder, 2, len);
17698 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17700 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17701 validate_res (4, 1, res, expected_res);
17704 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17705 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17707 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17708 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17709 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17711 vec_perm_builder builder (arg0_len, 1, 3);
17712 poly_uint64 mask_elems[] = {0, 1, 2};
17713 builder_push_elems (builder, mask_elems);
17715 vec_perm_indices sel (builder, 2, arg0_len);
17716 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17717 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17718 validate_res (1, 3, res, expected_res);
17721 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17722 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17724 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17725 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17726 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17728 vec_perm_builder builder (len, 1, 3);
17729 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17730 builder_push_elems (builder, mask_elems);
17732 vec_perm_indices sel (builder, 2, len);
17733 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17734 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17735 validate_res (1, 3, res, expected_res);
17738 /* Case 4:
17739 sel = { len, 0, 2, ... } // (1, 3)
17740 This should return NULL because we cross the input vectors.
17741 Because,
17742 Let's assume len = C + Cx
17743 a1 = 0
17744 S = 2
17745 esel = arg0_len / sel_npatterns = C + Cx
17746 ae = 0 + (esel - 2) * S
17747 = 0 + (C + Cx - 2) * 2
17748 = 2(C-2) + 2Cx
17750 For C >= 4:
17751 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17752 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17753 Since q1 != qe, we cross input vectors.
17754 So return NULL_TREE. */
17756 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17757 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17758 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17760 vec_perm_builder builder (arg0_len, 1, 3);
17761 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17762 builder_push_elems (builder, mask_elems);
17764 vec_perm_indices sel (builder, 2, arg0_len);
17765 const char *reason;
17766 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17767 ASSERT_TRUE (res == NULL_TREE);
17768 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17771 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17772 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17773 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17775 Note that fold_vec_perm_cst will set
17776 res_npatterns = max(4, max(4, 2)) = 4
17777 However after canonicalizing, we will end up with shape (2, 2). */
17779 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17780 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17781 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17783 vec_perm_builder builder (len, 2, 2);
17784 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17785 builder_push_elems (builder, mask_elems);
17787 vec_perm_indices sel (builder, 2, len);
17788 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17789 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17790 validate_res (2, 2, res, expected_res);
17793 /* Case 6: Test combination in sel, where one pattern is dup and other
17794 is stepped sequence.
17795 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17796 res = { arg0[0], arg0[0], arg0[0],
17797 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17799 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17800 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17801 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17803 vec_perm_builder builder (len, 2, 3);
17804 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17805 builder_push_elems (builder, mask_elems);
17807 vec_perm_indices sel (builder, 2, len);
17808 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17810 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17811 ARG0(1), ARG0(0), ARG0(2) };
17812 validate_res (2, 3, res, expected_res);
17815 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17816 when arg0, arg1 and sel have different number of patterns.
17817 arg0 is of shape (1, 1)
17818 arg1 is of shape (4, 1)
17819 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17821 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17822 However,
17823 step = (len+2) - (len+1) = 1
17824 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17825 Since step is not a multiple of arg_npatterns,
17826 valid_mask_for_fold_vec_perm_cst should return false,
17827 and thus fold_vec_perm_cst should return NULL_TREE. */
17829 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17830 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17831 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17833 vec_perm_builder builder (len, 2, 3);
17834 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17835 builder_push_elems (builder, mask_elems);
17837 vec_perm_indices sel (builder, 2, len);
17838 const char *reason;
17839 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17841 ASSERT_TRUE (res == NULL_TREE);
17842 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17845 /* Case 8: PR111754: When input vector is not a stepped sequence,
17846 check that the result is not a stepped sequence either, even
17847 if sel has a stepped sequence. */
17849 tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17850 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17852 vec_perm_builder builder (len, 1, 3);
17853 poly_uint64 mask_elems[] = { 0, 1, 2 };
17854 builder_push_elems (builder, mask_elems);
17856 vec_perm_indices sel (builder, 1, len);
17857 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17859 tree expected_res[] = { ARG0(0), ARG0(1) };
17860 validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17863 /* Case 9: If sel doesn't contain a stepped sequence,
17864 check that the result has same encoding as sel, irrespective
17865 of shape of input vectors. */
17867 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17868 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17869 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17871 vec_perm_builder builder (len, 1, 2);
17872 poly_uint64 mask_elems[] = { 0, len };
17873 builder_push_elems (builder, mask_elems);
17875 vec_perm_indices sel (builder, 2, len);
17876 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17878 tree expected_res[] = { ARG0(0), ARG1(0) };
17879 validate_res (sel.encoding ().npatterns (),
17880 sel.encoding ().nelts_per_pattern (), res, expected_res);
17885 /* Test all vectors which contain at-least 8 elements. */
17887 static void
17888 test_nunits_min_8 (machine_mode vmode)
17890 for (int i = 0; i < 10; i++)
17892 /* Case 1: sel_npatterns (4) > input npatterns (2)
17893 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17894 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17895 arg0[2], arg0[0], arg0[3], arg1[0],
17896 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17898 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17899 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17900 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17902 vec_perm_builder builder(len, 4, 3);
17903 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17904 4, 0, 5, len };
17905 builder_push_elems (builder, mask_elems);
17907 vec_perm_indices sel (builder, 2, len);
17908 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17910 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17911 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17912 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17913 validate_res (4, 3, res, expected_res);
17918 /* Test vectors for which nunits[0] <= 4. */
17920 static void
17921 test_nunits_max_4 (machine_mode vmode)
17923 /* Case 1: mask = {0, 4, ...} // (1, 2)
17924 This should return NULL_TREE because the index 4 may choose
17925 from either arg0 or arg1 depending on vector length. */
17927 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17928 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17929 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17931 vec_perm_builder builder (len, 1, 2);
17932 poly_uint64 mask_elems[] = {0, 4};
17933 builder_push_elems (builder, mask_elems);
17935 vec_perm_indices sel (builder, 2, len);
17936 const char *reason;
17937 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17938 ASSERT_TRUE (res == NULL_TREE);
17939 ASSERT_TRUE (reason != NULL);
17940 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17944 #undef ARG0
17945 #undef ARG1
17947 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17949 static bool
17950 is_simple_vla_size (poly_uint64 size)
17952 if (size.is_constant ()
17953 || !pow2p_hwi (size.coeffs[0]))
17954 return false;
17955 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17956 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17957 return false;
17958 return true;
17961 /* Execute fold_vec_perm_cst unit tests. */
17963 static void
17964 test ()
17966 machine_mode vnx4si_mode = E_VOIDmode;
17967 machine_mode v4si_mode = E_VOIDmode;
17969 machine_mode vmode;
17970 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17972 /* Obtain modes corresponding to VNx4SI and V4SI,
17973 to call mixed mode tests below.
17974 FIXME: Is there a better way to do this ? */
17975 if (GET_MODE_INNER (vmode) == SImode)
17977 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17978 if (is_simple_vla_size (nunits)
17979 && nunits.coeffs[0] == 4)
17980 vnx4si_mode = vmode;
17981 else if (known_eq (nunits, poly_uint64 (4)))
17982 v4si_mode = vmode;
17985 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17986 || !targetm.vector_mode_supported_p (vmode))
17987 continue;
17989 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17990 test_all_nunits (vmode);
17991 if (nunits.coeffs[0] >= 2)
17992 test_nunits_min_2 (vmode);
17993 if (nunits.coeffs[0] >= 4)
17994 test_nunits_min_4 (vmode);
17995 if (nunits.coeffs[0] >= 8)
17996 test_nunits_min_8 (vmode);
17998 if (nunits.coeffs[0] <= 4)
17999 test_nunits_max_4 (vmode);
18002 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
18003 && targetm.vector_mode_supported_p (vnx4si_mode)
18004 && targetm.vector_mode_supported_p (v4si_mode))
18006 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
18007 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
18010 } // end of test_fold_vec_perm_cst namespace
18012 /* Verify that various binary operations on vectors are folded
18013 correctly. */
18015 static void
18016 test_vector_folding ()
18018 tree inner_type = integer_type_node;
18019 tree type = build_vector_type (inner_type, 4);
18020 tree zero = build_zero_cst (type);
18021 tree one = build_one_cst (type);
18022 tree index = build_index_vector (type, 0, 1);
18024 /* Verify equality tests that return a scalar boolean result. */
18025 tree res_type = boolean_type_node;
18026 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
18027 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
18028 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
18029 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
18030 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
18031 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18032 index, one)));
18033 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
18034 index, index)));
18035 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18036 index, index)));
18039 /* Verify folding of VEC_DUPLICATE_EXPRs. */
18041 static void
18042 test_vec_duplicate_folding ()
18044 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
18045 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
18046 /* This will be 1 if VEC_MODE isn't a vector mode. */
18047 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
18049 tree type = build_vector_type (ssizetype, nunits);
18050 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
18051 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
18052 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
18055 /* Run all of the selftests within this file. */
18057 void
18058 fold_const_cc_tests ()
18060 test_arithmetic_folding ();
18061 test_vector_folding ();
18062 test_vec_duplicate_folding ();
18063 test_fold_vec_perm_cst::test ();
18066 } // namespace selftest
18068 #endif /* CHECKING_P */