tree-optimization/114485 - neg induction with partial vectors
[official-gcc.git] / gcc / fold-const.cc
blob7b268964acc424ada2ae4f8278eb2ca9420d37c6
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 min value to RES. */
1218 bool
1219 can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1221 if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1223 res = wi::to_poly_wide (arg1);
1224 return true;
1226 else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1228 res = wi::to_poly_wide (arg2);
1229 return true;
1232 return false;
1235 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 produce a new constant in RES. Return FALSE if we don't know how
1237 to evaluate CODE at compile-time. */
1239 static bool
1240 poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 const_tree arg1, const_tree arg2,
1242 signop sign, wi::overflow_type *overflow)
1244 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 switch (code)
1248 case PLUS_EXPR:
1249 res = wi::add (wi::to_poly_wide (arg1),
1250 wi::to_poly_wide (arg2), sign, overflow);
1251 break;
1253 case MINUS_EXPR:
1254 res = wi::sub (wi::to_poly_wide (arg1),
1255 wi::to_poly_wide (arg2), sign, overflow);
1256 break;
1258 case MULT_EXPR:
1259 if (TREE_CODE (arg2) == INTEGER_CST)
1260 res = wi::mul (wi::to_poly_wide (arg1),
1261 wi::to_wide (arg2), sign, overflow);
1262 else if (TREE_CODE (arg1) == INTEGER_CST)
1263 res = wi::mul (wi::to_poly_wide (arg2),
1264 wi::to_wide (arg1), sign, overflow);
1265 else
1266 return NULL_TREE;
1267 break;
1269 case LSHIFT_EXPR:
1270 if (TREE_CODE (arg2) == INTEGER_CST)
1271 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1272 else
1273 return false;
1274 break;
1276 case BIT_IOR_EXPR:
1277 if (TREE_CODE (arg2) != INTEGER_CST
1278 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1279 &res))
1280 return false;
1281 break;
1283 case MIN_EXPR:
1284 if (!can_min_p (arg1, arg2, res))
1285 return false;
1286 break;
1288 default:
1289 return false;
1291 return true;
1294 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 produce a new constant. Return NULL_TREE if we don't know how to
1296 evaluate CODE at compile-time. */
1298 tree
1299 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 int overflowable)
1302 poly_wide_int poly_res;
1303 tree type = TREE_TYPE (arg1);
1304 signop sign = TYPE_SIGN (type);
1305 wi::overflow_type overflow = wi::OVF_NONE;
1307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1309 wide_int warg1 = wi::to_wide (arg1), res;
1310 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1311 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1312 return NULL_TREE;
1313 poly_res = res;
1315 else if (!poly_int_tree_p (arg1)
1316 || !poly_int_tree_p (arg2)
1317 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1318 return NULL_TREE;
1319 return force_fit_type (type, poly_res, overflowable,
1320 (((sign == SIGNED || overflowable == -1)
1321 && overflow)
1322 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1325 /* Return true if binary operation OP distributes over addition in operand
1326 OPNO, with the other operand being held constant. OPNO counts from 1. */
1328 static bool
1329 distributes_over_addition_p (tree_code op, int opno)
1331 switch (op)
1333 case PLUS_EXPR:
1334 case MINUS_EXPR:
1335 case MULT_EXPR:
1336 return true;
1338 case LSHIFT_EXPR:
1339 return opno == 1;
1341 default:
1342 return false;
1346 /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1347 is the other operand. Try to use the value of OP to simplify the
1348 operation in one step, without having to process individual elements. */
1349 static tree
1350 simplify_const_binop (tree_code code, tree op, tree other_op,
1351 int index ATTRIBUTE_UNUSED)
1353 /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1354 if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1356 if (integer_zerop (other_op))
1358 if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1359 return op;
1360 else if (code == BIT_AND_EXPR)
1361 return other_op;
1365 return NULL_TREE;
1369 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1370 constant. We assume ARG1 and ARG2 have the same data type, or at least
1371 are the same kind of constant and the same machine mode. Return zero if
1372 combining the constants is not allowed in the current operating mode. */
1374 static tree
1375 const_binop (enum tree_code code, tree arg1, tree arg2)
1377 /* Sanity check for the recursive cases. */
1378 if (!arg1 || !arg2)
1379 return NULL_TREE;
1381 STRIP_NOPS (arg1);
1382 STRIP_NOPS (arg2);
1384 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1386 if (code == POINTER_PLUS_EXPR)
1387 return int_const_binop (PLUS_EXPR,
1388 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1390 return int_const_binop (code, arg1, arg2);
1393 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1395 machine_mode mode;
1396 REAL_VALUE_TYPE d1;
1397 REAL_VALUE_TYPE d2;
1398 REAL_VALUE_TYPE value;
1399 REAL_VALUE_TYPE result;
1400 bool inexact;
1401 tree t, type;
1403 /* The following codes are handled by real_arithmetic. */
1404 switch (code)
1406 case PLUS_EXPR:
1407 case MINUS_EXPR:
1408 case MULT_EXPR:
1409 case RDIV_EXPR:
1410 case MIN_EXPR:
1411 case MAX_EXPR:
1412 break;
1414 default:
1415 return NULL_TREE;
1418 d1 = TREE_REAL_CST (arg1);
1419 d2 = TREE_REAL_CST (arg2);
1421 type = TREE_TYPE (arg1);
1422 mode = TYPE_MODE (type);
1424 /* Don't perform operation if we honor signaling NaNs and
1425 either operand is a signaling NaN. */
1426 if (HONOR_SNANS (mode)
1427 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1428 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1429 return NULL_TREE;
1431 /* Don't perform operation if it would raise a division
1432 by zero exception. */
1433 if (code == RDIV_EXPR
1434 && real_equal (&d2, &dconst0)
1435 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1436 return NULL_TREE;
1438 /* If either operand is a NaN, just return it. Otherwise, set up
1439 for floating-point trap; we return an overflow. */
1440 if (REAL_VALUE_ISNAN (d1))
1442 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1443 is off. */
1444 d1.signalling = 0;
1445 t = build_real (type, d1);
1446 return t;
1448 else if (REAL_VALUE_ISNAN (d2))
1450 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1451 is off. */
1452 d2.signalling = 0;
1453 t = build_real (type, d2);
1454 return t;
1457 inexact = real_arithmetic (&value, code, &d1, &d2);
1458 real_convert (&result, mode, &value);
1460 /* Don't constant fold this floating point operation if
1461 both operands are not NaN but the result is NaN, and
1462 flag_trapping_math. Such operations should raise an
1463 invalid operation exception. */
1464 if (flag_trapping_math
1465 && MODE_HAS_NANS (mode)
1466 && REAL_VALUE_ISNAN (result)
1467 && !REAL_VALUE_ISNAN (d1)
1468 && !REAL_VALUE_ISNAN (d2))
1469 return NULL_TREE;
1471 /* Don't constant fold this floating point operation if
1472 the result has overflowed and flag_trapping_math. */
1473 if (flag_trapping_math
1474 && MODE_HAS_INFINITIES (mode)
1475 && REAL_VALUE_ISINF (result)
1476 && !REAL_VALUE_ISINF (d1)
1477 && !REAL_VALUE_ISINF (d2))
1478 return NULL_TREE;
1480 /* Don't constant fold this floating point operation if the
1481 result may dependent upon the run-time rounding mode and
1482 flag_rounding_math is set, or if GCC's software emulation
1483 is unable to accurately represent the result. */
1484 if ((flag_rounding_math
1485 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1486 && (inexact || !real_identical (&result, &value)))
1487 return NULL_TREE;
1489 t = build_real (type, result);
1491 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1492 return t;
1495 if (TREE_CODE (arg1) == FIXED_CST)
1497 FIXED_VALUE_TYPE f1;
1498 FIXED_VALUE_TYPE f2;
1499 FIXED_VALUE_TYPE result;
1500 tree t, type;
1501 bool sat_p;
1502 bool overflow_p;
1504 /* The following codes are handled by fixed_arithmetic. */
1505 switch (code)
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 case MULT_EXPR:
1510 case TRUNC_DIV_EXPR:
1511 if (TREE_CODE (arg2) != FIXED_CST)
1512 return NULL_TREE;
1513 f2 = TREE_FIXED_CST (arg2);
1514 break;
1516 case LSHIFT_EXPR:
1517 case RSHIFT_EXPR:
1519 if (TREE_CODE (arg2) != INTEGER_CST)
1520 return NULL_TREE;
1521 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1522 f2.data.high = w2.elt (1);
1523 f2.data.low = w2.ulow ();
1524 f2.mode = SImode;
1526 break;
1528 default:
1529 return NULL_TREE;
1532 f1 = TREE_FIXED_CST (arg1);
1533 type = TREE_TYPE (arg1);
1534 sat_p = TYPE_SATURATING (type);
1535 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1536 t = build_fixed (type, result);
1537 /* Propagate overflow flags. */
1538 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1539 TREE_OVERFLOW (t) = 1;
1540 return t;
1543 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1545 tree type = TREE_TYPE (arg1);
1546 tree r1 = TREE_REALPART (arg1);
1547 tree i1 = TREE_IMAGPART (arg1);
1548 tree r2 = TREE_REALPART (arg2);
1549 tree i2 = TREE_IMAGPART (arg2);
1550 tree real, imag;
1552 switch (code)
1554 case PLUS_EXPR:
1555 case MINUS_EXPR:
1556 real = const_binop (code, r1, r2);
1557 imag = const_binop (code, i1, i2);
1558 break;
1560 case MULT_EXPR:
1561 if (COMPLEX_FLOAT_TYPE_P (type))
1562 return do_mpc_arg2 (arg1, arg2, type,
1563 /* do_nonfinite= */ folding_initializer,
1564 mpc_mul);
1566 real = const_binop (MINUS_EXPR,
1567 const_binop (MULT_EXPR, r1, r2),
1568 const_binop (MULT_EXPR, i1, i2));
1569 imag = const_binop (PLUS_EXPR,
1570 const_binop (MULT_EXPR, r1, i2),
1571 const_binop (MULT_EXPR, i1, r2));
1572 break;
1574 case RDIV_EXPR:
1575 if (COMPLEX_FLOAT_TYPE_P (type))
1576 return do_mpc_arg2 (arg1, arg2, type,
1577 /* do_nonfinite= */ folding_initializer,
1578 mpc_div);
1579 /* Fallthru. */
1580 case TRUNC_DIV_EXPR:
1581 case CEIL_DIV_EXPR:
1582 case FLOOR_DIV_EXPR:
1583 case ROUND_DIV_EXPR:
1584 if (flag_complex_method == 0)
1586 /* Keep this algorithm in sync with
1587 tree-complex.cc:expand_complex_div_straight().
1589 Expand complex division to scalars, straightforward algorithm.
1590 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1591 t = br*br + bi*bi
1593 tree magsquared
1594 = const_binop (PLUS_EXPR,
1595 const_binop (MULT_EXPR, r2, r2),
1596 const_binop (MULT_EXPR, i2, i2));
1597 tree t1
1598 = const_binop (PLUS_EXPR,
1599 const_binop (MULT_EXPR, r1, r2),
1600 const_binop (MULT_EXPR, i1, i2));
1601 tree t2
1602 = const_binop (MINUS_EXPR,
1603 const_binop (MULT_EXPR, i1, r2),
1604 const_binop (MULT_EXPR, r1, i2));
1606 real = const_binop (code, t1, magsquared);
1607 imag = const_binop (code, t2, magsquared);
1609 else
1611 /* Keep this algorithm in sync with
1612 tree-complex.cc:expand_complex_div_wide().
1614 Expand complex division to scalars, modified algorithm to minimize
1615 overflow with wide input ranges. */
1616 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1617 fold_abs_const (r2, TREE_TYPE (type)),
1618 fold_abs_const (i2, TREE_TYPE (type)));
1620 if (integer_nonzerop (compare))
1622 /* In the TRUE branch, we compute
1623 ratio = br/bi;
1624 div = (br * ratio) + bi;
1625 tr = (ar * ratio) + ai;
1626 ti = (ai * ratio) - ar;
1627 tr = tr / div;
1628 ti = ti / div; */
1629 tree ratio = const_binop (code, r2, i2);
1630 tree div = const_binop (PLUS_EXPR, i2,
1631 const_binop (MULT_EXPR, r2, ratio));
1632 real = const_binop (MULT_EXPR, r1, ratio);
1633 real = const_binop (PLUS_EXPR, real, i1);
1634 real = const_binop (code, real, div);
1636 imag = const_binop (MULT_EXPR, i1, ratio);
1637 imag = const_binop (MINUS_EXPR, imag, r1);
1638 imag = const_binop (code, imag, div);
1640 else
1642 /* In the FALSE branch, we compute
1643 ratio = d/c;
1644 divisor = (d * ratio) + c;
1645 tr = (b * ratio) + a;
1646 ti = b - (a * ratio);
1647 tr = tr / div;
1648 ti = ti / div; */
1649 tree ratio = const_binop (code, i2, r2);
1650 tree div = const_binop (PLUS_EXPR, r2,
1651 const_binop (MULT_EXPR, i2, ratio));
1653 real = const_binop (MULT_EXPR, i1, ratio);
1654 real = const_binop (PLUS_EXPR, real, r1);
1655 real = const_binop (code, real, div);
1657 imag = const_binop (MULT_EXPR, r1, ratio);
1658 imag = const_binop (MINUS_EXPR, i1, imag);
1659 imag = const_binop (code, imag, div);
1662 break;
1664 default:
1665 return NULL_TREE;
1668 if (real && imag)
1669 return build_complex (type, real, imag);
1672 tree simplified;
1673 if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1674 return simplified;
1676 if (commutative_tree_code (code)
1677 && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1678 return simplified;
1680 if (TREE_CODE (arg1) == VECTOR_CST
1681 && TREE_CODE (arg2) == VECTOR_CST
1682 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1683 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1685 tree type = TREE_TYPE (arg1);
1686 bool step_ok_p;
1687 if (VECTOR_CST_STEPPED_P (arg1)
1688 && VECTOR_CST_STEPPED_P (arg2))
1689 /* We can operate directly on the encoding if:
1691 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1692 implies
1693 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1695 Addition and subtraction are the supported operators
1696 for which this is true. */
1697 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1698 else if (VECTOR_CST_STEPPED_P (arg1))
1699 /* We can operate directly on stepped encodings if:
1701 a3 - a2 == a2 - a1
1702 implies:
1703 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1705 which is true if (x -> x op c) distributes over addition. */
1706 step_ok_p = distributes_over_addition_p (code, 1);
1707 else
1708 /* Similarly in reverse. */
1709 step_ok_p = distributes_over_addition_p (code, 2);
1710 tree_vector_builder elts;
1711 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1712 return NULL_TREE;
1713 unsigned int count = elts.encoded_nelts ();
1714 for (unsigned int i = 0; i < count; ++i)
1716 tree elem1 = VECTOR_CST_ELT (arg1, i);
1717 tree elem2 = VECTOR_CST_ELT (arg2, i);
1719 tree elt = const_binop (code, elem1, elem2);
1721 /* It is possible that const_binop cannot handle the given
1722 code and return NULL_TREE */
1723 if (elt == NULL_TREE)
1724 return NULL_TREE;
1725 elts.quick_push (elt);
1728 return elts.build ();
1731 /* Shifts allow a scalar offset for a vector. */
1732 if (TREE_CODE (arg1) == VECTOR_CST
1733 && TREE_CODE (arg2) == INTEGER_CST)
1735 tree type = TREE_TYPE (arg1);
1736 bool step_ok_p = distributes_over_addition_p (code, 1);
1737 tree_vector_builder elts;
1738 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1739 return NULL_TREE;
1740 unsigned int count = elts.encoded_nelts ();
1741 for (unsigned int i = 0; i < count; ++i)
1743 tree elem1 = VECTOR_CST_ELT (arg1, i);
1745 tree elt = const_binop (code, elem1, arg2);
1747 /* It is possible that const_binop cannot handle the given
1748 code and return NULL_TREE. */
1749 if (elt == NULL_TREE)
1750 return NULL_TREE;
1751 elts.quick_push (elt);
1754 return elts.build ();
1756 return NULL_TREE;
1759 /* Overload that adds a TYPE parameter to be able to dispatch
1760 to fold_relational_const. */
1762 tree
1763 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1765 if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 return fold_relational_const (code, type, arg1, arg2);
1768 /* ??? Until we make the const_binop worker take the type of the
1769 result as argument put those cases that need it here. */
1770 switch (code)
1772 case VEC_SERIES_EXPR:
1773 if (CONSTANT_CLASS_P (arg1)
1774 && CONSTANT_CLASS_P (arg2))
1775 return build_vec_series (type, arg1, arg2);
1776 return NULL_TREE;
1778 case COMPLEX_EXPR:
1779 if ((TREE_CODE (arg1) == REAL_CST
1780 && TREE_CODE (arg2) == REAL_CST)
1781 || (TREE_CODE (arg1) == INTEGER_CST
1782 && TREE_CODE (arg2) == INTEGER_CST))
1783 return build_complex (type, arg1, arg2);
1784 return NULL_TREE;
1786 case POINTER_DIFF_EXPR:
1787 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1789 poly_offset_int res = (wi::to_poly_offset (arg1)
1790 - wi::to_poly_offset (arg2));
1791 return force_fit_type (type, res, 1,
1792 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1794 return NULL_TREE;
1796 case VEC_PACK_TRUNC_EXPR:
1797 case VEC_PACK_FIX_TRUNC_EXPR:
1798 case VEC_PACK_FLOAT_EXPR:
1800 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1802 if (TREE_CODE (arg1) != VECTOR_CST
1803 || TREE_CODE (arg2) != VECTOR_CST)
1804 return NULL_TREE;
1806 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1807 return NULL_TREE;
1809 out_nelts = in_nelts * 2;
1810 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1811 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1813 tree_vector_builder elts (type, out_nelts, 1);
1814 for (i = 0; i < out_nelts; i++)
1816 tree elt = (i < in_nelts
1817 ? VECTOR_CST_ELT (arg1, i)
1818 : VECTOR_CST_ELT (arg2, i - in_nelts));
1819 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1820 ? NOP_EXPR
1821 : code == VEC_PACK_FLOAT_EXPR
1822 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1823 TREE_TYPE (type), elt);
1824 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1825 return NULL_TREE;
1826 elts.quick_push (elt);
1829 return elts.build ();
1832 case VEC_WIDEN_MULT_LO_EXPR:
1833 case VEC_WIDEN_MULT_HI_EXPR:
1834 case VEC_WIDEN_MULT_EVEN_EXPR:
1835 case VEC_WIDEN_MULT_ODD_EXPR:
1837 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1839 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1840 return NULL_TREE;
1842 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1843 return NULL_TREE;
1844 out_nelts = in_nelts / 2;
1845 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1846 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1848 if (code == VEC_WIDEN_MULT_LO_EXPR)
1849 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1850 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1851 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1852 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1853 scale = 1, ofs = 0;
1854 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1855 scale = 1, ofs = 1;
1857 tree_vector_builder elts (type, out_nelts, 1);
1858 for (out = 0; out < out_nelts; out++)
1860 unsigned int in = (out << scale) + ofs;
1861 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1862 VECTOR_CST_ELT (arg1, in));
1863 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1864 VECTOR_CST_ELT (arg2, in));
1866 if (t1 == NULL_TREE || t2 == NULL_TREE)
1867 return NULL_TREE;
1868 tree elt = const_binop (MULT_EXPR, t1, t2);
1869 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1870 return NULL_TREE;
1871 elts.quick_push (elt);
1874 return elts.build ();
1877 default:;
1880 if (TREE_CODE_CLASS (code) != tcc_binary)
1881 return NULL_TREE;
1883 /* Make sure type and arg0 have the same saturating flag. */
1884 gcc_checking_assert (TYPE_SATURATING (type)
1885 == TYPE_SATURATING (TREE_TYPE (arg1)));
1887 return const_binop (code, arg1, arg2);
1890 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1891 Return zero if computing the constants is not possible. */
1893 tree
1894 const_unop (enum tree_code code, tree type, tree arg0)
1896 /* Don't perform the operation, other than NEGATE and ABS, if
1897 flag_signaling_nans is on and the operand is a signaling NaN. */
1898 if (TREE_CODE (arg0) == REAL_CST
1899 && HONOR_SNANS (arg0)
1900 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1901 && code != NEGATE_EXPR
1902 && code != ABS_EXPR
1903 && code != ABSU_EXPR)
1904 return NULL_TREE;
1906 switch (code)
1908 CASE_CONVERT:
1909 case FLOAT_EXPR:
1910 case FIX_TRUNC_EXPR:
1911 case FIXED_CONVERT_EXPR:
1912 return fold_convert_const (code, type, arg0);
1914 case ADDR_SPACE_CONVERT_EXPR:
1915 /* If the source address is 0, and the source address space
1916 cannot have a valid object at 0, fold to dest type null. */
1917 if (integer_zerop (arg0)
1918 && !(targetm.addr_space.zero_address_valid
1919 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1920 return fold_convert_const (code, type, arg0);
1921 break;
1923 case VIEW_CONVERT_EXPR:
1924 return fold_view_convert_expr (type, arg0);
1926 case NEGATE_EXPR:
1928 /* Can't call fold_negate_const directly here as that doesn't
1929 handle all cases and we might not be able to negate some
1930 constants. */
1931 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1932 if (tem && CONSTANT_CLASS_P (tem))
1933 return tem;
1934 break;
1937 case ABS_EXPR:
1938 case ABSU_EXPR:
1939 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1940 return fold_abs_const (arg0, type);
1941 break;
1943 case CONJ_EXPR:
1944 if (TREE_CODE (arg0) == COMPLEX_CST)
1946 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1947 TREE_TYPE (type));
1948 return build_complex (type, TREE_REALPART (arg0), ipart);
1950 break;
1952 case BIT_NOT_EXPR:
1953 if (TREE_CODE (arg0) == INTEGER_CST)
1954 return fold_not_const (arg0, type);
1955 else if (POLY_INT_CST_P (arg0))
1956 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1957 /* Perform BIT_NOT_EXPR on each element individually. */
1958 else if (TREE_CODE (arg0) == VECTOR_CST)
1960 tree elem;
1962 /* This can cope with stepped encodings because ~x == -1 - x. */
1963 tree_vector_builder elements;
1964 elements.new_unary_operation (type, arg0, true);
1965 unsigned int i, count = elements.encoded_nelts ();
1966 for (i = 0; i < count; ++i)
1968 elem = VECTOR_CST_ELT (arg0, i);
1969 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1970 if (elem == NULL_TREE)
1971 break;
1972 elements.quick_push (elem);
1974 if (i == count)
1975 return elements.build ();
1977 break;
1979 case TRUTH_NOT_EXPR:
1980 if (TREE_CODE (arg0) == INTEGER_CST)
1981 return constant_boolean_node (integer_zerop (arg0), type);
1982 break;
1984 case REALPART_EXPR:
1985 if (TREE_CODE (arg0) == COMPLEX_CST)
1986 return fold_convert (type, TREE_REALPART (arg0));
1987 break;
1989 case IMAGPART_EXPR:
1990 if (TREE_CODE (arg0) == COMPLEX_CST)
1991 return fold_convert (type, TREE_IMAGPART (arg0));
1992 break;
1994 case VEC_UNPACK_LO_EXPR:
1995 case VEC_UNPACK_HI_EXPR:
1996 case VEC_UNPACK_FLOAT_LO_EXPR:
1997 case VEC_UNPACK_FLOAT_HI_EXPR:
1998 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1999 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2001 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2002 enum tree_code subcode;
2004 if (TREE_CODE (arg0) != VECTOR_CST)
2005 return NULL_TREE;
2007 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2008 return NULL_TREE;
2009 out_nelts = in_nelts / 2;
2010 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2012 unsigned int offset = 0;
2013 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2014 || code == VEC_UNPACK_FLOAT_LO_EXPR
2015 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2016 offset = out_nelts;
2018 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2019 subcode = NOP_EXPR;
2020 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2021 || code == VEC_UNPACK_FLOAT_HI_EXPR)
2022 subcode = FLOAT_EXPR;
2023 else
2024 subcode = FIX_TRUNC_EXPR;
2026 tree_vector_builder elts (type, out_nelts, 1);
2027 for (i = 0; i < out_nelts; i++)
2029 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2030 VECTOR_CST_ELT (arg0, i + offset));
2031 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2032 return NULL_TREE;
2033 elts.quick_push (elt);
2036 return elts.build ();
2039 case VEC_DUPLICATE_EXPR:
2040 if (CONSTANT_CLASS_P (arg0))
2041 return build_vector_from_val (type, arg0);
2042 return NULL_TREE;
2044 default:
2045 break;
2048 return NULL_TREE;
2051 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2052 indicates which particular sizetype to create. */
2054 tree
2055 size_int_kind (poly_int64 number, enum size_type_kind kind)
2057 return build_int_cst (sizetype_tab[(int) kind], number);
2060 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2061 is a tree code. The type of the result is taken from the operands.
2062 Both must be equivalent integer types, ala int_binop_types_match_p.
2063 If the operands are constant, so is the result. */
2065 tree
2066 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2068 tree type = TREE_TYPE (arg0);
2070 if (arg0 == error_mark_node || arg1 == error_mark_node)
2071 return error_mark_node;
2073 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 TREE_TYPE (arg1)));
2076 /* Handle the special case of two poly_int constants faster. */
2077 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2079 /* And some specific cases even faster than that. */
2080 if (code == PLUS_EXPR)
2082 if (integer_zerop (arg0)
2083 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2084 return arg1;
2085 if (integer_zerop (arg1)
2086 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2087 return arg0;
2089 else if (code == MINUS_EXPR)
2091 if (integer_zerop (arg1)
2092 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2093 return arg0;
2095 else if (code == MULT_EXPR)
2097 if (integer_onep (arg0)
2098 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2099 return arg1;
2102 /* Handle general case of two integer constants. For sizetype
2103 constant calculations we always want to know about overflow,
2104 even in the unsigned case. */
2105 tree res = int_const_binop (code, arg0, arg1, -1);
2106 if (res != NULL_TREE)
2107 return res;
2110 return fold_build2_loc (loc, code, type, arg0, arg1);
2113 /* Given two values, either both of sizetype or both of bitsizetype,
2114 compute the difference between the two values. Return the value
2115 in signed type corresponding to the type of the operands. */
2117 tree
2118 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2120 tree type = TREE_TYPE (arg0);
2121 tree ctype;
2123 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2124 TREE_TYPE (arg1)));
2126 /* If the type is already signed, just do the simple thing. */
2127 if (!TYPE_UNSIGNED (type))
2128 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2130 if (type == sizetype)
2131 ctype = ssizetype;
2132 else if (type == bitsizetype)
2133 ctype = sbitsizetype;
2134 else
2135 ctype = signed_type_for (type);
2137 /* If either operand is not a constant, do the conversions to the signed
2138 type and subtract. The hardware will do the right thing with any
2139 overflow in the subtraction. */
2140 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2141 return size_binop_loc (loc, MINUS_EXPR,
2142 fold_convert_loc (loc, ctype, arg0),
2143 fold_convert_loc (loc, ctype, arg1));
2145 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2146 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2147 overflow) and negate (which can't either). Special-case a result
2148 of zero while we're here. */
2149 if (tree_int_cst_equal (arg0, arg1))
2150 return build_int_cst (ctype, 0);
2151 else if (tree_int_cst_lt (arg1, arg0))
2152 return fold_convert_loc (loc, ctype,
2153 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2154 else
2155 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2156 fold_convert_loc (loc, ctype,
2157 size_binop_loc (loc,
2158 MINUS_EXPR,
2159 arg1, arg0)));
2162 /* A subroutine of fold_convert_const handling conversions of an
2163 INTEGER_CST to another integer type. */
2165 static tree
2166 fold_convert_const_int_from_int (tree type, const_tree arg1)
2168 /* Given an integer constant, make new constant with new type,
2169 appropriately sign-extended or truncated. Use widest_int
2170 so that any extension is done according ARG1's type. */
2171 tree arg1_type = TREE_TYPE (arg1);
2172 unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2173 return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2174 TYPE_SIGN (arg1_type)),
2175 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2176 TREE_OVERFLOW (arg1));
2179 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2180 to an integer type. */
2182 static tree
2183 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2185 bool overflow = false;
2186 tree t;
2188 /* The following code implements the floating point to integer
2189 conversion rules required by the Java Language Specification,
2190 that IEEE NaNs are mapped to zero and values that overflow
2191 the target precision saturate, i.e. values greater than
2192 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2193 are mapped to INT_MIN. These semantics are allowed by the
2194 C and C++ standards that simply state that the behavior of
2195 FP-to-integer conversion is unspecified upon overflow. */
2197 wide_int val;
2198 REAL_VALUE_TYPE r;
2199 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2201 switch (code)
2203 case FIX_TRUNC_EXPR:
2204 real_trunc (&r, VOIDmode, &x);
2205 break;
2207 default:
2208 gcc_unreachable ();
2211 /* If R is NaN, return zero and show we have an overflow. */
2212 if (REAL_VALUE_ISNAN (r))
2214 overflow = true;
2215 val = wi::zero (TYPE_PRECISION (type));
2218 /* See if R is less than the lower bound or greater than the
2219 upper bound. */
2221 if (! overflow)
2223 tree lt = TYPE_MIN_VALUE (type);
2224 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2225 if (real_less (&r, &l))
2227 overflow = true;
2228 val = wi::to_wide (lt);
2232 if (! overflow)
2234 tree ut = TYPE_MAX_VALUE (type);
2235 if (ut)
2237 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2238 if (real_less (&u, &r))
2240 overflow = true;
2241 val = wi::to_wide (ut);
2246 if (! overflow)
2247 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2249 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2250 return t;
2253 /* A subroutine of fold_convert_const handling conversions of a
2254 FIXED_CST to an integer type. */
2256 static tree
2257 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2259 tree t;
2260 double_int temp, temp_trunc;
2261 scalar_mode mode;
2263 /* Right shift FIXED_CST to temp by fbit. */
2264 temp = TREE_FIXED_CST (arg1).data;
2265 mode = TREE_FIXED_CST (arg1).mode;
2266 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2268 temp = temp.rshift (GET_MODE_FBIT (mode),
2269 HOST_BITS_PER_DOUBLE_INT,
2270 SIGNED_FIXED_POINT_MODE_P (mode));
2272 /* Left shift temp to temp_trunc by fbit. */
2273 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2274 HOST_BITS_PER_DOUBLE_INT,
2275 SIGNED_FIXED_POINT_MODE_P (mode));
2277 else
2279 temp = double_int_zero;
2280 temp_trunc = double_int_zero;
2283 /* If FIXED_CST is negative, we need to round the value toward 0.
2284 By checking if the fractional bits are not zero to add 1 to temp. */
2285 if (SIGNED_FIXED_POINT_MODE_P (mode)
2286 && temp_trunc.is_negative ()
2287 && TREE_FIXED_CST (arg1).data != temp_trunc)
2288 temp += double_int_one;
2290 /* Given a fixed-point constant, make new constant with new type,
2291 appropriately sign-extended or truncated. */
2292 t = force_fit_type (type, temp, -1,
2293 (temp.is_negative ()
2294 && (TYPE_UNSIGNED (type)
2295 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2296 | TREE_OVERFLOW (arg1));
2298 return t;
2301 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2302 to another floating point type. */
2304 static tree
2305 fold_convert_const_real_from_real (tree type, const_tree arg1)
2307 REAL_VALUE_TYPE value;
2308 tree t;
2310 /* If the underlying modes are the same, simply treat it as
2311 copy and rebuild with TREE_REAL_CST information and the
2312 given type. */
2313 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2315 t = build_real (type, TREE_REAL_CST (arg1));
2316 return t;
2319 /* Don't perform the operation if flag_signaling_nans is on
2320 and the operand is a signaling NaN. */
2321 if (HONOR_SNANS (arg1)
2322 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2323 return NULL_TREE;
2325 /* With flag_rounding_math we should respect the current rounding mode
2326 unless the conversion is exact. */
2327 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2328 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2329 return NULL_TREE;
2331 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2332 t = build_real (type, value);
2334 /* If converting an infinity or NAN to a representation that doesn't
2335 have one, set the overflow bit so that we can produce some kind of
2336 error message at the appropriate point if necessary. It's not the
2337 most user-friendly message, but it's better than nothing. */
2338 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2339 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2340 TREE_OVERFLOW (t) = 1;
2341 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2342 && !MODE_HAS_NANS (TYPE_MODE (type)))
2343 TREE_OVERFLOW (t) = 1;
2344 /* Regular overflow, conversion produced an infinity in a mode that
2345 can't represent them. */
2346 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2347 && REAL_VALUE_ISINF (value)
2348 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2349 TREE_OVERFLOW (t) = 1;
2350 else
2351 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2352 return t;
2355 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2356 to a floating point type. */
2358 static tree
2359 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2361 REAL_VALUE_TYPE value;
2362 tree t;
2364 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2365 &TREE_FIXED_CST (arg1));
2366 t = build_real (type, value);
2368 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 return t;
2372 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 to another fixed-point type. */
2375 static tree
2376 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2378 FIXED_VALUE_TYPE value;
2379 tree t;
2380 bool overflow_p;
2382 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2383 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2389 return t;
2392 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2393 to a fixed-point type. */
2395 static tree
2396 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2398 FIXED_VALUE_TYPE value;
2399 tree t;
2400 bool overflow_p;
2401 double_int di;
2403 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2405 di.low = TREE_INT_CST_ELT (arg1, 0);
2406 if (TREE_INT_CST_NUNITS (arg1) == 1)
2407 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2408 else
2409 di.high = TREE_INT_CST_ELT (arg1, 1);
2411 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2412 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2413 TYPE_SATURATING (type));
2414 t = build_fixed (type, value);
2416 /* Propagate overflow flags. */
2417 if (overflow_p | TREE_OVERFLOW (arg1))
2418 TREE_OVERFLOW (t) = 1;
2419 return t;
2422 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2423 to a fixed-point type. */
2425 static tree
2426 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2428 FIXED_VALUE_TYPE value;
2429 tree t;
2430 bool overflow_p;
2432 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2433 &TREE_REAL_CST (arg1),
2434 TYPE_SATURATING (type));
2435 t = build_fixed (type, value);
2437 /* Propagate overflow flags. */
2438 if (overflow_p | TREE_OVERFLOW (arg1))
2439 TREE_OVERFLOW (t) = 1;
2440 return t;
2443 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2444 type TYPE. If no simplification can be done return NULL_TREE. */
2446 static tree
2447 fold_convert_const (enum tree_code code, tree type, tree arg1)
2449 tree arg_type = TREE_TYPE (arg1);
2450 if (arg_type == type)
2451 return arg1;
2453 /* We can't widen types, since the runtime value could overflow the
2454 original type before being extended to the new type. */
2455 if (POLY_INT_CST_P (arg1)
2456 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2457 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2458 return build_poly_int_cst (type,
2459 poly_wide_int::from (poly_int_cst_value (arg1),
2460 TYPE_PRECISION (type),
2461 TYPE_SIGN (arg_type)));
2463 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2464 || TREE_CODE (type) == OFFSET_TYPE)
2466 if (TREE_CODE (arg1) == INTEGER_CST)
2467 return fold_convert_const_int_from_int (type, arg1);
2468 else if (TREE_CODE (arg1) == REAL_CST)
2469 return fold_convert_const_int_from_real (code, type, arg1);
2470 else if (TREE_CODE (arg1) == FIXED_CST)
2471 return fold_convert_const_int_from_fixed (type, arg1);
2473 else if (SCALAR_FLOAT_TYPE_P (type))
2475 if (TREE_CODE (arg1) == INTEGER_CST)
2477 tree res = build_real_from_int_cst (type, arg1);
2478 /* Avoid the folding if flag_rounding_math is on and the
2479 conversion is not exact. */
2480 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2482 bool fail = false;
2483 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2484 TYPE_PRECISION (TREE_TYPE (arg1)));
2485 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2486 return NULL_TREE;
2488 return res;
2490 else if (TREE_CODE (arg1) == REAL_CST)
2491 return fold_convert_const_real_from_real (type, arg1);
2492 else if (TREE_CODE (arg1) == FIXED_CST)
2493 return fold_convert_const_real_from_fixed (type, arg1);
2495 else if (FIXED_POINT_TYPE_P (type))
2497 if (TREE_CODE (arg1) == FIXED_CST)
2498 return fold_convert_const_fixed_from_fixed (type, arg1);
2499 else if (TREE_CODE (arg1) == INTEGER_CST)
2500 return fold_convert_const_fixed_from_int (type, arg1);
2501 else if (TREE_CODE (arg1) == REAL_CST)
2502 return fold_convert_const_fixed_from_real (type, arg1);
2504 else if (VECTOR_TYPE_P (type))
2506 if (TREE_CODE (arg1) == VECTOR_CST
2507 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2509 tree elttype = TREE_TYPE (type);
2510 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2511 /* We can't handle steps directly when extending, since the
2512 values need to wrap at the original precision first. */
2513 bool step_ok_p
2514 = (INTEGRAL_TYPE_P (elttype)
2515 && INTEGRAL_TYPE_P (arg1_elttype)
2516 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2517 tree_vector_builder v;
2518 if (!v.new_unary_operation (type, arg1, step_ok_p))
2519 return NULL_TREE;
2520 unsigned int len = v.encoded_nelts ();
2521 for (unsigned int i = 0; i < len; ++i)
2523 tree elt = VECTOR_CST_ELT (arg1, i);
2524 tree cvt = fold_convert_const (code, elttype, elt);
2525 if (cvt == NULL_TREE)
2526 return NULL_TREE;
2527 v.quick_push (cvt);
2529 return v.build ();
2532 return NULL_TREE;
2535 /* Construct a vector of zero elements of vector type TYPE. */
2537 static tree
2538 build_zero_vector (tree type)
2540 tree t;
2542 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2543 return build_vector_from_val (type, t);
2546 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2548 bool
2549 fold_convertible_p (const_tree type, const_tree arg)
2551 const_tree orig = TREE_TYPE (arg);
2553 if (type == orig)
2554 return true;
2556 if (TREE_CODE (arg) == ERROR_MARK
2557 || TREE_CODE (type) == ERROR_MARK
2558 || TREE_CODE (orig) == ERROR_MARK)
2559 return false;
2561 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2562 return true;
2564 switch (TREE_CODE (type))
2566 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2567 case POINTER_TYPE: case REFERENCE_TYPE:
2568 case OFFSET_TYPE:
2569 return (INTEGRAL_TYPE_P (orig)
2570 || (POINTER_TYPE_P (orig)
2571 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2572 || TREE_CODE (orig) == OFFSET_TYPE);
2574 case REAL_TYPE:
2575 case FIXED_POINT_TYPE:
2576 case VOID_TYPE:
2577 return TREE_CODE (type) == TREE_CODE (orig);
2579 case VECTOR_TYPE:
2580 return (VECTOR_TYPE_P (orig)
2581 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2582 TYPE_VECTOR_SUBPARTS (orig))
2583 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2585 default:
2586 return false;
2590 /* Convert expression ARG to type TYPE. Used by the middle-end for
2591 simple conversions in preference to calling the front-end's convert. */
2593 tree
2594 fold_convert_loc (location_t loc, tree type, tree arg)
2596 tree orig = TREE_TYPE (arg);
2597 tree tem;
2599 if (type == orig)
2600 return arg;
2602 if (TREE_CODE (arg) == ERROR_MARK
2603 || TREE_CODE (type) == ERROR_MARK
2604 || TREE_CODE (orig) == ERROR_MARK)
2605 return error_mark_node;
2607 switch (TREE_CODE (type))
2609 case POINTER_TYPE:
2610 case REFERENCE_TYPE:
2611 /* Handle conversions between pointers to different address spaces. */
2612 if (POINTER_TYPE_P (orig)
2613 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2614 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2615 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2616 /* fall through */
2618 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2619 case OFFSET_TYPE: case BITINT_TYPE:
2620 if (TREE_CODE (arg) == INTEGER_CST)
2622 tem = fold_convert_const (NOP_EXPR, type, arg);
2623 if (tem != NULL_TREE)
2624 return tem;
2626 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2627 || TREE_CODE (orig) == OFFSET_TYPE)
2628 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2629 if (TREE_CODE (orig) == COMPLEX_TYPE)
2630 return fold_convert_loc (loc, type,
2631 fold_build1_loc (loc, REALPART_EXPR,
2632 TREE_TYPE (orig), arg));
2633 gcc_assert (VECTOR_TYPE_P (orig)
2634 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2637 case REAL_TYPE:
2638 if (TREE_CODE (arg) == INTEGER_CST)
2640 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2641 if (tem != NULL_TREE)
2642 return tem;
2644 else if (TREE_CODE (arg) == REAL_CST)
2646 tem = fold_convert_const (NOP_EXPR, type, arg);
2647 if (tem != NULL_TREE)
2648 return tem;
2650 else if (TREE_CODE (arg) == FIXED_CST)
2652 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2653 if (tem != NULL_TREE)
2654 return tem;
2657 switch (TREE_CODE (orig))
2659 case INTEGER_TYPE: case BITINT_TYPE:
2660 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2661 case POINTER_TYPE: case REFERENCE_TYPE:
2662 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2664 case REAL_TYPE:
2665 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2667 case FIXED_POINT_TYPE:
2668 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2670 case COMPLEX_TYPE:
2671 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 return fold_convert_loc (loc, type, tem);
2674 default:
2675 gcc_unreachable ();
2678 case FIXED_POINT_TYPE:
2679 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2680 || TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2684 goto fold_convert_exit;
2687 switch (TREE_CODE (orig))
2689 case FIXED_POINT_TYPE:
2690 case INTEGER_TYPE:
2691 case ENUMERAL_TYPE:
2692 case BOOLEAN_TYPE:
2693 case REAL_TYPE:
2694 case BITINT_TYPE:
2695 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2697 case COMPLEX_TYPE:
2698 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2699 return fold_convert_loc (loc, type, tem);
2701 default:
2702 gcc_unreachable ();
2705 case COMPLEX_TYPE:
2706 switch (TREE_CODE (orig))
2708 case INTEGER_TYPE: case BITINT_TYPE:
2709 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2710 case POINTER_TYPE: case REFERENCE_TYPE:
2711 case REAL_TYPE:
2712 case FIXED_POINT_TYPE:
2713 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2714 fold_convert_loc (loc, TREE_TYPE (type), arg),
2715 fold_convert_loc (loc, TREE_TYPE (type),
2716 integer_zero_node));
2717 case COMPLEX_TYPE:
2719 tree rpart, ipart;
2721 if (TREE_CODE (arg) == COMPLEX_EXPR)
2723 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2724 TREE_OPERAND (arg, 0));
2725 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2726 TREE_OPERAND (arg, 1));
2727 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2730 arg = save_expr (arg);
2731 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2732 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2733 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2734 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2735 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2738 default:
2739 gcc_unreachable ();
2742 case VECTOR_TYPE:
2743 if (integer_zerop (arg))
2744 return build_zero_vector (type);
2745 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2746 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2747 || VECTOR_TYPE_P (orig));
2748 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2750 case VOID_TYPE:
2751 tem = fold_ignored_result (arg);
2752 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2754 default:
2755 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2756 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2757 gcc_unreachable ();
2759 fold_convert_exit:
2760 tem = protected_set_expr_location_unshare (tem, loc);
2761 return tem;
2764 /* Return false if expr can be assumed not to be an lvalue, true
2765 otherwise. */
2767 static bool
2768 maybe_lvalue_p (const_tree x)
2770 /* We only need to wrap lvalue tree codes. */
2771 switch (TREE_CODE (x))
2773 case VAR_DECL:
2774 case PARM_DECL:
2775 case RESULT_DECL:
2776 case LABEL_DECL:
2777 case FUNCTION_DECL:
2778 case SSA_NAME:
2779 case COMPOUND_LITERAL_EXPR:
2781 case COMPONENT_REF:
2782 case MEM_REF:
2783 case INDIRECT_REF:
2784 case ARRAY_REF:
2785 case ARRAY_RANGE_REF:
2786 case BIT_FIELD_REF:
2787 case OBJ_TYPE_REF:
2789 case REALPART_EXPR:
2790 case IMAGPART_EXPR:
2791 case PREINCREMENT_EXPR:
2792 case PREDECREMENT_EXPR:
2793 case SAVE_EXPR:
2794 case TRY_CATCH_EXPR:
2795 case WITH_CLEANUP_EXPR:
2796 case COMPOUND_EXPR:
2797 case MODIFY_EXPR:
2798 case TARGET_EXPR:
2799 case COND_EXPR:
2800 case BIND_EXPR:
2801 case VIEW_CONVERT_EXPR:
2802 break;
2804 default:
2805 /* Assume the worst for front-end tree codes. */
2806 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2807 break;
2808 return false;
2811 return true;
2814 /* Return an expr equal to X but certainly not valid as an lvalue. */
2816 tree
2817 non_lvalue_loc (location_t loc, tree x)
2819 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2820 us. */
2821 if (in_gimple_form)
2822 return x;
2824 if (! maybe_lvalue_p (x))
2825 return x;
2826 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2829 /* Given a tree comparison code, return the code that is the logical inverse.
2830 It is generally not safe to do this for floating-point comparisons, except
2831 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2832 ERROR_MARK in this case. */
2834 enum tree_code
2835 invert_tree_comparison (enum tree_code code, bool honor_nans)
2837 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2838 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2839 return ERROR_MARK;
2841 switch (code)
2843 case EQ_EXPR:
2844 return NE_EXPR;
2845 case NE_EXPR:
2846 return EQ_EXPR;
2847 case GT_EXPR:
2848 return honor_nans ? UNLE_EXPR : LE_EXPR;
2849 case GE_EXPR:
2850 return honor_nans ? UNLT_EXPR : LT_EXPR;
2851 case LT_EXPR:
2852 return honor_nans ? UNGE_EXPR : GE_EXPR;
2853 case LE_EXPR:
2854 return honor_nans ? UNGT_EXPR : GT_EXPR;
2855 case LTGT_EXPR:
2856 return UNEQ_EXPR;
2857 case UNEQ_EXPR:
2858 return LTGT_EXPR;
2859 case UNGT_EXPR:
2860 return LE_EXPR;
2861 case UNGE_EXPR:
2862 return LT_EXPR;
2863 case UNLT_EXPR:
2864 return GE_EXPR;
2865 case UNLE_EXPR:
2866 return GT_EXPR;
2867 case ORDERED_EXPR:
2868 return UNORDERED_EXPR;
2869 case UNORDERED_EXPR:
2870 return ORDERED_EXPR;
2871 default:
2872 gcc_unreachable ();
2876 /* Similar, but return the comparison that results if the operands are
2877 swapped. This is safe for floating-point. */
2879 enum tree_code
2880 swap_tree_comparison (enum tree_code code)
2882 switch (code)
2884 case EQ_EXPR:
2885 case NE_EXPR:
2886 case ORDERED_EXPR:
2887 case UNORDERED_EXPR:
2888 case LTGT_EXPR:
2889 case UNEQ_EXPR:
2890 return code;
2891 case GT_EXPR:
2892 return LT_EXPR;
2893 case GE_EXPR:
2894 return LE_EXPR;
2895 case LT_EXPR:
2896 return GT_EXPR;
2897 case LE_EXPR:
2898 return GE_EXPR;
2899 case UNGT_EXPR:
2900 return UNLT_EXPR;
2901 case UNGE_EXPR:
2902 return UNLE_EXPR;
2903 case UNLT_EXPR:
2904 return UNGT_EXPR;
2905 case UNLE_EXPR:
2906 return UNGE_EXPR;
2907 default:
2908 gcc_unreachable ();
2913 /* Convert a comparison tree code from an enum tree_code representation
2914 into a compcode bit-based encoding. This function is the inverse of
2915 compcode_to_comparison. */
2917 static enum comparison_code
2918 comparison_to_compcode (enum tree_code code)
2920 switch (code)
2922 case LT_EXPR:
2923 return COMPCODE_LT;
2924 case EQ_EXPR:
2925 return COMPCODE_EQ;
2926 case LE_EXPR:
2927 return COMPCODE_LE;
2928 case GT_EXPR:
2929 return COMPCODE_GT;
2930 case NE_EXPR:
2931 return COMPCODE_NE;
2932 case GE_EXPR:
2933 return COMPCODE_GE;
2934 case ORDERED_EXPR:
2935 return COMPCODE_ORD;
2936 case UNORDERED_EXPR:
2937 return COMPCODE_UNORD;
2938 case UNLT_EXPR:
2939 return COMPCODE_UNLT;
2940 case UNEQ_EXPR:
2941 return COMPCODE_UNEQ;
2942 case UNLE_EXPR:
2943 return COMPCODE_UNLE;
2944 case UNGT_EXPR:
2945 return COMPCODE_UNGT;
2946 case LTGT_EXPR:
2947 return COMPCODE_LTGT;
2948 case UNGE_EXPR:
2949 return COMPCODE_UNGE;
2950 default:
2951 gcc_unreachable ();
2955 /* Convert a compcode bit-based encoding of a comparison operator back
2956 to GCC's enum tree_code representation. This function is the
2957 inverse of comparison_to_compcode. */
2959 static enum tree_code
2960 compcode_to_comparison (enum comparison_code code)
2962 switch (code)
2964 case COMPCODE_LT:
2965 return LT_EXPR;
2966 case COMPCODE_EQ:
2967 return EQ_EXPR;
2968 case COMPCODE_LE:
2969 return LE_EXPR;
2970 case COMPCODE_GT:
2971 return GT_EXPR;
2972 case COMPCODE_NE:
2973 return NE_EXPR;
2974 case COMPCODE_GE:
2975 return GE_EXPR;
2976 case COMPCODE_ORD:
2977 return ORDERED_EXPR;
2978 case COMPCODE_UNORD:
2979 return UNORDERED_EXPR;
2980 case COMPCODE_UNLT:
2981 return UNLT_EXPR;
2982 case COMPCODE_UNEQ:
2983 return UNEQ_EXPR;
2984 case COMPCODE_UNLE:
2985 return UNLE_EXPR;
2986 case COMPCODE_UNGT:
2987 return UNGT_EXPR;
2988 case COMPCODE_LTGT:
2989 return LTGT_EXPR;
2990 case COMPCODE_UNGE:
2991 return UNGE_EXPR;
2992 default:
2993 gcc_unreachable ();
2997 /* Return true if COND1 tests the opposite condition of COND2. */
2999 bool
3000 inverse_conditions_p (const_tree cond1, const_tree cond2)
3002 return (COMPARISON_CLASS_P (cond1)
3003 && COMPARISON_CLASS_P (cond2)
3004 && (invert_tree_comparison
3005 (TREE_CODE (cond1),
3006 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3007 && operand_equal_p (TREE_OPERAND (cond1, 0),
3008 TREE_OPERAND (cond2, 0), 0)
3009 && operand_equal_p (TREE_OPERAND (cond1, 1),
3010 TREE_OPERAND (cond2, 1), 0));
3013 /* Return a tree for the comparison which is the combination of
3014 doing the AND or OR (depending on CODE) of the two operations LCODE
3015 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3016 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3017 if this makes the transformation invalid. */
3019 tree
3020 combine_comparisons (location_t loc,
3021 enum tree_code code, enum tree_code lcode,
3022 enum tree_code rcode, tree truth_type,
3023 tree ll_arg, tree lr_arg)
3025 bool honor_nans = HONOR_NANS (ll_arg);
3026 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3027 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3028 int compcode;
3030 switch (code)
3032 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3033 compcode = lcompcode & rcompcode;
3034 break;
3036 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3037 compcode = lcompcode | rcompcode;
3038 break;
3040 default:
3041 return NULL_TREE;
3044 if (!honor_nans)
3046 /* Eliminate unordered comparisons, as well as LTGT and ORD
3047 which are not used unless the mode has NaNs. */
3048 compcode &= ~COMPCODE_UNORD;
3049 if (compcode == COMPCODE_LTGT)
3050 compcode = COMPCODE_NE;
3051 else if (compcode == COMPCODE_ORD)
3052 compcode = COMPCODE_TRUE;
3054 else if (flag_trapping_math)
3056 /* Check that the original operation and the optimized ones will trap
3057 under the same condition. */
3058 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3059 && (lcompcode != COMPCODE_EQ)
3060 && (lcompcode != COMPCODE_ORD);
3061 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3062 && (rcompcode != COMPCODE_EQ)
3063 && (rcompcode != COMPCODE_ORD);
3064 bool trap = (compcode & COMPCODE_UNORD) == 0
3065 && (compcode != COMPCODE_EQ)
3066 && (compcode != COMPCODE_ORD);
3068 /* In a short-circuited boolean expression the LHS might be
3069 such that the RHS, if evaluated, will never trap. For
3070 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3071 if neither x nor y is NaN. (This is a mixed blessing: for
3072 example, the expression above will never trap, hence
3073 optimizing it to x < y would be invalid). */
3074 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3075 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3076 rtrap = false;
3078 /* If the comparison was short-circuited, and only the RHS
3079 trapped, we may now generate a spurious trap. */
3080 if (rtrap && !ltrap
3081 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3082 return NULL_TREE;
3084 /* If we changed the conditions that cause a trap, we lose. */
3085 if ((ltrap || rtrap) != trap)
3086 return NULL_TREE;
3089 if (compcode == COMPCODE_TRUE)
3090 return constant_boolean_node (true, truth_type);
3091 else if (compcode == COMPCODE_FALSE)
3092 return constant_boolean_node (false, truth_type);
3093 else
3095 enum tree_code tcode;
3097 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3098 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3102 /* Return nonzero if two operands (typically of the same tree node)
3103 are necessarily equal. FLAGS modifies behavior as follows:
3105 If OEP_ONLY_CONST is set, only return nonzero for constants.
3106 This function tests whether the operands are indistinguishable;
3107 it does not test whether they are equal using C's == operation.
3108 The distinction is important for IEEE floating point, because
3109 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3110 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3112 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3113 even though it may hold multiple values during a function.
3114 This is because a GCC tree node guarantees that nothing else is
3115 executed between the evaluation of its "operands" (which may often
3116 be evaluated in arbitrary order). Hence if the operands themselves
3117 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3118 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3119 unset means assuming isochronic (or instantaneous) tree equivalence.
3120 Unless comparing arbitrary expression trees, such as from different
3121 statements, this flag can usually be left unset.
3123 If OEP_PURE_SAME is set, then pure functions with identical arguments
3124 are considered the same. It is used when the caller has other ways
3125 to ensure that global memory is unchanged in between.
3127 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3128 not values of expressions.
3130 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3131 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3133 If OEP_BITWISE is set, then require the values to be bitwise identical
3134 rather than simply numerically equal. Do not take advantage of things
3135 like math-related flags or undefined behavior; only return true for
3136 values that are provably bitwise identical in all circumstances.
3138 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3139 any operand with side effect. This is unnecesarily conservative in the
3140 case we know that arg0 and arg1 are in disjoint code paths (such as in
3141 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3142 addresses with TREE_CONSTANT flag set so we know that &var == &var
3143 even if var is volatile. */
3145 bool
3146 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3147 unsigned int flags)
3149 bool r;
3150 if (verify_hash_value (arg0, arg1, flags, &r))
3151 return r;
3153 STRIP_ANY_LOCATION_WRAPPER (arg0);
3154 STRIP_ANY_LOCATION_WRAPPER (arg1);
3156 /* If either is ERROR_MARK, they aren't equal. */
3157 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3158 || TREE_TYPE (arg0) == error_mark_node
3159 || TREE_TYPE (arg1) == error_mark_node)
3160 return false;
3162 /* Similar, if either does not have a type (like a template id),
3163 they aren't equal. */
3164 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3165 return false;
3167 /* Bitwise identity makes no sense if the values have different layouts. */
3168 if ((flags & OEP_BITWISE)
3169 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3170 return false;
3172 /* We cannot consider pointers to different address space equal. */
3173 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3174 && POINTER_TYPE_P (TREE_TYPE (arg1))
3175 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3176 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3177 return false;
3179 /* Check equality of integer constants before bailing out due to
3180 precision differences. */
3181 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3183 /* Address of INTEGER_CST is not defined; check that we did not forget
3184 to drop the OEP_ADDRESS_OF flags. */
3185 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3186 return tree_int_cst_equal (arg0, arg1);
3189 if (!(flags & OEP_ADDRESS_OF))
3191 /* If both types don't have the same signedness, then we can't consider
3192 them equal. We must check this before the STRIP_NOPS calls
3193 because they may change the signedness of the arguments. As pointers
3194 strictly don't have a signedness, require either two pointers or
3195 two non-pointers as well. */
3196 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3197 || POINTER_TYPE_P (TREE_TYPE (arg0))
3198 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3199 return false;
3201 /* If both types don't have the same precision, then it is not safe
3202 to strip NOPs. */
3203 if (element_precision (TREE_TYPE (arg0))
3204 != element_precision (TREE_TYPE (arg1)))
3205 return false;
3207 STRIP_NOPS (arg0);
3208 STRIP_NOPS (arg1);
3210 #if 0
3211 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3212 sanity check once the issue is solved. */
3213 else
3214 /* Addresses of conversions and SSA_NAMEs (and many other things)
3215 are not defined. Check that we did not forget to drop the
3216 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3217 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3218 && TREE_CODE (arg0) != SSA_NAME);
3219 #endif
3221 /* In case both args are comparisons but with different comparison
3222 code, try to swap the comparison operands of one arg to produce
3223 a match and compare that variant. */
3224 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3225 && COMPARISON_CLASS_P (arg0)
3226 && COMPARISON_CLASS_P (arg1))
3228 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3230 if (TREE_CODE (arg0) == swap_code)
3231 return operand_equal_p (TREE_OPERAND (arg0, 0),
3232 TREE_OPERAND (arg1, 1), flags)
3233 && operand_equal_p (TREE_OPERAND (arg0, 1),
3234 TREE_OPERAND (arg1, 0), flags);
3237 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3239 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3240 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3242 else if (flags & OEP_ADDRESS_OF)
3244 /* If we are interested in comparing addresses ignore
3245 MEM_REF wrappings of the base that can appear just for
3246 TBAA reasons. */
3247 if (TREE_CODE (arg0) == MEM_REF
3248 && DECL_P (arg1)
3249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3250 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3251 && integer_zerop (TREE_OPERAND (arg0, 1)))
3252 return true;
3253 else if (TREE_CODE (arg1) == MEM_REF
3254 && DECL_P (arg0)
3255 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3256 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3257 && integer_zerop (TREE_OPERAND (arg1, 1)))
3258 return true;
3259 return false;
3261 else
3262 return false;
3265 /* When not checking adddresses, this is needed for conversions and for
3266 COMPONENT_REF. Might as well play it safe and always test this. */
3267 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3268 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3269 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3270 && !(flags & OEP_ADDRESS_OF)))
3271 return false;
3273 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3274 We don't care about side effects in that case because the SAVE_EXPR
3275 takes care of that for us. In all other cases, two expressions are
3276 equal if they have no side effects. If we have two identical
3277 expressions with side effects that should be treated the same due
3278 to the only side effects being identical SAVE_EXPR's, that will
3279 be detected in the recursive calls below.
3280 If we are taking an invariant address of two identical objects
3281 they are necessarily equal as well. */
3282 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3283 && (TREE_CODE (arg0) == SAVE_EXPR
3284 || (flags & OEP_MATCH_SIDE_EFFECTS)
3285 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3286 return true;
3288 /* Next handle constant cases, those for which we can return 1 even
3289 if ONLY_CONST is set. */
3290 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3291 switch (TREE_CODE (arg0))
3293 case INTEGER_CST:
3294 return tree_int_cst_equal (arg0, arg1);
3296 case FIXED_CST:
3297 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3298 TREE_FIXED_CST (arg1));
3300 case REAL_CST:
3301 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3302 return true;
3304 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3306 /* If we do not distinguish between signed and unsigned zero,
3307 consider them equal. */
3308 if (real_zerop (arg0) && real_zerop (arg1))
3309 return true;
3311 return false;
3313 case VECTOR_CST:
3315 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3316 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3317 return false;
3319 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3320 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3321 return false;
3323 unsigned int count = vector_cst_encoded_nelts (arg0);
3324 for (unsigned int i = 0; i < count; ++i)
3325 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3326 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3327 return false;
3328 return true;
3331 case COMPLEX_CST:
3332 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3333 flags)
3334 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3335 flags));
3337 case STRING_CST:
3338 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3339 && ! memcmp (TREE_STRING_POINTER (arg0),
3340 TREE_STRING_POINTER (arg1),
3341 TREE_STRING_LENGTH (arg0)));
3343 case ADDR_EXPR:
3344 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3345 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3346 flags | OEP_ADDRESS_OF
3347 | OEP_MATCH_SIDE_EFFECTS);
3348 case CONSTRUCTOR:
3350 /* In GIMPLE empty constructors are allowed in initializers of
3351 aggregates. */
3352 if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3353 return true;
3355 /* See sem_variable::equals in ipa-icf for a similar approach. */
3356 tree typ0 = TREE_TYPE (arg0);
3357 tree typ1 = TREE_TYPE (arg1);
3359 if (TREE_CODE (typ0) != TREE_CODE (typ1))
3360 return false;
3361 else if (TREE_CODE (typ0) == ARRAY_TYPE)
3363 /* For arrays, check that the sizes all match. */
3364 const HOST_WIDE_INT siz0 = int_size_in_bytes (typ0);
3365 if (TYPE_MODE (typ0) != TYPE_MODE (typ1)
3366 || siz0 < 0
3367 || siz0 != int_size_in_bytes (typ1))
3368 return false;
3370 else if (!types_compatible_p (typ0, typ1))
3371 return false;
3373 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3374 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3375 if (vec_safe_length (v0) != vec_safe_length (v1))
3376 return false;
3378 /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3379 of the CONSTRUCTOR referenced indirectly. */
3380 flags &= ~OEP_ADDRESS_OF;
3382 for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3384 constructor_elt *c0 = &(*v0)[idx];
3385 constructor_elt *c1 = &(*v1)[idx];
3387 /* Check that the values are the same... */
3388 if (c0->value != c1->value
3389 && !operand_equal_p (c0->value, c1->value, flags))
3390 return false;
3392 /* ... and that they apply to the same field! */
3393 if (c0->index != c1->index
3394 && (TREE_CODE (typ0) == ARRAY_TYPE
3395 ? !operand_equal_p (c0->index, c1->index, flags)
3396 : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3397 DECL_FIELD_OFFSET (c1->index),
3398 flags)
3399 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3400 DECL_FIELD_BIT_OFFSET (c1->index),
3401 flags)))
3402 return false;
3405 return true;
3408 default:
3409 break;
3412 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3413 two instances of undefined behavior will give identical results. */
3414 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3415 return false;
3417 /* Define macros to test an operand from arg0 and arg1 for equality and a
3418 variant that allows null and views null as being different from any
3419 non-null value. In the latter case, if either is null, the both
3420 must be; otherwise, do the normal comparison. */
3421 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3422 TREE_OPERAND (arg1, N), flags)
3424 #define OP_SAME_WITH_NULL(N) \
3425 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3426 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3428 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3430 case tcc_unary:
3431 /* Two conversions are equal only if signedness and modes match. */
3432 switch (TREE_CODE (arg0))
3434 CASE_CONVERT:
3435 case FIX_TRUNC_EXPR:
3436 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3437 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3438 return false;
3439 break;
3440 default:
3441 break;
3444 return OP_SAME (0);
3447 case tcc_comparison:
3448 case tcc_binary:
3449 if (OP_SAME (0) && OP_SAME (1))
3450 return true;
3452 /* For commutative ops, allow the other order. */
3453 return (commutative_tree_code (TREE_CODE (arg0))
3454 && operand_equal_p (TREE_OPERAND (arg0, 0),
3455 TREE_OPERAND (arg1, 1), flags)
3456 && operand_equal_p (TREE_OPERAND (arg0, 1),
3457 TREE_OPERAND (arg1, 0), flags));
3459 case tcc_reference:
3460 /* If either of the pointer (or reference) expressions we are
3461 dereferencing contain a side effect, these cannot be equal,
3462 but their addresses can be. */
3463 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3464 && (TREE_SIDE_EFFECTS (arg0)
3465 || TREE_SIDE_EFFECTS (arg1)))
3466 return false;
3468 switch (TREE_CODE (arg0))
3470 case INDIRECT_REF:
3471 if (!(flags & OEP_ADDRESS_OF))
3473 if (TYPE_ALIGN (TREE_TYPE (arg0))
3474 != TYPE_ALIGN (TREE_TYPE (arg1)))
3475 return false;
3476 /* Verify that the access types are compatible. */
3477 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3478 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3479 return false;
3481 flags &= ~OEP_ADDRESS_OF;
3482 return OP_SAME (0);
3484 case IMAGPART_EXPR:
3485 /* Require the same offset. */
3486 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3487 TYPE_SIZE (TREE_TYPE (arg1)),
3488 flags & ~OEP_ADDRESS_OF))
3489 return false;
3491 /* Fallthru. */
3492 case REALPART_EXPR:
3493 case VIEW_CONVERT_EXPR:
3494 return OP_SAME (0);
3496 case TARGET_MEM_REF:
3497 case MEM_REF:
3498 if (!(flags & OEP_ADDRESS_OF))
3500 /* Require equal access sizes */
3501 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3502 && (!TYPE_SIZE (TREE_TYPE (arg0))
3503 || !TYPE_SIZE (TREE_TYPE (arg1))
3504 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3505 TYPE_SIZE (TREE_TYPE (arg1)),
3506 flags)))
3507 return false;
3508 /* Verify that access happens in similar types. */
3509 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3510 return false;
3511 /* Verify that accesses are TBAA compatible. */
3512 if (!alias_ptr_types_compatible_p
3513 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3514 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3515 || (MR_DEPENDENCE_CLIQUE (arg0)
3516 != MR_DEPENDENCE_CLIQUE (arg1))
3517 || (MR_DEPENDENCE_BASE (arg0)
3518 != MR_DEPENDENCE_BASE (arg1)))
3519 return false;
3520 /* Verify that alignment is compatible. */
3521 if (TYPE_ALIGN (TREE_TYPE (arg0))
3522 != TYPE_ALIGN (TREE_TYPE (arg1)))
3523 return false;
3525 flags &= ~OEP_ADDRESS_OF;
3526 return (OP_SAME (0) && OP_SAME (1)
3527 /* TARGET_MEM_REF require equal extra operands. */
3528 && (TREE_CODE (arg0) != TARGET_MEM_REF
3529 || (OP_SAME_WITH_NULL (2)
3530 && OP_SAME_WITH_NULL (3)
3531 && OP_SAME_WITH_NULL (4))));
3533 case ARRAY_REF:
3534 case ARRAY_RANGE_REF:
3535 if (!OP_SAME (0))
3536 return false;
3537 flags &= ~OEP_ADDRESS_OF;
3538 /* Compare the array index by value if it is constant first as we
3539 may have different types but same value here. */
3540 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3541 TREE_OPERAND (arg1, 1))
3542 || OP_SAME (1))
3543 && OP_SAME_WITH_NULL (2)
3544 && OP_SAME_WITH_NULL (3)
3545 /* Compare low bound and element size as with OEP_ADDRESS_OF
3546 we have to account for the offset of the ref. */
3547 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3548 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3549 || (operand_equal_p (array_ref_low_bound
3550 (CONST_CAST_TREE (arg0)),
3551 array_ref_low_bound
3552 (CONST_CAST_TREE (arg1)), flags)
3553 && operand_equal_p (array_ref_element_size
3554 (CONST_CAST_TREE (arg0)),
3555 array_ref_element_size
3556 (CONST_CAST_TREE (arg1)),
3557 flags))));
3559 case COMPONENT_REF:
3560 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3561 may be NULL when we're called to compare MEM_EXPRs. */
3562 if (!OP_SAME_WITH_NULL (0))
3563 return false;
3565 bool compare_address = flags & OEP_ADDRESS_OF;
3567 /* Most of time we only need to compare FIELD_DECLs for equality.
3568 However when determining address look into actual offsets.
3569 These may match for unions and unshared record types. */
3570 flags &= ~OEP_ADDRESS_OF;
3571 if (!OP_SAME (1))
3573 if (compare_address
3574 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3576 tree field0 = TREE_OPERAND (arg0, 1);
3577 tree field1 = TREE_OPERAND (arg1, 1);
3579 /* Non-FIELD_DECL operands can appear in C++ templates. */
3580 if (TREE_CODE (field0) != FIELD_DECL
3581 || TREE_CODE (field1) != FIELD_DECL
3582 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3583 DECL_FIELD_OFFSET (field1), flags)
3584 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3585 DECL_FIELD_BIT_OFFSET (field1),
3586 flags))
3587 return false;
3589 else
3590 return false;
3593 return OP_SAME_WITH_NULL (2);
3595 case BIT_FIELD_REF:
3596 if (!OP_SAME (0))
3597 return false;
3598 flags &= ~OEP_ADDRESS_OF;
3599 return OP_SAME (1) && OP_SAME (2);
3601 default:
3602 return false;
3605 case tcc_expression:
3606 switch (TREE_CODE (arg0))
3608 case ADDR_EXPR:
3609 /* Be sure we pass right ADDRESS_OF flag. */
3610 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3611 return operand_equal_p (TREE_OPERAND (arg0, 0),
3612 TREE_OPERAND (arg1, 0),
3613 flags | OEP_ADDRESS_OF);
3615 case TRUTH_NOT_EXPR:
3616 return OP_SAME (0);
3618 case TRUTH_ANDIF_EXPR:
3619 case TRUTH_ORIF_EXPR:
3620 return OP_SAME (0) && OP_SAME (1);
3622 case WIDEN_MULT_PLUS_EXPR:
3623 case WIDEN_MULT_MINUS_EXPR:
3624 if (!OP_SAME (2))
3625 return false;
3626 /* The multiplcation operands are commutative. */
3627 /* FALLTHRU */
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632 if (OP_SAME (0) && OP_SAME (1))
3633 return true;
3635 /* Otherwise take into account this is a commutative operation. */
3636 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 1), flags)
3638 && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 TREE_OPERAND (arg1, 0), flags));
3641 case COND_EXPR:
3642 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3643 return false;
3644 flags &= ~OEP_ADDRESS_OF;
3645 return OP_SAME (0);
3647 case BIT_INSERT_EXPR:
3648 /* BIT_INSERT_EXPR has an implict operand as the type precision
3649 of op1. Need to check to make sure they are the same. */
3650 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3651 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3652 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3653 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3654 return false;
3655 /* FALLTHRU */
3657 case VEC_COND_EXPR:
3658 case DOT_PROD_EXPR:
3659 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3661 case MODIFY_EXPR:
3662 case INIT_EXPR:
3663 case COMPOUND_EXPR:
3664 case PREDECREMENT_EXPR:
3665 case PREINCREMENT_EXPR:
3666 case POSTDECREMENT_EXPR:
3667 case POSTINCREMENT_EXPR:
3668 if (flags & OEP_LEXICOGRAPHIC)
3669 return OP_SAME (0) && OP_SAME (1);
3670 return false;
3672 case CLEANUP_POINT_EXPR:
3673 case EXPR_STMT:
3674 case SAVE_EXPR:
3675 if (flags & OEP_LEXICOGRAPHIC)
3676 return OP_SAME (0);
3677 return false;
3679 case OBJ_TYPE_REF:
3680 /* Virtual table reference. */
3681 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3682 OBJ_TYPE_REF_EXPR (arg1), flags))
3683 return false;
3684 flags &= ~OEP_ADDRESS_OF;
3685 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3686 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3687 return false;
3688 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3689 OBJ_TYPE_REF_OBJECT (arg1), flags))
3690 return false;
3691 if (virtual_method_call_p (arg0))
3693 if (!virtual_method_call_p (arg1))
3694 return false;
3695 return types_same_for_odr (obj_type_ref_class (arg0),
3696 obj_type_ref_class (arg1));
3698 return false;
3700 default:
3701 return false;
3704 case tcc_vl_exp:
3705 switch (TREE_CODE (arg0))
3707 case CALL_EXPR:
3708 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3709 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3710 /* If not both CALL_EXPRs are either internal or normal function
3711 functions, then they are not equal. */
3712 return false;
3713 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3715 /* If the CALL_EXPRs call different internal functions, then they
3716 are not equal. */
3717 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3718 return false;
3720 else
3722 /* If the CALL_EXPRs call different functions, then they are not
3723 equal. */
3724 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3725 flags))
3726 return false;
3729 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3731 unsigned int cef = call_expr_flags (arg0);
3732 if (flags & OEP_PURE_SAME)
3733 cef &= ECF_CONST | ECF_PURE;
3734 else
3735 cef &= ECF_CONST;
3736 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3737 return false;
3740 /* Now see if all the arguments are the same. */
3742 const_call_expr_arg_iterator iter0, iter1;
3743 const_tree a0, a1;
3744 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3745 a1 = first_const_call_expr_arg (arg1, &iter1);
3746 a0 && a1;
3747 a0 = next_const_call_expr_arg (&iter0),
3748 a1 = next_const_call_expr_arg (&iter1))
3749 if (! operand_equal_p (a0, a1, flags))
3750 return false;
3752 /* If we get here and both argument lists are exhausted
3753 then the CALL_EXPRs are equal. */
3754 return ! (a0 || a1);
3756 default:
3757 return false;
3760 case tcc_declaration:
3761 /* Consider __builtin_sqrt equal to sqrt. */
3762 if (TREE_CODE (arg0) == FUNCTION_DECL)
3763 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3764 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3765 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3766 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3768 if (DECL_P (arg0)
3769 && (flags & OEP_DECL_NAME)
3770 && (flags & OEP_LEXICOGRAPHIC))
3772 /* Consider decls with the same name equal. The caller needs
3773 to make sure they refer to the same entity (such as a function
3774 formal parameter). */
3775 tree a0name = DECL_NAME (arg0);
3776 tree a1name = DECL_NAME (arg1);
3777 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3778 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3779 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3781 return false;
3783 case tcc_exceptional:
3784 if (TREE_CODE (arg0) == CONSTRUCTOR)
3786 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3787 return false;
3789 /* In GIMPLE constructors are used only to build vectors from
3790 elements. Individual elements in the constructor must be
3791 indexed in increasing order and form an initial sequence.
3793 We make no effort to compare nonconstant ones in GENERIC. */
3794 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3795 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3796 return false;
3798 /* Be sure that vectors constructed have the same representation.
3799 We only tested element precision and modes to match.
3800 Vectors may be BLKmode and thus also check that the number of
3801 parts match. */
3802 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3803 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3804 return false;
3806 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3807 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3808 unsigned int len = vec_safe_length (v0);
3810 if (len != vec_safe_length (v1))
3811 return false;
3813 for (unsigned int i = 0; i < len; i++)
3815 constructor_elt *c0 = &(*v0)[i];
3816 constructor_elt *c1 = &(*v1)[i];
3818 if (!operand_equal_p (c0->value, c1->value, flags)
3819 /* In GIMPLE the indexes can be either NULL or matching i.
3820 Double check this so we won't get false
3821 positives for GENERIC. */
3822 || (c0->index
3823 && (TREE_CODE (c0->index) != INTEGER_CST
3824 || compare_tree_int (c0->index, i)))
3825 || (c1->index
3826 && (TREE_CODE (c1->index) != INTEGER_CST
3827 || compare_tree_int (c1->index, i))))
3828 return false;
3830 return true;
3832 else if (TREE_CODE (arg0) == STATEMENT_LIST
3833 && (flags & OEP_LEXICOGRAPHIC))
3835 /* Compare the STATEMENT_LISTs. */
3836 tree_stmt_iterator tsi1, tsi2;
3837 tree body1 = CONST_CAST_TREE (arg0);
3838 tree body2 = CONST_CAST_TREE (arg1);
3839 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3840 tsi_next (&tsi1), tsi_next (&tsi2))
3842 /* The lists don't have the same number of statements. */
3843 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3844 return false;
3845 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3846 return true;
3847 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3848 flags & (OEP_LEXICOGRAPHIC
3849 | OEP_NO_HASH_CHECK)))
3850 return false;
3853 return false;
3855 case tcc_statement:
3856 switch (TREE_CODE (arg0))
3858 case RETURN_EXPR:
3859 if (flags & OEP_LEXICOGRAPHIC)
3860 return OP_SAME_WITH_NULL (0);
3861 return false;
3862 case DEBUG_BEGIN_STMT:
3863 if (flags & OEP_LEXICOGRAPHIC)
3864 return true;
3865 return false;
3866 default:
3867 return false;
3870 default:
3871 return false;
3874 #undef OP_SAME
3875 #undef OP_SAME_WITH_NULL
3878 /* Generate a hash value for an expression. This can be used iteratively
3879 by passing a previous result as the HSTATE argument. */
3881 void
3882 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3883 unsigned int flags)
3885 int i;
3886 enum tree_code code;
3887 enum tree_code_class tclass;
3889 if (t == NULL_TREE || t == error_mark_node)
3891 hstate.merge_hash (0);
3892 return;
3895 STRIP_ANY_LOCATION_WRAPPER (t);
3897 if (!(flags & OEP_ADDRESS_OF))
3898 STRIP_NOPS (t);
3900 code = TREE_CODE (t);
3902 switch (code)
3904 /* Alas, constants aren't shared, so we can't rely on pointer
3905 identity. */
3906 case VOID_CST:
3907 hstate.merge_hash (0);
3908 return;
3909 case INTEGER_CST:
3910 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3911 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3912 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3913 return;
3914 case REAL_CST:
3916 unsigned int val2;
3917 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3918 val2 = rvc_zero;
3919 else
3920 val2 = real_hash (TREE_REAL_CST_PTR (t));
3921 hstate.merge_hash (val2);
3922 return;
3924 case FIXED_CST:
3926 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3927 hstate.merge_hash (val2);
3928 return;
3930 case STRING_CST:
3931 hstate.add ((const void *) TREE_STRING_POINTER (t),
3932 TREE_STRING_LENGTH (t));
3933 return;
3934 case COMPLEX_CST:
3935 hash_operand (TREE_REALPART (t), hstate, flags);
3936 hash_operand (TREE_IMAGPART (t), hstate, flags);
3937 return;
3938 case VECTOR_CST:
3940 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3941 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3942 unsigned int count = vector_cst_encoded_nelts (t);
3943 for (unsigned int i = 0; i < count; ++i)
3944 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3945 return;
3947 case SSA_NAME:
3948 /* We can just compare by pointer. */
3949 hstate.add_hwi (SSA_NAME_VERSION (t));
3950 return;
3951 case PLACEHOLDER_EXPR:
3952 /* The node itself doesn't matter. */
3953 return;
3954 case BLOCK:
3955 case OMP_CLAUSE:
3956 /* Ignore. */
3957 return;
3958 case TREE_LIST:
3959 /* A list of expressions, for a CALL_EXPR or as the elements of a
3960 VECTOR_CST. */
3961 for (; t; t = TREE_CHAIN (t))
3962 hash_operand (TREE_VALUE (t), hstate, flags);
3963 return;
3964 case CONSTRUCTOR:
3966 unsigned HOST_WIDE_INT idx;
3967 tree field, value;
3968 flags &= ~OEP_ADDRESS_OF;
3969 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3972 /* In GIMPLE the indexes can be either NULL or matching i. */
3973 if (field == NULL_TREE)
3974 field = bitsize_int (idx);
3975 if (TREE_CODE (field) == FIELD_DECL)
3977 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
3978 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
3980 else
3981 hash_operand (field, hstate, flags);
3982 hash_operand (value, hstate, flags);
3984 return;
3986 case STATEMENT_LIST:
3988 tree_stmt_iterator i;
3989 for (i = tsi_start (CONST_CAST_TREE (t));
3990 !tsi_end_p (i); tsi_next (&i))
3991 hash_operand (tsi_stmt (i), hstate, flags);
3992 return;
3994 case TREE_VEC:
3995 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3996 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3997 return;
3998 case IDENTIFIER_NODE:
3999 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4000 return;
4001 case FUNCTION_DECL:
4002 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4003 Otherwise nodes that compare equal according to operand_equal_p might
4004 get different hash codes. However, don't do this for machine specific
4005 or front end builtins, since the function code is overloaded in those
4006 cases. */
4007 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4008 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4010 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4011 code = TREE_CODE (t);
4013 /* FALL THROUGH */
4014 default:
4015 if (POLY_INT_CST_P (t))
4017 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4018 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4019 return;
4021 tclass = TREE_CODE_CLASS (code);
4023 if (tclass == tcc_declaration)
4025 /* DECL's have a unique ID */
4026 hstate.add_hwi (DECL_UID (t));
4028 else if (tclass == tcc_comparison && !commutative_tree_code (code))
4030 /* For comparisons that can be swapped, use the lower
4031 tree code. */
4032 enum tree_code ccode = swap_tree_comparison (code);
4033 if (code < ccode)
4034 ccode = code;
4035 hstate.add_object (ccode);
4036 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4037 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4039 else if (CONVERT_EXPR_CODE_P (code))
4041 /* NOP_EXPR and CONVERT_EXPR are considered equal by
4042 operand_equal_p. */
4043 enum tree_code ccode = NOP_EXPR;
4044 hstate.add_object (ccode);
4046 /* Don't hash the type, that can lead to having nodes which
4047 compare equal according to operand_equal_p, but which
4048 have different hash codes. Make sure to include signedness
4049 in the hash computation. */
4050 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4051 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4053 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4054 else if (code == MEM_REF
4055 && (flags & OEP_ADDRESS_OF) != 0
4056 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4057 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4058 && integer_zerop (TREE_OPERAND (t, 1)))
4059 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4060 hstate, flags);
4061 /* Don't ICE on FE specific trees, or their arguments etc.
4062 during operand_equal_p hash verification. */
4063 else if (!IS_EXPR_CODE_CLASS (tclass))
4064 gcc_assert (flags & OEP_HASH_CHECK);
4065 else
4067 unsigned int sflags = flags;
4069 hstate.add_object (code);
4071 switch (code)
4073 case ADDR_EXPR:
4074 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4075 flags |= OEP_ADDRESS_OF;
4076 sflags = flags;
4077 break;
4079 case INDIRECT_REF:
4080 case MEM_REF:
4081 case TARGET_MEM_REF:
4082 flags &= ~OEP_ADDRESS_OF;
4083 sflags = flags;
4084 break;
4086 case COMPONENT_REF:
4087 if (sflags & OEP_ADDRESS_OF)
4089 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4090 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4091 hstate, flags & ~OEP_ADDRESS_OF);
4092 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4093 hstate, flags & ~OEP_ADDRESS_OF);
4094 return;
4096 break;
4097 case ARRAY_REF:
4098 case ARRAY_RANGE_REF:
4099 case BIT_FIELD_REF:
4100 sflags &= ~OEP_ADDRESS_OF;
4101 break;
4103 case COND_EXPR:
4104 flags &= ~OEP_ADDRESS_OF;
4105 break;
4107 case WIDEN_MULT_PLUS_EXPR:
4108 case WIDEN_MULT_MINUS_EXPR:
4110 /* The multiplication operands are commutative. */
4111 inchash::hash one, two;
4112 hash_operand (TREE_OPERAND (t, 0), one, flags);
4113 hash_operand (TREE_OPERAND (t, 1), two, flags);
4114 hstate.add_commutative (one, two);
4115 hash_operand (TREE_OPERAND (t, 2), two, flags);
4116 return;
4119 case CALL_EXPR:
4120 if (CALL_EXPR_FN (t) == NULL_TREE)
4121 hstate.add_int (CALL_EXPR_IFN (t));
4122 break;
4124 case TARGET_EXPR:
4125 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4126 Usually different TARGET_EXPRs just should use
4127 different temporaries in their slots. */
4128 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4129 return;
4131 case OBJ_TYPE_REF:
4132 /* Virtual table reference. */
4133 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4134 flags &= ~OEP_ADDRESS_OF;
4135 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4136 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4137 if (!virtual_method_call_p (t))
4138 return;
4139 if (tree c = obj_type_ref_class (t))
4141 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4142 /* We compute mangled names only when free_lang_data is run.
4143 In that case we can hash precisely. */
4144 if (TREE_CODE (c) == TYPE_DECL
4145 && DECL_ASSEMBLER_NAME_SET_P (c))
4146 hstate.add_object
4147 (IDENTIFIER_HASH_VALUE
4148 (DECL_ASSEMBLER_NAME (c)));
4150 return;
4151 default:
4152 break;
4155 /* Don't hash the type, that can lead to having nodes which
4156 compare equal according to operand_equal_p, but which
4157 have different hash codes. */
4158 if (code == NON_LVALUE_EXPR)
4160 /* Make sure to include signness in the hash computation. */
4161 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4162 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4165 else if (commutative_tree_code (code))
4167 /* It's a commutative expression. We want to hash it the same
4168 however it appears. We do this by first hashing both operands
4169 and then rehashing based on the order of their independent
4170 hashes. */
4171 inchash::hash one, two;
4172 hash_operand (TREE_OPERAND (t, 0), one, flags);
4173 hash_operand (TREE_OPERAND (t, 1), two, flags);
4174 hstate.add_commutative (one, two);
4176 else
4177 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4178 hash_operand (TREE_OPERAND (t, i), hstate,
4179 i == 0 ? flags : sflags);
4181 return;
4185 bool
4186 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4187 unsigned int flags, bool *ret)
4189 /* When checking and unless comparing DECL names, verify that if
4190 the outermost operand_equal_p call returns non-zero then ARG0
4191 and ARG1 have the same hash value. */
4192 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4194 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4196 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4198 inchash::hash hstate0 (0), hstate1 (0);
4199 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4200 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4201 hashval_t h0 = hstate0.end ();
4202 hashval_t h1 = hstate1.end ();
4203 gcc_assert (h0 == h1);
4205 *ret = true;
4207 else
4208 *ret = false;
4210 return true;
4213 return false;
4217 static operand_compare default_compare_instance;
4219 /* Conveinece wrapper around operand_compare class because usually we do
4220 not need to play with the valueizer. */
4222 bool
4223 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4225 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4228 namespace inchash
4231 /* Generate a hash value for an expression. This can be used iteratively
4232 by passing a previous result as the HSTATE argument.
4234 This function is intended to produce the same hash for expressions which
4235 would compare equal using operand_equal_p. */
4236 void
4237 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4239 default_compare_instance.hash_operand (t, hstate, flags);
4244 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4245 with a different signedness or a narrower precision. */
4247 static bool
4248 operand_equal_for_comparison_p (tree arg0, tree arg1)
4250 if (operand_equal_p (arg0, arg1, 0))
4251 return true;
4253 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4254 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4255 return false;
4257 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4258 and see if the inner values are the same. This removes any
4259 signedness comparison, which doesn't matter here. */
4260 tree op0 = arg0;
4261 tree op1 = arg1;
4262 STRIP_NOPS (op0);
4263 STRIP_NOPS (op1);
4264 if (operand_equal_p (op0, op1, 0))
4265 return true;
4267 /* Discard a single widening conversion from ARG1 and see if the inner
4268 value is the same as ARG0. */
4269 if (CONVERT_EXPR_P (arg1)
4270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4271 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4272 < TYPE_PRECISION (TREE_TYPE (arg1))
4273 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4274 return true;
4276 return false;
4279 /* See if ARG is an expression that is either a comparison or is performing
4280 arithmetic on comparisons. The comparisons must only be comparing
4281 two different values, which will be stored in *CVAL1 and *CVAL2; if
4282 they are nonzero it means that some operands have already been found.
4283 No variables may be used anywhere else in the expression except in the
4284 comparisons.
4286 If this is true, return 1. Otherwise, return zero. */
4288 static bool
4289 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4291 enum tree_code code = TREE_CODE (arg);
4292 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4294 /* We can handle some of the tcc_expression cases here. */
4295 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4296 tclass = tcc_unary;
4297 else if (tclass == tcc_expression
4298 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4299 || code == COMPOUND_EXPR))
4300 tclass = tcc_binary;
4302 switch (tclass)
4304 case tcc_unary:
4305 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4307 case tcc_binary:
4308 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4309 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4311 case tcc_constant:
4312 return true;
4314 case tcc_expression:
4315 if (code == COND_EXPR)
4316 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4317 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4318 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4319 return false;
4321 case tcc_comparison:
4322 /* First see if we can handle the first operand, then the second. For
4323 the second operand, we know *CVAL1 can't be zero. It must be that
4324 one side of the comparison is each of the values; test for the
4325 case where this isn't true by failing if the two operands
4326 are the same. */
4328 if (operand_equal_p (TREE_OPERAND (arg, 0),
4329 TREE_OPERAND (arg, 1), 0))
4330 return false;
4332 if (*cval1 == 0)
4333 *cval1 = TREE_OPERAND (arg, 0);
4334 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4336 else if (*cval2 == 0)
4337 *cval2 = TREE_OPERAND (arg, 0);
4338 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4340 else
4341 return false;
4343 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4345 else if (*cval2 == 0)
4346 *cval2 = TREE_OPERAND (arg, 1);
4347 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4349 else
4350 return false;
4352 return true;
4354 default:
4355 return false;
4359 /* ARG is a tree that is known to contain just arithmetic operations and
4360 comparisons. Evaluate the operations in the tree substituting NEW0 for
4361 any occurrence of OLD0 as an operand of a comparison and likewise for
4362 NEW1 and OLD1. */
4364 static tree
4365 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4366 tree old1, tree new1)
4368 tree type = TREE_TYPE (arg);
4369 enum tree_code code = TREE_CODE (arg);
4370 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4372 /* We can handle some of the tcc_expression cases here. */
4373 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4374 tclass = tcc_unary;
4375 else if (tclass == tcc_expression
4376 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4377 tclass = tcc_binary;
4379 switch (tclass)
4381 case tcc_unary:
4382 return fold_build1_loc (loc, code, type,
4383 eval_subst (loc, TREE_OPERAND (arg, 0),
4384 old0, new0, old1, new1));
4386 case tcc_binary:
4387 return fold_build2_loc (loc, code, type,
4388 eval_subst (loc, TREE_OPERAND (arg, 0),
4389 old0, new0, old1, new1),
4390 eval_subst (loc, TREE_OPERAND (arg, 1),
4391 old0, new0, old1, new1));
4393 case tcc_expression:
4394 switch (code)
4396 case SAVE_EXPR:
4397 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4398 old1, new1);
4400 case COMPOUND_EXPR:
4401 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4402 old1, new1);
4404 case COND_EXPR:
4405 return fold_build3_loc (loc, code, type,
4406 eval_subst (loc, TREE_OPERAND (arg, 0),
4407 old0, new0, old1, new1),
4408 eval_subst (loc, TREE_OPERAND (arg, 1),
4409 old0, new0, old1, new1),
4410 eval_subst (loc, TREE_OPERAND (arg, 2),
4411 old0, new0, old1, new1));
4412 default:
4413 break;
4415 /* Fall through - ??? */
4417 case tcc_comparison:
4419 tree arg0 = TREE_OPERAND (arg, 0);
4420 tree arg1 = TREE_OPERAND (arg, 1);
4422 /* We need to check both for exact equality and tree equality. The
4423 former will be true if the operand has a side-effect. In that
4424 case, we know the operand occurred exactly once. */
4426 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4427 arg0 = new0;
4428 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4429 arg0 = new1;
4431 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4432 arg1 = new0;
4433 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4434 arg1 = new1;
4436 return fold_build2_loc (loc, code, type, arg0, arg1);
4439 default:
4440 return arg;
4444 /* Return a tree for the case when the result of an expression is RESULT
4445 converted to TYPE and OMITTED was previously an operand of the expression
4446 but is now not needed (e.g., we folded OMITTED * 0).
4448 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4449 the conversion of RESULT to TYPE. */
4451 tree
4452 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4454 tree t = fold_convert_loc (loc, type, result);
4456 /* If the resulting operand is an empty statement, just return the omitted
4457 statement casted to void. */
4458 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4459 return build1_loc (loc, NOP_EXPR, void_type_node,
4460 fold_ignored_result (omitted));
4462 if (TREE_SIDE_EFFECTS (omitted))
4463 return build2_loc (loc, COMPOUND_EXPR, type,
4464 fold_ignored_result (omitted), t);
4466 return non_lvalue_loc (loc, t);
4469 /* Return a tree for the case when the result of an expression is RESULT
4470 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4471 of the expression but are now not needed.
4473 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4474 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4475 evaluated before OMITTED2. Otherwise, if neither has side effects,
4476 just do the conversion of RESULT to TYPE. */
4478 tree
4479 omit_two_operands_loc (location_t loc, tree type, tree result,
4480 tree omitted1, tree omitted2)
4482 tree t = fold_convert_loc (loc, type, result);
4484 if (TREE_SIDE_EFFECTS (omitted2))
4485 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4486 if (TREE_SIDE_EFFECTS (omitted1))
4487 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4489 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4493 /* Return a simplified tree node for the truth-negation of ARG. This
4494 never alters ARG itself. We assume that ARG is an operation that
4495 returns a truth value (0 or 1).
4497 FIXME: one would think we would fold the result, but it causes
4498 problems with the dominator optimizer. */
4500 static tree
4501 fold_truth_not_expr (location_t loc, tree arg)
4503 tree type = TREE_TYPE (arg);
4504 enum tree_code code = TREE_CODE (arg);
4505 location_t loc1, loc2;
4507 /* If this is a comparison, we can simply invert it, except for
4508 floating-point non-equality comparisons, in which case we just
4509 enclose a TRUTH_NOT_EXPR around what we have. */
4511 if (TREE_CODE_CLASS (code) == tcc_comparison)
4513 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4514 if (FLOAT_TYPE_P (op_type)
4515 && flag_trapping_math
4516 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4517 && code != NE_EXPR && code != EQ_EXPR)
4518 return NULL_TREE;
4520 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4521 if (code == ERROR_MARK)
4522 return NULL_TREE;
4524 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4525 TREE_OPERAND (arg, 1));
4526 copy_warning (ret, arg);
4527 return ret;
4530 switch (code)
4532 case INTEGER_CST:
4533 return constant_boolean_node (integer_zerop (arg), type);
4535 case TRUTH_AND_EXPR:
4536 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4537 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4538 return build2_loc (loc, TRUTH_OR_EXPR, type,
4539 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4540 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4542 case TRUTH_OR_EXPR:
4543 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4544 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4545 return build2_loc (loc, TRUTH_AND_EXPR, type,
4546 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4547 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4549 case TRUTH_XOR_EXPR:
4550 /* Here we can invert either operand. We invert the first operand
4551 unless the second operand is a TRUTH_NOT_EXPR in which case our
4552 result is the XOR of the first operand with the inside of the
4553 negation of the second operand. */
4555 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4556 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4557 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4558 else
4559 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4560 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4561 TREE_OPERAND (arg, 1));
4563 case TRUTH_ANDIF_EXPR:
4564 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4565 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4566 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4567 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4568 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4570 case TRUTH_ORIF_EXPR:
4571 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4572 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4573 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4574 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4575 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4577 case TRUTH_NOT_EXPR:
4578 return TREE_OPERAND (arg, 0);
4580 case COND_EXPR:
4582 tree arg1 = TREE_OPERAND (arg, 1);
4583 tree arg2 = TREE_OPERAND (arg, 2);
4585 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4586 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4588 /* A COND_EXPR may have a throw as one operand, which
4589 then has void type. Just leave void operands
4590 as they are. */
4591 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4592 VOID_TYPE_P (TREE_TYPE (arg1))
4593 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4594 VOID_TYPE_P (TREE_TYPE (arg2))
4595 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4598 case COMPOUND_EXPR:
4599 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4600 return build2_loc (loc, COMPOUND_EXPR, type,
4601 TREE_OPERAND (arg, 0),
4602 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4604 case NON_LVALUE_EXPR:
4605 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4606 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4608 CASE_CONVERT:
4609 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4610 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4612 /* fall through */
4614 case FLOAT_EXPR:
4615 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4616 return build1_loc (loc, TREE_CODE (arg), type,
4617 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4619 case BIT_AND_EXPR:
4620 if (!integer_onep (TREE_OPERAND (arg, 1)))
4621 return NULL_TREE;
4622 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4624 case SAVE_EXPR:
4625 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4627 case CLEANUP_POINT_EXPR:
4628 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4629 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4630 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4632 default:
4633 return NULL_TREE;
4637 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4638 assume that ARG is an operation that returns a truth value (0 or 1
4639 for scalars, 0 or -1 for vectors). Return the folded expression if
4640 folding is successful. Otherwise, return NULL_TREE. */
4642 static tree
4643 fold_invert_truthvalue (location_t loc, tree arg)
4645 tree type = TREE_TYPE (arg);
4646 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4647 ? BIT_NOT_EXPR
4648 : TRUTH_NOT_EXPR,
4649 type, arg);
4652 /* Return a simplified tree node for the truth-negation of ARG. This
4653 never alters ARG itself. We assume that ARG is an operation that
4654 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4656 tree
4657 invert_truthvalue_loc (location_t loc, tree arg)
4659 if (TREE_CODE (arg) == ERROR_MARK)
4660 return arg;
4662 tree type = TREE_TYPE (arg);
4663 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4664 ? BIT_NOT_EXPR
4665 : TRUTH_NOT_EXPR,
4666 type, arg);
4669 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4670 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4671 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4672 is the original memory reference used to preserve the alias set of
4673 the access. */
4675 static tree
4676 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4677 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4678 int unsignedp, int reversep)
4680 tree result, bftype;
4682 /* Attempt not to lose the access path if possible. */
4683 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4685 tree ninner = TREE_OPERAND (orig_inner, 0);
4686 machine_mode nmode;
4687 poly_int64 nbitsize, nbitpos;
4688 tree noffset;
4689 int nunsignedp, nreversep, nvolatilep = 0;
4690 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4691 &noffset, &nmode, &nunsignedp,
4692 &nreversep, &nvolatilep);
4693 if (base == inner
4694 && noffset == NULL_TREE
4695 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4696 && !reversep
4697 && !nreversep
4698 && !nvolatilep)
4700 inner = ninner;
4701 bitpos -= nbitpos;
4705 alias_set_type iset = get_alias_set (orig_inner);
4706 if (iset == 0 && get_alias_set (inner) != iset)
4707 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4708 build_fold_addr_expr (inner),
4709 build_int_cst (ptr_type_node, 0));
4711 if (known_eq (bitpos, 0) && !reversep)
4713 tree size = TYPE_SIZE (TREE_TYPE (inner));
4714 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4715 || POINTER_TYPE_P (TREE_TYPE (inner)))
4716 && tree_fits_shwi_p (size)
4717 && tree_to_shwi (size) == bitsize)
4718 return fold_convert_loc (loc, type, inner);
4721 bftype = type;
4722 if (TYPE_PRECISION (bftype) != bitsize
4723 || TYPE_UNSIGNED (bftype) == !unsignedp)
4724 bftype = build_nonstandard_integer_type (bitsize, 0);
4726 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4727 bitsize_int (bitsize), bitsize_int (bitpos));
4728 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4730 if (bftype != type)
4731 result = fold_convert_loc (loc, type, result);
4733 return result;
4736 /* Optimize a bit-field compare.
4738 There are two cases: First is a compare against a constant and the
4739 second is a comparison of two items where the fields are at the same
4740 bit position relative to the start of a chunk (byte, halfword, word)
4741 large enough to contain it. In these cases we can avoid the shift
4742 implicit in bitfield extractions.
4744 For constants, we emit a compare of the shifted constant with the
4745 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4746 compared. For two fields at the same position, we do the ANDs with the
4747 similar mask and compare the result of the ANDs.
4749 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4750 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4751 are the left and right operands of the comparison, respectively.
4753 If the optimization described above can be done, we return the resulting
4754 tree. Otherwise we return zero. */
4756 static tree
4757 optimize_bit_field_compare (location_t loc, enum tree_code code,
4758 tree compare_type, tree lhs, tree rhs)
4760 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4761 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4762 tree type = TREE_TYPE (lhs);
4763 tree unsigned_type;
4764 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4765 machine_mode lmode, rmode;
4766 scalar_int_mode nmode;
4767 int lunsignedp, runsignedp;
4768 int lreversep, rreversep;
4769 int lvolatilep = 0, rvolatilep = 0;
4770 tree linner, rinner = NULL_TREE;
4771 tree mask;
4772 tree offset;
4774 /* Get all the information about the extractions being done. If the bit size
4775 is the same as the size of the underlying object, we aren't doing an
4776 extraction at all and so can do nothing. We also don't want to
4777 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4778 then will no longer be able to replace it. */
4779 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4780 &lunsignedp, &lreversep, &lvolatilep);
4781 if (linner == lhs
4782 || !known_size_p (plbitsize)
4783 || !plbitsize.is_constant (&lbitsize)
4784 || !plbitpos.is_constant (&lbitpos)
4785 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4786 || offset != 0
4787 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4788 || lvolatilep)
4789 return 0;
4791 if (const_p)
4792 rreversep = lreversep;
4793 else
4795 /* If this is not a constant, we can only do something if bit positions,
4796 sizes, signedness and storage order are the same. */
4797 rinner
4798 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4799 &runsignedp, &rreversep, &rvolatilep);
4801 if (rinner == rhs
4802 || maybe_ne (lbitpos, rbitpos)
4803 || maybe_ne (lbitsize, rbitsize)
4804 || lunsignedp != runsignedp
4805 || lreversep != rreversep
4806 || offset != 0
4807 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4808 || rvolatilep)
4809 return 0;
4812 /* Honor the C++ memory model and mimic what RTL expansion does. */
4813 poly_uint64 bitstart = 0;
4814 poly_uint64 bitend = 0;
4815 if (TREE_CODE (lhs) == COMPONENT_REF)
4817 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4818 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4819 return 0;
4822 /* See if we can find a mode to refer to this field. We should be able to,
4823 but fail if we can't. */
4824 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4825 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4826 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4827 TYPE_ALIGN (TREE_TYPE (rinner))),
4828 BITS_PER_WORD, false, &nmode))
4829 return 0;
4831 /* Set signed and unsigned types of the precision of this mode for the
4832 shifts below. */
4833 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4835 /* Compute the bit position and size for the new reference and our offset
4836 within it. If the new reference is the same size as the original, we
4837 won't optimize anything, so return zero. */
4838 nbitsize = GET_MODE_BITSIZE (nmode);
4839 nbitpos = lbitpos & ~ (nbitsize - 1);
4840 lbitpos -= nbitpos;
4841 if (nbitsize == lbitsize)
4842 return 0;
4844 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4845 lbitpos = nbitsize - lbitsize - lbitpos;
4847 /* Make the mask to be used against the extracted field. */
4848 mask = build_int_cst_type (unsigned_type, -1);
4849 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4850 mask = const_binop (RSHIFT_EXPR, mask,
4851 size_int (nbitsize - lbitsize - lbitpos));
4853 if (! const_p)
4855 if (nbitpos < 0)
4856 return 0;
4858 /* If not comparing with constant, just rework the comparison
4859 and return. */
4860 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4861 nbitsize, nbitpos, 1, lreversep);
4862 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4863 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4864 nbitsize, nbitpos, 1, rreversep);
4865 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4866 return fold_build2_loc (loc, code, compare_type, t1, t2);
4869 /* Otherwise, we are handling the constant case. See if the constant is too
4870 big for the field. Warn and return a tree for 0 (false) if so. We do
4871 this not only for its own sake, but to avoid having to test for this
4872 error case below. If we didn't, we might generate wrong code.
4874 For unsigned fields, the constant shifted right by the field length should
4875 be all zero. For signed fields, the high-order bits should agree with
4876 the sign bit. */
4878 if (lunsignedp)
4880 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4882 warning (0, "comparison is always %d due to width of bit-field",
4883 code == NE_EXPR);
4884 return constant_boolean_node (code == NE_EXPR, compare_type);
4887 else
4889 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4890 if (tem != 0 && tem != -1)
4892 warning (0, "comparison is always %d due to width of bit-field",
4893 code == NE_EXPR);
4894 return constant_boolean_node (code == NE_EXPR, compare_type);
4898 if (nbitpos < 0)
4899 return 0;
4901 /* Single-bit compares should always be against zero. */
4902 if (lbitsize == 1 && ! integer_zerop (rhs))
4904 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4905 rhs = build_int_cst (type, 0);
4908 /* Make a new bitfield reference, shift the constant over the
4909 appropriate number of bits and mask it with the computed mask
4910 (in case this was a signed field). If we changed it, make a new one. */
4911 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4912 nbitsize, nbitpos, 1, lreversep);
4914 rhs = const_binop (BIT_AND_EXPR,
4915 const_binop (LSHIFT_EXPR,
4916 fold_convert_loc (loc, unsigned_type, rhs),
4917 size_int (lbitpos)),
4918 mask);
4920 lhs = build2_loc (loc, code, compare_type,
4921 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4922 return lhs;
4925 /* Subroutine for fold_truth_andor_1: decode a field reference.
4927 If EXP is a comparison reference, we return the innermost reference.
4929 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4930 set to the starting bit number.
4932 If the innermost field can be completely contained in a mode-sized
4933 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4935 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4936 otherwise it is not changed.
4938 *PUNSIGNEDP is set to the signedness of the field.
4940 *PREVERSEP is set to the storage order of the field.
4942 *PMASK is set to the mask used. This is either contained in a
4943 BIT_AND_EXPR or derived from the width of the field.
4945 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4947 Return 0 if this is not a component reference or is one that we can't
4948 do anything with. */
4950 static tree
4951 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4952 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4953 int *punsignedp, int *preversep, int *pvolatilep,
4954 tree *pmask, tree *pand_mask)
4956 tree exp = *exp_;
4957 tree outer_type = 0;
4958 tree and_mask = 0;
4959 tree mask, inner, offset;
4960 tree unsigned_type;
4961 unsigned int precision;
4963 /* All the optimizations using this function assume integer fields.
4964 There are problems with FP fields since the type_for_size call
4965 below can fail for, e.g., XFmode. */
4966 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4967 return NULL_TREE;
4969 /* We are interested in the bare arrangement of bits, so strip everything
4970 that doesn't affect the machine mode. However, record the type of the
4971 outermost expression if it may matter below. */
4972 if (CONVERT_EXPR_P (exp)
4973 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4974 outer_type = TREE_TYPE (exp);
4975 STRIP_NOPS (exp);
4977 if (TREE_CODE (exp) == BIT_AND_EXPR)
4979 and_mask = TREE_OPERAND (exp, 1);
4980 exp = TREE_OPERAND (exp, 0);
4981 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4982 if (TREE_CODE (and_mask) != INTEGER_CST)
4983 return NULL_TREE;
4986 poly_int64 poly_bitsize, poly_bitpos;
4987 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4988 pmode, punsignedp, preversep, pvolatilep);
4989 if ((inner == exp && and_mask == 0)
4990 || !poly_bitsize.is_constant (pbitsize)
4991 || !poly_bitpos.is_constant (pbitpos)
4992 || *pbitsize < 0
4993 || offset != 0
4994 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4995 /* Reject out-of-bound accesses (PR79731). */
4996 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4997 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4998 *pbitpos + *pbitsize) < 0))
4999 return NULL_TREE;
5001 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
5002 if (unsigned_type == NULL_TREE)
5003 return NULL_TREE;
5005 *exp_ = exp;
5007 /* If the number of bits in the reference is the same as the bitsize of
5008 the outer type, then the outer type gives the signedness. Otherwise
5009 (in case of a small bitfield) the signedness is unchanged. */
5010 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
5011 *punsignedp = TYPE_UNSIGNED (outer_type);
5013 /* Compute the mask to access the bitfield. */
5014 precision = TYPE_PRECISION (unsigned_type);
5016 mask = build_int_cst_type (unsigned_type, -1);
5018 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5019 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5021 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
5022 if (and_mask != 0)
5023 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
5024 fold_convert_loc (loc, unsigned_type, and_mask), mask);
5026 *pmask = mask;
5027 *pand_mask = and_mask;
5028 return inner;
5031 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
5032 bit positions and MASK is SIGNED. */
5034 static bool
5035 all_ones_mask_p (const_tree mask, unsigned int size)
5037 tree type = TREE_TYPE (mask);
5038 unsigned int precision = TYPE_PRECISION (type);
5040 /* If this function returns true when the type of the mask is
5041 UNSIGNED, then there will be errors. In particular see
5042 gcc.c-torture/execute/990326-1.c. There does not appear to be
5043 any documentation paper trail as to why this is so. But the pre
5044 wide-int worked with that restriction and it has been preserved
5045 here. */
5046 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
5047 return false;
5049 return wi::mask (size, false, precision) == wi::to_wide (mask);
5052 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5053 represents the sign bit of EXP's type. If EXP represents a sign
5054 or zero extension, also test VAL against the unextended type.
5055 The return value is the (sub)expression whose sign bit is VAL,
5056 or NULL_TREE otherwise. */
5058 tree
5059 sign_bit_p (tree exp, const_tree val)
5061 int width;
5062 tree t;
5064 /* Tree EXP must have an integral type. */
5065 t = TREE_TYPE (exp);
5066 if (! INTEGRAL_TYPE_P (t))
5067 return NULL_TREE;
5069 /* Tree VAL must be an integer constant. */
5070 if (TREE_CODE (val) != INTEGER_CST
5071 || TREE_OVERFLOW (val))
5072 return NULL_TREE;
5074 width = TYPE_PRECISION (t);
5075 if (wi::only_sign_bit_p (wi::to_wide (val), width))
5076 return exp;
5078 /* Handle extension from a narrower type. */
5079 if (TREE_CODE (exp) == NOP_EXPR
5080 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5081 return sign_bit_p (TREE_OPERAND (exp, 0), val);
5083 return NULL_TREE;
5086 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5087 operand is simple enough to be evaluated unconditionally. */
5089 static bool
5090 simple_operand_p (const_tree exp)
5092 /* Strip any conversions that don't change the machine mode. */
5093 STRIP_NOPS (exp);
5095 return (CONSTANT_CLASS_P (exp)
5096 || TREE_CODE (exp) == SSA_NAME
5097 || (DECL_P (exp)
5098 && ! TREE_ADDRESSABLE (exp)
5099 && ! TREE_THIS_VOLATILE (exp)
5100 && ! DECL_NONLOCAL (exp)
5101 /* Don't regard global variables as simple. They may be
5102 allocated in ways unknown to the compiler (shared memory,
5103 #pragma weak, etc). */
5104 && ! TREE_PUBLIC (exp)
5105 && ! DECL_EXTERNAL (exp)
5106 /* Weakrefs are not safe to be read, since they can be NULL.
5107 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5108 have DECL_WEAK flag set. */
5109 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5110 /* Loading a static variable is unduly expensive, but global
5111 registers aren't expensive. */
5112 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5115 /* Determine if an operand is simple enough to be evaluated unconditionally.
5116 In addition to simple_operand_p, we assume that comparisons, conversions,
5117 and logic-not operations are simple, if their operands are simple, too. */
5119 bool
5120 simple_condition_p (tree exp)
5122 enum tree_code code;
5124 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5125 return false;
5127 while (CONVERT_EXPR_P (exp))
5128 exp = TREE_OPERAND (exp, 0);
5130 code = TREE_CODE (exp);
5132 if (TREE_CODE_CLASS (code) == tcc_comparison)
5133 return (simple_operand_p (TREE_OPERAND (exp, 0))
5134 && simple_operand_p (TREE_OPERAND (exp, 1)));
5136 if (code == TRUTH_NOT_EXPR)
5137 return simple_condition_p (TREE_OPERAND (exp, 0));
5139 return simple_operand_p (exp);
5143 /* The following functions are subroutines to fold_range_test and allow it to
5144 try to change a logical combination of comparisons into a range test.
5146 For example, both
5147 X == 2 || X == 3 || X == 4 || X == 5
5149 X >= 2 && X <= 5
5150 are converted to
5151 (unsigned) (X - 2) <= 3
5153 We describe each set of comparisons as being either inside or outside
5154 a range, using a variable named like IN_P, and then describe the
5155 range with a lower and upper bound. If one of the bounds is omitted,
5156 it represents either the highest or lowest value of the type.
5158 In the comments below, we represent a range by two numbers in brackets
5159 preceded by a "+" to designate being inside that range, or a "-" to
5160 designate being outside that range, so the condition can be inverted by
5161 flipping the prefix. An omitted bound is represented by a "-". For
5162 example, "- [-, 10]" means being outside the range starting at the lowest
5163 possible value and ending at 10, in other words, being greater than 10.
5164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5165 always false.
5167 We set up things so that the missing bounds are handled in a consistent
5168 manner so neither a missing bound nor "true" and "false" need to be
5169 handled using a special case. */
5171 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5173 and UPPER1_P are nonzero if the respective argument is an upper bound
5174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5175 must be specified for a comparison. ARG1 will be converted to ARG0's
5176 type if both are specified. */
5178 static tree
5179 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5180 tree arg1, int upper1_p)
5182 tree tem;
5183 int result;
5184 int sgn0, sgn1;
5186 /* If neither arg represents infinity, do the normal operation.
5187 Else, if not a comparison, return infinity. Else handle the special
5188 comparison rules. Note that most of the cases below won't occur, but
5189 are handled for consistency. */
5191 if (arg0 != 0 && arg1 != 0)
5193 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5194 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5195 STRIP_NOPS (tem);
5196 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5199 if (TREE_CODE_CLASS (code) != tcc_comparison)
5200 return 0;
5202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5203 for neither. In real maths, we cannot assume open ended ranges are
5204 the same. But, this is computer arithmetic, where numbers are finite.
5205 We can therefore make the transformation of any unbounded range with
5206 the value Z, Z being greater than any representable number. This permits
5207 us to treat unbounded ranges as equal. */
5208 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5209 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5210 switch (code)
5212 case EQ_EXPR:
5213 result = sgn0 == sgn1;
5214 break;
5215 case NE_EXPR:
5216 result = sgn0 != sgn1;
5217 break;
5218 case LT_EXPR:
5219 result = sgn0 < sgn1;
5220 break;
5221 case LE_EXPR:
5222 result = sgn0 <= sgn1;
5223 break;
5224 case GT_EXPR:
5225 result = sgn0 > sgn1;
5226 break;
5227 case GE_EXPR:
5228 result = sgn0 >= sgn1;
5229 break;
5230 default:
5231 gcc_unreachable ();
5234 return constant_boolean_node (result, type);
5237 /* Helper routine for make_range. Perform one step for it, return
5238 new expression if the loop should continue or NULL_TREE if it should
5239 stop. */
5241 tree
5242 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5243 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5244 bool *strict_overflow_p)
5246 tree arg0_type = TREE_TYPE (arg0);
5247 tree n_low, n_high, low = *p_low, high = *p_high;
5248 int in_p = *p_in_p, n_in_p;
5250 switch (code)
5252 case TRUTH_NOT_EXPR:
5253 /* We can only do something if the range is testing for zero. */
5254 if (low == NULL_TREE || high == NULL_TREE
5255 || ! integer_zerop (low) || ! integer_zerop (high))
5256 return NULL_TREE;
5257 *p_in_p = ! in_p;
5258 return arg0;
5260 case EQ_EXPR: case NE_EXPR:
5261 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5262 /* We can only do something if the range is testing for zero
5263 and if the second operand is an integer constant. Note that
5264 saying something is "in" the range we make is done by
5265 complementing IN_P since it will set in the initial case of
5266 being not equal to zero; "out" is leaving it alone. */
5267 if (low == NULL_TREE || high == NULL_TREE
5268 || ! integer_zerop (low) || ! integer_zerop (high)
5269 || TREE_CODE (arg1) != INTEGER_CST)
5270 return NULL_TREE;
5272 switch (code)
5274 case NE_EXPR: /* - [c, c] */
5275 low = high = arg1;
5276 break;
5277 case EQ_EXPR: /* + [c, c] */
5278 in_p = ! in_p, low = high = arg1;
5279 break;
5280 case GT_EXPR: /* - [-, c] */
5281 low = 0, high = arg1;
5282 break;
5283 case GE_EXPR: /* + [c, -] */
5284 in_p = ! in_p, low = arg1, high = 0;
5285 break;
5286 case LT_EXPR: /* - [c, -] */
5287 low = arg1, high = 0;
5288 break;
5289 case LE_EXPR: /* + [-, c] */
5290 in_p = ! in_p, low = 0, high = arg1;
5291 break;
5292 default:
5293 gcc_unreachable ();
5296 /* If this is an unsigned comparison, we also know that EXP is
5297 greater than or equal to zero. We base the range tests we make
5298 on that fact, so we record it here so we can parse existing
5299 range tests. We test arg0_type since often the return type
5300 of, e.g. EQ_EXPR, is boolean. */
5301 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5303 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5304 in_p, low, high, 1,
5305 build_int_cst (arg0_type, 0),
5306 NULL_TREE))
5307 return NULL_TREE;
5309 in_p = n_in_p, low = n_low, high = n_high;
5311 /* If the high bound is missing, but we have a nonzero low
5312 bound, reverse the range so it goes from zero to the low bound
5313 minus 1. */
5314 if (high == 0 && low && ! integer_zerop (low))
5316 in_p = ! in_p;
5317 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5318 build_int_cst (TREE_TYPE (low), 1), 0);
5319 low = build_int_cst (arg0_type, 0);
5323 *p_low = low;
5324 *p_high = high;
5325 *p_in_p = in_p;
5326 return arg0;
5328 case NEGATE_EXPR:
5329 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5330 low and high are non-NULL, then normalize will DTRT. */
5331 if (!TYPE_UNSIGNED (arg0_type)
5332 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5334 if (low == NULL_TREE)
5335 low = TYPE_MIN_VALUE (arg0_type);
5336 if (high == NULL_TREE)
5337 high = TYPE_MAX_VALUE (arg0_type);
5340 /* (-x) IN [a,b] -> x in [-b, -a] */
5341 n_low = range_binop (MINUS_EXPR, exp_type,
5342 build_int_cst (exp_type, 0),
5343 0, high, 1);
5344 n_high = range_binop (MINUS_EXPR, exp_type,
5345 build_int_cst (exp_type, 0),
5346 0, low, 0);
5347 if (n_high != 0 && TREE_OVERFLOW (n_high))
5348 return NULL_TREE;
5349 goto normalize;
5351 case BIT_NOT_EXPR:
5352 /* ~ X -> -X - 1 */
5353 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5354 build_int_cst (exp_type, 1));
5356 case PLUS_EXPR:
5357 case MINUS_EXPR:
5358 if (TREE_CODE (arg1) != INTEGER_CST)
5359 return NULL_TREE;
5361 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5362 move a constant to the other side. */
5363 if (!TYPE_UNSIGNED (arg0_type)
5364 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5365 return NULL_TREE;
5367 /* If EXP is signed, any overflow in the computation is undefined,
5368 so we don't worry about it so long as our computations on
5369 the bounds don't overflow. For unsigned, overflow is defined
5370 and this is exactly the right thing. */
5371 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5372 arg0_type, low, 0, arg1, 0);
5373 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 arg0_type, high, 1, arg1, 0);
5375 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5376 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5377 return NULL_TREE;
5379 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5380 *strict_overflow_p = true;
5382 normalize:
5383 /* Check for an unsigned range which has wrapped around the maximum
5384 value thus making n_high < n_low, and normalize it. */
5385 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5387 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5388 build_int_cst (TREE_TYPE (n_high), 1), 0);
5389 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5390 build_int_cst (TREE_TYPE (n_low), 1), 0);
5392 /* If the range is of the form +/- [ x+1, x ], we won't
5393 be able to normalize it. But then, it represents the
5394 whole range or the empty set, so make it
5395 +/- [ -, - ]. */
5396 if (tree_int_cst_equal (n_low, low)
5397 && tree_int_cst_equal (n_high, high))
5398 low = high = 0;
5399 else
5400 in_p = ! in_p;
5402 else
5403 low = n_low, high = n_high;
5405 *p_low = low;
5406 *p_high = high;
5407 *p_in_p = in_p;
5408 return arg0;
5410 CASE_CONVERT:
5411 case NON_LVALUE_EXPR:
5412 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5413 return NULL_TREE;
5415 if (! INTEGRAL_TYPE_P (arg0_type)
5416 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5417 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5418 return NULL_TREE;
5420 n_low = low, n_high = high;
5422 if (n_low != 0)
5423 n_low = fold_convert_loc (loc, arg0_type, n_low);
5425 if (n_high != 0)
5426 n_high = fold_convert_loc (loc, arg0_type, n_high);
5428 /* If we're converting arg0 from an unsigned type, to exp,
5429 a signed type, we will be doing the comparison as unsigned.
5430 The tests above have already verified that LOW and HIGH
5431 are both positive.
5433 So we have to ensure that we will handle large unsigned
5434 values the same way that the current signed bounds treat
5435 negative values. */
5437 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5439 tree high_positive;
5440 tree equiv_type;
5441 /* For fixed-point modes, we need to pass the saturating flag
5442 as the 2nd parameter. */
5443 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5444 equiv_type
5445 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5446 TYPE_SATURATING (arg0_type));
5447 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5448 equiv_type = arg0_type;
5449 else
5450 equiv_type
5451 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5453 /* A range without an upper bound is, naturally, unbounded.
5454 Since convert would have cropped a very large value, use
5455 the max value for the destination type. */
5456 high_positive
5457 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5458 : TYPE_MAX_VALUE (arg0_type);
5460 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5461 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5462 fold_convert_loc (loc, arg0_type,
5463 high_positive),
5464 build_int_cst (arg0_type, 1));
5466 /* If the low bound is specified, "and" the range with the
5467 range for which the original unsigned value will be
5468 positive. */
5469 if (low != 0)
5471 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5472 1, fold_convert_loc (loc, arg0_type,
5473 integer_zero_node),
5474 high_positive))
5475 return NULL_TREE;
5477 in_p = (n_in_p == in_p);
5479 else
5481 /* Otherwise, "or" the range with the range of the input
5482 that will be interpreted as negative. */
5483 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5484 1, fold_convert_loc (loc, arg0_type,
5485 integer_zero_node),
5486 high_positive))
5487 return NULL_TREE;
5489 in_p = (in_p != n_in_p);
5493 /* Otherwise, if we are converting arg0 from signed type, to exp,
5494 an unsigned type, we will do the comparison as signed. If
5495 high is non-NULL, we punt above if it doesn't fit in the signed
5496 type, so if we get through here, +[-, high] or +[low, high] are
5497 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5498 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5499 high is not, the +[low, -] range is equivalent to union of
5500 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5501 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5502 low being 0, which should be treated as [-, -]. */
5503 else if (TYPE_UNSIGNED (exp_type)
5504 && !TYPE_UNSIGNED (arg0_type)
5505 && low
5506 && !high)
5508 if (integer_zerop (low))
5509 n_low = NULL_TREE;
5510 else
5512 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5513 n_low, build_int_cst (arg0_type, -1));
5514 n_low = build_zero_cst (arg0_type);
5515 in_p = !in_p;
5519 *p_low = n_low;
5520 *p_high = n_high;
5521 *p_in_p = in_p;
5522 return arg0;
5524 default:
5525 return NULL_TREE;
5529 /* Given EXP, a logical expression, set the range it is testing into
5530 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5531 actually being tested. *PLOW and *PHIGH will be made of the same
5532 type as the returned expression. If EXP is not a comparison, we
5533 will most likely not be returning a useful value and range. Set
5534 *STRICT_OVERFLOW_P to true if the return value is only valid
5535 because signed overflow is undefined; otherwise, do not change
5536 *STRICT_OVERFLOW_P. */
5538 tree
5539 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5540 bool *strict_overflow_p)
5542 enum tree_code code;
5543 tree arg0, arg1 = NULL_TREE;
5544 tree exp_type, nexp;
5545 int in_p;
5546 tree low, high;
5547 location_t loc = EXPR_LOCATION (exp);
5549 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5550 and see if we can refine the range. Some of the cases below may not
5551 happen, but it doesn't seem worth worrying about this. We "continue"
5552 the outer loop when we've changed something; otherwise we "break"
5553 the switch, which will "break" the while. */
5555 in_p = 0;
5556 low = high = build_int_cst (TREE_TYPE (exp), 0);
5558 while (1)
5560 code = TREE_CODE (exp);
5561 exp_type = TREE_TYPE (exp);
5562 arg0 = NULL_TREE;
5564 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5566 if (TREE_OPERAND_LENGTH (exp) > 0)
5567 arg0 = TREE_OPERAND (exp, 0);
5568 if (TREE_CODE_CLASS (code) == tcc_binary
5569 || TREE_CODE_CLASS (code) == tcc_comparison
5570 || (TREE_CODE_CLASS (code) == tcc_expression
5571 && TREE_OPERAND_LENGTH (exp) > 1))
5572 arg1 = TREE_OPERAND (exp, 1);
5574 if (arg0 == NULL_TREE)
5575 break;
5577 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5578 &high, &in_p, strict_overflow_p);
5579 if (nexp == NULL_TREE)
5580 break;
5581 exp = nexp;
5584 /* If EXP is a constant, we can evaluate whether this is true or false. */
5585 if (TREE_CODE (exp) == INTEGER_CST)
5587 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5588 exp, 0, low, 0))
5589 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5590 exp, 1, high, 1)));
5591 low = high = 0;
5592 exp = 0;
5595 *pin_p = in_p, *plow = low, *phigh = high;
5596 return exp;
5599 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5600 a bitwise check i.e. when
5601 LOW == 0xXX...X00...0
5602 HIGH == 0xXX...X11...1
5603 Return corresponding mask in MASK and stem in VALUE. */
5605 static bool
5606 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5607 tree *value)
5609 if (TREE_CODE (low) != INTEGER_CST
5610 || TREE_CODE (high) != INTEGER_CST)
5611 return false;
5613 unsigned prec = TYPE_PRECISION (type);
5614 wide_int lo = wi::to_wide (low, prec);
5615 wide_int hi = wi::to_wide (high, prec);
5617 wide_int end_mask = lo ^ hi;
5618 if ((end_mask & (end_mask + 1)) != 0
5619 || (lo & end_mask) != 0)
5620 return false;
5622 wide_int stem_mask = ~end_mask;
5623 wide_int stem = lo & stem_mask;
5624 if (stem != (hi & stem_mask))
5625 return false;
5627 *mask = wide_int_to_tree (type, stem_mask);
5628 *value = wide_int_to_tree (type, stem);
5630 return true;
5633 /* Helper routine for build_range_check and match.pd. Return the type to
5634 perform the check or NULL if it shouldn't be optimized. */
5636 tree
5637 range_check_type (tree etype)
5639 /* First make sure that arithmetics in this type is valid, then make sure
5640 that it wraps around. */
5641 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5642 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5644 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5646 tree utype, minv, maxv;
5648 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5649 for the type in question, as we rely on this here. */
5650 utype = unsigned_type_for (etype);
5651 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5652 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5653 build_int_cst (TREE_TYPE (maxv), 1), 1);
5654 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5656 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5657 minv, 1, maxv, 1)))
5658 etype = utype;
5659 else
5660 return NULL_TREE;
5662 else if (POINTER_TYPE_P (etype)
5663 || TREE_CODE (etype) == OFFSET_TYPE
5664 /* Right now all BITINT_TYPEs satisfy
5665 (unsigned) max + 1 == (unsigned) min, so no need to verify
5666 that like for INTEGER_TYPEs. */
5667 || TREE_CODE (etype) == BITINT_TYPE)
5668 etype = unsigned_type_for (etype);
5669 return etype;
5672 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5673 type, TYPE, return an expression to test if EXP is in (or out of, depending
5674 on IN_P) the range. Return 0 if the test couldn't be created. */
5676 tree
5677 build_range_check (location_t loc, tree type, tree exp, int in_p,
5678 tree low, tree high)
5680 tree etype = TREE_TYPE (exp), mask, value;
5682 /* Disable this optimization for function pointer expressions
5683 on targets that require function pointer canonicalization. */
5684 if (targetm.have_canonicalize_funcptr_for_compare ()
5685 && POINTER_TYPE_P (etype)
5686 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5687 return NULL_TREE;
5689 if (! in_p)
5691 value = build_range_check (loc, type, exp, 1, low, high);
5692 if (value != 0)
5693 return invert_truthvalue_loc (loc, value);
5695 return 0;
5698 if (low == 0 && high == 0)
5699 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5701 if (low == 0)
5702 return fold_build2_loc (loc, LE_EXPR, type, exp,
5703 fold_convert_loc (loc, etype, high));
5705 if (high == 0)
5706 return fold_build2_loc (loc, GE_EXPR, type, exp,
5707 fold_convert_loc (loc, etype, low));
5709 if (operand_equal_p (low, high, 0))
5710 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5711 fold_convert_loc (loc, etype, low));
5713 if (TREE_CODE (exp) == BIT_AND_EXPR
5714 && maskable_range_p (low, high, etype, &mask, &value))
5715 return fold_build2_loc (loc, EQ_EXPR, type,
5716 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5717 exp, mask),
5718 value);
5720 if (integer_zerop (low))
5722 if (! TYPE_UNSIGNED (etype))
5724 etype = unsigned_type_for (etype);
5725 high = fold_convert_loc (loc, etype, high);
5726 exp = fold_convert_loc (loc, etype, exp);
5728 return build_range_check (loc, type, exp, 1, 0, high);
5731 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5732 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5734 int prec = TYPE_PRECISION (etype);
5736 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5738 if (TYPE_UNSIGNED (etype))
5740 tree signed_etype = signed_type_for (etype);
5741 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5742 etype
5743 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5744 else
5745 etype = signed_etype;
5746 exp = fold_convert_loc (loc, etype, exp);
5748 return fold_build2_loc (loc, GT_EXPR, type, exp,
5749 build_int_cst (etype, 0));
5753 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5754 This requires wrap-around arithmetics for the type of the expression. */
5755 etype = range_check_type (etype);
5756 if (etype == NULL_TREE)
5757 return NULL_TREE;
5759 high = fold_convert_loc (loc, etype, high);
5760 low = fold_convert_loc (loc, etype, low);
5761 exp = fold_convert_loc (loc, etype, exp);
5763 value = const_binop (MINUS_EXPR, high, low);
5765 if (value != 0 && !TREE_OVERFLOW (value))
5766 return build_range_check (loc, type,
5767 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5768 1, build_int_cst (etype, 0), value);
5770 return 0;
5773 /* Return the predecessor of VAL in its type, handling the infinite case. */
5775 static tree
5776 range_predecessor (tree val)
5778 tree type = TREE_TYPE (val);
5780 if (INTEGRAL_TYPE_P (type)
5781 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5782 return 0;
5783 else
5784 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5785 build_int_cst (TREE_TYPE (val), 1), 0);
5788 /* Return the successor of VAL in its type, handling the infinite case. */
5790 static tree
5791 range_successor (tree val)
5793 tree type = TREE_TYPE (val);
5795 if (INTEGRAL_TYPE_P (type)
5796 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5797 return 0;
5798 else
5799 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5800 build_int_cst (TREE_TYPE (val), 1), 0);
5803 /* Given two ranges, see if we can merge them into one. Return 1 if we
5804 can, 0 if we can't. Set the output range into the specified parameters. */
5806 bool
5807 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5808 tree high0, int in1_p, tree low1, tree high1)
5810 bool no_overlap;
5811 int subset;
5812 int temp;
5813 tree tem;
5814 int in_p;
5815 tree low, high;
5816 int lowequal = ((low0 == 0 && low1 == 0)
5817 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5818 low0, 0, low1, 0)));
5819 int highequal = ((high0 == 0 && high1 == 0)
5820 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5821 high0, 1, high1, 1)));
5823 /* Make range 0 be the range that starts first, or ends last if they
5824 start at the same value. Swap them if it isn't. */
5825 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5826 low0, 0, low1, 0))
5827 || (lowequal
5828 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5829 high1, 1, high0, 1))))
5831 temp = in0_p, in0_p = in1_p, in1_p = temp;
5832 tem = low0, low0 = low1, low1 = tem;
5833 tem = high0, high0 = high1, high1 = tem;
5836 /* If the second range is != high1 where high1 is the type maximum of
5837 the type, try first merging with < high1 range. */
5838 if (low1
5839 && high1
5840 && TREE_CODE (low1) == INTEGER_CST
5841 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5842 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5843 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5844 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5845 && operand_equal_p (low1, high1, 0))
5847 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5848 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5849 !in1_p, NULL_TREE, range_predecessor (low1)))
5850 return true;
5851 /* Similarly for the second range != low1 where low1 is the type minimum
5852 of the type, try first merging with > low1 range. */
5853 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5854 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5855 !in1_p, range_successor (low1), NULL_TREE))
5856 return true;
5859 /* Now flag two cases, whether the ranges are disjoint or whether the
5860 second range is totally subsumed in the first. Note that the tests
5861 below are simplified by the ones above. */
5862 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5863 high0, 1, low1, 0));
5864 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5865 high1, 1, high0, 1));
5867 /* We now have four cases, depending on whether we are including or
5868 excluding the two ranges. */
5869 if (in0_p && in1_p)
5871 /* If they don't overlap, the result is false. If the second range
5872 is a subset it is the result. Otherwise, the range is from the start
5873 of the second to the end of the first. */
5874 if (no_overlap)
5875 in_p = 0, low = high = 0;
5876 else if (subset)
5877 in_p = 1, low = low1, high = high1;
5878 else
5879 in_p = 1, low = low1, high = high0;
5882 else if (in0_p && ! in1_p)
5884 /* If they don't overlap, the result is the first range. If they are
5885 equal, the result is false. If the second range is a subset of the
5886 first, and the ranges begin at the same place, we go from just after
5887 the end of the second range to the end of the first. If the second
5888 range is not a subset of the first, or if it is a subset and both
5889 ranges end at the same place, the range starts at the start of the
5890 first range and ends just before the second range.
5891 Otherwise, we can't describe this as a single range. */
5892 if (no_overlap)
5893 in_p = 1, low = low0, high = high0;
5894 else if (lowequal && highequal)
5895 in_p = 0, low = high = 0;
5896 else if (subset && lowequal)
5898 low = range_successor (high1);
5899 high = high0;
5900 in_p = 1;
5901 if (low == 0)
5903 /* We are in the weird situation where high0 > high1 but
5904 high1 has no successor. Punt. */
5905 return 0;
5908 else if (! subset || highequal)
5910 low = low0;
5911 high = range_predecessor (low1);
5912 in_p = 1;
5913 if (high == 0)
5915 /* low0 < low1 but low1 has no predecessor. Punt. */
5916 return 0;
5919 else
5920 return 0;
5923 else if (! in0_p && in1_p)
5925 /* If they don't overlap, the result is the second range. If the second
5926 is a subset of the first, the result is false. Otherwise,
5927 the range starts just after the first range and ends at the
5928 end of the second. */
5929 if (no_overlap)
5930 in_p = 1, low = low1, high = high1;
5931 else if (subset || highequal)
5932 in_p = 0, low = high = 0;
5933 else
5935 low = range_successor (high0);
5936 high = high1;
5937 in_p = 1;
5938 if (low == 0)
5940 /* high1 > high0 but high0 has no successor. Punt. */
5941 return 0;
5946 else
5948 /* The case where we are excluding both ranges. Here the complex case
5949 is if they don't overlap. In that case, the only time we have a
5950 range is if they are adjacent. If the second is a subset of the
5951 first, the result is the first. Otherwise, the range to exclude
5952 starts at the beginning of the first range and ends at the end of the
5953 second. */
5954 if (no_overlap)
5956 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5957 range_successor (high0),
5958 1, low1, 0)))
5959 in_p = 0, low = low0, high = high1;
5960 else
5962 /* Canonicalize - [min, x] into - [-, x]. */
5963 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5964 switch (TREE_CODE (TREE_TYPE (low0)))
5966 case ENUMERAL_TYPE:
5967 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5968 GET_MODE_BITSIZE
5969 (TYPE_MODE (TREE_TYPE (low0)))))
5970 break;
5971 /* FALLTHROUGH */
5972 case INTEGER_TYPE:
5973 if (tree_int_cst_equal (low0,
5974 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5975 low0 = 0;
5976 break;
5977 case POINTER_TYPE:
5978 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5979 && integer_zerop (low0))
5980 low0 = 0;
5981 break;
5982 default:
5983 break;
5986 /* Canonicalize - [x, max] into - [x, -]. */
5987 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5988 switch (TREE_CODE (TREE_TYPE (high1)))
5990 case ENUMERAL_TYPE:
5991 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5992 GET_MODE_BITSIZE
5993 (TYPE_MODE (TREE_TYPE (high1)))))
5994 break;
5995 /* FALLTHROUGH */
5996 case INTEGER_TYPE:
5997 if (tree_int_cst_equal (high1,
5998 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5999 high1 = 0;
6000 break;
6001 case POINTER_TYPE:
6002 if (TYPE_UNSIGNED (TREE_TYPE (high1))
6003 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
6004 high1, 1,
6005 build_int_cst (TREE_TYPE (high1), 1),
6006 1)))
6007 high1 = 0;
6008 break;
6009 default:
6010 break;
6013 /* The ranges might be also adjacent between the maximum and
6014 minimum values of the given type. For
6015 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6016 return + [x + 1, y - 1]. */
6017 if (low0 == 0 && high1 == 0)
6019 low = range_successor (high0);
6020 high = range_predecessor (low1);
6021 if (low == 0 || high == 0)
6022 return 0;
6024 in_p = 1;
6026 else
6027 return 0;
6030 else if (subset)
6031 in_p = 0, low = low0, high = high0;
6032 else
6033 in_p = 0, low = low0, high = high1;
6036 *pin_p = in_p, *plow = low, *phigh = high;
6037 return 1;
6041 /* Subroutine of fold, looking inside expressions of the form
6042 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6043 are the three operands of the COND_EXPR. This function is
6044 being used also to optimize A op B ? C : A, by reversing the
6045 comparison first.
6047 Return a folded expression whose code is not a COND_EXPR
6048 anymore, or NULL_TREE if no folding opportunity is found. */
6050 static tree
6051 fold_cond_expr_with_comparison (location_t loc, tree type,
6052 enum tree_code comp_code,
6053 tree arg00, tree arg01, tree arg1, tree arg2)
6055 tree arg1_type = TREE_TYPE (arg1);
6056 tree tem;
6058 STRIP_NOPS (arg1);
6059 STRIP_NOPS (arg2);
6061 /* If we have A op 0 ? A : -A, consider applying the following
6062 transformations:
6064 A == 0? A : -A same as -A
6065 A != 0? A : -A same as A
6066 A >= 0? A : -A same as abs (A)
6067 A > 0? A : -A same as abs (A)
6068 A <= 0? A : -A same as -abs (A)
6069 A < 0? A : -A same as -abs (A)
6071 None of these transformations work for modes with signed
6072 zeros. If A is +/-0, the first two transformations will
6073 change the sign of the result (from +0 to -0, or vice
6074 versa). The last four will fix the sign of the result,
6075 even though the original expressions could be positive or
6076 negative, depending on the sign of A.
6078 Note that all these transformations are correct if A is
6079 NaN, since the two alternatives (A and -A) are also NaNs. */
6080 if (!HONOR_SIGNED_ZEROS (type)
6081 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6082 ? real_zerop (arg01)
6083 : integer_zerop (arg01))
6084 && ((TREE_CODE (arg2) == NEGATE_EXPR
6085 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6086 /* In the case that A is of the form X-Y, '-A' (arg2) may
6087 have already been folded to Y-X, check for that. */
6088 || (TREE_CODE (arg1) == MINUS_EXPR
6089 && TREE_CODE (arg2) == MINUS_EXPR
6090 && operand_equal_p (TREE_OPERAND (arg1, 0),
6091 TREE_OPERAND (arg2, 1), 0)
6092 && operand_equal_p (TREE_OPERAND (arg1, 1),
6093 TREE_OPERAND (arg2, 0), 0))))
6094 switch (comp_code)
6096 case EQ_EXPR:
6097 case UNEQ_EXPR:
6098 tem = fold_convert_loc (loc, arg1_type, arg1);
6099 return fold_convert_loc (loc, type, negate_expr (tem));
6100 case NE_EXPR:
6101 case LTGT_EXPR:
6102 return fold_convert_loc (loc, type, arg1);
6103 case UNGE_EXPR:
6104 case UNGT_EXPR:
6105 if (flag_trapping_math)
6106 break;
6107 /* Fall through. */
6108 case GE_EXPR:
6109 case GT_EXPR:
6110 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6111 break;
6112 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6113 return fold_convert_loc (loc, type, tem);
6114 case UNLE_EXPR:
6115 case UNLT_EXPR:
6116 if (flag_trapping_math)
6117 break;
6118 /* FALLTHRU */
6119 case LE_EXPR:
6120 case LT_EXPR:
6121 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6122 break;
6123 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6124 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6126 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6127 is not, invokes UB both in abs and in the negation of it.
6128 So, use ABSU_EXPR instead. */
6129 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6130 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6131 tem = negate_expr (tem);
6132 return fold_convert_loc (loc, type, tem);
6134 else
6136 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6137 return negate_expr (fold_convert_loc (loc, type, tem));
6139 default:
6140 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6141 break;
6144 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6145 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6146 both transformations are correct when A is NaN: A != 0
6147 is then true, and A == 0 is false. */
6149 if (!HONOR_SIGNED_ZEROS (type)
6150 && integer_zerop (arg01) && integer_zerop (arg2))
6152 if (comp_code == NE_EXPR)
6153 return fold_convert_loc (loc, type, arg1);
6154 else if (comp_code == EQ_EXPR)
6155 return build_zero_cst (type);
6158 /* Try some transformations of A op B ? A : B.
6160 A == B? A : B same as B
6161 A != B? A : B same as A
6162 A >= B? A : B same as max (A, B)
6163 A > B? A : B same as max (B, A)
6164 A <= B? A : B same as min (A, B)
6165 A < B? A : B same as min (B, A)
6167 As above, these transformations don't work in the presence
6168 of signed zeros. For example, if A and B are zeros of
6169 opposite sign, the first two transformations will change
6170 the sign of the result. In the last four, the original
6171 expressions give different results for (A=+0, B=-0) and
6172 (A=-0, B=+0), but the transformed expressions do not.
6174 The first two transformations are correct if either A or B
6175 is a NaN. In the first transformation, the condition will
6176 be false, and B will indeed be chosen. In the case of the
6177 second transformation, the condition A != B will be true,
6178 and A will be chosen.
6180 The conversions to max() and min() are not correct if B is
6181 a number and A is not. The conditions in the original
6182 expressions will be false, so all four give B. The min()
6183 and max() versions would give a NaN instead. */
6184 if (!HONOR_SIGNED_ZEROS (type)
6185 && operand_equal_for_comparison_p (arg01, arg2)
6186 /* Avoid these transformations if the COND_EXPR may be used
6187 as an lvalue in the C++ front-end. PR c++/19199. */
6188 && (in_gimple_form
6189 || VECTOR_TYPE_P (type)
6190 || (! lang_GNU_CXX ()
6191 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6192 || ! maybe_lvalue_p (arg1)
6193 || ! maybe_lvalue_p (arg2)))
6195 tree comp_op0 = arg00;
6196 tree comp_op1 = arg01;
6197 tree comp_type = TREE_TYPE (comp_op0);
6199 switch (comp_code)
6201 case EQ_EXPR:
6202 return fold_convert_loc (loc, type, arg2);
6203 case NE_EXPR:
6204 return fold_convert_loc (loc, type, arg1);
6205 case LE_EXPR:
6206 case LT_EXPR:
6207 case UNLE_EXPR:
6208 case UNLT_EXPR:
6209 /* In C++ a ?: expression can be an lvalue, so put the
6210 operand which will be used if they are equal first
6211 so that we can convert this back to the
6212 corresponding COND_EXPR. */
6213 if (!HONOR_NANS (arg1))
6215 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6216 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6217 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6218 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6219 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6220 comp_op1, comp_op0);
6221 return fold_convert_loc (loc, type, tem);
6223 break;
6224 case GE_EXPR:
6225 case GT_EXPR:
6226 case UNGE_EXPR:
6227 case UNGT_EXPR:
6228 if (!HONOR_NANS (arg1))
6230 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6231 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6232 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6233 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6234 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6235 comp_op1, comp_op0);
6236 return fold_convert_loc (loc, type, tem);
6238 break;
6239 case UNEQ_EXPR:
6240 if (!HONOR_NANS (arg1))
6241 return fold_convert_loc (loc, type, arg2);
6242 break;
6243 case LTGT_EXPR:
6244 if (!HONOR_NANS (arg1))
6245 return fold_convert_loc (loc, type, arg1);
6246 break;
6247 default:
6248 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6249 break;
6253 return NULL_TREE;
6258 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6259 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6260 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6261 false) >= 2)
6262 #endif
6264 /* EXP is some logical combination of boolean tests. See if we can
6265 merge it into some range test. Return the new tree if so. */
6267 static tree
6268 fold_range_test (location_t loc, enum tree_code code, tree type,
6269 tree op0, tree op1)
6271 int or_op = (code == TRUTH_ORIF_EXPR
6272 || code == TRUTH_OR_EXPR);
6273 int in0_p, in1_p, in_p;
6274 tree low0, low1, low, high0, high1, high;
6275 bool strict_overflow_p = false;
6276 tree tem, lhs, rhs;
6277 const char * const warnmsg = G_("assuming signed overflow does not occur "
6278 "when simplifying range test");
6280 if (!INTEGRAL_TYPE_P (type))
6281 return 0;
6283 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6284 /* If op0 is known true or false and this is a short-circuiting
6285 operation we must not merge with op1 since that makes side-effects
6286 unconditional. So special-case this. */
6287 if (!lhs
6288 && ((code == TRUTH_ORIF_EXPR && in0_p)
6289 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6290 return op0;
6291 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6293 /* If this is an OR operation, invert both sides; we will invert
6294 again at the end. */
6295 if (or_op)
6296 in0_p = ! in0_p, in1_p = ! in1_p;
6298 /* If both expressions are the same, if we can merge the ranges, and we
6299 can build the range test, return it or it inverted. If one of the
6300 ranges is always true or always false, consider it to be the same
6301 expression as the other. */
6302 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6303 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6304 in1_p, low1, high1)
6305 && (tem = (build_range_check (loc, type,
6306 lhs != 0 ? lhs
6307 : rhs != 0 ? rhs : integer_zero_node,
6308 in_p, low, high))) != 0)
6310 if (strict_overflow_p)
6311 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6312 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6315 /* On machines where the branch cost is expensive, if this is a
6316 short-circuited branch and the underlying object on both sides
6317 is the same, make a non-short-circuit operation. */
6318 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6319 if (param_logical_op_non_short_circuit != -1)
6320 logical_op_non_short_circuit
6321 = param_logical_op_non_short_circuit;
6322 if (logical_op_non_short_circuit
6323 && !sanitize_coverage_p ()
6324 && lhs != 0 && rhs != 0
6325 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6326 && operand_equal_p (lhs, rhs, 0))
6328 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6329 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6330 which cases we can't do this. */
6331 if (simple_operand_p (lhs))
6332 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6333 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6334 type, op0, op1);
6336 else if (!lang_hooks.decls.global_bindings_p ()
6337 && !CONTAINS_PLACEHOLDER_P (lhs))
6339 tree common = save_expr (lhs);
6341 if ((lhs = build_range_check (loc, type, common,
6342 or_op ? ! in0_p : in0_p,
6343 low0, high0)) != 0
6344 && (rhs = build_range_check (loc, type, common,
6345 or_op ? ! in1_p : in1_p,
6346 low1, high1)) != 0)
6348 if (strict_overflow_p)
6349 fold_overflow_warning (warnmsg,
6350 WARN_STRICT_OVERFLOW_COMPARISON);
6351 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6352 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6353 type, lhs, rhs);
6358 return 0;
6361 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6362 bit value. Arrange things so the extra bits will be set to zero if and
6363 only if C is signed-extended to its full width. If MASK is nonzero,
6364 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6366 static tree
6367 unextend (tree c, int p, int unsignedp, tree mask)
6369 tree type = TREE_TYPE (c);
6370 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6371 tree temp;
6373 if (p == modesize || unsignedp)
6374 return c;
6376 /* We work by getting just the sign bit into the low-order bit, then
6377 into the high-order bit, then sign-extend. We then XOR that value
6378 with C. */
6379 temp = build_int_cst (TREE_TYPE (c),
6380 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6382 /* We must use a signed type in order to get an arithmetic right shift.
6383 However, we must also avoid introducing accidental overflows, so that
6384 a subsequent call to integer_zerop will work. Hence we must
6385 do the type conversion here. At this point, the constant is either
6386 zero or one, and the conversion to a signed type can never overflow.
6387 We could get an overflow if this conversion is done anywhere else. */
6388 if (TYPE_UNSIGNED (type))
6389 temp = fold_convert (signed_type_for (type), temp);
6391 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6392 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6393 if (mask != 0)
6394 temp = const_binop (BIT_AND_EXPR, temp,
6395 fold_convert (TREE_TYPE (c), mask));
6396 /* If necessary, convert the type back to match the type of C. */
6397 if (TYPE_UNSIGNED (type))
6398 temp = fold_convert (type, temp);
6400 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6403 /* For an expression that has the form
6404 (A && B) || ~B
6406 (A || B) && ~B,
6407 we can drop one of the inner expressions and simplify to
6408 A || ~B
6410 A && ~B
6411 LOC is the location of the resulting expression. OP is the inner
6412 logical operation; the left-hand side in the examples above, while CMPOP
6413 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6414 removing a condition that guards another, as in
6415 (A != NULL && A->...) || A == NULL
6416 which we must not transform. If RHS_ONLY is true, only eliminate the
6417 right-most operand of the inner logical operation. */
6419 static tree
6420 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6421 bool rhs_only)
6423 enum tree_code code = TREE_CODE (cmpop);
6424 enum tree_code truthop_code = TREE_CODE (op);
6425 tree lhs = TREE_OPERAND (op, 0);
6426 tree rhs = TREE_OPERAND (op, 1);
6427 tree orig_lhs = lhs, orig_rhs = rhs;
6428 enum tree_code rhs_code = TREE_CODE (rhs);
6429 enum tree_code lhs_code = TREE_CODE (lhs);
6430 enum tree_code inv_code;
6432 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6433 return NULL_TREE;
6435 if (TREE_CODE_CLASS (code) != tcc_comparison)
6436 return NULL_TREE;
6438 tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6440 if (rhs_code == truthop_code)
6442 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6443 if (newrhs != NULL_TREE)
6445 rhs = newrhs;
6446 rhs_code = TREE_CODE (rhs);
6449 if (lhs_code == truthop_code && !rhs_only)
6451 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6452 if (newlhs != NULL_TREE)
6454 lhs = newlhs;
6455 lhs_code = TREE_CODE (lhs);
6459 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6460 if (inv_code == rhs_code
6461 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6462 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6463 return lhs;
6464 if (!rhs_only && inv_code == lhs_code
6465 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6466 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6467 return rhs;
6468 if (rhs != orig_rhs || lhs != orig_lhs)
6469 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6470 lhs, rhs);
6471 return NULL_TREE;
6474 /* Find ways of folding logical expressions of LHS and RHS:
6475 Try to merge two comparisons to the same innermost item.
6476 Look for range tests like "ch >= '0' && ch <= '9'".
6477 Look for combinations of simple terms on machines with expensive branches
6478 and evaluate the RHS unconditionally.
6480 For example, if we have p->a == 2 && p->b == 4 and we can make an
6481 object large enough to span both A and B, we can do this with a comparison
6482 against the object ANDed with the a mask.
6484 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6485 operations to do this with one comparison.
6487 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6488 function and the one above.
6490 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6491 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6493 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6494 two operands.
6496 We return the simplified tree or 0 if no optimization is possible. */
6498 static tree
6499 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6500 tree lhs, tree rhs)
6502 /* If this is the "or" of two comparisons, we can do something if
6503 the comparisons are NE_EXPR. If this is the "and", we can do something
6504 if the comparisons are EQ_EXPR. I.e.,
6505 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6507 WANTED_CODE is this operation code. For single bit fields, we can
6508 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6509 comparison for one-bit fields. */
6511 enum tree_code wanted_code;
6512 enum tree_code lcode, rcode;
6513 tree ll_arg, lr_arg, rl_arg, rr_arg;
6514 tree ll_inner, lr_inner, rl_inner, rr_inner;
6515 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6516 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6517 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6518 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6519 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6520 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6521 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6522 scalar_int_mode lnmode, rnmode;
6523 tree ll_mask, lr_mask, rl_mask, rr_mask;
6524 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6525 tree l_const, r_const;
6526 tree lntype, rntype, result;
6527 HOST_WIDE_INT first_bit, end_bit;
6528 int volatilep;
6530 /* Start by getting the comparison codes. Fail if anything is volatile.
6531 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6532 it were surrounded with a NE_EXPR. */
6534 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6535 return 0;
6537 lcode = TREE_CODE (lhs);
6538 rcode = TREE_CODE (rhs);
6540 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6542 lhs = build2 (NE_EXPR, truth_type, lhs,
6543 build_int_cst (TREE_TYPE (lhs), 0));
6544 lcode = NE_EXPR;
6547 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6549 rhs = build2 (NE_EXPR, truth_type, rhs,
6550 build_int_cst (TREE_TYPE (rhs), 0));
6551 rcode = NE_EXPR;
6554 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6555 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6556 return 0;
6558 ll_arg = TREE_OPERAND (lhs, 0);
6559 lr_arg = TREE_OPERAND (lhs, 1);
6560 rl_arg = TREE_OPERAND (rhs, 0);
6561 rr_arg = TREE_OPERAND (rhs, 1);
6563 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6564 if (simple_operand_p (ll_arg)
6565 && simple_operand_p (lr_arg))
6567 if (operand_equal_p (ll_arg, rl_arg, 0)
6568 && operand_equal_p (lr_arg, rr_arg, 0))
6570 result = combine_comparisons (loc, code, lcode, rcode,
6571 truth_type, ll_arg, lr_arg);
6572 if (result)
6573 return result;
6575 else if (operand_equal_p (ll_arg, rr_arg, 0)
6576 && operand_equal_p (lr_arg, rl_arg, 0))
6578 result = combine_comparisons (loc, code, lcode,
6579 swap_tree_comparison (rcode),
6580 truth_type, ll_arg, lr_arg);
6581 if (result)
6582 return result;
6586 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6587 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6589 /* If the RHS can be evaluated unconditionally and its operands are
6590 simple, it wins to evaluate the RHS unconditionally on machines
6591 with expensive branches. In this case, this isn't a comparison
6592 that can be merged. */
6594 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6595 false) >= 2
6596 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6597 && simple_operand_p (rl_arg)
6598 && simple_operand_p (rr_arg))
6600 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6601 if (code == TRUTH_OR_EXPR
6602 && lcode == NE_EXPR && integer_zerop (lr_arg)
6603 && rcode == NE_EXPR && integer_zerop (rr_arg)
6604 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6605 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6606 return build2_loc (loc, NE_EXPR, truth_type,
6607 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6608 ll_arg, rl_arg),
6609 build_int_cst (TREE_TYPE (ll_arg), 0));
6611 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6612 if (code == TRUTH_AND_EXPR
6613 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6614 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6615 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6616 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6617 return build2_loc (loc, EQ_EXPR, truth_type,
6618 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6619 ll_arg, rl_arg),
6620 build_int_cst (TREE_TYPE (ll_arg), 0));
6623 /* See if the comparisons can be merged. Then get all the parameters for
6624 each side. */
6626 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6627 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6628 return 0;
6630 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6631 volatilep = 0;
6632 ll_inner = decode_field_reference (loc, &ll_arg,
6633 &ll_bitsize, &ll_bitpos, &ll_mode,
6634 &ll_unsignedp, &ll_reversep, &volatilep,
6635 &ll_mask, &ll_and_mask);
6636 lr_inner = decode_field_reference (loc, &lr_arg,
6637 &lr_bitsize, &lr_bitpos, &lr_mode,
6638 &lr_unsignedp, &lr_reversep, &volatilep,
6639 &lr_mask, &lr_and_mask);
6640 rl_inner = decode_field_reference (loc, &rl_arg,
6641 &rl_bitsize, &rl_bitpos, &rl_mode,
6642 &rl_unsignedp, &rl_reversep, &volatilep,
6643 &rl_mask, &rl_and_mask);
6644 rr_inner = decode_field_reference (loc, &rr_arg,
6645 &rr_bitsize, &rr_bitpos, &rr_mode,
6646 &rr_unsignedp, &rr_reversep, &volatilep,
6647 &rr_mask, &rr_and_mask);
6649 /* It must be true that the inner operation on the lhs of each
6650 comparison must be the same if we are to be able to do anything.
6651 Then see if we have constants. If not, the same must be true for
6652 the rhs's. */
6653 if (volatilep
6654 || ll_reversep != rl_reversep
6655 || ll_inner == 0 || rl_inner == 0
6656 || ! operand_equal_p (ll_inner, rl_inner, 0))
6657 return 0;
6659 if (TREE_CODE (lr_arg) == INTEGER_CST
6660 && TREE_CODE (rr_arg) == INTEGER_CST)
6662 l_const = lr_arg, r_const = rr_arg;
6663 lr_reversep = ll_reversep;
6665 else if (lr_reversep != rr_reversep
6666 || lr_inner == 0 || rr_inner == 0
6667 || ! operand_equal_p (lr_inner, rr_inner, 0))
6668 return 0;
6669 else
6670 l_const = r_const = 0;
6672 /* If either comparison code is not correct for our logical operation,
6673 fail. However, we can convert a one-bit comparison against zero into
6674 the opposite comparison against that bit being set in the field. */
6676 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6677 if (lcode != wanted_code)
6679 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6681 /* Make the left operand unsigned, since we are only interested
6682 in the value of one bit. Otherwise we are doing the wrong
6683 thing below. */
6684 ll_unsignedp = 1;
6685 l_const = ll_mask;
6687 else
6688 return 0;
6691 /* This is analogous to the code for l_const above. */
6692 if (rcode != wanted_code)
6694 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6696 rl_unsignedp = 1;
6697 r_const = rl_mask;
6699 else
6700 return 0;
6703 /* See if we can find a mode that contains both fields being compared on
6704 the left. If we can't, fail. Otherwise, update all constants and masks
6705 to be relative to a field of that size. */
6706 first_bit = MIN (ll_bitpos, rl_bitpos);
6707 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6708 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6709 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6710 volatilep, &lnmode))
6711 return 0;
6713 lnbitsize = GET_MODE_BITSIZE (lnmode);
6714 lnbitpos = first_bit & ~ (lnbitsize - 1);
6715 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6716 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6718 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6720 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6721 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6724 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6725 size_int (xll_bitpos));
6726 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6727 size_int (xrl_bitpos));
6728 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6729 return 0;
6731 if (l_const)
6733 l_const = fold_convert_loc (loc, lntype, l_const);
6734 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6735 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6736 if (l_const == NULL_TREE)
6737 return 0;
6738 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6739 fold_build1_loc (loc, BIT_NOT_EXPR,
6740 lntype, ll_mask))))
6742 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6744 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6747 if (r_const)
6749 r_const = fold_convert_loc (loc, lntype, r_const);
6750 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6751 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6752 if (r_const == NULL_TREE)
6753 return 0;
6754 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6755 fold_build1_loc (loc, BIT_NOT_EXPR,
6756 lntype, rl_mask))))
6758 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6760 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6764 /* If the right sides are not constant, do the same for it. Also,
6765 disallow this optimization if a size, signedness or storage order
6766 mismatch occurs between the left and right sides. */
6767 if (l_const == 0)
6769 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6770 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6771 || ll_reversep != lr_reversep
6772 /* Make sure the two fields on the right
6773 correspond to the left without being swapped. */
6774 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6775 return 0;
6777 first_bit = MIN (lr_bitpos, rr_bitpos);
6778 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6779 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6780 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6781 volatilep, &rnmode))
6782 return 0;
6784 rnbitsize = GET_MODE_BITSIZE (rnmode);
6785 rnbitpos = first_bit & ~ (rnbitsize - 1);
6786 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6787 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6789 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6791 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6792 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6795 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6796 rntype, lr_mask),
6797 size_int (xlr_bitpos));
6798 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6799 rntype, rr_mask),
6800 size_int (xrr_bitpos));
6801 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6802 return 0;
6804 /* Make a mask that corresponds to both fields being compared.
6805 Do this for both items being compared. If the operands are the
6806 same size and the bits being compared are in the same position
6807 then we can do this by masking both and comparing the masked
6808 results. */
6809 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6810 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6811 if (lnbitsize == rnbitsize
6812 && xll_bitpos == xlr_bitpos
6813 && lnbitpos >= 0
6814 && rnbitpos >= 0)
6816 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6817 lntype, lnbitsize, lnbitpos,
6818 ll_unsignedp || rl_unsignedp, ll_reversep);
6819 if (! all_ones_mask_p (ll_mask, lnbitsize))
6820 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6822 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6823 rntype, rnbitsize, rnbitpos,
6824 lr_unsignedp || rr_unsignedp, lr_reversep);
6825 if (! all_ones_mask_p (lr_mask, rnbitsize))
6826 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6828 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6831 /* There is still another way we can do something: If both pairs of
6832 fields being compared are adjacent, we may be able to make a wider
6833 field containing them both.
6835 Note that we still must mask the lhs/rhs expressions. Furthermore,
6836 the mask must be shifted to account for the shift done by
6837 make_bit_field_ref. */
6838 if (((ll_bitsize + ll_bitpos == rl_bitpos
6839 && lr_bitsize + lr_bitpos == rr_bitpos)
6840 || (ll_bitpos == rl_bitpos + rl_bitsize
6841 && lr_bitpos == rr_bitpos + rr_bitsize))
6842 && ll_bitpos >= 0
6843 && rl_bitpos >= 0
6844 && lr_bitpos >= 0
6845 && rr_bitpos >= 0)
6847 tree type;
6849 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6850 ll_bitsize + rl_bitsize,
6851 MIN (ll_bitpos, rl_bitpos),
6852 ll_unsignedp, ll_reversep);
6853 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6854 lr_bitsize + rr_bitsize,
6855 MIN (lr_bitpos, rr_bitpos),
6856 lr_unsignedp, lr_reversep);
6858 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6859 size_int (MIN (xll_bitpos, xrl_bitpos)));
6860 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6861 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6862 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6863 return 0;
6865 /* Convert to the smaller type before masking out unwanted bits. */
6866 type = lntype;
6867 if (lntype != rntype)
6869 if (lnbitsize > rnbitsize)
6871 lhs = fold_convert_loc (loc, rntype, lhs);
6872 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6873 type = rntype;
6875 else if (lnbitsize < rnbitsize)
6877 rhs = fold_convert_loc (loc, lntype, rhs);
6878 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6879 type = lntype;
6883 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6884 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6886 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6887 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6889 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6892 return 0;
6895 /* Handle the case of comparisons with constants. If there is something in
6896 common between the masks, those bits of the constants must be the same.
6897 If not, the condition is always false. Test for this to avoid generating
6898 incorrect code below. */
6899 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6900 if (! integer_zerop (result)
6901 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6902 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6904 if (wanted_code == NE_EXPR)
6906 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6907 return constant_boolean_node (true, truth_type);
6909 else
6911 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6912 return constant_boolean_node (false, truth_type);
6916 if (lnbitpos < 0)
6917 return 0;
6919 /* Construct the expression we will return. First get the component
6920 reference we will make. Unless the mask is all ones the width of
6921 that field, perform the mask operation. Then compare with the
6922 merged constant. */
6923 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6924 lntype, lnbitsize, lnbitpos,
6925 ll_unsignedp || rl_unsignedp, ll_reversep);
6927 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6928 if (! all_ones_mask_p (ll_mask, lnbitsize))
6929 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6931 return build2_loc (loc, wanted_code, truth_type, result,
6932 const_binop (BIT_IOR_EXPR, l_const, r_const));
6935 /* T is an integer expression that is being multiplied, divided, or taken a
6936 modulus (CODE says which and what kind of divide or modulus) by a
6937 constant C. See if we can eliminate that operation by folding it with
6938 other operations already in T. WIDE_TYPE, if non-null, is a type that
6939 should be used for the computation if wider than our type.
6941 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6942 (X * 2) + (Y * 4). We must, however, be assured that either the original
6943 expression would not overflow or that overflow is undefined for the type
6944 in the language in question.
6946 If we return a non-null expression, it is an equivalent form of the
6947 original computation, but need not be in the original type.
6949 We set *STRICT_OVERFLOW_P to true if the return values depends on
6950 signed overflow being undefined. Otherwise we do not change
6951 *STRICT_OVERFLOW_P. */
6953 static tree
6954 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6955 bool *strict_overflow_p)
6957 /* To avoid exponential search depth, refuse to allow recursion past
6958 three levels. Beyond that (1) it's highly unlikely that we'll find
6959 something interesting and (2) we've probably processed it before
6960 when we built the inner expression. */
6962 static int depth;
6963 tree ret;
6965 if (depth > 3)
6966 return NULL;
6968 depth++;
6969 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6970 depth--;
6972 return ret;
6975 static tree
6976 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6977 bool *strict_overflow_p)
6979 tree type = TREE_TYPE (t);
6980 enum tree_code tcode = TREE_CODE (t);
6981 tree ctype = type;
6982 if (wide_type)
6984 if (TREE_CODE (type) == BITINT_TYPE
6985 || TREE_CODE (wide_type) == BITINT_TYPE)
6987 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6988 ctype = wide_type;
6990 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6991 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6992 ctype = wide_type;
6994 tree t1, t2;
6995 bool same_p = tcode == code;
6996 tree op0 = NULL_TREE, op1 = NULL_TREE;
6997 bool sub_strict_overflow_p;
6999 /* Don't deal with constants of zero here; they confuse the code below. */
7000 if (integer_zerop (c))
7001 return NULL_TREE;
7003 if (TREE_CODE_CLASS (tcode) == tcc_unary)
7004 op0 = TREE_OPERAND (t, 0);
7006 if (TREE_CODE_CLASS (tcode) == tcc_binary)
7007 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
7009 /* Note that we need not handle conditional operations here since fold
7010 already handles those cases. So just do arithmetic here. */
7011 switch (tcode)
7013 case INTEGER_CST:
7014 /* For a constant, we can always simplify if we are a multiply
7015 or (for divide and modulus) if it is a multiple of our constant. */
7016 if (code == MULT_EXPR
7017 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
7018 TYPE_SIGN (type)))
7020 tree tem = const_binop (code, fold_convert (ctype, t),
7021 fold_convert (ctype, c));
7022 /* If the multiplication overflowed, we lost information on it.
7023 See PR68142 and PR69845. */
7024 if (TREE_OVERFLOW (tem))
7025 return NULL_TREE;
7026 return tem;
7028 break;
7030 CASE_CONVERT: case NON_LVALUE_EXPR:
7031 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
7032 break;
7033 /* If op0 is an expression ... */
7034 if ((COMPARISON_CLASS_P (op0)
7035 || UNARY_CLASS_P (op0)
7036 || BINARY_CLASS_P (op0)
7037 || VL_EXP_CLASS_P (op0)
7038 || EXPRESSION_CLASS_P (op0))
7039 /* ... and has wrapping overflow, and its type is smaller
7040 than ctype, then we cannot pass through as widening. */
7041 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
7042 && (TYPE_PRECISION (ctype)
7043 > TYPE_PRECISION (TREE_TYPE (op0))))
7044 /* ... or this is a truncation (t is narrower than op0),
7045 then we cannot pass through this narrowing. */
7046 || (TYPE_PRECISION (type)
7047 < TYPE_PRECISION (TREE_TYPE (op0)))
7048 /* ... or signedness changes for division or modulus,
7049 then we cannot pass through this conversion. */
7050 || (code != MULT_EXPR
7051 && (TYPE_UNSIGNED (ctype)
7052 != TYPE_UNSIGNED (TREE_TYPE (op0))))
7053 /* ... or has undefined overflow while the converted to
7054 type has not, we cannot do the operation in the inner type
7055 as that would introduce undefined overflow. */
7056 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
7057 && !TYPE_OVERFLOW_UNDEFINED (type))))
7058 break;
7060 /* Pass the constant down and see if we can make a simplification. If
7061 we can, replace this expression with the inner simplification for
7062 possible later conversion to our or some other type. */
7063 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
7064 && TREE_CODE (t2) == INTEGER_CST
7065 && !TREE_OVERFLOW (t2)
7066 && (t1 = extract_muldiv (op0, t2, code,
7067 code == MULT_EXPR ? ctype : NULL_TREE,
7068 strict_overflow_p)) != 0)
7069 return t1;
7070 break;
7072 case ABS_EXPR:
7073 /* If widening the type changes it from signed to unsigned, then we
7074 must avoid building ABS_EXPR itself as unsigned. */
7075 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
7077 tree cstype = (*signed_type_for) (ctype);
7078 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
7079 != 0)
7081 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
7082 return fold_convert (ctype, t1);
7084 break;
7086 /* If the constant is negative, we cannot simplify this. */
7087 if (tree_int_cst_sgn (c) == -1)
7088 break;
7089 /* FALLTHROUGH */
7090 case NEGATE_EXPR:
7091 /* For division and modulus, type can't be unsigned, as e.g.
7092 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
7093 For signed types, even with wrapping overflow, this is fine. */
7094 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7095 break;
7096 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
7097 != 0)
7098 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7099 break;
7101 case MIN_EXPR: case MAX_EXPR:
7102 /* If widening the type changes the signedness, then we can't perform
7103 this optimization as that changes the result. */
7104 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7105 break;
7107 /* Punt for multiplication altogether.
7108 MAX (1U + INT_MAX, 1U) * 2U is not equivalent to
7109 MAX ((1U + INT_MAX) * 2U, 1U * 2U), the former is
7110 0U, the latter is 2U.
7111 MAX (INT_MIN / 2, 0) * -2 is not equivalent to
7112 MIN (INT_MIN / 2 * -2, 0 * -2), the former is
7113 well defined 0, the latter invokes UB.
7114 MAX (INT_MIN / 2, 5) * 5 is not equivalent to
7115 MAX (INT_MIN / 2 * 5, 5 * 5), the former is
7116 well defined 25, the latter invokes UB. */
7117 if (code == MULT_EXPR)
7118 break;
7119 /* For division/modulo, punt on c being -1 for MAX, as
7120 MAX (INT_MIN, 0) / -1 is not equivalent to
7121 MIN (INT_MIN / -1, 0 / -1), the former is well defined
7122 0, the latter invokes UB (or for -fwrapv is INT_MIN).
7123 MIN (INT_MIN, 0) / -1 already invokes UB, so the
7124 transformation won't make it worse. */
7125 else if (tcode == MAX_EXPR && integer_minus_onep (c))
7126 break;
7128 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7129 sub_strict_overflow_p = false;
7130 if ((t1 = extract_muldiv (op0, c, code, wide_type,
7131 &sub_strict_overflow_p)) != 0
7132 && (t2 = extract_muldiv (op1, c, code, wide_type,
7133 &sub_strict_overflow_p)) != 0)
7135 if (tree_int_cst_sgn (c) < 0)
7136 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7137 if (sub_strict_overflow_p)
7138 *strict_overflow_p = true;
7139 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7140 fold_convert (ctype, t2));
7142 break;
7144 case LSHIFT_EXPR: case RSHIFT_EXPR:
7145 /* If the second operand is constant, this is a multiplication
7146 or floor division, by a power of two, so we can treat it that
7147 way unless the multiplier or divisor overflows. Signed
7148 left-shift overflow is implementation-defined rather than
7149 undefined in C90, so do not convert signed left shift into
7150 multiplication. */
7151 if (TREE_CODE (op1) == INTEGER_CST
7152 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7153 /* const_binop may not detect overflow correctly,
7154 so check for it explicitly here. */
7155 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7156 wi::to_wide (op1))
7157 && (t1 = fold_convert (ctype,
7158 const_binop (LSHIFT_EXPR, size_one_node,
7159 op1))) != 0
7160 && !TREE_OVERFLOW (t1))
7161 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7162 ? MULT_EXPR : FLOOR_DIV_EXPR,
7163 ctype,
7164 fold_convert (ctype, op0),
7165 t1),
7166 c, code, wide_type, strict_overflow_p);
7167 break;
7169 case PLUS_EXPR: case MINUS_EXPR:
7170 /* See if we can eliminate the operation on both sides. If we can, we
7171 can return a new PLUS or MINUS. If we can't, the only remaining
7172 cases where we can do anything are if the second operand is a
7173 constant. */
7174 sub_strict_overflow_p = false;
7175 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7176 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7177 if (t1 != 0 && t2 != 0
7178 && TYPE_OVERFLOW_WRAPS (ctype)
7179 && (code == MULT_EXPR
7180 /* If not multiplication, we can only do this if both operands
7181 are divisible by c. */
7182 || (multiple_of_p (ctype, op0, c)
7183 && multiple_of_p (ctype, op1, c))))
7185 if (sub_strict_overflow_p)
7186 *strict_overflow_p = true;
7187 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7188 fold_convert (ctype, t2));
7191 /* If this was a subtraction, negate OP1 and set it to be an addition.
7192 This simplifies the logic below. */
7193 if (tcode == MINUS_EXPR)
7195 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7196 /* If OP1 was not easily negatable, the constant may be OP0. */
7197 if (TREE_CODE (op0) == INTEGER_CST)
7199 std::swap (op0, op1);
7200 std::swap (t1, t2);
7204 if (TREE_CODE (op1) != INTEGER_CST)
7205 break;
7207 /* If either OP1 or C are negative, this optimization is not safe for
7208 some of the division and remainder types while for others we need
7209 to change the code. */
7210 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7212 if (code == CEIL_DIV_EXPR)
7213 code = FLOOR_DIV_EXPR;
7214 else if (code == FLOOR_DIV_EXPR)
7215 code = CEIL_DIV_EXPR;
7216 else if (code != MULT_EXPR
7217 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7218 break;
7221 /* If it's a multiply or a division/modulus operation of a multiple
7222 of our constant, do the operation and verify it doesn't overflow. */
7223 if (code == MULT_EXPR
7224 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7225 TYPE_SIGN (type)))
7227 op1 = const_binop (code, fold_convert (ctype, op1),
7228 fold_convert (ctype, c));
7229 /* We allow the constant to overflow with wrapping semantics. */
7230 if (op1 == 0
7231 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7232 break;
7234 else
7235 break;
7237 /* If we have an unsigned type, we cannot widen the operation since it
7238 will change the result if the original computation overflowed. */
7239 if (TYPE_UNSIGNED (ctype) && ctype != type)
7240 break;
7242 /* The last case is if we are a multiply. In that case, we can
7243 apply the distributive law to commute the multiply and addition
7244 if the multiplication of the constants doesn't overflow
7245 and overflow is defined. With undefined overflow
7246 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7247 But fold_plusminus_mult_expr would factor back any power-of-two
7248 value so do not distribute in the first place in this case. */
7249 if (code == MULT_EXPR
7250 && TYPE_OVERFLOW_WRAPS (ctype)
7251 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7252 return fold_build2 (tcode, ctype,
7253 fold_build2 (code, ctype,
7254 fold_convert (ctype, op0),
7255 fold_convert (ctype, c)),
7256 op1);
7258 break;
7260 case MULT_EXPR:
7261 /* We have a special case here if we are doing something like
7262 (C * 8) % 4 since we know that's zero. */
7263 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7264 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7265 /* If the multiplication can overflow we cannot optimize this. */
7266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7267 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7268 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7269 TYPE_SIGN (type)))
7271 *strict_overflow_p = true;
7272 return omit_one_operand (type, integer_zero_node, op0);
7275 /* ... fall through ... */
7277 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7278 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7279 /* If we can extract our operation from the LHS, do so and return a
7280 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7281 do something only if the second operand is a constant. */
7282 if (same_p
7283 && TYPE_OVERFLOW_WRAPS (ctype)
7284 && (t1 = extract_muldiv (op0, c, code, wide_type,
7285 strict_overflow_p)) != 0)
7286 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7287 fold_convert (ctype, op1));
7288 else if (tcode == MULT_EXPR && code == MULT_EXPR
7289 && TYPE_OVERFLOW_WRAPS (ctype)
7290 && (t1 = extract_muldiv (op1, c, code, wide_type,
7291 strict_overflow_p)) != 0)
7292 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7293 fold_convert (ctype, t1));
7294 else if (TREE_CODE (op1) != INTEGER_CST)
7295 return 0;
7297 /* If these are the same operation types, we can associate them
7298 assuming no overflow. */
7299 if (tcode == code)
7301 bool overflow_p = false;
7302 wi::overflow_type overflow_mul;
7303 signop sign = TYPE_SIGN (ctype);
7304 unsigned prec = TYPE_PRECISION (ctype);
7305 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7306 wi::to_wide (c, prec),
7307 sign, &overflow_mul);
7308 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7309 if (overflow_mul
7310 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7311 overflow_p = true;
7312 if (!overflow_p)
7313 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7314 wide_int_to_tree (ctype, mul));
7317 /* If these operations "cancel" each other, we have the main
7318 optimizations of this pass, which occur when either constant is a
7319 multiple of the other, in which case we replace this with either an
7320 operation or CODE or TCODE.
7322 If we have an unsigned type, we cannot do this since it will change
7323 the result if the original computation overflowed. */
7324 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7325 && !TYPE_OVERFLOW_SANITIZED (ctype)
7326 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7327 || (tcode == MULT_EXPR
7328 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7329 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7330 && code != MULT_EXPR)))
7332 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7333 TYPE_SIGN (type)))
7335 *strict_overflow_p = true;
7336 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7337 fold_convert (ctype,
7338 const_binop (TRUNC_DIV_EXPR,
7339 op1, c)));
7341 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7342 TYPE_SIGN (type)))
7344 *strict_overflow_p = true;
7345 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7346 fold_convert (ctype,
7347 const_binop (TRUNC_DIV_EXPR,
7348 c, op1)));
7351 break;
7353 default:
7354 break;
7357 return 0;
7360 /* Return a node which has the indicated constant VALUE (either 0 or
7361 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7362 and is of the indicated TYPE. */
7364 tree
7365 constant_boolean_node (bool value, tree type)
7367 if (type == integer_type_node)
7368 return value ? integer_one_node : integer_zero_node;
7369 else if (type == boolean_type_node)
7370 return value ? boolean_true_node : boolean_false_node;
7371 else if (VECTOR_TYPE_P (type))
7372 return build_vector_from_val (type,
7373 build_int_cst (TREE_TYPE (type),
7374 value ? -1 : 0));
7375 else
7376 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7380 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7381 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7382 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7383 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7384 COND is the first argument to CODE; otherwise (as in the example
7385 given here), it is the second argument. TYPE is the type of the
7386 original expression. Return NULL_TREE if no simplification is
7387 possible. */
7389 static tree
7390 fold_binary_op_with_conditional_arg (location_t loc,
7391 enum tree_code code,
7392 tree type, tree op0, tree op1,
7393 tree cond, tree arg, int cond_first_p)
7395 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7396 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7397 tree test, true_value, false_value;
7398 tree lhs = NULL_TREE;
7399 tree rhs = NULL_TREE;
7400 enum tree_code cond_code = COND_EXPR;
7402 /* Do not move possibly trapping operations into the conditional as this
7403 pessimizes code and causes gimplification issues when applied late. */
7404 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7405 ANY_INTEGRAL_TYPE_P (type)
7406 && TYPE_OVERFLOW_TRAPS (type), op1))
7407 return NULL_TREE;
7409 if (TREE_CODE (cond) == COND_EXPR
7410 || TREE_CODE (cond) == VEC_COND_EXPR)
7412 test = TREE_OPERAND (cond, 0);
7413 true_value = TREE_OPERAND (cond, 1);
7414 false_value = TREE_OPERAND (cond, 2);
7415 /* If this operand throws an expression, then it does not make
7416 sense to try to perform a logical or arithmetic operation
7417 involving it. */
7418 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7419 lhs = true_value;
7420 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7421 rhs = false_value;
7423 else if (!(TREE_CODE (type) != VECTOR_TYPE
7424 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7426 tree testtype = TREE_TYPE (cond);
7427 test = cond;
7428 true_value = constant_boolean_node (true, testtype);
7429 false_value = constant_boolean_node (false, testtype);
7431 else
7432 /* Detect the case of mixing vector and scalar types - bail out. */
7433 return NULL_TREE;
7435 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7436 cond_code = VEC_COND_EXPR;
7438 /* This transformation is only worthwhile if we don't have to wrap ARG
7439 in a SAVE_EXPR and the operation can be simplified without recursing
7440 on at least one of the branches once its pushed inside the COND_EXPR. */
7441 if (!TREE_CONSTANT (arg)
7442 && (TREE_SIDE_EFFECTS (arg)
7443 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7444 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7445 return NULL_TREE;
7447 arg = fold_convert_loc (loc, arg_type, arg);
7448 if (lhs == 0)
7450 true_value = fold_convert_loc (loc, cond_type, true_value);
7451 if (cond_first_p)
7452 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7453 else
7454 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7456 if (rhs == 0)
7458 false_value = fold_convert_loc (loc, cond_type, false_value);
7459 if (cond_first_p)
7460 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7461 else
7462 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7465 /* Check that we have simplified at least one of the branches. */
7466 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7467 return NULL_TREE;
7469 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7473 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7475 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7476 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7477 if ARG - ZERO_ARG is the same as X.
7479 If ARG is NULL, check for any value of type TYPE.
7481 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7482 and finite. The problematic cases are when X is zero, and its mode
7483 has signed zeros. In the case of rounding towards -infinity,
7484 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7485 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7487 bool
7488 fold_real_zero_addition_p (const_tree type, const_tree arg,
7489 const_tree zero_arg, int negate)
7491 if (!real_zerop (zero_arg))
7492 return false;
7494 /* Don't allow the fold with -fsignaling-nans. */
7495 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7496 return false;
7498 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7499 if (!HONOR_SIGNED_ZEROS (type))
7500 return true;
7502 /* There is no case that is safe for all rounding modes. */
7503 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7504 return false;
7506 /* In a vector or complex, we would need to check the sign of all zeros. */
7507 if (TREE_CODE (zero_arg) == VECTOR_CST)
7508 zero_arg = uniform_vector_p (zero_arg);
7509 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7510 return false;
7512 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7513 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7514 negate = !negate;
7516 /* The mode has signed zeros, and we have to honor their sign.
7517 In this situation, there are only two cases we can return true for.
7518 (i) X - 0 is the same as X with default rounding.
7519 (ii) X + 0 is X when X can't possibly be -0.0. */
7520 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7523 /* Subroutine of match.pd that optimizes comparisons of a division by
7524 a nonzero integer constant against an integer constant, i.e.
7525 X/C1 op C2.
7527 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7528 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7530 enum tree_code
7531 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7532 tree *hi, bool *neg_overflow)
7534 tree prod, tmp, type = TREE_TYPE (c1);
7535 signop sign = TYPE_SIGN (type);
7536 wi::overflow_type overflow;
7538 /* We have to do this the hard way to detect unsigned overflow.
7539 prod = int_const_binop (MULT_EXPR, c1, c2); */
7540 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7541 prod = force_fit_type (type, val, -1, overflow);
7542 *neg_overflow = false;
7544 if (sign == UNSIGNED)
7546 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7547 *lo = prod;
7549 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7550 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7551 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7553 else if (tree_int_cst_sgn (c1) >= 0)
7555 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7556 switch (tree_int_cst_sgn (c2))
7558 case -1:
7559 *neg_overflow = true;
7560 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7561 *hi = prod;
7562 break;
7564 case 0:
7565 *lo = fold_negate_const (tmp, type);
7566 *hi = tmp;
7567 break;
7569 case 1:
7570 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7571 *lo = prod;
7572 break;
7574 default:
7575 gcc_unreachable ();
7578 else
7580 /* A negative divisor reverses the relational operators. */
7581 code = swap_tree_comparison (code);
7583 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7584 switch (tree_int_cst_sgn (c2))
7586 case -1:
7587 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7588 *lo = prod;
7589 break;
7591 case 0:
7592 *hi = fold_negate_const (tmp, type);
7593 *lo = tmp;
7594 break;
7596 case 1:
7597 *neg_overflow = true;
7598 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7599 *hi = prod;
7600 break;
7602 default:
7603 gcc_unreachable ();
7607 if (code != EQ_EXPR && code != NE_EXPR)
7608 return code;
7610 if (TREE_OVERFLOW (*lo)
7611 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7612 *lo = NULL_TREE;
7613 if (TREE_OVERFLOW (*hi)
7614 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7615 *hi = NULL_TREE;
7617 return code;
7620 /* Test whether it is preferable to swap two operands, ARG0 and
7621 ARG1, for example because ARG0 is an integer constant and ARG1
7622 isn't. */
7624 bool
7625 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7627 if (CONSTANT_CLASS_P (arg1))
7628 return false;
7629 if (CONSTANT_CLASS_P (arg0))
7630 return true;
7632 STRIP_NOPS (arg0);
7633 STRIP_NOPS (arg1);
7635 if (TREE_CONSTANT (arg1))
7636 return false;
7637 if (TREE_CONSTANT (arg0))
7638 return true;
7640 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7641 for commutative and comparison operators. Ensuring a canonical
7642 form allows the optimizers to find additional redundancies without
7643 having to explicitly check for both orderings. */
7644 if (TREE_CODE (arg0) == SSA_NAME
7645 && TREE_CODE (arg1) == SSA_NAME
7646 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7647 return true;
7649 /* Put SSA_NAMEs last. */
7650 if (TREE_CODE (arg1) == SSA_NAME)
7651 return false;
7652 if (TREE_CODE (arg0) == SSA_NAME)
7653 return true;
7655 /* Put variables last. */
7656 if (DECL_P (arg1))
7657 return false;
7658 if (DECL_P (arg0))
7659 return true;
7661 return false;
7665 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7666 means A >= Y && A != MAX, but in this case we know that
7667 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7669 static tree
7670 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7672 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7674 if (TREE_CODE (bound) == LT_EXPR)
7675 a = TREE_OPERAND (bound, 0);
7676 else if (TREE_CODE (bound) == GT_EXPR)
7677 a = TREE_OPERAND (bound, 1);
7678 else
7679 return NULL_TREE;
7681 typea = TREE_TYPE (a);
7682 if (!INTEGRAL_TYPE_P (typea)
7683 && !POINTER_TYPE_P (typea))
7684 return NULL_TREE;
7686 if (TREE_CODE (ineq) == LT_EXPR)
7688 a1 = TREE_OPERAND (ineq, 1);
7689 y = TREE_OPERAND (ineq, 0);
7691 else if (TREE_CODE (ineq) == GT_EXPR)
7693 a1 = TREE_OPERAND (ineq, 0);
7694 y = TREE_OPERAND (ineq, 1);
7696 else
7697 return NULL_TREE;
7699 if (TREE_TYPE (a1) != typea)
7700 return NULL_TREE;
7702 if (POINTER_TYPE_P (typea))
7704 /* Convert the pointer types into integer before taking the difference. */
7705 tree ta = fold_convert_loc (loc, ssizetype, a);
7706 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7707 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7709 else
7710 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7712 if (!diff || !integer_onep (diff))
7713 return NULL_TREE;
7715 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7718 /* Fold a sum or difference of at least one multiplication.
7719 Returns the folded tree or NULL if no simplification could be made. */
7721 static tree
7722 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7723 tree arg0, tree arg1)
7725 tree arg00, arg01, arg10, arg11;
7726 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7728 /* (A * C) +- (B * C) -> (A+-B) * C.
7729 (A * C) +- A -> A * (C+-1).
7730 We are most concerned about the case where C is a constant,
7731 but other combinations show up during loop reduction. Since
7732 it is not difficult, try all four possibilities. */
7734 if (TREE_CODE (arg0) == MULT_EXPR)
7736 arg00 = TREE_OPERAND (arg0, 0);
7737 arg01 = TREE_OPERAND (arg0, 1);
7739 else if (TREE_CODE (arg0) == INTEGER_CST)
7741 arg00 = build_one_cst (type);
7742 arg01 = arg0;
7744 else
7746 /* We cannot generate constant 1 for fract. */
7747 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7748 return NULL_TREE;
7749 arg00 = arg0;
7750 arg01 = build_one_cst (type);
7752 if (TREE_CODE (arg1) == MULT_EXPR)
7754 arg10 = TREE_OPERAND (arg1, 0);
7755 arg11 = TREE_OPERAND (arg1, 1);
7757 else if (TREE_CODE (arg1) == INTEGER_CST)
7759 arg10 = build_one_cst (type);
7760 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7761 the purpose of this canonicalization. */
7762 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7763 && negate_expr_p (arg1)
7764 && code == PLUS_EXPR)
7766 arg11 = negate_expr (arg1);
7767 code = MINUS_EXPR;
7769 else
7770 arg11 = arg1;
7772 else
7774 /* We cannot generate constant 1 for fract. */
7775 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7776 return NULL_TREE;
7777 arg10 = arg1;
7778 arg11 = build_one_cst (type);
7780 same = NULL_TREE;
7782 /* Prefer factoring a common non-constant. */
7783 if (operand_equal_p (arg00, arg10, 0))
7784 same = arg00, alt0 = arg01, alt1 = arg11;
7785 else if (operand_equal_p (arg01, arg11, 0))
7786 same = arg01, alt0 = arg00, alt1 = arg10;
7787 else if (operand_equal_p (arg00, arg11, 0))
7788 same = arg00, alt0 = arg01, alt1 = arg10;
7789 else if (operand_equal_p (arg01, arg10, 0))
7790 same = arg01, alt0 = arg00, alt1 = arg11;
7792 /* No identical multiplicands; see if we can find a common
7793 power-of-two factor in non-power-of-two multiplies. This
7794 can help in multi-dimensional array access. */
7795 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7797 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7798 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7799 HOST_WIDE_INT tmp;
7800 bool swap = false;
7801 tree maybe_same;
7803 /* Move min of absolute values to int11. */
7804 if (absu_hwi (int01) < absu_hwi (int11))
7806 tmp = int01, int01 = int11, int11 = tmp;
7807 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7808 maybe_same = arg01;
7809 swap = true;
7811 else
7812 maybe_same = arg11;
7814 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7815 if (factor > 1
7816 && pow2p_hwi (factor)
7817 && (int01 & (factor - 1)) == 0
7818 /* The remainder should not be a constant, otherwise we
7819 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7820 increased the number of multiplications necessary. */
7821 && TREE_CODE (arg10) != INTEGER_CST)
7823 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7824 build_int_cst (TREE_TYPE (arg00),
7825 int01 / int11));
7826 alt1 = arg10;
7827 same = maybe_same;
7828 if (swap)
7829 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7833 if (!same)
7834 return NULL_TREE;
7836 if (! ANY_INTEGRAL_TYPE_P (type)
7837 || TYPE_OVERFLOW_WRAPS (type)
7838 /* We are neither factoring zero nor minus one. */
7839 || TREE_CODE (same) == INTEGER_CST)
7840 return fold_build2_loc (loc, MULT_EXPR, type,
7841 fold_build2_loc (loc, code, type,
7842 fold_convert_loc (loc, type, alt0),
7843 fold_convert_loc (loc, type, alt1)),
7844 fold_convert_loc (loc, type, same));
7846 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7847 same may be minus one and thus the multiplication may overflow. Perform
7848 the sum operation in an unsigned type. */
7849 tree utype = unsigned_type_for (type);
7850 tree tem = fold_build2_loc (loc, code, utype,
7851 fold_convert_loc (loc, utype, alt0),
7852 fold_convert_loc (loc, utype, alt1));
7853 /* If the sum evaluated to a constant that is not -INF the multiplication
7854 cannot overflow. */
7855 if (TREE_CODE (tem) == INTEGER_CST
7856 && (wi::to_wide (tem)
7857 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7858 return fold_build2_loc (loc, MULT_EXPR, type,
7859 fold_convert (type, tem), same);
7861 /* Do not resort to unsigned multiplication because
7862 we lose the no-overflow property of the expression. */
7863 return NULL_TREE;
7866 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7867 specified by EXPR into the buffer PTR of length LEN bytes.
7868 Return the number of bytes placed in the buffer, or zero
7869 upon failure. */
7871 static int
7872 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7874 tree type = TREE_TYPE (expr);
7875 int total_bytes;
7876 if (TREE_CODE (type) == BITINT_TYPE)
7878 struct bitint_info info;
7879 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7880 gcc_assert (ok);
7881 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7882 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7884 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7885 /* More work is needed when adding _BitInt support to PDP endian
7886 if limb is smaller than word, or if _BitInt limb ordering doesn't
7887 match target endianity here. */
7888 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7889 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7890 || (GET_MODE_SIZE (limb_mode)
7891 >= UNITS_PER_WORD)));
7893 else
7894 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7896 else
7897 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7898 int byte, offset, word, words;
7899 unsigned char value;
7901 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7902 return 0;
7903 if (off == -1)
7904 off = 0;
7906 if (ptr == NULL)
7907 /* Dry run. */
7908 return MIN (len, total_bytes - off);
7910 words = total_bytes / UNITS_PER_WORD;
7912 for (byte = 0; byte < total_bytes; byte++)
7914 int bitpos = byte * BITS_PER_UNIT;
7915 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7916 number of bytes. */
7917 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7919 if (total_bytes > UNITS_PER_WORD)
7921 word = byte / UNITS_PER_WORD;
7922 if (WORDS_BIG_ENDIAN)
7923 word = (words - 1) - word;
7924 offset = word * UNITS_PER_WORD;
7925 if (BYTES_BIG_ENDIAN)
7926 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7927 else
7928 offset += byte % UNITS_PER_WORD;
7930 else
7931 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7932 if (offset >= off && offset - off < len)
7933 ptr[offset - off] = value;
7935 return MIN (len, total_bytes - off);
7939 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7940 specified by EXPR into the buffer PTR of length LEN bytes.
7941 Return the number of bytes placed in the buffer, or zero
7942 upon failure. */
7944 static int
7945 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7947 tree type = TREE_TYPE (expr);
7948 scalar_mode mode = SCALAR_TYPE_MODE (type);
7949 int total_bytes = GET_MODE_SIZE (mode);
7950 FIXED_VALUE_TYPE value;
7951 tree i_value, i_type;
7953 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7954 return 0;
7956 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7958 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7959 return 0;
7961 value = TREE_FIXED_CST (expr);
7962 i_value = double_int_to_tree (i_type, value.data);
7964 return native_encode_int (i_value, ptr, len, off);
7968 /* Subroutine of native_encode_expr. Encode the REAL_CST
7969 specified by EXPR into the buffer PTR of length LEN bytes.
7970 Return the number of bytes placed in the buffer, or zero
7971 upon failure. */
7973 static int
7974 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7976 tree type = TREE_TYPE (expr);
7977 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7978 int byte, offset, word, words, bitpos;
7979 unsigned char value;
7981 /* There are always 32 bits in each long, no matter the size of
7982 the hosts long. We handle floating point representations with
7983 up to 192 bits. */
7984 long tmp[6];
7986 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7987 return 0;
7988 if (off == -1)
7989 off = 0;
7991 if (ptr == NULL)
7992 /* Dry run. */
7993 return MIN (len, total_bytes - off);
7995 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7997 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7999 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8000 bitpos += BITS_PER_UNIT)
8002 byte = (bitpos / BITS_PER_UNIT) & 3;
8003 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
8005 if (UNITS_PER_WORD < 4)
8007 word = byte / UNITS_PER_WORD;
8008 if (WORDS_BIG_ENDIAN)
8009 word = (words - 1) - word;
8010 offset = word * UNITS_PER_WORD;
8011 if (BYTES_BIG_ENDIAN)
8012 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8013 else
8014 offset += byte % UNITS_PER_WORD;
8016 else
8018 offset = byte;
8019 if (BYTES_BIG_ENDIAN)
8021 /* Reverse bytes within each long, or within the entire float
8022 if it's smaller than a long (for HFmode). */
8023 offset = MIN (3, total_bytes - 1) - offset;
8024 gcc_assert (offset >= 0);
8027 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
8028 if (offset >= off
8029 && offset - off < len)
8030 ptr[offset - off] = value;
8032 return MIN (len, total_bytes - off);
8035 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
8036 specified by EXPR into the buffer PTR of length LEN bytes.
8037 Return the number of bytes placed in the buffer, or zero
8038 upon failure. */
8040 static int
8041 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
8043 int rsize, isize;
8044 tree part;
8046 part = TREE_REALPART (expr);
8047 rsize = native_encode_expr (part, ptr, len, off);
8048 if (off == -1 && rsize == 0)
8049 return 0;
8050 part = TREE_IMAGPART (expr);
8051 if (off != -1)
8052 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
8053 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
8054 len - rsize, off);
8055 if (off == -1 && isize != rsize)
8056 return 0;
8057 return rsize + isize;
8060 /* Like native_encode_vector, but only encode the first COUNT elements.
8061 The other arguments are as for native_encode_vector. */
8063 static int
8064 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
8065 int off, unsigned HOST_WIDE_INT count)
8067 tree itype = TREE_TYPE (TREE_TYPE (expr));
8068 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
8069 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
8071 /* This is the only case in which elements can be smaller than a byte.
8072 Element 0 is always in the lsb of the containing byte. */
8073 unsigned int elt_bits = TYPE_PRECISION (itype);
8074 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
8075 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8076 return 0;
8078 if (off == -1)
8079 off = 0;
8081 /* Zero the buffer and then set bits later where necessary. */
8082 int extract_bytes = MIN (len, total_bytes - off);
8083 if (ptr)
8084 memset (ptr, 0, extract_bytes);
8086 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
8087 unsigned int first_elt = off * elts_per_byte;
8088 unsigned int extract_elts = extract_bytes * elts_per_byte;
8089 for (unsigned int i = 0; i < extract_elts; ++i)
8091 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
8092 if (TREE_CODE (elt) != INTEGER_CST)
8093 return 0;
8095 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
8097 unsigned int bit = i * elt_bits;
8098 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8101 return extract_bytes;
8104 int offset = 0;
8105 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8106 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8108 if (off >= size)
8110 off -= size;
8111 continue;
8113 tree elem = VECTOR_CST_ELT (expr, i);
8114 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8115 len - offset, off);
8116 if ((off == -1 && res != size) || res == 0)
8117 return 0;
8118 offset += res;
8119 if (offset >= len)
8120 return (off == -1 && i < count - 1) ? 0 : offset;
8121 if (off != -1)
8122 off = 0;
8124 return offset;
8127 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8128 specified by EXPR into the buffer PTR of length LEN bytes.
8129 Return the number of bytes placed in the buffer, or zero
8130 upon failure. */
8132 static int
8133 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8135 unsigned HOST_WIDE_INT count;
8136 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8137 return 0;
8138 return native_encode_vector_part (expr, ptr, len, off, count);
8142 /* Subroutine of native_encode_expr. Encode the STRING_CST
8143 specified by EXPR into the buffer PTR of length LEN bytes.
8144 Return the number of bytes placed in the buffer, or zero
8145 upon failure. */
8147 static int
8148 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8150 tree type = TREE_TYPE (expr);
8152 /* Wide-char strings are encoded in target byte-order so native
8153 encoding them is trivial. */
8154 if (BITS_PER_UNIT != CHAR_BIT
8155 || TREE_CODE (type) != ARRAY_TYPE
8156 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8157 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8158 return 0;
8160 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8161 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8162 return 0;
8163 if (off == -1)
8164 off = 0;
8165 len = MIN (total_bytes - off, len);
8166 if (ptr == NULL)
8167 /* Dry run. */;
8168 else
8170 int written = 0;
8171 if (off < TREE_STRING_LENGTH (expr))
8173 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8174 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8176 memset (ptr + written, 0, len - written);
8178 return len;
8182 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8183 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8184 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8185 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8186 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8187 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8188 Return the number of bytes placed in the buffer, or zero upon failure. */
8191 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8193 /* We don't support starting at negative offset and -1 is special. */
8194 if (off < -1)
8195 return 0;
8197 switch (TREE_CODE (expr))
8199 case INTEGER_CST:
8200 return native_encode_int (expr, ptr, len, off);
8202 case REAL_CST:
8203 return native_encode_real (expr, ptr, len, off);
8205 case FIXED_CST:
8206 return native_encode_fixed (expr, ptr, len, off);
8208 case COMPLEX_CST:
8209 return native_encode_complex (expr, ptr, len, off);
8211 case VECTOR_CST:
8212 return native_encode_vector (expr, ptr, len, off);
8214 case STRING_CST:
8215 return native_encode_string (expr, ptr, len, off);
8217 default:
8218 return 0;
8222 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8223 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8224 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8225 machine modes, we can't just use build_nonstandard_integer_type. */
8227 tree
8228 find_bitfield_repr_type (int fieldsize, int len)
8230 machine_mode mode;
8231 for (int pass = 0; pass < 2; pass++)
8233 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8234 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8235 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8236 && known_eq (GET_MODE_PRECISION (mode),
8237 GET_MODE_BITSIZE (mode))
8238 && known_le (GET_MODE_SIZE (mode), len))
8240 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8241 if (ret && TYPE_MODE (ret) == mode)
8242 return ret;
8246 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8247 if (int_n_enabled_p[i]
8248 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8249 && int_n_trees[i].unsigned_type)
8251 tree ret = int_n_trees[i].unsigned_type;
8252 mode = TYPE_MODE (ret);
8253 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8254 && known_eq (GET_MODE_PRECISION (mode),
8255 GET_MODE_BITSIZE (mode))
8256 && known_le (GET_MODE_SIZE (mode), len))
8257 return ret;
8260 return NULL_TREE;
8263 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8264 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8265 to be non-NULL and OFF zero), then in addition to filling the
8266 bytes pointed by PTR with the value also clear any bits pointed
8267 by MASK that are known to be initialized, keep them as is for
8268 e.g. uninitialized padding bits or uninitialized fields. */
8271 native_encode_initializer (tree init, unsigned char *ptr, int len,
8272 int off, unsigned char *mask)
8274 int r;
8276 /* We don't support starting at negative offset and -1 is special. */
8277 if (off < -1 || init == NULL_TREE)
8278 return 0;
8280 gcc_assert (mask == NULL || (off == 0 && ptr));
8282 STRIP_NOPS (init);
8283 switch (TREE_CODE (init))
8285 case VIEW_CONVERT_EXPR:
8286 case NON_LVALUE_EXPR:
8287 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8288 mask);
8289 default:
8290 r = native_encode_expr (init, ptr, len, off);
8291 if (mask)
8292 memset (mask, 0, r);
8293 return r;
8294 case CONSTRUCTOR:
8295 tree type = TREE_TYPE (init);
8296 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8297 if (total_bytes < 0)
8298 return 0;
8299 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8300 return 0;
8301 int o = off == -1 ? 0 : off;
8302 if (TREE_CODE (type) == ARRAY_TYPE)
8304 tree min_index;
8305 unsigned HOST_WIDE_INT cnt;
8306 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8307 constructor_elt *ce;
8309 if (!TYPE_DOMAIN (type)
8310 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8311 return 0;
8313 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8314 if (fieldsize <= 0)
8315 return 0;
8317 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8318 if (ptr)
8319 memset (ptr, '\0', MIN (total_bytes - off, len));
8321 for (cnt = 0; ; cnt++)
8323 tree val = NULL_TREE, index = NULL_TREE;
8324 HOST_WIDE_INT pos = curpos, count = 0;
8325 bool full = false;
8326 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8328 val = ce->value;
8329 index = ce->index;
8331 else if (mask == NULL
8332 || CONSTRUCTOR_NO_CLEARING (init)
8333 || curpos >= total_bytes)
8334 break;
8335 else
8336 pos = total_bytes;
8338 if (index && TREE_CODE (index) == RANGE_EXPR)
8340 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8341 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8342 return 0;
8343 offset_int wpos
8344 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8345 - wi::to_offset (min_index),
8346 TYPE_PRECISION (sizetype));
8347 wpos *= fieldsize;
8348 if (!wi::fits_shwi_p (pos))
8349 return 0;
8350 pos = wpos.to_shwi ();
8351 offset_int wcount
8352 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8353 - wi::to_offset (TREE_OPERAND (index, 0)),
8354 TYPE_PRECISION (sizetype));
8355 if (!wi::fits_shwi_p (wcount))
8356 return 0;
8357 count = wcount.to_shwi ();
8359 else if (index)
8361 if (TREE_CODE (index) != INTEGER_CST)
8362 return 0;
8363 offset_int wpos
8364 = wi::sext (wi::to_offset (index)
8365 - wi::to_offset (min_index),
8366 TYPE_PRECISION (sizetype));
8367 wpos *= fieldsize;
8368 if (!wi::fits_shwi_p (wpos))
8369 return 0;
8370 pos = wpos.to_shwi ();
8373 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8375 if (valueinit == -1)
8377 tree zero = build_zero_cst (TREE_TYPE (type));
8378 r = native_encode_initializer (zero, ptr + curpos,
8379 fieldsize, 0,
8380 mask + curpos);
8381 if (TREE_CODE (zero) == CONSTRUCTOR)
8382 ggc_free (zero);
8383 if (!r)
8384 return 0;
8385 valueinit = curpos;
8386 curpos += fieldsize;
8388 while (curpos != pos)
8390 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8391 memcpy (mask + curpos, mask + valueinit, fieldsize);
8392 curpos += fieldsize;
8396 curpos = pos;
8397 if (val)
8400 if (off == -1
8401 || (curpos >= off
8402 && (curpos + fieldsize
8403 <= (HOST_WIDE_INT) off + len)))
8405 if (full)
8407 if (ptr)
8408 memcpy (ptr + (curpos - o), ptr + (pos - o),
8409 fieldsize);
8410 if (mask)
8411 memcpy (mask + curpos, mask + pos, fieldsize);
8413 else if (!native_encode_initializer (val,
8415 ? ptr + curpos - o
8416 : NULL,
8417 fieldsize,
8418 off == -1 ? -1
8419 : 0,
8420 mask
8421 ? mask + curpos
8422 : NULL))
8423 return 0;
8424 else
8426 full = true;
8427 pos = curpos;
8430 else if (curpos + fieldsize > off
8431 && curpos < (HOST_WIDE_INT) off + len)
8433 /* Partial overlap. */
8434 unsigned char *p = NULL;
8435 int no = 0;
8436 int l;
8437 gcc_assert (mask == NULL);
8438 if (curpos >= off)
8440 if (ptr)
8441 p = ptr + curpos - off;
8442 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8443 fieldsize);
8445 else
8447 p = ptr;
8448 no = off - curpos;
8449 l = len;
8451 if (!native_encode_initializer (val, p, l, no, NULL))
8452 return 0;
8454 curpos += fieldsize;
8456 while (count-- != 0);
8458 return MIN (total_bytes - off, len);
8460 else if (TREE_CODE (type) == RECORD_TYPE
8461 || TREE_CODE (type) == UNION_TYPE)
8463 unsigned HOST_WIDE_INT cnt;
8464 constructor_elt *ce;
8465 tree fld_base = TYPE_FIELDS (type);
8466 tree to_free = NULL_TREE;
8468 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8469 if (ptr != NULL)
8470 memset (ptr, '\0', MIN (total_bytes - o, len));
8471 for (cnt = 0; ; cnt++)
8473 tree val = NULL_TREE, field = NULL_TREE;
8474 HOST_WIDE_INT pos = 0, fieldsize;
8475 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8477 if (to_free)
8479 ggc_free (to_free);
8480 to_free = NULL_TREE;
8483 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8485 val = ce->value;
8486 field = ce->index;
8487 if (field == NULL_TREE)
8488 return 0;
8490 pos = int_byte_position (field);
8491 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8492 continue;
8494 else if (mask == NULL
8495 || CONSTRUCTOR_NO_CLEARING (init))
8496 break;
8497 else
8498 pos = total_bytes;
8500 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8502 tree fld;
8503 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8505 if (TREE_CODE (fld) != FIELD_DECL)
8506 continue;
8507 if (fld == field)
8508 break;
8509 if (DECL_PADDING_P (fld))
8510 continue;
8511 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8512 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8513 return 0;
8514 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8515 continue;
8516 break;
8518 if (fld == NULL_TREE)
8520 if (ce == NULL)
8521 break;
8522 return 0;
8524 fld_base = DECL_CHAIN (fld);
8525 if (fld != field)
8527 cnt--;
8528 field = fld;
8529 pos = int_byte_position (field);
8530 val = build_zero_cst (TREE_TYPE (fld));
8531 if (TREE_CODE (val) == CONSTRUCTOR)
8532 to_free = val;
8536 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8537 && TYPE_DOMAIN (TREE_TYPE (field))
8538 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8540 if (mask || off != -1)
8541 return 0;
8542 if (val == NULL_TREE)
8543 continue;
8544 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8545 return 0;
8546 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8547 if (fieldsize < 0
8548 || (int) fieldsize != fieldsize
8549 || (pos + fieldsize) > INT_MAX)
8550 return 0;
8551 if (pos + fieldsize > total_bytes)
8553 if (ptr != NULL && total_bytes < len)
8554 memset (ptr + total_bytes, '\0',
8555 MIN (pos + fieldsize, len) - total_bytes);
8556 total_bytes = pos + fieldsize;
8559 else
8561 if (DECL_SIZE_UNIT (field) == NULL_TREE
8562 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8563 return 0;
8564 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8566 if (fieldsize == 0)
8567 continue;
8569 /* Prepare to deal with integral bit-fields and filter out other
8570 bit-fields that do not start and end on a byte boundary. */
8571 if (DECL_BIT_FIELD (field))
8573 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8574 return 0;
8575 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8576 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8578 bpos %= BITS_PER_UNIT;
8579 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8580 epos = fieldsize % BITS_PER_UNIT;
8581 fieldsize += BITS_PER_UNIT - 1;
8582 fieldsize /= BITS_PER_UNIT;
8584 else if (bpos % BITS_PER_UNIT
8585 || DECL_SIZE (field) == NULL_TREE
8586 || !tree_fits_shwi_p (DECL_SIZE (field))
8587 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8588 return 0;
8591 if (off != -1 && pos + fieldsize <= off)
8592 continue;
8594 if (val == NULL_TREE)
8595 continue;
8597 if (DECL_BIT_FIELD (field)
8598 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8600 /* FIXME: Handle PDP endian. */
8601 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8602 return 0;
8604 if (TREE_CODE (val) == NON_LVALUE_EXPR)
8605 val = TREE_OPERAND (val, 0);
8606 if (TREE_CODE (val) != INTEGER_CST)
8607 return 0;
8609 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8610 tree repr_type = NULL_TREE;
8611 HOST_WIDE_INT rpos = 0;
8612 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8614 rpos = int_byte_position (repr);
8615 repr_type = TREE_TYPE (repr);
8617 else
8619 repr_type = find_bitfield_repr_type (fieldsize, len);
8620 if (repr_type == NULL_TREE)
8621 return 0;
8622 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8623 gcc_assert (repr_size > 0 && repr_size <= len);
8624 if (pos + repr_size <= o + len)
8625 rpos = pos;
8626 else
8628 rpos = o + len - repr_size;
8629 gcc_assert (rpos <= pos);
8633 if (rpos > pos)
8634 return 0;
8635 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8636 int diff = (TYPE_PRECISION (repr_type)
8637 - TYPE_PRECISION (TREE_TYPE (field)));
8638 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8639 if (!BYTES_BIG_ENDIAN)
8640 w = wi::lshift (w, bitoff);
8641 else
8642 w = wi::lshift (w, diff - bitoff);
8643 val = wide_int_to_tree (repr_type, w);
8645 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8646 / BITS_PER_UNIT + 1];
8647 int l = native_encode_int (val, buf, sizeof buf, 0);
8648 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8649 return 0;
8651 if (ptr == NULL)
8652 continue;
8654 /* If the bitfield does not start at byte boundary, handle
8655 the partial byte at the start. */
8656 if (bpos
8657 && (off == -1 || (pos >= off && len >= 1)))
8659 if (!BYTES_BIG_ENDIAN)
8661 int msk = (1 << bpos) - 1;
8662 buf[pos - rpos] &= ~msk;
8663 buf[pos - rpos] |= ptr[pos - o] & msk;
8664 if (mask)
8666 if (fieldsize > 1 || epos == 0)
8667 mask[pos] &= msk;
8668 else
8669 mask[pos] &= (msk | ~((1 << epos) - 1));
8672 else
8674 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8675 buf[pos - rpos] &= msk;
8676 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8677 if (mask)
8679 if (fieldsize > 1 || epos == 0)
8680 mask[pos] &= ~msk;
8681 else
8682 mask[pos] &= (~msk
8683 | ((1 << (BITS_PER_UNIT - epos))
8684 - 1));
8688 /* If the bitfield does not end at byte boundary, handle
8689 the partial byte at the end. */
8690 if (epos
8691 && (off == -1
8692 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8694 if (!BYTES_BIG_ENDIAN)
8696 int msk = (1 << epos) - 1;
8697 buf[pos - rpos + fieldsize - 1] &= msk;
8698 buf[pos - rpos + fieldsize - 1]
8699 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8700 if (mask && (fieldsize > 1 || bpos == 0))
8701 mask[pos + fieldsize - 1] &= ~msk;
8703 else
8705 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8706 buf[pos - rpos + fieldsize - 1] &= ~msk;
8707 buf[pos - rpos + fieldsize - 1]
8708 |= ptr[pos + fieldsize - 1 - o] & msk;
8709 if (mask && (fieldsize > 1 || bpos == 0))
8710 mask[pos + fieldsize - 1] &= msk;
8713 if (off == -1
8714 || (pos >= off
8715 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8717 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8718 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8719 memset (mask + pos + (bpos != 0), 0,
8720 fieldsize - (bpos != 0) - (epos != 0));
8722 else
8724 /* Partial overlap. */
8725 HOST_WIDE_INT fsz = fieldsize;
8726 gcc_assert (mask == NULL);
8727 if (pos < off)
8729 fsz -= (off - pos);
8730 pos = off;
8732 if (pos + fsz > (HOST_WIDE_INT) off + len)
8733 fsz = (HOST_WIDE_INT) off + len - pos;
8734 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8736 continue;
8739 if (off == -1
8740 || (pos >= off
8741 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8743 int fldsize = fieldsize;
8744 if (off == -1)
8746 tree fld = DECL_CHAIN (field);
8747 while (fld)
8749 if (TREE_CODE (fld) == FIELD_DECL)
8750 break;
8751 fld = DECL_CHAIN (fld);
8753 if (fld == NULL_TREE)
8754 fldsize = len - pos;
8756 r = native_encode_initializer (val, ptr ? ptr + pos - o
8757 : NULL,
8758 fldsize,
8759 off == -1 ? -1 : 0,
8760 mask ? mask + pos : NULL);
8761 if (!r)
8762 return 0;
8763 if (off == -1
8764 && fldsize != fieldsize
8765 && r > fieldsize
8766 && pos + r > total_bytes)
8767 total_bytes = pos + r;
8769 else
8771 /* Partial overlap. */
8772 unsigned char *p = NULL;
8773 int no = 0;
8774 int l;
8775 gcc_assert (mask == NULL);
8776 if (pos >= off)
8778 if (ptr)
8779 p = ptr + pos - off;
8780 l = MIN ((HOST_WIDE_INT) off + len - pos,
8781 fieldsize);
8783 else
8785 p = ptr;
8786 no = off - pos;
8787 l = len;
8789 if (!native_encode_initializer (val, p, l, no, NULL))
8790 return 0;
8793 return MIN (total_bytes - off, len);
8795 return 0;
8800 /* Subroutine of native_interpret_expr. Interpret the contents of
8801 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8802 If the buffer cannot be interpreted, return NULL_TREE. */
8804 static tree
8805 native_interpret_int (tree type, const unsigned char *ptr, int len)
8807 int total_bytes;
8808 if (TREE_CODE (type) == BITINT_TYPE)
8810 struct bitint_info info;
8811 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8812 gcc_assert (ok);
8813 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8814 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8816 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8817 /* More work is needed when adding _BitInt support to PDP endian
8818 if limb is smaller than word, or if _BitInt limb ordering doesn't
8819 match target endianity here. */
8820 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8821 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8822 || (GET_MODE_SIZE (limb_mode)
8823 >= UNITS_PER_WORD)));
8825 else
8826 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8828 else
8829 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8831 if (total_bytes > len)
8832 return NULL_TREE;
8834 wide_int result = wi::from_buffer (ptr, total_bytes);
8836 return wide_int_to_tree (type, result);
8840 /* Subroutine of native_interpret_expr. Interpret the contents of
8841 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8842 If the buffer cannot be interpreted, return NULL_TREE. */
8844 static tree
8845 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8847 scalar_mode mode = SCALAR_TYPE_MODE (type);
8848 int total_bytes = GET_MODE_SIZE (mode);
8849 double_int result;
8850 FIXED_VALUE_TYPE fixed_value;
8852 if (total_bytes > len
8853 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8854 return NULL_TREE;
8856 result = double_int::from_buffer (ptr, total_bytes);
8857 fixed_value = fixed_from_double_int (result, mode);
8859 return build_fixed (type, fixed_value);
8863 /* Subroutine of native_interpret_expr. Interpret the contents of
8864 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8865 If the buffer cannot be interpreted, return NULL_TREE. */
8867 tree
8868 native_interpret_real (tree type, const unsigned char *ptr, int len)
8870 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8871 int total_bytes = GET_MODE_SIZE (mode);
8872 unsigned char value;
8873 /* There are always 32 bits in each long, no matter the size of
8874 the hosts long. We handle floating point representations with
8875 up to 192 bits. */
8876 REAL_VALUE_TYPE r;
8877 long tmp[6];
8879 if (total_bytes > len || total_bytes > 24)
8880 return NULL_TREE;
8881 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8883 memset (tmp, 0, sizeof (tmp));
8884 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8885 bitpos += BITS_PER_UNIT)
8887 /* Both OFFSET and BYTE index within a long;
8888 bitpos indexes the whole float. */
8889 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8890 if (UNITS_PER_WORD < 4)
8892 int word = byte / UNITS_PER_WORD;
8893 if (WORDS_BIG_ENDIAN)
8894 word = (words - 1) - word;
8895 offset = word * UNITS_PER_WORD;
8896 if (BYTES_BIG_ENDIAN)
8897 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8898 else
8899 offset += byte % UNITS_PER_WORD;
8901 else
8903 offset = byte;
8904 if (BYTES_BIG_ENDIAN)
8906 /* Reverse bytes within each long, or within the entire float
8907 if it's smaller than a long (for HFmode). */
8908 offset = MIN (3, total_bytes - 1) - offset;
8909 gcc_assert (offset >= 0);
8912 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8914 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8917 real_from_target (&r, tmp, mode);
8918 return build_real (type, r);
8922 /* Subroutine of native_interpret_expr. Interpret the contents of
8923 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8924 If the buffer cannot be interpreted, return NULL_TREE. */
8926 static tree
8927 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8929 tree etype, rpart, ipart;
8930 int size;
8932 etype = TREE_TYPE (type);
8933 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8934 if (size * 2 > len)
8935 return NULL_TREE;
8936 rpart = native_interpret_expr (etype, ptr, size);
8937 if (!rpart)
8938 return NULL_TREE;
8939 ipart = native_interpret_expr (etype, ptr+size, size);
8940 if (!ipart)
8941 return NULL_TREE;
8942 return build_complex (type, rpart, ipart);
8945 /* Read a vector of type TYPE from the target memory image given by BYTES,
8946 which contains LEN bytes. The vector is known to be encodable using
8947 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8949 Return the vector on success, otherwise return null. */
8951 static tree
8952 native_interpret_vector_part (tree type, const unsigned char *bytes,
8953 unsigned int len, unsigned int npatterns,
8954 unsigned int nelts_per_pattern)
8956 tree elt_type = TREE_TYPE (type);
8957 if (VECTOR_BOOLEAN_TYPE_P (type)
8958 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8960 /* This is the only case in which elements can be smaller than a byte.
8961 Element 0 is always in the lsb of the containing byte. */
8962 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8963 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8964 return NULL_TREE;
8966 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8967 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8969 unsigned int bit_index = i * elt_bits;
8970 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8971 unsigned int lsb = bit_index % BITS_PER_UNIT;
8972 builder.quick_push (bytes[byte_index] & (1 << lsb)
8973 ? build_all_ones_cst (elt_type)
8974 : build_zero_cst (elt_type));
8976 return builder.build ();
8979 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8980 if (elt_bytes * npatterns * nelts_per_pattern > len)
8981 return NULL_TREE;
8983 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8984 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8986 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8987 if (!elt)
8988 return NULL_TREE;
8989 builder.quick_push (elt);
8990 bytes += elt_bytes;
8992 return builder.build ();
8995 /* Subroutine of native_interpret_expr. Interpret the contents of
8996 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8997 If the buffer cannot be interpreted, return NULL_TREE. */
8999 static tree
9000 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
9002 unsigned HOST_WIDE_INT size;
9004 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
9005 || size > len)
9006 return NULL_TREE;
9008 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9009 return native_interpret_vector_part (type, ptr, len, count, 1);
9013 /* Subroutine of fold_view_convert_expr. Interpret the contents of
9014 the buffer PTR of length LEN as a constant of type TYPE. For
9015 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
9016 we return a REAL_CST, etc... If the buffer cannot be interpreted,
9017 return NULL_TREE. */
9019 tree
9020 native_interpret_expr (tree type, const unsigned char *ptr, int len)
9022 switch (TREE_CODE (type))
9024 case INTEGER_TYPE:
9025 case ENUMERAL_TYPE:
9026 case BOOLEAN_TYPE:
9027 case POINTER_TYPE:
9028 case REFERENCE_TYPE:
9029 case OFFSET_TYPE:
9030 case BITINT_TYPE:
9031 return native_interpret_int (type, ptr, len);
9033 case REAL_TYPE:
9034 if (tree ret = native_interpret_real (type, ptr, len))
9036 /* For floating point values in composite modes, punt if this
9037 folding doesn't preserve bit representation. As the mode doesn't
9038 have fixed precision while GCC pretends it does, there could be
9039 valid values that GCC can't really represent accurately.
9040 See PR95450. Even for other modes, e.g. x86 XFmode can have some
9041 bit combinationations which GCC doesn't preserve. */
9042 unsigned char buf[24 * 2];
9043 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
9044 int total_bytes = GET_MODE_SIZE (mode);
9045 memcpy (buf + 24, ptr, total_bytes);
9046 clear_type_padding_in_mask (type, buf + 24);
9047 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
9048 || memcmp (buf + 24, buf, total_bytes) != 0)
9049 return NULL_TREE;
9050 return ret;
9052 return NULL_TREE;
9054 case FIXED_POINT_TYPE:
9055 return native_interpret_fixed (type, ptr, len);
9057 case COMPLEX_TYPE:
9058 return native_interpret_complex (type, ptr, len);
9060 case VECTOR_TYPE:
9061 return native_interpret_vector (type, ptr, len);
9063 default:
9064 return NULL_TREE;
9068 /* Returns true if we can interpret the contents of a native encoding
9069 as TYPE. */
9071 bool
9072 can_native_interpret_type_p (tree type)
9074 switch (TREE_CODE (type))
9076 case INTEGER_TYPE:
9077 case ENUMERAL_TYPE:
9078 case BOOLEAN_TYPE:
9079 case POINTER_TYPE:
9080 case REFERENCE_TYPE:
9081 case FIXED_POINT_TYPE:
9082 case REAL_TYPE:
9083 case COMPLEX_TYPE:
9084 case VECTOR_TYPE:
9085 case OFFSET_TYPE:
9086 return true;
9087 default:
9088 return false;
9092 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
9093 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
9095 tree
9096 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
9097 int len)
9099 vec<constructor_elt, va_gc> *elts = NULL;
9100 if (TREE_CODE (type) == ARRAY_TYPE)
9102 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
9103 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
9104 return NULL_TREE;
9106 HOST_WIDE_INT cnt = 0;
9107 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
9109 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
9110 return NULL_TREE;
9111 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
9113 if (eltsz == 0)
9114 cnt = 0;
9115 HOST_WIDE_INT pos = 0;
9116 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9118 tree v = NULL_TREE;
9119 if (pos >= len || pos + eltsz > len)
9120 return NULL_TREE;
9121 if (can_native_interpret_type_p (TREE_TYPE (type)))
9123 v = native_interpret_expr (TREE_TYPE (type),
9124 ptr + off + pos, eltsz);
9125 if (v == NULL_TREE)
9126 return NULL_TREE;
9128 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9129 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9130 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9131 eltsz);
9132 if (v == NULL_TREE)
9133 return NULL_TREE;
9134 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9136 return build_constructor (type, elts);
9138 if (TREE_CODE (type) != RECORD_TYPE)
9139 return NULL_TREE;
9140 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9142 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9143 || is_empty_type (TREE_TYPE (field)))
9144 continue;
9145 tree fld = field;
9146 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9147 int diff = 0;
9148 tree v = NULL_TREE;
9149 if (DECL_BIT_FIELD (field))
9151 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9152 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9154 poly_int64 bitoffset;
9155 poly_uint64 field_offset, fld_offset;
9156 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9157 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9158 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9159 else
9160 bitoffset = 0;
9161 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9162 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9163 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9164 - TYPE_PRECISION (TREE_TYPE (field)));
9165 if (!bitoffset.is_constant (&bitoff)
9166 || bitoff < 0
9167 || bitoff > diff)
9168 return NULL_TREE;
9170 else
9172 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9173 return NULL_TREE;
9174 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9175 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9176 bpos %= BITS_PER_UNIT;
9177 fieldsize += bpos;
9178 fieldsize += BITS_PER_UNIT - 1;
9179 fieldsize /= BITS_PER_UNIT;
9180 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9181 if (repr_type == NULL_TREE)
9182 return NULL_TREE;
9183 sz = int_size_in_bytes (repr_type);
9184 if (sz < 0 || sz > len)
9185 return NULL_TREE;
9186 pos = int_byte_position (field);
9187 if (pos < 0 || pos > len || pos + fieldsize > len)
9188 return NULL_TREE;
9189 HOST_WIDE_INT rpos;
9190 if (pos + sz <= len)
9191 rpos = pos;
9192 else
9194 rpos = len - sz;
9195 gcc_assert (rpos <= pos);
9197 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9198 pos = rpos;
9199 diff = (TYPE_PRECISION (repr_type)
9200 - TYPE_PRECISION (TREE_TYPE (field)));
9201 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9202 if (v == NULL_TREE)
9203 return NULL_TREE;
9204 fld = NULL_TREE;
9208 if (fld)
9210 sz = int_size_in_bytes (TREE_TYPE (fld));
9211 if (sz < 0 || sz > len)
9212 return NULL_TREE;
9213 tree byte_pos = byte_position (fld);
9214 if (!tree_fits_shwi_p (byte_pos))
9215 return NULL_TREE;
9216 pos = tree_to_shwi (byte_pos);
9217 if (pos < 0 || pos > len || pos + sz > len)
9218 return NULL_TREE;
9220 if (fld == NULL_TREE)
9221 /* Already handled above. */;
9222 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9224 v = native_interpret_expr (TREE_TYPE (fld),
9225 ptr + off + pos, sz);
9226 if (v == NULL_TREE)
9227 return NULL_TREE;
9229 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9230 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9231 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9232 if (v == NULL_TREE)
9233 return NULL_TREE;
9234 if (fld != field)
9236 if (TREE_CODE (v) != INTEGER_CST)
9237 return NULL_TREE;
9239 /* FIXME: Figure out how to handle PDP endian bitfields. */
9240 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9241 return NULL_TREE;
9242 if (!BYTES_BIG_ENDIAN)
9243 v = wide_int_to_tree (TREE_TYPE (field),
9244 wi::lrshift (wi::to_wide (v), bitoff));
9245 else
9246 v = wide_int_to_tree (TREE_TYPE (field),
9247 wi::lrshift (wi::to_wide (v),
9248 diff - bitoff));
9250 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9252 return build_constructor (type, elts);
9255 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9256 or extracted constant positions and/or sizes aren't byte aligned. */
9258 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9259 bits between adjacent elements. AMNT should be within
9260 [0, BITS_PER_UNIT).
9261 Example, AMNT = 2:
9262 00011111|11100000 << 2 = 01111111|10000000
9263 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9265 void
9266 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9267 unsigned int amnt)
9269 if (amnt == 0)
9270 return;
9272 unsigned char carry_over = 0U;
9273 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9274 unsigned char clear_mask = (~0U) << amnt;
9276 for (unsigned int i = 0; i < sz; i++)
9278 unsigned prev_carry_over = carry_over;
9279 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9281 ptr[i] <<= amnt;
9282 if (i != 0)
9284 ptr[i] &= clear_mask;
9285 ptr[i] |= prev_carry_over;
9290 /* Like shift_bytes_in_array_left but for big-endian.
9291 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9292 bits between adjacent elements. AMNT should be within
9293 [0, BITS_PER_UNIT).
9294 Example, AMNT = 2:
9295 00011111|11100000 >> 2 = 00000111|11111000
9296 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9298 void
9299 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9300 unsigned int amnt)
9302 if (amnt == 0)
9303 return;
9305 unsigned char carry_over = 0U;
9306 unsigned char carry_mask = ~(~0U << amnt);
9308 for (unsigned int i = 0; i < sz; i++)
9310 unsigned prev_carry_over = carry_over;
9311 carry_over = ptr[i] & carry_mask;
9313 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9314 ptr[i] >>= amnt;
9315 ptr[i] |= prev_carry_over;
9319 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9320 directly on the VECTOR_CST encoding, in a way that works for variable-
9321 length vectors. Return the resulting VECTOR_CST on success or null
9322 on failure. */
9324 static tree
9325 fold_view_convert_vector_encoding (tree type, tree expr)
9327 tree expr_type = TREE_TYPE (expr);
9328 poly_uint64 type_bits, expr_bits;
9329 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9330 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9331 return NULL_TREE;
9333 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9334 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9335 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9336 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9338 /* We can only preserve the semantics of a stepped pattern if the new
9339 vector element is an integer of the same size. */
9340 if (VECTOR_CST_STEPPED_P (expr)
9341 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9342 return NULL_TREE;
9344 /* The number of bits needed to encode one element from every pattern
9345 of the original vector. */
9346 unsigned int expr_sequence_bits
9347 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9349 /* The number of bits needed to encode one element from every pattern
9350 of the result. */
9351 unsigned int type_sequence_bits
9352 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9354 /* Don't try to read more bytes than are available, which can happen
9355 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9356 The general VIEW_CONVERT handling can cope with that case, so there's
9357 no point complicating things here. */
9358 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9359 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9360 BITS_PER_UNIT);
9361 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9362 if (known_gt (buffer_bits, expr_bits))
9363 return NULL_TREE;
9365 /* Get enough bytes of EXPR to form the new encoding. */
9366 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9367 buffer.quick_grow (buffer_bytes);
9368 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9369 buffer_bits / expr_elt_bits)
9370 != (int) buffer_bytes)
9371 return NULL_TREE;
9373 /* Reencode the bytes as TYPE. */
9374 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9375 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9376 type_npatterns, nelts_per_pattern);
9379 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9380 TYPE at compile-time. If we're unable to perform the conversion
9381 return NULL_TREE. */
9383 static tree
9384 fold_view_convert_expr (tree type, tree expr)
9386 unsigned char buffer[128];
9387 unsigned char *buf;
9388 int len;
9389 HOST_WIDE_INT l;
9391 /* Check that the host and target are sane. */
9392 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9393 return NULL_TREE;
9395 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9396 if (tree res = fold_view_convert_vector_encoding (type, expr))
9397 return res;
9399 l = int_size_in_bytes (type);
9400 if (l > (int) sizeof (buffer)
9401 && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9403 buf = XALLOCAVEC (unsigned char, l);
9404 len = l;
9406 else
9408 buf = buffer;
9409 len = sizeof (buffer);
9411 len = native_encode_expr (expr, buf, len);
9412 if (len == 0)
9413 return NULL_TREE;
9415 return native_interpret_expr (type, buf, len);
9418 /* Build an expression for the address of T. Folds away INDIRECT_REF
9419 to avoid confusing the gimplify process. */
9421 tree
9422 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9424 /* The size of the object is not relevant when talking about its address. */
9425 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9426 t = TREE_OPERAND (t, 0);
9428 if (INDIRECT_REF_P (t))
9430 t = TREE_OPERAND (t, 0);
9432 if (TREE_TYPE (t) != ptrtype)
9433 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9435 else if (TREE_CODE (t) == MEM_REF
9436 && integer_zerop (TREE_OPERAND (t, 1)))
9438 t = TREE_OPERAND (t, 0);
9440 if (TREE_TYPE (t) != ptrtype)
9441 t = fold_convert_loc (loc, ptrtype, t);
9443 else if (TREE_CODE (t) == MEM_REF
9444 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9445 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9446 TREE_OPERAND (t, 0),
9447 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9448 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9450 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9452 if (TREE_TYPE (t) != ptrtype)
9453 t = fold_convert_loc (loc, ptrtype, t);
9455 else
9456 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9458 return t;
9461 /* Build an expression for the address of T. */
9463 tree
9464 build_fold_addr_expr_loc (location_t loc, tree t)
9466 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9468 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9471 /* Fold a unary expression of code CODE and type TYPE with operand
9472 OP0. Return the folded expression if folding is successful.
9473 Otherwise, return NULL_TREE. */
9475 tree
9476 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9478 tree tem;
9479 tree arg0;
9480 enum tree_code_class kind = TREE_CODE_CLASS (code);
9482 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9483 && TREE_CODE_LENGTH (code) == 1);
9485 arg0 = op0;
9486 if (arg0)
9488 if (CONVERT_EXPR_CODE_P (code)
9489 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9491 /* Don't use STRIP_NOPS, because signedness of argument type
9492 matters. */
9493 STRIP_SIGN_NOPS (arg0);
9495 else
9497 /* Strip any conversions that don't change the mode. This
9498 is safe for every expression, except for a comparison
9499 expression because its signedness is derived from its
9500 operands.
9502 Note that this is done as an internal manipulation within
9503 the constant folder, in order to find the simplest
9504 representation of the arguments so that their form can be
9505 studied. In any cases, the appropriate type conversions
9506 should be put back in the tree that will get out of the
9507 constant folder. */
9508 STRIP_NOPS (arg0);
9511 if (CONSTANT_CLASS_P (arg0))
9513 tree tem = const_unop (code, type, arg0);
9514 if (tem)
9516 if (TREE_TYPE (tem) != type)
9517 tem = fold_convert_loc (loc, type, tem);
9518 return tem;
9523 tem = generic_simplify (loc, code, type, op0);
9524 if (tem)
9525 return tem;
9527 if (TREE_CODE_CLASS (code) == tcc_unary)
9529 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9530 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9531 fold_build1_loc (loc, code, type,
9532 fold_convert_loc (loc, TREE_TYPE (op0),
9533 TREE_OPERAND (arg0, 1))));
9534 else if (TREE_CODE (arg0) == COND_EXPR)
9536 tree arg01 = TREE_OPERAND (arg0, 1);
9537 tree arg02 = TREE_OPERAND (arg0, 2);
9538 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9539 arg01 = fold_build1_loc (loc, code, type,
9540 fold_convert_loc (loc,
9541 TREE_TYPE (op0), arg01));
9542 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9543 arg02 = fold_build1_loc (loc, code, type,
9544 fold_convert_loc (loc,
9545 TREE_TYPE (op0), arg02));
9546 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9547 arg01, arg02);
9549 /* If this was a conversion, and all we did was to move into
9550 inside the COND_EXPR, bring it back out. But leave it if
9551 it is a conversion from integer to integer and the
9552 result precision is no wider than a word since such a
9553 conversion is cheap and may be optimized away by combine,
9554 while it couldn't if it were outside the COND_EXPR. Then return
9555 so we don't get into an infinite recursion loop taking the
9556 conversion out and then back in. */
9558 if ((CONVERT_EXPR_CODE_P (code)
9559 || code == NON_LVALUE_EXPR)
9560 && TREE_CODE (tem) == COND_EXPR
9561 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9562 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9563 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9564 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9565 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9566 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9567 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9568 && (INTEGRAL_TYPE_P
9569 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9570 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9571 || flag_syntax_only))
9572 tem = build1_loc (loc, code, type,
9573 build3 (COND_EXPR,
9574 TREE_TYPE (TREE_OPERAND
9575 (TREE_OPERAND (tem, 1), 0)),
9576 TREE_OPERAND (tem, 0),
9577 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9578 TREE_OPERAND (TREE_OPERAND (tem, 2),
9579 0)));
9580 return tem;
9584 switch (code)
9586 case NON_LVALUE_EXPR:
9587 if (!maybe_lvalue_p (op0))
9588 return fold_convert_loc (loc, type, op0);
9589 return NULL_TREE;
9591 CASE_CONVERT:
9592 case FLOAT_EXPR:
9593 case FIX_TRUNC_EXPR:
9594 if (COMPARISON_CLASS_P (op0))
9596 /* If we have (type) (a CMP b) and type is an integral type, return
9597 new expression involving the new type. Canonicalize
9598 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9599 non-integral type.
9600 Do not fold the result as that would not simplify further, also
9601 folding again results in recursions. */
9602 if (TREE_CODE (type) == BOOLEAN_TYPE)
9603 return build2_loc (loc, TREE_CODE (op0), type,
9604 TREE_OPERAND (op0, 0),
9605 TREE_OPERAND (op0, 1));
9606 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9607 && TREE_CODE (type) != VECTOR_TYPE)
9608 return build3_loc (loc, COND_EXPR, type, op0,
9609 constant_boolean_node (true, type),
9610 constant_boolean_node (false, type));
9613 /* Handle (T *)&A.B.C for A being of type T and B and C
9614 living at offset zero. This occurs frequently in
9615 C++ upcasting and then accessing the base. */
9616 if (TREE_CODE (op0) == ADDR_EXPR
9617 && POINTER_TYPE_P (type)
9618 && handled_component_p (TREE_OPERAND (op0, 0)))
9620 poly_int64 bitsize, bitpos;
9621 tree offset;
9622 machine_mode mode;
9623 int unsignedp, reversep, volatilep;
9624 tree base
9625 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9626 &offset, &mode, &unsignedp, &reversep,
9627 &volatilep);
9628 /* If the reference was to a (constant) zero offset, we can use
9629 the address of the base if it has the same base type
9630 as the result type and the pointer type is unqualified. */
9631 if (!offset
9632 && known_eq (bitpos, 0)
9633 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9634 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9635 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9636 return fold_convert_loc (loc, type,
9637 build_fold_addr_expr_loc (loc, base));
9640 if (TREE_CODE (op0) == MODIFY_EXPR
9641 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9642 /* Detect assigning a bitfield. */
9643 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9644 && DECL_BIT_FIELD
9645 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9647 /* Don't leave an assignment inside a conversion
9648 unless assigning a bitfield. */
9649 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9650 /* First do the assignment, then return converted constant. */
9651 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9652 suppress_warning (tem /* What warning? */);
9653 TREE_USED (tem) = 1;
9654 return tem;
9657 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9658 constants (if x has signed type, the sign bit cannot be set
9659 in c). This folds extension into the BIT_AND_EXPR.
9660 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9661 very likely don't have maximal range for their precision and this
9662 transformation effectively doesn't preserve non-maximal ranges. */
9663 if (TREE_CODE (type) == INTEGER_TYPE
9664 && TREE_CODE (op0) == BIT_AND_EXPR
9665 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9667 tree and_expr = op0;
9668 tree and0 = TREE_OPERAND (and_expr, 0);
9669 tree and1 = TREE_OPERAND (and_expr, 1);
9670 int change = 0;
9672 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9673 || (TYPE_PRECISION (type)
9674 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9675 change = 1;
9676 else if (TYPE_PRECISION (TREE_TYPE (and1))
9677 <= HOST_BITS_PER_WIDE_INT
9678 && tree_fits_uhwi_p (and1))
9680 unsigned HOST_WIDE_INT cst;
9682 cst = tree_to_uhwi (and1);
9683 cst &= HOST_WIDE_INT_M1U
9684 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9685 change = (cst == 0);
9686 if (change
9687 && !flag_syntax_only
9688 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9689 == ZERO_EXTEND))
9691 tree uns = unsigned_type_for (TREE_TYPE (and0));
9692 and0 = fold_convert_loc (loc, uns, and0);
9693 and1 = fold_convert_loc (loc, uns, and1);
9696 if (change)
9698 tree and1_type = TREE_TYPE (and1);
9699 unsigned prec = MAX (TYPE_PRECISION (and1_type),
9700 TYPE_PRECISION (type));
9701 tem = force_fit_type (type,
9702 wide_int::from (wi::to_wide (and1), prec,
9703 TYPE_SIGN (and1_type)),
9704 0, TREE_OVERFLOW (and1));
9705 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9706 fold_convert_loc (loc, type, and0), tem);
9710 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9711 cast (T1)X will fold away. We assume that this happens when X itself
9712 is a cast. */
9713 if (POINTER_TYPE_P (type)
9714 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9715 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9717 tree arg00 = TREE_OPERAND (arg0, 0);
9718 tree arg01 = TREE_OPERAND (arg0, 1);
9720 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9721 when the pointed type needs higher alignment than
9722 the p+ first operand's pointed type. */
9723 if (!in_gimple_form
9724 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9725 && (min_align_of_type (TREE_TYPE (type))
9726 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9727 return NULL_TREE;
9729 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9730 when type is a reference type and arg00's type is not,
9731 because arg00 could be validly nullptr and if arg01 doesn't return,
9732 we don't want false positive binding of reference to nullptr. */
9733 if (TREE_CODE (type) == REFERENCE_TYPE
9734 && !in_gimple_form
9735 && sanitize_flags_p (SANITIZE_NULL)
9736 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9737 return NULL_TREE;
9739 arg00 = fold_convert_loc (loc, type, arg00);
9740 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9743 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9744 of the same precision, and X is an integer type not narrower than
9745 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9746 if (INTEGRAL_TYPE_P (type)
9747 && TREE_CODE (op0) == BIT_NOT_EXPR
9748 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9749 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9750 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9752 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9753 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9754 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9755 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9756 fold_convert_loc (loc, type, tem));
9759 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9760 type of X and Y (integer types only). */
9761 if (INTEGRAL_TYPE_P (type)
9762 && TREE_CODE (op0) == MULT_EXPR
9763 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9764 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9765 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9766 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9768 /* Be careful not to introduce new overflows. */
9769 tree mult_type;
9770 if (TYPE_OVERFLOW_WRAPS (type))
9771 mult_type = type;
9772 else
9773 mult_type = unsigned_type_for (type);
9775 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9777 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9778 fold_convert_loc (loc, mult_type,
9779 TREE_OPERAND (op0, 0)),
9780 fold_convert_loc (loc, mult_type,
9781 TREE_OPERAND (op0, 1)));
9782 return fold_convert_loc (loc, type, tem);
9786 return NULL_TREE;
9788 case VIEW_CONVERT_EXPR:
9789 if (TREE_CODE (op0) == MEM_REF)
9791 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9792 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9793 tem = fold_build2_loc (loc, MEM_REF, type,
9794 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9795 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9796 return tem;
9799 return NULL_TREE;
9801 case NEGATE_EXPR:
9802 tem = fold_negate_expr (loc, arg0);
9803 if (tem)
9804 return fold_convert_loc (loc, type, tem);
9805 return NULL_TREE;
9807 case ABS_EXPR:
9808 /* Convert fabs((double)float) into (double)fabsf(float). */
9809 if (TREE_CODE (arg0) == NOP_EXPR
9810 && TREE_CODE (type) == REAL_TYPE)
9812 tree targ0 = strip_float_extensions (arg0);
9813 if (targ0 != arg0)
9814 return fold_convert_loc (loc, type,
9815 fold_build1_loc (loc, ABS_EXPR,
9816 TREE_TYPE (targ0),
9817 targ0));
9819 return NULL_TREE;
9821 case BIT_NOT_EXPR:
9822 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9823 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9824 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9825 fold_convert_loc (loc, type,
9826 TREE_OPERAND (arg0, 0)))))
9827 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9828 fold_convert_loc (loc, type,
9829 TREE_OPERAND (arg0, 1)));
9830 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9831 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9832 fold_convert_loc (loc, type,
9833 TREE_OPERAND (arg0, 1)))))
9834 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9835 fold_convert_loc (loc, type,
9836 TREE_OPERAND (arg0, 0)), tem);
9838 return NULL_TREE;
9840 case TRUTH_NOT_EXPR:
9841 /* Note that the operand of this must be an int
9842 and its values must be 0 or 1.
9843 ("true" is a fixed value perhaps depending on the language,
9844 but we don't handle values other than 1 correctly yet.) */
9845 tem = fold_truth_not_expr (loc, arg0);
9846 if (!tem)
9847 return NULL_TREE;
9848 return fold_convert_loc (loc, type, tem);
9850 case INDIRECT_REF:
9851 /* Fold *&X to X if X is an lvalue. */
9852 if (TREE_CODE (op0) == ADDR_EXPR)
9854 tree op00 = TREE_OPERAND (op0, 0);
9855 if ((VAR_P (op00)
9856 || TREE_CODE (op00) == PARM_DECL
9857 || TREE_CODE (op00) == RESULT_DECL)
9858 && !TREE_READONLY (op00))
9859 return op00;
9861 return NULL_TREE;
9863 default:
9864 return NULL_TREE;
9865 } /* switch (code) */
9869 /* If the operation was a conversion do _not_ mark a resulting constant
9870 with TREE_OVERFLOW if the original constant was not. These conversions
9871 have implementation defined behavior and retaining the TREE_OVERFLOW
9872 flag here would confuse later passes such as VRP. */
9873 tree
9874 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9875 tree type, tree op0)
9877 tree res = fold_unary_loc (loc, code, type, op0);
9878 if (res
9879 && TREE_CODE (res) == INTEGER_CST
9880 && TREE_CODE (op0) == INTEGER_CST
9881 && CONVERT_EXPR_CODE_P (code))
9882 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9884 return res;
9887 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9888 operands OP0 and OP1. LOC is the location of the resulting expression.
9889 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9890 Return the folded expression if folding is successful. Otherwise,
9891 return NULL_TREE. */
9892 static tree
9893 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9894 tree arg0, tree arg1, tree op0, tree op1)
9896 tree tem;
9898 /* We only do these simplifications if we are optimizing. */
9899 if (!optimize)
9900 return NULL_TREE;
9902 /* Check for things like (A || B) && (A || C). We can convert this
9903 to A || (B && C). Note that either operator can be any of the four
9904 truth and/or operations and the transformation will still be
9905 valid. Also note that we only care about order for the
9906 ANDIF and ORIF operators. If B contains side effects, this
9907 might change the truth-value of A. */
9908 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9909 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9910 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9911 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9912 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9913 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9915 tree a00 = TREE_OPERAND (arg0, 0);
9916 tree a01 = TREE_OPERAND (arg0, 1);
9917 tree a10 = TREE_OPERAND (arg1, 0);
9918 tree a11 = TREE_OPERAND (arg1, 1);
9919 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9920 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9921 && (code == TRUTH_AND_EXPR
9922 || code == TRUTH_OR_EXPR));
9924 if (operand_equal_p (a00, a10, 0))
9925 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9926 fold_build2_loc (loc, code, type, a01, a11));
9927 else if (commutative && operand_equal_p (a00, a11, 0))
9928 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9929 fold_build2_loc (loc, code, type, a01, a10));
9930 else if (commutative && operand_equal_p (a01, a10, 0))
9931 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9932 fold_build2_loc (loc, code, type, a00, a11));
9934 /* This case if tricky because we must either have commutative
9935 operators or else A10 must not have side-effects. */
9937 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9938 && operand_equal_p (a01, a11, 0))
9939 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9940 fold_build2_loc (loc, code, type, a00, a10),
9941 a01);
9944 /* See if we can build a range comparison. */
9945 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9946 return tem;
9948 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9949 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9951 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9952 if (tem)
9953 return fold_build2_loc (loc, code, type, tem, arg1);
9956 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9957 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9959 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9960 if (tem)
9961 return fold_build2_loc (loc, code, type, arg0, tem);
9964 /* Check for the possibility of merging component references. If our
9965 lhs is another similar operation, try to merge its rhs with our
9966 rhs. Then try to merge our lhs and rhs. */
9967 if (TREE_CODE (arg0) == code
9968 && (tem = fold_truth_andor_1 (loc, code, type,
9969 TREE_OPERAND (arg0, 1), arg1)) != 0)
9970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9972 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9973 return tem;
9975 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9976 if (param_logical_op_non_short_circuit != -1)
9977 logical_op_non_short_circuit
9978 = param_logical_op_non_short_circuit;
9979 if (logical_op_non_short_circuit
9980 && !sanitize_coverage_p ()
9981 && (code == TRUTH_AND_EXPR
9982 || code == TRUTH_ANDIF_EXPR
9983 || code == TRUTH_OR_EXPR
9984 || code == TRUTH_ORIF_EXPR))
9986 enum tree_code ncode, icode;
9988 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9989 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9990 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9992 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9993 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9994 We don't want to pack more than two leafs to a non-IF AND/OR
9995 expression.
9996 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9997 equal to IF-CODE, then we don't want to add right-hand operand.
9998 If the inner right-hand side of left-hand operand has
9999 side-effects, or isn't simple, then we can't add to it,
10000 as otherwise we might destroy if-sequence. */
10001 if (TREE_CODE (arg0) == icode
10002 && simple_condition_p (arg1)
10003 /* Needed for sequence points to handle trappings, and
10004 side-effects. */
10005 && simple_condition_p (TREE_OPERAND (arg0, 1)))
10007 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
10008 arg1);
10009 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
10010 tem);
10012 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
10013 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
10014 else if (TREE_CODE (arg1) == icode
10015 && simple_condition_p (arg0)
10016 /* Needed for sequence points to handle trappings, and
10017 side-effects. */
10018 && simple_condition_p (TREE_OPERAND (arg1, 0)))
10020 tem = fold_build2_loc (loc, ncode, type,
10021 arg0, TREE_OPERAND (arg1, 0));
10022 return fold_build2_loc (loc, icode, type, tem,
10023 TREE_OPERAND (arg1, 1));
10025 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
10026 into (A OR B).
10027 For sequence point consistancy, we need to check for trapping,
10028 and side-effects. */
10029 else if (code == icode && simple_condition_p (arg0)
10030 && simple_condition_p (arg1))
10031 return fold_build2_loc (loc, ncode, type, arg0, arg1);
10034 return NULL_TREE;
10037 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
10038 by changing CODE to reduce the magnitude of constants involved in
10039 ARG0 of the comparison.
10040 Returns a canonicalized comparison tree if a simplification was
10041 possible, otherwise returns NULL_TREE.
10042 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
10043 valid if signed overflow is undefined. */
10045 static tree
10046 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
10047 tree arg0, tree arg1,
10048 bool *strict_overflow_p)
10050 enum tree_code code0 = TREE_CODE (arg0);
10051 tree t, cst0 = NULL_TREE;
10052 int sgn0;
10054 /* Match A +- CST code arg1. We can change this only if overflow
10055 is undefined. */
10056 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10057 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
10058 /* In principle pointers also have undefined overflow behavior,
10059 but that causes problems elsewhere. */
10060 && !POINTER_TYPE_P (TREE_TYPE (arg0))
10061 && (code0 == MINUS_EXPR
10062 || code0 == PLUS_EXPR)
10063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
10064 return NULL_TREE;
10066 /* Identify the constant in arg0 and its sign. */
10067 cst0 = TREE_OPERAND (arg0, 1);
10068 sgn0 = tree_int_cst_sgn (cst0);
10070 /* Overflowed constants and zero will cause problems. */
10071 if (integer_zerop (cst0)
10072 || TREE_OVERFLOW (cst0))
10073 return NULL_TREE;
10075 /* See if we can reduce the magnitude of the constant in
10076 arg0 by changing the comparison code. */
10077 /* A - CST < arg1 -> A - CST-1 <= arg1. */
10078 if (code == LT_EXPR
10079 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10080 code = LE_EXPR;
10081 /* A + CST > arg1 -> A + CST-1 >= arg1. */
10082 else if (code == GT_EXPR
10083 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10084 code = GE_EXPR;
10085 /* A + CST <= arg1 -> A + CST-1 < arg1. */
10086 else if (code == LE_EXPR
10087 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10088 code = LT_EXPR;
10089 /* A - CST >= arg1 -> A - CST-1 > arg1. */
10090 else if (code == GE_EXPR
10091 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10092 code = GT_EXPR;
10093 else
10094 return NULL_TREE;
10095 *strict_overflow_p = true;
10097 /* Now build the constant reduced in magnitude. But not if that
10098 would produce one outside of its types range. */
10099 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
10100 && ((sgn0 == 1
10101 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
10102 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
10103 || (sgn0 == -1
10104 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
10105 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
10106 return NULL_TREE;
10108 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
10109 cst0, build_int_cst (TREE_TYPE (cst0), 1));
10110 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
10111 t = fold_convert (TREE_TYPE (arg1), t);
10113 return fold_build2_loc (loc, code, type, t, arg1);
10116 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
10117 overflow further. Try to decrease the magnitude of constants involved
10118 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
10119 and put sole constants at the second argument position.
10120 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
10122 static tree
10123 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
10124 tree arg0, tree arg1)
10126 tree t;
10127 bool strict_overflow_p;
10128 const char * const warnmsg = G_("assuming signed overflow does not occur "
10129 "when reducing constant in comparison");
10131 /* Try canonicalization by simplifying arg0. */
10132 strict_overflow_p = false;
10133 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
10134 &strict_overflow_p);
10135 if (t)
10137 if (strict_overflow_p)
10138 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10139 return t;
10142 /* Try canonicalization by simplifying arg1 using the swapped
10143 comparison. */
10144 code = swap_tree_comparison (code);
10145 strict_overflow_p = false;
10146 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10147 &strict_overflow_p);
10148 if (t && strict_overflow_p)
10149 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10150 return t;
10153 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10154 space. This is used to avoid issuing overflow warnings for
10155 expressions like &p->x which cannot wrap. */
10157 static bool
10158 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10160 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10161 return true;
10163 if (maybe_lt (bitpos, 0))
10164 return true;
10166 poly_wide_int wi_offset;
10167 int precision = TYPE_PRECISION (TREE_TYPE (base));
10168 if (offset == NULL_TREE)
10169 wi_offset = wi::zero (precision);
10170 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10171 return true;
10172 else
10173 wi_offset = wi::to_poly_wide (offset);
10175 wi::overflow_type overflow;
10176 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10177 precision);
10178 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10179 if (overflow)
10180 return true;
10182 poly_uint64 total_hwi, size;
10183 if (!total.to_uhwi (&total_hwi)
10184 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10185 &size)
10186 || known_eq (size, 0U))
10187 return true;
10189 if (known_le (total_hwi, size))
10190 return false;
10192 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10193 array. */
10194 if (TREE_CODE (base) == ADDR_EXPR
10195 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10196 &size)
10197 && maybe_ne (size, 0U)
10198 && known_le (total_hwi, size))
10199 return false;
10201 return true;
10204 /* Return a positive integer when the symbol DECL is known to have
10205 a nonzero address, zero when it's known not to (e.g., it's a weak
10206 symbol), and a negative integer when the symbol is not yet in the
10207 symbol table and so whether or not its address is zero is unknown.
10208 For function local objects always return positive integer. */
10209 static int
10210 maybe_nonzero_address (tree decl)
10212 /* Normally, don't do anything for variables and functions before symtab is
10213 built; it is quite possible that DECL will be declared weak later.
10214 But if folding_initializer, we need a constant answer now, so create
10215 the symtab entry and prevent later weak declaration. */
10216 if (DECL_P (decl) && decl_in_symtab_p (decl))
10217 if (struct symtab_node *symbol
10218 = (folding_initializer
10219 ? symtab_node::get_create (decl)
10220 : symtab_node::get (decl)))
10221 return symbol->nonzero_address ();
10223 /* Function local objects are never NULL. */
10224 if (DECL_P (decl)
10225 && (DECL_CONTEXT (decl)
10226 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10227 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10228 return 1;
10230 return -1;
10233 /* Subroutine of fold_binary. This routine performs all of the
10234 transformations that are common to the equality/inequality
10235 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10236 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10237 fold_binary should call fold_binary. Fold a comparison with
10238 tree code CODE and type TYPE with operands OP0 and OP1. Return
10239 the folded comparison or NULL_TREE. */
10241 static tree
10242 fold_comparison (location_t loc, enum tree_code code, tree type,
10243 tree op0, tree op1)
10245 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10246 tree arg0, arg1, tem;
10248 arg0 = op0;
10249 arg1 = op1;
10251 STRIP_SIGN_NOPS (arg0);
10252 STRIP_SIGN_NOPS (arg1);
10254 /* For comparisons of pointers we can decompose it to a compile time
10255 comparison of the base objects and the offsets into the object.
10256 This requires at least one operand being an ADDR_EXPR or a
10257 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10258 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10259 && (TREE_CODE (arg0) == ADDR_EXPR
10260 || TREE_CODE (arg1) == ADDR_EXPR
10261 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10262 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10264 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10265 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10266 machine_mode mode;
10267 int volatilep, reversep, unsignedp;
10268 bool indirect_base0 = false, indirect_base1 = false;
10270 /* Get base and offset for the access. Strip ADDR_EXPR for
10271 get_inner_reference, but put it back by stripping INDIRECT_REF
10272 off the base object if possible. indirect_baseN will be true
10273 if baseN is not an address but refers to the object itself. */
10274 base0 = arg0;
10275 if (TREE_CODE (arg0) == ADDR_EXPR)
10277 base0
10278 = get_inner_reference (TREE_OPERAND (arg0, 0),
10279 &bitsize, &bitpos0, &offset0, &mode,
10280 &unsignedp, &reversep, &volatilep);
10281 if (INDIRECT_REF_P (base0))
10282 base0 = TREE_OPERAND (base0, 0);
10283 else
10284 indirect_base0 = true;
10286 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10288 base0 = TREE_OPERAND (arg0, 0);
10289 STRIP_SIGN_NOPS (base0);
10290 if (TREE_CODE (base0) == ADDR_EXPR)
10292 base0
10293 = get_inner_reference (TREE_OPERAND (base0, 0),
10294 &bitsize, &bitpos0, &offset0, &mode,
10295 &unsignedp, &reversep, &volatilep);
10296 if (INDIRECT_REF_P (base0))
10297 base0 = TREE_OPERAND (base0, 0);
10298 else
10299 indirect_base0 = true;
10301 if (offset0 == NULL_TREE || integer_zerop (offset0))
10302 offset0 = TREE_OPERAND (arg0, 1);
10303 else
10304 offset0 = size_binop (PLUS_EXPR, offset0,
10305 TREE_OPERAND (arg0, 1));
10306 if (poly_int_tree_p (offset0))
10308 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10309 TYPE_PRECISION (sizetype));
10310 tem <<= LOG2_BITS_PER_UNIT;
10311 tem += bitpos0;
10312 if (tem.to_shwi (&bitpos0))
10313 offset0 = NULL_TREE;
10317 base1 = arg1;
10318 if (TREE_CODE (arg1) == ADDR_EXPR)
10320 base1
10321 = get_inner_reference (TREE_OPERAND (arg1, 0),
10322 &bitsize, &bitpos1, &offset1, &mode,
10323 &unsignedp, &reversep, &volatilep);
10324 if (INDIRECT_REF_P (base1))
10325 base1 = TREE_OPERAND (base1, 0);
10326 else
10327 indirect_base1 = true;
10329 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10331 base1 = TREE_OPERAND (arg1, 0);
10332 STRIP_SIGN_NOPS (base1);
10333 if (TREE_CODE (base1) == ADDR_EXPR)
10335 base1
10336 = get_inner_reference (TREE_OPERAND (base1, 0),
10337 &bitsize, &bitpos1, &offset1, &mode,
10338 &unsignedp, &reversep, &volatilep);
10339 if (INDIRECT_REF_P (base1))
10340 base1 = TREE_OPERAND (base1, 0);
10341 else
10342 indirect_base1 = true;
10344 if (offset1 == NULL_TREE || integer_zerop (offset1))
10345 offset1 = TREE_OPERAND (arg1, 1);
10346 else
10347 offset1 = size_binop (PLUS_EXPR, offset1,
10348 TREE_OPERAND (arg1, 1));
10349 if (poly_int_tree_p (offset1))
10351 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10352 TYPE_PRECISION (sizetype));
10353 tem <<= LOG2_BITS_PER_UNIT;
10354 tem += bitpos1;
10355 if (tem.to_shwi (&bitpos1))
10356 offset1 = NULL_TREE;
10360 /* If we have equivalent bases we might be able to simplify. */
10361 if (indirect_base0 == indirect_base1
10362 && operand_equal_p (base0, base1,
10363 indirect_base0 ? OEP_ADDRESS_OF : 0))
10365 /* We can fold this expression to a constant if the non-constant
10366 offset parts are equal. */
10367 if ((offset0 == offset1
10368 || (offset0 && offset1
10369 && operand_equal_p (offset0, offset1, 0)))
10370 && (equality_code
10371 || (indirect_base0
10372 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10373 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10375 if (!equality_code
10376 && maybe_ne (bitpos0, bitpos1)
10377 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10378 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10379 fold_overflow_warning (("assuming pointer wraparound does not "
10380 "occur when comparing P +- C1 with "
10381 "P +- C2"),
10382 WARN_STRICT_OVERFLOW_CONDITIONAL);
10384 switch (code)
10386 case EQ_EXPR:
10387 if (known_eq (bitpos0, bitpos1))
10388 return constant_boolean_node (true, type);
10389 if (known_ne (bitpos0, bitpos1))
10390 return constant_boolean_node (false, type);
10391 break;
10392 case NE_EXPR:
10393 if (known_ne (bitpos0, bitpos1))
10394 return constant_boolean_node (true, type);
10395 if (known_eq (bitpos0, bitpos1))
10396 return constant_boolean_node (false, type);
10397 break;
10398 case LT_EXPR:
10399 if (known_lt (bitpos0, bitpos1))
10400 return constant_boolean_node (true, type);
10401 if (known_ge (bitpos0, bitpos1))
10402 return constant_boolean_node (false, type);
10403 break;
10404 case LE_EXPR:
10405 if (known_le (bitpos0, bitpos1))
10406 return constant_boolean_node (true, type);
10407 if (known_gt (bitpos0, bitpos1))
10408 return constant_boolean_node (false, type);
10409 break;
10410 case GE_EXPR:
10411 if (known_ge (bitpos0, bitpos1))
10412 return constant_boolean_node (true, type);
10413 if (known_lt (bitpos0, bitpos1))
10414 return constant_boolean_node (false, type);
10415 break;
10416 case GT_EXPR:
10417 if (known_gt (bitpos0, bitpos1))
10418 return constant_boolean_node (true, type);
10419 if (known_le (bitpos0, bitpos1))
10420 return constant_boolean_node (false, type);
10421 break;
10422 default:;
10425 /* We can simplify the comparison to a comparison of the variable
10426 offset parts if the constant offset parts are equal.
10427 Be careful to use signed sizetype here because otherwise we
10428 mess with array offsets in the wrong way. This is possible
10429 because pointer arithmetic is restricted to retain within an
10430 object and overflow on pointer differences is undefined as of
10431 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10432 else if (known_eq (bitpos0, bitpos1)
10433 && (equality_code
10434 || (indirect_base0
10435 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10436 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10438 /* By converting to signed sizetype we cover middle-end pointer
10439 arithmetic which operates on unsigned pointer types of size
10440 type size and ARRAY_REF offsets which are properly sign or
10441 zero extended from their type in case it is narrower than
10442 sizetype. */
10443 if (offset0 == NULL_TREE)
10444 offset0 = build_int_cst (ssizetype, 0);
10445 else
10446 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10447 if (offset1 == NULL_TREE)
10448 offset1 = build_int_cst (ssizetype, 0);
10449 else
10450 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10452 if (!equality_code
10453 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10454 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10455 fold_overflow_warning (("assuming pointer wraparound does not "
10456 "occur when comparing P +- C1 with "
10457 "P +- C2"),
10458 WARN_STRICT_OVERFLOW_COMPARISON);
10460 return fold_build2_loc (loc, code, type, offset0, offset1);
10463 /* For equal offsets we can simplify to a comparison of the
10464 base addresses. */
10465 else if (known_eq (bitpos0, bitpos1)
10466 && (indirect_base0
10467 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10468 && (indirect_base1
10469 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10470 && ((offset0 == offset1)
10471 || (offset0 && offset1
10472 && operand_equal_p (offset0, offset1, 0))))
10474 if (indirect_base0)
10475 base0 = build_fold_addr_expr_loc (loc, base0);
10476 if (indirect_base1)
10477 base1 = build_fold_addr_expr_loc (loc, base1);
10478 return fold_build2_loc (loc, code, type, base0, base1);
10480 /* Comparison between an ordinary (non-weak) symbol and a null
10481 pointer can be eliminated since such symbols must have a non
10482 null address. In C, relational expressions between pointers
10483 to objects and null pointers are undefined. The results
10484 below follow the C++ rules with the additional property that
10485 every object pointer compares greater than a null pointer.
10487 else if (((DECL_P (base0)
10488 && maybe_nonzero_address (base0) > 0
10489 /* Avoid folding references to struct members at offset 0 to
10490 prevent tests like '&ptr->firstmember == 0' from getting
10491 eliminated. When ptr is null, although the -> expression
10492 is strictly speaking invalid, GCC retains it as a matter
10493 of QoI. See PR c/44555. */
10494 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10495 || CONSTANT_CLASS_P (base0))
10496 && indirect_base0
10497 /* The caller guarantees that when one of the arguments is
10498 constant (i.e., null in this case) it is second. */
10499 && integer_zerop (arg1))
10501 switch (code)
10503 case EQ_EXPR:
10504 case LE_EXPR:
10505 case LT_EXPR:
10506 return constant_boolean_node (false, type);
10507 case GE_EXPR:
10508 case GT_EXPR:
10509 case NE_EXPR:
10510 return constant_boolean_node (true, type);
10511 default:
10512 gcc_unreachable ();
10517 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10518 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10519 the resulting offset is smaller in absolute value than the
10520 original one and has the same sign. */
10521 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10522 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10523 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10524 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10525 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10526 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10527 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10528 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10530 tree const1 = TREE_OPERAND (arg0, 1);
10531 tree const2 = TREE_OPERAND (arg1, 1);
10532 tree variable1 = TREE_OPERAND (arg0, 0);
10533 tree variable2 = TREE_OPERAND (arg1, 0);
10534 tree cst;
10535 const char * const warnmsg = G_("assuming signed overflow does not "
10536 "occur when combining constants around "
10537 "a comparison");
10539 /* Put the constant on the side where it doesn't overflow and is
10540 of lower absolute value and of same sign than before. */
10541 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10542 ? MINUS_EXPR : PLUS_EXPR,
10543 const2, const1);
10544 if (!TREE_OVERFLOW (cst)
10545 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10546 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10548 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10549 return fold_build2_loc (loc, code, type,
10550 variable1,
10551 fold_build2_loc (loc, TREE_CODE (arg1),
10552 TREE_TYPE (arg1),
10553 variable2, cst));
10556 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10557 ? MINUS_EXPR : PLUS_EXPR,
10558 const1, const2);
10559 if (!TREE_OVERFLOW (cst)
10560 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10561 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10563 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10564 return fold_build2_loc (loc, code, type,
10565 fold_build2_loc (loc, TREE_CODE (arg0),
10566 TREE_TYPE (arg0),
10567 variable1, cst),
10568 variable2);
10572 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10573 if (tem)
10574 return tem;
10576 /* If we are comparing an expression that just has comparisons
10577 of two integer values, arithmetic expressions of those comparisons,
10578 and constants, we can simplify it. There are only three cases
10579 to check: the two values can either be equal, the first can be
10580 greater, or the second can be greater. Fold the expression for
10581 those three values. Since each value must be 0 or 1, we have
10582 eight possibilities, each of which corresponds to the constant 0
10583 or 1 or one of the six possible comparisons.
10585 This handles common cases like (a > b) == 0 but also handles
10586 expressions like ((x > y) - (y > x)) > 0, which supposedly
10587 occur in macroized code. */
10589 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10591 tree cval1 = 0, cval2 = 0;
10593 if (twoval_comparison_p (arg0, &cval1, &cval2)
10594 /* Don't handle degenerate cases here; they should already
10595 have been handled anyway. */
10596 && cval1 != 0 && cval2 != 0
10597 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10598 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10599 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10600 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10601 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10602 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10603 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10605 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10606 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10608 /* We can't just pass T to eval_subst in case cval1 or cval2
10609 was the same as ARG1. */
10611 tree high_result
10612 = fold_build2_loc (loc, code, type,
10613 eval_subst (loc, arg0, cval1, maxval,
10614 cval2, minval),
10615 arg1);
10616 tree equal_result
10617 = fold_build2_loc (loc, code, type,
10618 eval_subst (loc, arg0, cval1, maxval,
10619 cval2, maxval),
10620 arg1);
10621 tree low_result
10622 = fold_build2_loc (loc, code, type,
10623 eval_subst (loc, arg0, cval1, minval,
10624 cval2, maxval),
10625 arg1);
10627 /* All three of these results should be 0 or 1. Confirm they are.
10628 Then use those values to select the proper code to use. */
10630 if (TREE_CODE (high_result) == INTEGER_CST
10631 && TREE_CODE (equal_result) == INTEGER_CST
10632 && TREE_CODE (low_result) == INTEGER_CST)
10634 /* Make a 3-bit mask with the high-order bit being the
10635 value for `>', the next for '=', and the low for '<'. */
10636 switch ((integer_onep (high_result) * 4)
10637 + (integer_onep (equal_result) * 2)
10638 + integer_onep (low_result))
10640 case 0:
10641 /* Always false. */
10642 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10643 case 1:
10644 code = LT_EXPR;
10645 break;
10646 case 2:
10647 code = EQ_EXPR;
10648 break;
10649 case 3:
10650 code = LE_EXPR;
10651 break;
10652 case 4:
10653 code = GT_EXPR;
10654 break;
10655 case 5:
10656 code = NE_EXPR;
10657 break;
10658 case 6:
10659 code = GE_EXPR;
10660 break;
10661 case 7:
10662 /* Always true. */
10663 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10666 return fold_build2_loc (loc, code, type, cval1, cval2);
10671 return NULL_TREE;
10675 /* Subroutine of fold_binary. Optimize complex multiplications of the
10676 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10677 argument EXPR represents the expression "z" of type TYPE. */
10679 static tree
10680 fold_mult_zconjz (location_t loc, tree type, tree expr)
10682 tree itype = TREE_TYPE (type);
10683 tree rpart, ipart, tem;
10685 if (TREE_CODE (expr) == COMPLEX_EXPR)
10687 rpart = TREE_OPERAND (expr, 0);
10688 ipart = TREE_OPERAND (expr, 1);
10690 else if (TREE_CODE (expr) == COMPLEX_CST)
10692 rpart = TREE_REALPART (expr);
10693 ipart = TREE_IMAGPART (expr);
10695 else
10697 expr = save_expr (expr);
10698 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10699 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10702 rpart = save_expr (rpart);
10703 ipart = save_expr (ipart);
10704 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10705 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10706 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10707 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10708 build_zero_cst (itype));
10712 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10713 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10714 true if successful. */
10716 static bool
10717 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10719 unsigned HOST_WIDE_INT i, nunits;
10721 if (TREE_CODE (arg) == VECTOR_CST
10722 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10724 for (i = 0; i < nunits; ++i)
10725 elts[i] = VECTOR_CST_ELT (arg, i);
10727 else if (TREE_CODE (arg) == CONSTRUCTOR)
10729 constructor_elt *elt;
10731 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10732 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10733 return false;
10734 else
10735 elts[i] = elt->value;
10737 else
10738 return false;
10739 for (; i < nelts; i++)
10740 elts[i]
10741 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10742 return true;
10745 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10746 mask for VLA vec_perm folding.
10747 REASON if specified, will contain the reason why SEL is not suitable.
10748 Used only for debugging and unit-testing. */
10750 static bool
10751 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10752 const vec_perm_indices &sel,
10753 const char **reason = NULL)
10755 unsigned sel_npatterns = sel.encoding ().npatterns ();
10756 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10758 if (!(pow2p_hwi (sel_npatterns)
10759 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10760 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10762 if (reason)
10763 *reason = "npatterns is not power of 2";
10764 return false;
10767 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10768 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10769 poly_uint64 esel;
10770 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10772 if (reason)
10773 *reason = "sel.length is not multiple of sel_npatterns";
10774 return false;
10777 if (sel_nelts_per_pattern < 3)
10778 return true;
10780 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10782 poly_uint64 a1 = sel[pattern + sel_npatterns];
10783 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10784 HOST_WIDE_INT step;
10785 if (!poly_int64 (a2 - a1).is_constant (&step))
10787 if (reason)
10788 *reason = "step is not constant";
10789 return false;
10791 // FIXME: Punt on step < 0 for now, revisit later.
10792 if (step < 0)
10793 return false;
10794 if (step == 0)
10795 continue;
10797 if (!pow2p_hwi (step))
10799 if (reason)
10800 *reason = "step is not power of 2";
10801 return false;
10804 /* Ensure that stepped sequence of the pattern selects elements
10805 only from the same input vector. */
10806 uint64_t q1, qe;
10807 poly_uint64 r1, re;
10808 poly_uint64 ae = a1 + (esel - 2) * step;
10809 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10811 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10812 && can_div_trunc_p (ae, arg_len, &qe, &re)
10813 && q1 == qe))
10815 if (reason)
10816 *reason = "crossed input vectors";
10817 return false;
10820 /* Ensure that the stepped sequence always selects from the same
10821 input pattern. */
10822 tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10823 unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10825 if (!multiple_p (step, arg_npatterns))
10827 if (reason)
10828 *reason = "step is not multiple of npatterns";
10829 return false;
10832 /* If a1 chooses base element from arg, ensure that it's a natural
10833 stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10834 to preserve arg's encoding. */
10836 if (maybe_lt (r1, arg_npatterns))
10838 unsigned HOST_WIDE_INT index;
10839 if (!r1.is_constant (&index))
10840 return false;
10842 tree arg_elem0 = vector_cst_elt (arg, index);
10843 tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10844 tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10846 tree step1, step2;
10847 if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10848 || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10849 || !operand_equal_p (step1, step2, 0))
10851 if (reason)
10852 *reason = "not a natural stepped sequence";
10853 return false;
10858 return true;
10861 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10862 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10863 REASON has same purpose as described in
10864 valid_mask_for_fold_vec_perm_cst_p. */
10866 static tree
10867 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10868 const char **reason = NULL)
10870 unsigned res_npatterns, res_nelts_per_pattern;
10871 unsigned HOST_WIDE_INT res_nelts;
10873 /* First try to implement the fold in a VLA-friendly way.
10875 (1) If the selector is simply a duplication of N elements, the
10876 result is likewise a duplication of N elements.
10878 (2) If the selector is N elements followed by a duplication
10879 of N elements, the result is too.
10881 (3) If the selector is N elements followed by an interleaving
10882 of N linear series, the situation is more complex.
10884 valid_mask_for_fold_vec_perm_cst_p detects whether we
10885 can handle this case. If we can, then each of the N linear
10886 series either (a) selects the same element each time or
10887 (b) selects a linear series from one of the input patterns.
10889 If (b) holds for one of the linear series, the result
10890 will contain a linear series, and so the result will have
10891 the same shape as the selector. If (a) holds for all of
10892 the linear series, the result will be the same as (2) above.
10894 (b) can only hold if one of the input patterns has a
10895 stepped encoding. */
10897 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10899 res_npatterns = sel.encoding ().npatterns ();
10900 res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10901 if (res_nelts_per_pattern == 3
10902 && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10903 && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10904 res_nelts_per_pattern = 2;
10905 res_nelts = res_npatterns * res_nelts_per_pattern;
10907 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10909 res_npatterns = res_nelts;
10910 res_nelts_per_pattern = 1;
10912 else
10913 return NULL_TREE;
10915 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10916 for (unsigned i = 0; i < res_nelts; i++)
10918 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10919 uint64_t q;
10920 poly_uint64 r;
10921 unsigned HOST_WIDE_INT index;
10923 /* Punt if sel[i] /trunc_div len cannot be determined,
10924 because the input vector to be chosen will depend on
10925 runtime vector length.
10926 For example if len == 4 + 4x, and sel[i] == 4,
10927 If len at runtime equals 4, we choose arg1[0].
10928 For any other value of len > 4 at runtime, we choose arg0[4].
10929 which makes the element choice dependent on runtime vector length. */
10930 if (!can_div_trunc_p (sel[i], len, &q, &r))
10932 if (reason)
10933 *reason = "cannot divide selector element by arg len";
10934 return NULL_TREE;
10937 /* sel[i] % len will give the index of element in the chosen input
10938 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10939 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10940 if (!r.is_constant (&index))
10942 if (reason)
10943 *reason = "remainder is not constant";
10944 return NULL_TREE;
10947 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10948 tree elem = vector_cst_elt (arg, index);
10949 out_elts.quick_push (elem);
10952 return out_elts.build ();
10955 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10956 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10957 NULL_TREE otherwise. */
10959 tree
10960 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10962 unsigned int i;
10963 unsigned HOST_WIDE_INT nelts;
10965 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10966 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10967 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10969 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10970 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10971 return NULL_TREE;
10973 if (TREE_CODE (arg0) == VECTOR_CST
10974 && TREE_CODE (arg1) == VECTOR_CST)
10975 return fold_vec_perm_cst (type, arg0, arg1, sel);
10977 /* For fall back case, we want to ensure we have VLS vectors
10978 with equal length. */
10979 if (!sel.length ().is_constant (&nelts))
10980 return NULL_TREE;
10982 gcc_assert (known_eq (sel.length (),
10983 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10984 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10985 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10986 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10987 return NULL_TREE;
10989 vec<constructor_elt, va_gc> *v;
10990 vec_alloc (v, nelts);
10991 for (i = 0; i < nelts; i++)
10993 HOST_WIDE_INT index;
10994 if (!sel[i].is_constant (&index))
10995 return NULL_TREE;
10996 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10998 return build_constructor (type, v);
11001 /* Try to fold a pointer difference of type TYPE two address expressions of
11002 array references AREF0 and AREF1 using location LOC. Return a
11003 simplified expression for the difference or NULL_TREE. */
11005 static tree
11006 fold_addr_of_array_ref_difference (location_t loc, tree type,
11007 tree aref0, tree aref1,
11008 bool use_pointer_diff)
11010 tree base0 = TREE_OPERAND (aref0, 0);
11011 tree base1 = TREE_OPERAND (aref1, 0);
11012 tree base_offset = build_int_cst (type, 0);
11014 /* If the bases are array references as well, recurse. If the bases
11015 are pointer indirections compute the difference of the pointers.
11016 If the bases are equal, we are set. */
11017 if ((TREE_CODE (base0) == ARRAY_REF
11018 && TREE_CODE (base1) == ARRAY_REF
11019 && (base_offset
11020 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
11021 use_pointer_diff)))
11022 || (INDIRECT_REF_P (base0)
11023 && INDIRECT_REF_P (base1)
11024 && (base_offset
11025 = use_pointer_diff
11026 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
11027 TREE_OPERAND (base0, 0),
11028 TREE_OPERAND (base1, 0))
11029 : fold_binary_loc (loc, MINUS_EXPR, type,
11030 fold_convert (type,
11031 TREE_OPERAND (base0, 0)),
11032 fold_convert (type,
11033 TREE_OPERAND (base1, 0)))))
11034 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
11036 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
11037 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
11038 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
11039 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
11040 return fold_build2_loc (loc, PLUS_EXPR, type,
11041 base_offset,
11042 fold_build2_loc (loc, MULT_EXPR, type,
11043 diff, esz));
11045 return NULL_TREE;
11048 /* If the real or vector real constant CST of type TYPE has an exact
11049 inverse, return it, else return NULL. */
11051 tree
11052 exact_inverse (tree type, tree cst)
11054 REAL_VALUE_TYPE r;
11055 tree unit_type;
11056 machine_mode mode;
11058 switch (TREE_CODE (cst))
11060 case REAL_CST:
11061 r = TREE_REAL_CST (cst);
11063 if (exact_real_inverse (TYPE_MODE (type), &r))
11064 return build_real (type, r);
11066 return NULL_TREE;
11068 case VECTOR_CST:
11070 unit_type = TREE_TYPE (type);
11071 mode = TYPE_MODE (unit_type);
11073 tree_vector_builder elts;
11074 if (!elts.new_unary_operation (type, cst, false))
11075 return NULL_TREE;
11076 unsigned int count = elts.encoded_nelts ();
11077 for (unsigned int i = 0; i < count; ++i)
11079 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
11080 if (!exact_real_inverse (mode, &r))
11081 return NULL_TREE;
11082 elts.quick_push (build_real (unit_type, r));
11085 return elts.build ();
11088 default:
11089 return NULL_TREE;
11093 /* Mask out the tz least significant bits of X of type TYPE where
11094 tz is the number of trailing zeroes in Y. */
11095 static wide_int
11096 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
11098 int tz = wi::ctz (y);
11099 if (tz > 0)
11100 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
11101 return x;
11104 /* Return true when T is an address and is known to be nonzero.
11105 For floating point we further ensure that T is not denormal.
11106 Similar logic is present in nonzero_address in rtlanal.h.
11108 If the return value is based on the assumption that signed overflow
11109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
11110 change *STRICT_OVERFLOW_P. */
11112 static bool
11113 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
11115 tree type = TREE_TYPE (t);
11116 enum tree_code code;
11118 /* Doing something useful for floating point would need more work. */
11119 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11120 return false;
11122 code = TREE_CODE (t);
11123 switch (TREE_CODE_CLASS (code))
11125 case tcc_unary:
11126 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11127 strict_overflow_p);
11128 case tcc_binary:
11129 case tcc_comparison:
11130 return tree_binary_nonzero_warnv_p (code, type,
11131 TREE_OPERAND (t, 0),
11132 TREE_OPERAND (t, 1),
11133 strict_overflow_p);
11134 case tcc_constant:
11135 case tcc_declaration:
11136 case tcc_reference:
11137 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11139 default:
11140 break;
11143 switch (code)
11145 case TRUTH_NOT_EXPR:
11146 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11147 strict_overflow_p);
11149 case TRUTH_AND_EXPR:
11150 case TRUTH_OR_EXPR:
11151 case TRUTH_XOR_EXPR:
11152 return tree_binary_nonzero_warnv_p (code, type,
11153 TREE_OPERAND (t, 0),
11154 TREE_OPERAND (t, 1),
11155 strict_overflow_p);
11157 case COND_EXPR:
11158 case CONSTRUCTOR:
11159 case OBJ_TYPE_REF:
11160 case ADDR_EXPR:
11161 case WITH_SIZE_EXPR:
11162 case SSA_NAME:
11163 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11165 case COMPOUND_EXPR:
11166 case MODIFY_EXPR:
11167 case BIND_EXPR:
11168 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
11169 strict_overflow_p);
11171 case SAVE_EXPR:
11172 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11173 strict_overflow_p);
11175 case CALL_EXPR:
11177 tree fndecl = get_callee_fndecl (t);
11178 if (!fndecl) return false;
11179 if (flag_delete_null_pointer_checks && !flag_check_new
11180 && DECL_IS_OPERATOR_NEW_P (fndecl)
11181 && !TREE_NOTHROW (fndecl))
11182 return true;
11183 if (flag_delete_null_pointer_checks
11184 && lookup_attribute ("returns_nonnull",
11185 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11186 return true;
11187 return alloca_call_p (t);
11190 default:
11191 break;
11193 return false;
11196 /* Return true when T is an address and is known to be nonzero.
11197 Handle warnings about undefined signed overflow. */
11199 bool
11200 tree_expr_nonzero_p (tree t)
11202 bool ret, strict_overflow_p;
11204 strict_overflow_p = false;
11205 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11206 if (strict_overflow_p)
11207 fold_overflow_warning (("assuming signed overflow does not occur when "
11208 "determining that expression is always "
11209 "non-zero"),
11210 WARN_STRICT_OVERFLOW_MISC);
11211 return ret;
11214 /* Return true if T is known not to be equal to an integer W. */
11216 bool
11217 expr_not_equal_to (tree t, const wide_int &w)
11219 int_range_max vr;
11220 switch (TREE_CODE (t))
11222 case INTEGER_CST:
11223 return wi::to_wide (t) != w;
11225 case SSA_NAME:
11226 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11227 return false;
11229 get_range_query (cfun)->range_of_expr (vr, t);
11230 if (!vr.undefined_p () && !vr.contains_p (w))
11231 return true;
11232 /* If T has some known zero bits and W has any of those bits set,
11233 then T is known not to be equal to W. */
11234 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11235 TYPE_PRECISION (TREE_TYPE (t))), 0))
11236 return true;
11237 return false;
11239 default:
11240 return false;
11244 /* Fold a binary expression of code CODE and type TYPE with operands
11245 OP0 and OP1. LOC is the location of the resulting expression.
11246 Return the folded expression if folding is successful. Otherwise,
11247 return NULL_TREE. */
11249 tree
11250 fold_binary_loc (location_t loc, enum tree_code code, tree type,
11251 tree op0, tree op1)
11253 enum tree_code_class kind = TREE_CODE_CLASS (code);
11254 tree arg0, arg1, tem;
11255 tree t1 = NULL_TREE;
11256 bool strict_overflow_p;
11257 unsigned int prec;
11259 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11260 && TREE_CODE_LENGTH (code) == 2
11261 && op0 != NULL_TREE
11262 && op1 != NULL_TREE);
11264 arg0 = op0;
11265 arg1 = op1;
11267 /* Strip any conversions that don't change the mode. This is
11268 safe for every expression, except for a comparison expression
11269 because its signedness is derived from its operands. So, in
11270 the latter case, only strip conversions that don't change the
11271 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11272 preserved.
11274 Note that this is done as an internal manipulation within the
11275 constant folder, in order to find the simplest representation
11276 of the arguments so that their form can be studied. In any
11277 cases, the appropriate type conversions should be put back in
11278 the tree that will get out of the constant folder. */
11280 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11282 STRIP_SIGN_NOPS (arg0);
11283 STRIP_SIGN_NOPS (arg1);
11285 else
11287 STRIP_NOPS (arg0);
11288 STRIP_NOPS (arg1);
11291 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11292 constant but we can't do arithmetic on them. */
11293 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11295 tem = const_binop (code, type, arg0, arg1);
11296 if (tem != NULL_TREE)
11298 if (TREE_TYPE (tem) != type)
11299 tem = fold_convert_loc (loc, type, tem);
11300 return tem;
11304 /* If this is a commutative operation, and ARG0 is a constant, move it
11305 to ARG1 to reduce the number of tests below. */
11306 if (commutative_tree_code (code)
11307 && tree_swap_operands_p (arg0, arg1))
11308 return fold_build2_loc (loc, code, type, op1, op0);
11310 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11311 to ARG1 to reduce the number of tests below. */
11312 if (kind == tcc_comparison
11313 && tree_swap_operands_p (arg0, arg1))
11314 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11316 tem = generic_simplify (loc, code, type, op0, op1);
11317 if (tem)
11318 return tem;
11320 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11322 First check for cases where an arithmetic operation is applied to a
11323 compound, conditional, or comparison operation. Push the arithmetic
11324 operation inside the compound or conditional to see if any folding
11325 can then be done. Convert comparison to conditional for this purpose.
11326 The also optimizes non-constant cases that used to be done in
11327 expand_expr.
11329 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11330 one of the operands is a comparison and the other is a comparison, a
11331 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11332 code below would make the expression more complex. Change it to a
11333 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11334 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11336 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11337 || code == EQ_EXPR || code == NE_EXPR)
11338 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11339 && ((truth_value_p (TREE_CODE (arg0))
11340 && (truth_value_p (TREE_CODE (arg1))
11341 || (TREE_CODE (arg1) == BIT_AND_EXPR
11342 && integer_onep (TREE_OPERAND (arg1, 1)))))
11343 || (truth_value_p (TREE_CODE (arg1))
11344 && (truth_value_p (TREE_CODE (arg0))
11345 || (TREE_CODE (arg0) == BIT_AND_EXPR
11346 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11348 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11349 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11350 : TRUTH_XOR_EXPR,
11351 boolean_type_node,
11352 fold_convert_loc (loc, boolean_type_node, arg0),
11353 fold_convert_loc (loc, boolean_type_node, arg1));
11355 if (code == EQ_EXPR)
11356 tem = invert_truthvalue_loc (loc, tem);
11358 return fold_convert_loc (loc, type, tem);
11361 if (TREE_CODE_CLASS (code) == tcc_binary
11362 || TREE_CODE_CLASS (code) == tcc_comparison)
11364 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11366 tem = fold_build2_loc (loc, code, type,
11367 fold_convert_loc (loc, TREE_TYPE (op0),
11368 TREE_OPERAND (arg0, 1)), op1);
11369 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11370 tem);
11372 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11374 tem = fold_build2_loc (loc, code, type, op0,
11375 fold_convert_loc (loc, TREE_TYPE (op1),
11376 TREE_OPERAND (arg1, 1)));
11377 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11378 tem);
11381 if (TREE_CODE (arg0) == COND_EXPR
11382 || TREE_CODE (arg0) == VEC_COND_EXPR
11383 || COMPARISON_CLASS_P (arg0))
11385 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11386 arg0, arg1,
11387 /*cond_first_p=*/1);
11388 if (tem != NULL_TREE)
11389 return tem;
11392 if (TREE_CODE (arg1) == COND_EXPR
11393 || TREE_CODE (arg1) == VEC_COND_EXPR
11394 || COMPARISON_CLASS_P (arg1))
11396 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11397 arg1, arg0,
11398 /*cond_first_p=*/0);
11399 if (tem != NULL_TREE)
11400 return tem;
11404 switch (code)
11406 case MEM_REF:
11407 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11408 if (TREE_CODE (arg0) == ADDR_EXPR
11409 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11411 tree iref = TREE_OPERAND (arg0, 0);
11412 return fold_build2 (MEM_REF, type,
11413 TREE_OPERAND (iref, 0),
11414 int_const_binop (PLUS_EXPR, arg1,
11415 TREE_OPERAND (iref, 1)));
11418 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11419 if (TREE_CODE (arg0) == ADDR_EXPR
11420 && handled_component_p (TREE_OPERAND (arg0, 0)))
11422 tree base;
11423 poly_int64 coffset;
11424 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11425 &coffset);
11426 if (!base)
11427 return NULL_TREE;
11428 return fold_build2 (MEM_REF, type,
11429 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11430 int_const_binop (PLUS_EXPR, arg1,
11431 size_int (coffset)));
11434 return NULL_TREE;
11436 case POINTER_PLUS_EXPR:
11437 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11438 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11439 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11440 return fold_convert_loc (loc, type,
11441 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11442 fold_convert_loc (loc, sizetype,
11443 arg1),
11444 fold_convert_loc (loc, sizetype,
11445 arg0)));
11447 return NULL_TREE;
11449 case PLUS_EXPR:
11450 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11452 /* X + (X / CST) * -CST is X % CST. */
11453 if (TREE_CODE (arg1) == MULT_EXPR
11454 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11455 && operand_equal_p (arg0,
11456 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11458 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11459 tree cst1 = TREE_OPERAND (arg1, 1);
11460 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11461 cst1, cst0);
11462 if (sum && integer_zerop (sum))
11463 return fold_convert_loc (loc, type,
11464 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11465 TREE_TYPE (arg0), arg0,
11466 cst0));
11470 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11471 one. Make sure the type is not saturating and has the signedness of
11472 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11473 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11474 if ((TREE_CODE (arg0) == MULT_EXPR
11475 || TREE_CODE (arg1) == MULT_EXPR)
11476 && !TYPE_SATURATING (type)
11477 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11478 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11479 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11481 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11482 if (tem)
11483 return tem;
11486 if (! FLOAT_TYPE_P (type))
11488 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11489 (plus (plus (mult) (mult)) (foo)) so that we can
11490 take advantage of the factoring cases below. */
11491 if (ANY_INTEGRAL_TYPE_P (type)
11492 && TYPE_OVERFLOW_WRAPS (type)
11493 && (((TREE_CODE (arg0) == PLUS_EXPR
11494 || TREE_CODE (arg0) == MINUS_EXPR)
11495 && TREE_CODE (arg1) == MULT_EXPR)
11496 || ((TREE_CODE (arg1) == PLUS_EXPR
11497 || TREE_CODE (arg1) == MINUS_EXPR)
11498 && TREE_CODE (arg0) == MULT_EXPR)))
11500 tree parg0, parg1, parg, marg;
11501 enum tree_code pcode;
11503 if (TREE_CODE (arg1) == MULT_EXPR)
11504 parg = arg0, marg = arg1;
11505 else
11506 parg = arg1, marg = arg0;
11507 pcode = TREE_CODE (parg);
11508 parg0 = TREE_OPERAND (parg, 0);
11509 parg1 = TREE_OPERAND (parg, 1);
11510 STRIP_NOPS (parg0);
11511 STRIP_NOPS (parg1);
11513 if (TREE_CODE (parg0) == MULT_EXPR
11514 && TREE_CODE (parg1) != MULT_EXPR)
11515 return fold_build2_loc (loc, pcode, type,
11516 fold_build2_loc (loc, PLUS_EXPR, type,
11517 fold_convert_loc (loc, type,
11518 parg0),
11519 fold_convert_loc (loc, type,
11520 marg)),
11521 fold_convert_loc (loc, type, parg1));
11522 if (TREE_CODE (parg0) != MULT_EXPR
11523 && TREE_CODE (parg1) == MULT_EXPR)
11524 return
11525 fold_build2_loc (loc, PLUS_EXPR, type,
11526 fold_convert_loc (loc, type, parg0),
11527 fold_build2_loc (loc, pcode, type,
11528 fold_convert_loc (loc, type, marg),
11529 fold_convert_loc (loc, type,
11530 parg1)));
11533 else
11535 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11536 to __complex__ ( x, y ). This is not the same for SNaNs or
11537 if signed zeros are involved. */
11538 if (!HONOR_SNANS (arg0)
11539 && !HONOR_SIGNED_ZEROS (arg0)
11540 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11542 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11543 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11544 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11545 bool arg0rz = false, arg0iz = false;
11546 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11547 || (arg0i && (arg0iz = real_zerop (arg0i))))
11549 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11550 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11551 if (arg0rz && arg1i && real_zerop (arg1i))
11553 tree rp = arg1r ? arg1r
11554 : build1 (REALPART_EXPR, rtype, arg1);
11555 tree ip = arg0i ? arg0i
11556 : build1 (IMAGPART_EXPR, rtype, arg0);
11557 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11559 else if (arg0iz && arg1r && real_zerop (arg1r))
11561 tree rp = arg0r ? arg0r
11562 : build1 (REALPART_EXPR, rtype, arg0);
11563 tree ip = arg1i ? arg1i
11564 : build1 (IMAGPART_EXPR, rtype, arg1);
11565 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11570 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11571 We associate floats only if the user has specified
11572 -fassociative-math. */
11573 if (flag_associative_math
11574 && TREE_CODE (arg1) == PLUS_EXPR
11575 && TREE_CODE (arg0) != MULT_EXPR)
11577 tree tree10 = TREE_OPERAND (arg1, 0);
11578 tree tree11 = TREE_OPERAND (arg1, 1);
11579 if (TREE_CODE (tree11) == MULT_EXPR
11580 && TREE_CODE (tree10) == MULT_EXPR)
11582 tree tree0;
11583 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11584 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11587 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11588 We associate floats only if the user has specified
11589 -fassociative-math. */
11590 if (flag_associative_math
11591 && TREE_CODE (arg0) == PLUS_EXPR
11592 && TREE_CODE (arg1) != MULT_EXPR)
11594 tree tree00 = TREE_OPERAND (arg0, 0);
11595 tree tree01 = TREE_OPERAND (arg0, 1);
11596 if (TREE_CODE (tree01) == MULT_EXPR
11597 && TREE_CODE (tree00) == MULT_EXPR)
11599 tree tree0;
11600 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11601 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11606 bit_rotate:
11607 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11608 is a rotate of A by C1 bits. */
11609 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11610 is a rotate of A by B bits.
11611 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11612 though in this case CODE must be | and not + or ^, otherwise
11613 it doesn't return A when B is 0. */
11615 enum tree_code code0, code1;
11616 tree rtype;
11617 code0 = TREE_CODE (arg0);
11618 code1 = TREE_CODE (arg1);
11619 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11620 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11621 && operand_equal_p (TREE_OPERAND (arg0, 0),
11622 TREE_OPERAND (arg1, 0), 0)
11623 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11624 TYPE_UNSIGNED (rtype))
11625 /* Only create rotates in complete modes. Other cases are not
11626 expanded properly. */
11627 && (element_precision (rtype)
11628 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11630 tree tree01, tree11;
11631 tree orig_tree01, orig_tree11;
11632 enum tree_code code01, code11;
11634 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11635 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11636 STRIP_NOPS (tree01);
11637 STRIP_NOPS (tree11);
11638 code01 = TREE_CODE (tree01);
11639 code11 = TREE_CODE (tree11);
11640 if (code11 != MINUS_EXPR
11641 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11643 std::swap (code0, code1);
11644 std::swap (code01, code11);
11645 std::swap (tree01, tree11);
11646 std::swap (orig_tree01, orig_tree11);
11648 if (code01 == INTEGER_CST
11649 && code11 == INTEGER_CST
11650 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11651 == element_precision (rtype)))
11653 tem = build2_loc (loc, LROTATE_EXPR,
11654 rtype, TREE_OPERAND (arg0, 0),
11655 code0 == LSHIFT_EXPR
11656 ? orig_tree01 : orig_tree11);
11657 return fold_convert_loc (loc, type, tem);
11659 else if (code11 == MINUS_EXPR)
11661 tree tree110, tree111;
11662 tree110 = TREE_OPERAND (tree11, 0);
11663 tree111 = TREE_OPERAND (tree11, 1);
11664 STRIP_NOPS (tree110);
11665 STRIP_NOPS (tree111);
11666 if (TREE_CODE (tree110) == INTEGER_CST
11667 && compare_tree_int (tree110,
11668 element_precision (rtype)) == 0
11669 && operand_equal_p (tree01, tree111, 0))
11671 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11672 ? LROTATE_EXPR : RROTATE_EXPR),
11673 rtype, TREE_OPERAND (arg0, 0),
11674 orig_tree01);
11675 return fold_convert_loc (loc, type, tem);
11678 else if (code == BIT_IOR_EXPR
11679 && code11 == BIT_AND_EXPR
11680 && pow2p_hwi (element_precision (rtype)))
11682 tree tree110, tree111;
11683 tree110 = TREE_OPERAND (tree11, 0);
11684 tree111 = TREE_OPERAND (tree11, 1);
11685 STRIP_NOPS (tree110);
11686 STRIP_NOPS (tree111);
11687 if (TREE_CODE (tree110) == NEGATE_EXPR
11688 && TREE_CODE (tree111) == INTEGER_CST
11689 && compare_tree_int (tree111,
11690 element_precision (rtype) - 1) == 0
11691 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11693 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11694 ? LROTATE_EXPR : RROTATE_EXPR),
11695 rtype, TREE_OPERAND (arg0, 0),
11696 orig_tree01);
11697 return fold_convert_loc (loc, type, tem);
11703 associate:
11704 /* In most languages, can't associate operations on floats through
11705 parentheses. Rather than remember where the parentheses were, we
11706 don't associate floats at all, unless the user has specified
11707 -fassociative-math.
11708 And, we need to make sure type is not saturating. */
11710 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11711 && !TYPE_SATURATING (type)
11712 && !TYPE_OVERFLOW_SANITIZED (type))
11714 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11715 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11716 tree atype = type;
11717 bool ok = true;
11719 /* Split both trees into variables, constants, and literals. Then
11720 associate each group together, the constants with literals,
11721 then the result with variables. This increases the chances of
11722 literals being recombined later and of generating relocatable
11723 expressions for the sum of a constant and literal. */
11724 var0 = split_tree (arg0, type, code,
11725 &minus_var0, &con0, &minus_con0,
11726 &lit0, &minus_lit0, 0);
11727 var1 = split_tree (arg1, type, code,
11728 &minus_var1, &con1, &minus_con1,
11729 &lit1, &minus_lit1, code == MINUS_EXPR);
11731 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11732 if (code == MINUS_EXPR)
11733 code = PLUS_EXPR;
11735 /* With undefined overflow prefer doing association in a type
11736 which wraps on overflow, if that is one of the operand types. */
11737 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11738 && !TYPE_OVERFLOW_WRAPS (type))
11740 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11741 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11742 atype = TREE_TYPE (arg0);
11743 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11744 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11745 atype = TREE_TYPE (arg1);
11746 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11749 /* With undefined overflow we can only associate constants with one
11750 variable, and constants whose association doesn't overflow. */
11751 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11752 && !TYPE_OVERFLOW_WRAPS (atype))
11754 if ((var0 && var1) || (minus_var0 && minus_var1))
11756 /* ??? If split_tree would handle NEGATE_EXPR we could
11757 simply reject these cases and the allowed cases would
11758 be the var0/minus_var1 ones. */
11759 tree tmp0 = var0 ? var0 : minus_var0;
11760 tree tmp1 = var1 ? var1 : minus_var1;
11761 bool one_neg = false;
11763 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11765 tmp0 = TREE_OPERAND (tmp0, 0);
11766 one_neg = !one_neg;
11768 if (CONVERT_EXPR_P (tmp0)
11769 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11770 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11771 <= TYPE_PRECISION (atype)))
11772 tmp0 = TREE_OPERAND (tmp0, 0);
11773 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11775 tmp1 = TREE_OPERAND (tmp1, 0);
11776 one_neg = !one_neg;
11778 if (CONVERT_EXPR_P (tmp1)
11779 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11780 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11781 <= TYPE_PRECISION (atype)))
11782 tmp1 = TREE_OPERAND (tmp1, 0);
11783 /* The only case we can still associate with two variables
11784 is if they cancel out. */
11785 if (!one_neg
11786 || !operand_equal_p (tmp0, tmp1, 0))
11787 ok = false;
11789 else if ((var0 && minus_var1
11790 && ! operand_equal_p (var0, minus_var1, 0))
11791 || (minus_var0 && var1
11792 && ! operand_equal_p (minus_var0, var1, 0)))
11793 ok = false;
11796 /* Only do something if we found more than two objects. Otherwise,
11797 nothing has changed and we risk infinite recursion. */
11798 if (ok
11799 && ((var0 != 0) + (var1 != 0)
11800 + (minus_var0 != 0) + (minus_var1 != 0)
11801 + (con0 != 0) + (con1 != 0)
11802 + (minus_con0 != 0) + (minus_con1 != 0)
11803 + (lit0 != 0) + (lit1 != 0)
11804 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11806 int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11807 int minus_var0_origin
11808 = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11809 int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11810 int minus_con0_origin
11811 = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11812 int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11813 int minus_lit0_origin
11814 = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11815 var0 = associate_trees (loc, var0, var1, code, atype);
11816 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11817 code, atype);
11818 con0 = associate_trees (loc, con0, con1, code, atype);
11819 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11820 code, atype);
11821 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11822 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11823 code, atype);
11825 if (minus_var0 && var0)
11827 var0_origin |= minus_var0_origin;
11828 var0 = associate_trees (loc, var0, minus_var0,
11829 MINUS_EXPR, atype);
11830 minus_var0 = 0;
11831 minus_var0_origin = 0;
11833 if (minus_con0 && con0)
11835 con0_origin |= minus_con0_origin;
11836 con0 = associate_trees (loc, con0, minus_con0,
11837 MINUS_EXPR, atype);
11838 minus_con0 = 0;
11839 minus_con0_origin = 0;
11842 /* Preserve the MINUS_EXPR if the negative part of the literal is
11843 greater than the positive part. Otherwise, the multiplicative
11844 folding code (i.e extract_muldiv) may be fooled in case
11845 unsigned constants are subtracted, like in the following
11846 example: ((X*2 + 4) - 8U)/2. */
11847 if (minus_lit0 && lit0)
11849 if (TREE_CODE (lit0) == INTEGER_CST
11850 && TREE_CODE (minus_lit0) == INTEGER_CST
11851 && tree_int_cst_lt (lit0, minus_lit0)
11852 /* But avoid ending up with only negated parts. */
11853 && (var0 || con0))
11855 minus_lit0_origin |= lit0_origin;
11856 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11857 MINUS_EXPR, atype);
11858 lit0 = 0;
11859 lit0_origin = 0;
11861 else
11863 lit0_origin |= minus_lit0_origin;
11864 lit0 = associate_trees (loc, lit0, minus_lit0,
11865 MINUS_EXPR, atype);
11866 minus_lit0 = 0;
11867 minus_lit0_origin = 0;
11871 /* Don't introduce overflows through reassociation. */
11872 if ((lit0 && TREE_OVERFLOW_P (lit0))
11873 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11874 return NULL_TREE;
11876 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11877 con0_origin |= lit0_origin;
11878 con0 = associate_trees (loc, con0, lit0, code, atype);
11879 minus_con0_origin |= minus_lit0_origin;
11880 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11881 code, atype);
11883 /* Eliminate minus_con0. */
11884 if (minus_con0)
11886 if (con0)
11888 con0_origin |= minus_con0_origin;
11889 con0 = associate_trees (loc, con0, minus_con0,
11890 MINUS_EXPR, atype);
11892 else if (var0)
11894 var0_origin |= minus_con0_origin;
11895 var0 = associate_trees (loc, var0, minus_con0,
11896 MINUS_EXPR, atype);
11898 else
11899 gcc_unreachable ();
11902 /* Eliminate minus_var0. */
11903 if (minus_var0)
11905 if (con0)
11907 con0_origin |= minus_var0_origin;
11908 con0 = associate_trees (loc, con0, minus_var0,
11909 MINUS_EXPR, atype);
11911 else
11912 gcc_unreachable ();
11915 /* Reassociate only if there has been any actual association
11916 between subtrees from op0 and subtrees from op1 in at
11917 least one of the operands, otherwise we risk infinite
11918 recursion. See PR114084. */
11919 if (var0_origin != 3 && con0_origin != 3)
11920 return NULL_TREE;
11922 return
11923 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11924 code, atype));
11928 return NULL_TREE;
11930 case POINTER_DIFF_EXPR:
11931 case MINUS_EXPR:
11932 /* Fold &a[i] - &a[j] to i-j. */
11933 if (TREE_CODE (arg0) == ADDR_EXPR
11934 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11935 && TREE_CODE (arg1) == ADDR_EXPR
11936 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11938 tree tem = fold_addr_of_array_ref_difference (loc, type,
11939 TREE_OPERAND (arg0, 0),
11940 TREE_OPERAND (arg1, 0),
11941 code
11942 == POINTER_DIFF_EXPR);
11943 if (tem)
11944 return tem;
11947 /* Further transformations are not for pointers. */
11948 if (code == POINTER_DIFF_EXPR)
11949 return NULL_TREE;
11951 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11952 if (TREE_CODE (arg0) == NEGATE_EXPR
11953 && negate_expr_p (op1)
11954 /* If arg0 is e.g. unsigned int and type is int, then this could
11955 introduce UB, because if A is INT_MIN at runtime, the original
11956 expression can be well defined while the latter is not.
11957 See PR83269. */
11958 && !(ANY_INTEGRAL_TYPE_P (type)
11959 && TYPE_OVERFLOW_UNDEFINED (type)
11960 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11961 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11962 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11963 fold_convert_loc (loc, type,
11964 TREE_OPERAND (arg0, 0)));
11966 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11967 __complex__ ( x, -y ). This is not the same for SNaNs or if
11968 signed zeros are involved. */
11969 if (!HONOR_SNANS (arg0)
11970 && !HONOR_SIGNED_ZEROS (arg0)
11971 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11973 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11974 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11975 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11976 bool arg0rz = false, arg0iz = false;
11977 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11978 || (arg0i && (arg0iz = real_zerop (arg0i))))
11980 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11981 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11982 if (arg0rz && arg1i && real_zerop (arg1i))
11984 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11985 arg1r ? arg1r
11986 : build1 (REALPART_EXPR, rtype, arg1));
11987 tree ip = arg0i ? arg0i
11988 : build1 (IMAGPART_EXPR, rtype, arg0);
11989 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11991 else if (arg0iz && arg1r && real_zerop (arg1r))
11993 tree rp = arg0r ? arg0r
11994 : build1 (REALPART_EXPR, rtype, arg0);
11995 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11996 arg1i ? arg1i
11997 : build1 (IMAGPART_EXPR, rtype, arg1));
11998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
12003 /* A - B -> A + (-B) if B is easily negatable. */
12004 if (negate_expr_p (op1)
12005 && ! TYPE_OVERFLOW_SANITIZED (type)
12006 && ((FLOAT_TYPE_P (type)
12007 /* Avoid this transformation if B is a positive REAL_CST. */
12008 && (TREE_CODE (op1) != REAL_CST
12009 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
12010 || INTEGRAL_TYPE_P (type)))
12011 return fold_build2_loc (loc, PLUS_EXPR, type,
12012 fold_convert_loc (loc, type, arg0),
12013 negate_expr (op1));
12015 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
12016 one. Make sure the type is not saturating and has the signedness of
12017 the stripped operands, as fold_plusminus_mult_expr will re-associate.
12018 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
12019 if ((TREE_CODE (arg0) == MULT_EXPR
12020 || TREE_CODE (arg1) == MULT_EXPR)
12021 && !TYPE_SATURATING (type)
12022 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
12023 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
12024 && (!FLOAT_TYPE_P (type) || flag_associative_math))
12026 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
12027 if (tem)
12028 return tem;
12031 goto associate;
12033 case MULT_EXPR:
12034 if (! FLOAT_TYPE_P (type))
12036 /* Transform x * -C into -x * C if x is easily negatable. */
12037 if (TREE_CODE (op1) == INTEGER_CST
12038 && tree_int_cst_sgn (op1) == -1
12039 && negate_expr_p (op0)
12040 && negate_expr_p (op1)
12041 && (tem = negate_expr (op1)) != op1
12042 && ! TREE_OVERFLOW (tem))
12043 return fold_build2_loc (loc, MULT_EXPR, type,
12044 fold_convert_loc (loc, type,
12045 negate_expr (op0)), tem);
12047 strict_overflow_p = false;
12048 if (TREE_CODE (arg1) == INTEGER_CST
12049 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12050 &strict_overflow_p)) != 0)
12052 if (strict_overflow_p)
12053 fold_overflow_warning (("assuming signed overflow does not "
12054 "occur when simplifying "
12055 "multiplication"),
12056 WARN_STRICT_OVERFLOW_MISC);
12057 return fold_convert_loc (loc, type, tem);
12060 /* Optimize z * conj(z) for integer complex numbers. */
12061 if (TREE_CODE (arg0) == CONJ_EXPR
12062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12063 return fold_mult_zconjz (loc, type, arg1);
12064 if (TREE_CODE (arg1) == CONJ_EXPR
12065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12066 return fold_mult_zconjz (loc, type, arg0);
12068 else
12070 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
12071 This is not the same for NaNs or if signed zeros are
12072 involved. */
12073 if (!HONOR_NANS (arg0)
12074 && !HONOR_SIGNED_ZEROS (arg0)
12075 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12076 && TREE_CODE (arg1) == COMPLEX_CST
12077 && real_zerop (TREE_REALPART (arg1)))
12079 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
12080 if (real_onep (TREE_IMAGPART (arg1)))
12081 return
12082 fold_build2_loc (loc, COMPLEX_EXPR, type,
12083 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
12084 rtype, arg0)),
12085 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
12086 else if (real_minus_onep (TREE_IMAGPART (arg1)))
12087 return
12088 fold_build2_loc (loc, COMPLEX_EXPR, type,
12089 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
12090 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
12091 rtype, arg0)));
12094 /* Optimize z * conj(z) for floating point complex numbers.
12095 Guarded by flag_unsafe_math_optimizations as non-finite
12096 imaginary components don't produce scalar results. */
12097 if (flag_unsafe_math_optimizations
12098 && TREE_CODE (arg0) == CONJ_EXPR
12099 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12100 return fold_mult_zconjz (loc, type, arg1);
12101 if (flag_unsafe_math_optimizations
12102 && TREE_CODE (arg1) == CONJ_EXPR
12103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12104 return fold_mult_zconjz (loc, type, arg0);
12106 goto associate;
12108 case BIT_IOR_EXPR:
12109 /* Canonicalize (X & C1) | C2. */
12110 if (TREE_CODE (arg0) == BIT_AND_EXPR
12111 && TREE_CODE (arg1) == INTEGER_CST
12112 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12114 int width = TYPE_PRECISION (type), w;
12115 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
12116 wide_int c2 = wi::to_wide (arg1);
12118 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
12119 if ((c1 & c2) == c1)
12120 return omit_one_operand_loc (loc, type, arg1,
12121 TREE_OPERAND (arg0, 0));
12123 wide_int msk = wi::mask (width, false,
12124 TYPE_PRECISION (TREE_TYPE (arg1)));
12126 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
12127 if (wi::bit_and_not (msk, c1 | c2) == 0)
12129 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12130 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12133 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
12134 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
12135 mode which allows further optimizations. */
12136 c1 &= msk;
12137 c2 &= msk;
12138 wide_int c3 = wi::bit_and_not (c1, c2);
12139 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
12141 wide_int mask = wi::mask (w, false,
12142 TYPE_PRECISION (type));
12143 if (((c1 | c2) & mask) == mask
12144 && wi::bit_and_not (c1, mask) == 0)
12146 c3 = mask;
12147 break;
12151 if (c3 != c1)
12153 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12154 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
12155 wide_int_to_tree (type, c3));
12156 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12160 /* See if this can be simplified into a rotate first. If that
12161 is unsuccessful continue in the association code. */
12162 goto bit_rotate;
12164 case BIT_XOR_EXPR:
12165 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
12166 if (TREE_CODE (arg0) == BIT_AND_EXPR
12167 && INTEGRAL_TYPE_P (type)
12168 && integer_onep (TREE_OPERAND (arg0, 1))
12169 && integer_onep (arg1))
12170 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
12171 build_zero_cst (TREE_TYPE (arg0)));
12173 /* See if this can be simplified into a rotate first. If that
12174 is unsuccessful continue in the association code. */
12175 goto bit_rotate;
12177 case BIT_AND_EXPR:
12178 /* Fold !X & 1 as X == 0. */
12179 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12180 && integer_onep (arg1))
12182 tem = TREE_OPERAND (arg0, 0);
12183 return fold_build2_loc (loc, EQ_EXPR, type, tem,
12184 build_zero_cst (TREE_TYPE (tem)));
12187 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12188 multiple of 1 << CST. */
12189 if (TREE_CODE (arg1) == INTEGER_CST)
12191 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12192 wide_int ncst1 = -cst1;
12193 if ((cst1 & ncst1) == ncst1
12194 && multiple_of_p (type, arg0,
12195 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
12196 return fold_convert_loc (loc, type, arg0);
12199 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12200 bits from CST2. */
12201 if (TREE_CODE (arg1) == INTEGER_CST
12202 && TREE_CODE (arg0) == MULT_EXPR
12203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12205 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12206 wide_int masked
12207 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12209 if (masked == 0)
12210 return omit_two_operands_loc (loc, type, build_zero_cst (type),
12211 arg0, arg1);
12212 else if (masked != warg1)
12214 /* Avoid the transform if arg1 is a mask of some
12215 mode which allows further optimizations. */
12216 int pop = wi::popcount (warg1);
12217 if (!(pop >= BITS_PER_UNIT
12218 && pow2p_hwi (pop)
12219 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12220 return fold_build2_loc (loc, code, type, op0,
12221 wide_int_to_tree (type, masked));
12225 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12226 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12227 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12229 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12231 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12232 if (mask == -1)
12233 return
12234 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12237 goto associate;
12239 case RDIV_EXPR:
12240 /* Don't touch a floating-point divide by zero unless the mode
12241 of the constant can represent infinity. */
12242 if (TREE_CODE (arg1) == REAL_CST
12243 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12244 && real_zerop (arg1))
12245 return NULL_TREE;
12247 /* (-A) / (-B) -> A / B */
12248 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12249 return fold_build2_loc (loc, RDIV_EXPR, type,
12250 TREE_OPERAND (arg0, 0),
12251 negate_expr (arg1));
12252 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12253 return fold_build2_loc (loc, RDIV_EXPR, type,
12254 negate_expr (arg0),
12255 TREE_OPERAND (arg1, 0));
12256 return NULL_TREE;
12258 case TRUNC_DIV_EXPR:
12259 /* Fall through */
12261 case FLOOR_DIV_EXPR:
12262 /* Simplify A / (B << N) where A and B are positive and B is
12263 a power of 2, to A >> (N + log2(B)). */
12264 strict_overflow_p = false;
12265 if (TREE_CODE (arg1) == LSHIFT_EXPR
12266 && (TYPE_UNSIGNED (type)
12267 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12269 tree sval = TREE_OPERAND (arg1, 0);
12270 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12272 tree sh_cnt = TREE_OPERAND (arg1, 1);
12273 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12274 wi::exact_log2 (wi::to_wide (sval)));
12276 if (strict_overflow_p)
12277 fold_overflow_warning (("assuming signed overflow does not "
12278 "occur when simplifying A / (B << N)"),
12279 WARN_STRICT_OVERFLOW_MISC);
12281 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12282 sh_cnt, pow2);
12283 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12284 fold_convert_loc (loc, type, arg0), sh_cnt);
12288 /* Fall through */
12290 case ROUND_DIV_EXPR:
12291 case CEIL_DIV_EXPR:
12292 case EXACT_DIV_EXPR:
12293 if (integer_zerop (arg1))
12294 return NULL_TREE;
12296 /* Convert -A / -B to A / B when the type is signed and overflow is
12297 undefined. */
12298 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12299 && TREE_CODE (op0) == NEGATE_EXPR
12300 && negate_expr_p (op1))
12302 if (ANY_INTEGRAL_TYPE_P (type))
12303 fold_overflow_warning (("assuming signed overflow does not occur "
12304 "when distributing negation across "
12305 "division"),
12306 WARN_STRICT_OVERFLOW_MISC);
12307 return fold_build2_loc (loc, code, type,
12308 fold_convert_loc (loc, type,
12309 TREE_OPERAND (arg0, 0)),
12310 negate_expr (op1));
12312 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12313 && TREE_CODE (arg1) == NEGATE_EXPR
12314 && negate_expr_p (op0))
12316 if (ANY_INTEGRAL_TYPE_P (type))
12317 fold_overflow_warning (("assuming signed overflow does not occur "
12318 "when distributing negation across "
12319 "division"),
12320 WARN_STRICT_OVERFLOW_MISC);
12321 return fold_build2_loc (loc, code, type,
12322 negate_expr (op0),
12323 fold_convert_loc (loc, type,
12324 TREE_OPERAND (arg1, 0)));
12327 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12328 operation, EXACT_DIV_EXPR.
12330 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12331 At one time others generated faster code, it's not clear if they do
12332 after the last round to changes to the DIV code in expmed.cc. */
12333 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12334 && multiple_of_p (type, arg0, arg1))
12335 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12336 fold_convert (type, arg0),
12337 fold_convert (type, arg1));
12339 strict_overflow_p = false;
12340 if (TREE_CODE (arg1) == INTEGER_CST
12341 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12342 &strict_overflow_p)) != 0)
12344 if (strict_overflow_p)
12345 fold_overflow_warning (("assuming signed overflow does not occur "
12346 "when simplifying division"),
12347 WARN_STRICT_OVERFLOW_MISC);
12348 return fold_convert_loc (loc, type, tem);
12351 return NULL_TREE;
12353 case CEIL_MOD_EXPR:
12354 case FLOOR_MOD_EXPR:
12355 case ROUND_MOD_EXPR:
12356 case TRUNC_MOD_EXPR:
12357 strict_overflow_p = false;
12358 if (TREE_CODE (arg1) == INTEGER_CST
12359 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12360 &strict_overflow_p)) != 0)
12362 if (strict_overflow_p)
12363 fold_overflow_warning (("assuming signed overflow does not occur "
12364 "when simplifying modulus"),
12365 WARN_STRICT_OVERFLOW_MISC);
12366 return fold_convert_loc (loc, type, tem);
12369 return NULL_TREE;
12371 case LROTATE_EXPR:
12372 case RROTATE_EXPR:
12373 case RSHIFT_EXPR:
12374 case LSHIFT_EXPR:
12375 /* Since negative shift count is not well-defined,
12376 don't try to compute it in the compiler. */
12377 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12378 return NULL_TREE;
12380 prec = element_precision (type);
12382 /* If we have a rotate of a bit operation with the rotate count and
12383 the second operand of the bit operation both constant,
12384 permute the two operations. */
12385 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12386 && (TREE_CODE (arg0) == BIT_AND_EXPR
12387 || TREE_CODE (arg0) == BIT_IOR_EXPR
12388 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12391 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12392 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12393 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12394 fold_build2_loc (loc, code, type,
12395 arg00, arg1),
12396 fold_build2_loc (loc, code, type,
12397 arg01, arg1));
12400 /* Two consecutive rotates adding up to the some integer
12401 multiple of the precision of the type can be ignored. */
12402 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12403 && TREE_CODE (arg0) == RROTATE_EXPR
12404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12405 && wi::umod_trunc (wi::to_wide (arg1)
12406 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12407 prec) == 0)
12408 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12410 return NULL_TREE;
12412 case MIN_EXPR:
12413 case MAX_EXPR:
12414 goto associate;
12416 case TRUTH_ANDIF_EXPR:
12417 /* Note that the operands of this must be ints
12418 and their values must be 0 or 1.
12419 ("true" is a fixed value perhaps depending on the language.) */
12420 /* If first arg is constant zero, return it. */
12421 if (integer_zerop (arg0))
12422 return fold_convert_loc (loc, type, arg0);
12423 /* FALLTHRU */
12424 case TRUTH_AND_EXPR:
12425 /* If either arg is constant true, drop it. */
12426 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12427 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12428 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12429 /* Preserve sequence points. */
12430 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12431 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12432 /* If second arg is constant zero, result is zero, but first arg
12433 must be evaluated. */
12434 if (integer_zerop (arg1))
12435 return omit_one_operand_loc (loc, type, arg1, arg0);
12436 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12437 case will be handled here. */
12438 if (integer_zerop (arg0))
12439 return omit_one_operand_loc (loc, type, arg0, arg1);
12441 /* !X && X is always false. */
12442 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12444 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12445 /* X && !X is always false. */
12446 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12447 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12448 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12450 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12451 means A >= Y && A != MAX, but in this case we know that
12452 A < X <= MAX. */
12454 if (!TREE_SIDE_EFFECTS (arg0)
12455 && !TREE_SIDE_EFFECTS (arg1))
12457 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12458 if (tem && !operand_equal_p (tem, arg0, 0))
12459 return fold_convert (type,
12460 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12461 tem, arg1));
12463 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12464 if (tem && !operand_equal_p (tem, arg1, 0))
12465 return fold_convert (type,
12466 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12467 arg0, tem));
12470 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12471 != NULL_TREE)
12472 return tem;
12474 return NULL_TREE;
12476 case TRUTH_ORIF_EXPR:
12477 /* Note that the operands of this must be ints
12478 and their values must be 0 or true.
12479 ("true" is a fixed value perhaps depending on the language.) */
12480 /* If first arg is constant true, return it. */
12481 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12482 return fold_convert_loc (loc, type, arg0);
12483 /* FALLTHRU */
12484 case TRUTH_OR_EXPR:
12485 /* If either arg is constant zero, drop it. */
12486 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12488 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12489 /* Preserve sequence points. */
12490 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12492 /* If second arg is constant true, result is true, but we must
12493 evaluate first arg. */
12494 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12495 return omit_one_operand_loc (loc, type, arg1, arg0);
12496 /* Likewise for first arg, but note this only occurs here for
12497 TRUTH_OR_EXPR. */
12498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12499 return omit_one_operand_loc (loc, type, arg0, arg1);
12501 /* !X || X is always true. */
12502 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12503 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12504 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12505 /* X || !X is always true. */
12506 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12508 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12510 /* (X && !Y) || (!X && Y) is X ^ Y */
12511 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12512 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12514 tree a0, a1, l0, l1, n0, n1;
12516 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12517 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12519 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12520 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12522 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12523 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12525 if ((operand_equal_p (n0, a0, 0)
12526 && operand_equal_p (n1, a1, 0))
12527 || (operand_equal_p (n0, a1, 0)
12528 && operand_equal_p (n1, a0, 0)))
12529 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12532 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12533 != NULL_TREE)
12534 return tem;
12536 return NULL_TREE;
12538 case TRUTH_XOR_EXPR:
12539 /* If the second arg is constant zero, drop it. */
12540 if (integer_zerop (arg1))
12541 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12542 /* If the second arg is constant true, this is a logical inversion. */
12543 if (integer_onep (arg1))
12545 tem = invert_truthvalue_loc (loc, arg0);
12546 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12548 /* Identical arguments cancel to zero. */
12549 if (operand_equal_p (arg0, arg1, 0))
12550 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12552 /* !X ^ X is always true. */
12553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12555 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12557 /* X ^ !X is always true. */
12558 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12560 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12562 return NULL_TREE;
12564 case EQ_EXPR:
12565 case NE_EXPR:
12566 STRIP_NOPS (arg0);
12567 STRIP_NOPS (arg1);
12569 tem = fold_comparison (loc, code, type, op0, op1);
12570 if (tem != NULL_TREE)
12571 return tem;
12573 /* bool_var != 1 becomes !bool_var. */
12574 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12575 && code == NE_EXPR)
12576 return fold_convert_loc (loc, type,
12577 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12578 TREE_TYPE (arg0), arg0));
12580 /* bool_var == 0 becomes !bool_var. */
12581 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12582 && code == EQ_EXPR)
12583 return fold_convert_loc (loc, type,
12584 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12585 TREE_TYPE (arg0), arg0));
12587 /* !exp != 0 becomes !exp */
12588 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12589 && code == NE_EXPR)
12590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12592 /* If this is an EQ or NE comparison with zero and ARG0 is
12593 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12594 two operations, but the latter can be done in one less insn
12595 on machines that have only two-operand insns or on which a
12596 constant cannot be the first operand. */
12597 if (TREE_CODE (arg0) == BIT_AND_EXPR
12598 && integer_zerop (arg1))
12600 tree arg00 = TREE_OPERAND (arg0, 0);
12601 tree arg01 = TREE_OPERAND (arg0, 1);
12602 if (TREE_CODE (arg00) == LSHIFT_EXPR
12603 && integer_onep (TREE_OPERAND (arg00, 0)))
12605 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12606 arg01, TREE_OPERAND (arg00, 1));
12607 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12608 build_one_cst (TREE_TYPE (arg0)));
12609 return fold_build2_loc (loc, code, type,
12610 fold_convert_loc (loc, TREE_TYPE (arg1),
12611 tem), arg1);
12613 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12614 && integer_onep (TREE_OPERAND (arg01, 0)))
12616 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12617 arg00, TREE_OPERAND (arg01, 1));
12618 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12619 build_one_cst (TREE_TYPE (arg0)));
12620 return fold_build2_loc (loc, code, type,
12621 fold_convert_loc (loc, TREE_TYPE (arg1),
12622 tem), arg1);
12626 /* If this is a comparison of a field, we may be able to simplify it. */
12627 if ((TREE_CODE (arg0) == COMPONENT_REF
12628 || TREE_CODE (arg0) == BIT_FIELD_REF)
12629 /* Handle the constant case even without -O
12630 to make sure the warnings are given. */
12631 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12633 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12634 if (t1)
12635 return t1;
12638 /* Optimize comparisons of strlen vs zero to a compare of the
12639 first character of the string vs zero. To wit,
12640 strlen(ptr) == 0 => *ptr == 0
12641 strlen(ptr) != 0 => *ptr != 0
12642 Other cases should reduce to one of these two (or a constant)
12643 due to the return value of strlen being unsigned. */
12644 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12646 tree fndecl = get_callee_fndecl (arg0);
12648 if (fndecl
12649 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12650 && call_expr_nargs (arg0) == 1
12651 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12652 == POINTER_TYPE))
12654 tree ptrtype
12655 = build_pointer_type (build_qualified_type (char_type_node,
12656 TYPE_QUAL_CONST));
12657 tree ptr = fold_convert_loc (loc, ptrtype,
12658 CALL_EXPR_ARG (arg0, 0));
12659 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12660 return fold_build2_loc (loc, code, type, iref,
12661 build_int_cst (TREE_TYPE (iref), 0));
12665 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12666 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12667 if (TREE_CODE (arg0) == RSHIFT_EXPR
12668 && integer_zerop (arg1)
12669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12671 tree arg00 = TREE_OPERAND (arg0, 0);
12672 tree arg01 = TREE_OPERAND (arg0, 1);
12673 tree itype = TREE_TYPE (arg00);
12674 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12676 if (TYPE_UNSIGNED (itype))
12678 itype = signed_type_for (itype);
12679 arg00 = fold_convert_loc (loc, itype, arg00);
12681 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12682 type, arg00, build_zero_cst (itype));
12686 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12687 (X & C) == 0 when C is a single bit. */
12688 if (TREE_CODE (arg0) == BIT_AND_EXPR
12689 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12690 && integer_zerop (arg1)
12691 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12693 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12694 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12695 TREE_OPERAND (arg0, 1));
12696 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12697 type, tem,
12698 fold_convert_loc (loc, TREE_TYPE (arg0),
12699 arg1));
12702 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12703 constant C is a power of two, i.e. a single bit. */
12704 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12705 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12706 && integer_zerop (arg1)
12707 && integer_pow2p (TREE_OPERAND (arg0, 1))
12708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12709 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12711 tree arg00 = TREE_OPERAND (arg0, 0);
12712 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12713 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12716 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12717 when is C is a power of two, i.e. a single bit. */
12718 if (TREE_CODE (arg0) == BIT_AND_EXPR
12719 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12720 && integer_zerop (arg1)
12721 && integer_pow2p (TREE_OPERAND (arg0, 1))
12722 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12723 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12725 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12726 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12727 arg000, TREE_OPERAND (arg0, 1));
12728 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12729 tem, build_int_cst (TREE_TYPE (tem), 0));
12732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12735 tree arg00 = TREE_OPERAND (arg0, 0);
12736 tree arg01 = TREE_OPERAND (arg0, 1);
12737 tree arg10 = TREE_OPERAND (arg1, 0);
12738 tree arg11 = TREE_OPERAND (arg1, 1);
12739 tree itype = TREE_TYPE (arg0);
12741 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12742 operand_equal_p guarantees no side-effects so we don't need
12743 to use omit_one_operand on Z. */
12744 if (operand_equal_p (arg01, arg11, 0))
12745 return fold_build2_loc (loc, code, type, arg00,
12746 fold_convert_loc (loc, TREE_TYPE (arg00),
12747 arg10));
12748 if (operand_equal_p (arg01, arg10, 0))
12749 return fold_build2_loc (loc, code, type, arg00,
12750 fold_convert_loc (loc, TREE_TYPE (arg00),
12751 arg11));
12752 if (operand_equal_p (arg00, arg11, 0))
12753 return fold_build2_loc (loc, code, type, arg01,
12754 fold_convert_loc (loc, TREE_TYPE (arg01),
12755 arg10));
12756 if (operand_equal_p (arg00, arg10, 0))
12757 return fold_build2_loc (loc, code, type, arg01,
12758 fold_convert_loc (loc, TREE_TYPE (arg01),
12759 arg11));
12761 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12762 if (TREE_CODE (arg01) == INTEGER_CST
12763 && TREE_CODE (arg11) == INTEGER_CST)
12765 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12766 fold_convert_loc (loc, itype, arg11));
12767 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12768 return fold_build2_loc (loc, code, type, tem,
12769 fold_convert_loc (loc, itype, arg10));
12773 /* Attempt to simplify equality/inequality comparisons of complex
12774 values. Only lower the comparison if the result is known or
12775 can be simplified to a single scalar comparison. */
12776 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12777 || TREE_CODE (arg0) == COMPLEX_CST)
12778 && (TREE_CODE (arg1) == COMPLEX_EXPR
12779 || TREE_CODE (arg1) == COMPLEX_CST))
12781 tree real0, imag0, real1, imag1;
12782 tree rcond, icond;
12784 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12786 real0 = TREE_OPERAND (arg0, 0);
12787 imag0 = TREE_OPERAND (arg0, 1);
12789 else
12791 real0 = TREE_REALPART (arg0);
12792 imag0 = TREE_IMAGPART (arg0);
12795 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12797 real1 = TREE_OPERAND (arg1, 0);
12798 imag1 = TREE_OPERAND (arg1, 1);
12800 else
12802 real1 = TREE_REALPART (arg1);
12803 imag1 = TREE_IMAGPART (arg1);
12806 rcond = fold_binary_loc (loc, code, type, real0, real1);
12807 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12809 if (integer_zerop (rcond))
12811 if (code == EQ_EXPR)
12812 return omit_two_operands_loc (loc, type, boolean_false_node,
12813 imag0, imag1);
12814 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12816 else
12818 if (code == NE_EXPR)
12819 return omit_two_operands_loc (loc, type, boolean_true_node,
12820 imag0, imag1);
12821 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12825 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12826 if (icond && TREE_CODE (icond) == INTEGER_CST)
12828 if (integer_zerop (icond))
12830 if (code == EQ_EXPR)
12831 return omit_two_operands_loc (loc, type, boolean_false_node,
12832 real0, real1);
12833 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12835 else
12837 if (code == NE_EXPR)
12838 return omit_two_operands_loc (loc, type, boolean_true_node,
12839 real0, real1);
12840 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12845 return NULL_TREE;
12847 case LT_EXPR:
12848 case GT_EXPR:
12849 case LE_EXPR:
12850 case GE_EXPR:
12851 tem = fold_comparison (loc, code, type, op0, op1);
12852 if (tem != NULL_TREE)
12853 return tem;
12855 /* Transform comparisons of the form X +- C CMP X. */
12856 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12859 && !HONOR_SNANS (arg0))
12861 tree arg01 = TREE_OPERAND (arg0, 1);
12862 enum tree_code code0 = TREE_CODE (arg0);
12863 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12865 /* (X - c) > X becomes false. */
12866 if (code == GT_EXPR
12867 && ((code0 == MINUS_EXPR && is_positive >= 0)
12868 || (code0 == PLUS_EXPR && is_positive <= 0)))
12869 return constant_boolean_node (0, type);
12871 /* Likewise (X + c) < X becomes false. */
12872 if (code == LT_EXPR
12873 && ((code0 == PLUS_EXPR && is_positive >= 0)
12874 || (code0 == MINUS_EXPR && is_positive <= 0)))
12875 return constant_boolean_node (0, type);
12877 /* Convert (X - c) <= X to true. */
12878 if (!HONOR_NANS (arg1)
12879 && code == LE_EXPR
12880 && ((code0 == MINUS_EXPR && is_positive >= 0)
12881 || (code0 == PLUS_EXPR && is_positive <= 0)))
12882 return constant_boolean_node (1, type);
12884 /* Convert (X + c) >= X to true. */
12885 if (!HONOR_NANS (arg1)
12886 && code == GE_EXPR
12887 && ((code0 == PLUS_EXPR && is_positive >= 0)
12888 || (code0 == MINUS_EXPR && is_positive <= 0)))
12889 return constant_boolean_node (1, type);
12892 /* If we are comparing an ABS_EXPR with a constant, we can
12893 convert all the cases into explicit comparisons, but they may
12894 well not be faster than doing the ABS and one comparison.
12895 But ABS (X) <= C is a range comparison, which becomes a subtraction
12896 and a comparison, and is probably faster. */
12897 if (code == LE_EXPR
12898 && TREE_CODE (arg1) == INTEGER_CST
12899 && TREE_CODE (arg0) == ABS_EXPR
12900 && ! TREE_SIDE_EFFECTS (arg0)
12901 && (tem = negate_expr (arg1)) != 0
12902 && TREE_CODE (tem) == INTEGER_CST
12903 && !TREE_OVERFLOW (tem))
12904 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12905 build2 (GE_EXPR, type,
12906 TREE_OPERAND (arg0, 0), tem),
12907 build2 (LE_EXPR, type,
12908 TREE_OPERAND (arg0, 0), arg1));
12910 /* Convert ABS_EXPR<x> >= 0 to true. */
12911 strict_overflow_p = false;
12912 if (code == GE_EXPR
12913 && (integer_zerop (arg1)
12914 || (! HONOR_NANS (arg0)
12915 && real_zerop (arg1)))
12916 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12918 if (strict_overflow_p)
12919 fold_overflow_warning (("assuming signed overflow does not occur "
12920 "when simplifying comparison of "
12921 "absolute value and zero"),
12922 WARN_STRICT_OVERFLOW_CONDITIONAL);
12923 return omit_one_operand_loc (loc, type,
12924 constant_boolean_node (true, type),
12925 arg0);
12928 /* Convert ABS_EXPR<x> < 0 to false. */
12929 strict_overflow_p = false;
12930 if (code == LT_EXPR
12931 && (integer_zerop (arg1) || real_zerop (arg1))
12932 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12934 if (strict_overflow_p)
12935 fold_overflow_warning (("assuming signed overflow does not occur "
12936 "when simplifying comparison of "
12937 "absolute value and zero"),
12938 WARN_STRICT_OVERFLOW_CONDITIONAL);
12939 return omit_one_operand_loc (loc, type,
12940 constant_boolean_node (false, type),
12941 arg0);
12944 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12945 and similarly for >= into !=. */
12946 if ((code == LT_EXPR || code == GE_EXPR)
12947 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12948 && TREE_CODE (arg1) == LSHIFT_EXPR
12949 && integer_onep (TREE_OPERAND (arg1, 0)))
12950 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12951 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12952 TREE_OPERAND (arg1, 1)),
12953 build_zero_cst (TREE_TYPE (arg0)));
12955 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12956 otherwise Y might be >= # of bits in X's type and thus e.g.
12957 (unsigned char) (1 << Y) for Y 15 might be 0.
12958 If the cast is widening, then 1 << Y should have unsigned type,
12959 otherwise if Y is number of bits in the signed shift type minus 1,
12960 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12961 31 might be 0xffffffff80000000. */
12962 if ((code == LT_EXPR || code == GE_EXPR)
12963 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12964 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12965 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12966 && CONVERT_EXPR_P (arg1)
12967 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12968 && (element_precision (TREE_TYPE (arg1))
12969 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12970 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12971 || (element_precision (TREE_TYPE (arg1))
12972 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12973 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12975 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12976 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12977 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12978 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12979 build_zero_cst (TREE_TYPE (arg0)));
12982 return NULL_TREE;
12984 case UNORDERED_EXPR:
12985 case ORDERED_EXPR:
12986 case UNLT_EXPR:
12987 case UNLE_EXPR:
12988 case UNGT_EXPR:
12989 case UNGE_EXPR:
12990 case UNEQ_EXPR:
12991 case LTGT_EXPR:
12992 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12994 tree targ0 = strip_float_extensions (arg0);
12995 tree targ1 = strip_float_extensions (arg1);
12996 tree newtype = TREE_TYPE (targ0);
12998 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12999 newtype = TREE_TYPE (targ1);
13001 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
13002 && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
13003 return fold_build2_loc (loc, code, type,
13004 fold_convert_loc (loc, newtype, targ0),
13005 fold_convert_loc (loc, newtype, targ1));
13008 return NULL_TREE;
13010 case COMPOUND_EXPR:
13011 /* When pedantic, a compound expression can be neither an lvalue
13012 nor an integer constant expression. */
13013 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13014 return NULL_TREE;
13015 /* Don't let (0, 0) be null pointer constant. */
13016 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
13017 : fold_convert_loc (loc, type, arg1);
13018 return tem;
13020 default:
13021 return NULL_TREE;
13022 } /* switch (code) */
13025 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
13026 ((A & N) + B) & M -> (A + B) & M
13027 Similarly if (N & M) == 0,
13028 ((A | N) + B) & M -> (A + B) & M
13029 and for - instead of + (or unary - instead of +)
13030 and/or ^ instead of |.
13031 If B is constant and (B & M) == 0, fold into A & M.
13033 This function is a helper for match.pd patterns. Return non-NULL
13034 type in which the simplified operation should be performed only
13035 if any optimization is possible.
13037 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
13038 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
13039 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
13040 +/-. */
13041 tree
13042 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
13043 tree arg00, enum tree_code code00, tree arg000, tree arg001,
13044 tree arg01, enum tree_code code01, tree arg010, tree arg011,
13045 tree *pmop)
13047 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
13048 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
13049 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
13050 if (~cst1 == 0
13051 || (cst1 & (cst1 + 1)) != 0
13052 || !INTEGRAL_TYPE_P (type)
13053 || (!TYPE_OVERFLOW_WRAPS (type)
13054 && TREE_CODE (type) != INTEGER_TYPE)
13055 || (wi::max_value (type) & cst1) != cst1)
13056 return NULL_TREE;
13058 enum tree_code codes[2] = { code00, code01 };
13059 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
13060 int which = 0;
13061 wide_int cst0;
13063 /* Now we know that arg0 is (C + D) or (C - D) or -C and
13064 arg1 (M) is == (1LL << cst) - 1.
13065 Store C into PMOP[0] and D into PMOP[1]. */
13066 pmop[0] = arg00;
13067 pmop[1] = arg01;
13068 which = code != NEGATE_EXPR;
13070 for (; which >= 0; which--)
13071 switch (codes[which])
13073 case BIT_AND_EXPR:
13074 case BIT_IOR_EXPR:
13075 case BIT_XOR_EXPR:
13076 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
13077 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
13078 if (codes[which] == BIT_AND_EXPR)
13080 if (cst0 != cst1)
13081 break;
13083 else if (cst0 != 0)
13084 break;
13085 /* If C or D is of the form (A & N) where
13086 (N & M) == M, or of the form (A | N) or
13087 (A ^ N) where (N & M) == 0, replace it with A. */
13088 pmop[which] = arg0xx[2 * which];
13089 break;
13090 case ERROR_MARK:
13091 if (TREE_CODE (pmop[which]) != INTEGER_CST)
13092 break;
13093 /* If C or D is a N where (N & M) == 0, it can be
13094 omitted (replaced with 0). */
13095 if ((code == PLUS_EXPR
13096 || (code == MINUS_EXPR && which == 0))
13097 && (cst1 & wi::to_wide (pmop[which])) == 0)
13098 pmop[which] = build_int_cst (type, 0);
13099 /* Similarly, with C - N where (-N & M) == 0. */
13100 if (code == MINUS_EXPR
13101 && which == 1
13102 && (cst1 & -wi::to_wide (pmop[which])) == 0)
13103 pmop[which] = build_int_cst (type, 0);
13104 break;
13105 default:
13106 gcc_unreachable ();
13109 /* Only build anything new if we optimized one or both arguments above. */
13110 if (pmop[0] == arg00 && pmop[1] == arg01)
13111 return NULL_TREE;
13113 if (TYPE_OVERFLOW_WRAPS (type))
13114 return type;
13115 else
13116 return unsigned_type_for (type);
13119 /* Used by contains_label_[p1]. */
13121 struct contains_label_data
13123 hash_set<tree> *pset;
13124 bool inside_switch_p;
13127 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13128 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
13129 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
13131 static tree
13132 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
13134 contains_label_data *d = (contains_label_data *) data;
13135 switch (TREE_CODE (*tp))
13137 case LABEL_EXPR:
13138 return *tp;
13140 case CASE_LABEL_EXPR:
13141 if (!d->inside_switch_p)
13142 return *tp;
13143 return NULL_TREE;
13145 case SWITCH_EXPR:
13146 if (!d->inside_switch_p)
13148 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
13149 return *tp;
13150 d->inside_switch_p = true;
13151 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
13152 return *tp;
13153 d->inside_switch_p = false;
13154 *walk_subtrees = 0;
13156 return NULL_TREE;
13158 case GOTO_EXPR:
13159 *walk_subtrees = 0;
13160 return NULL_TREE;
13162 default:
13163 return NULL_TREE;
13167 /* Return whether the sub-tree ST contains a label which is accessible from
13168 outside the sub-tree. */
13170 static bool
13171 contains_label_p (tree st)
13173 hash_set<tree> pset;
13174 contains_label_data data = { &pset, false };
13175 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13178 /* Fold a ternary expression of code CODE and type TYPE with operands
13179 OP0, OP1, and OP2. Return the folded expression if folding is
13180 successful. Otherwise, return NULL_TREE. */
13182 tree
13183 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13184 tree op0, tree op1, tree op2)
13186 tree tem;
13187 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13188 enum tree_code_class kind = TREE_CODE_CLASS (code);
13190 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13191 && TREE_CODE_LENGTH (code) == 3);
13193 /* If this is a commutative operation, and OP0 is a constant, move it
13194 to OP1 to reduce the number of tests below. */
13195 if (commutative_ternary_tree_code (code)
13196 && tree_swap_operands_p (op0, op1))
13197 return fold_build3_loc (loc, code, type, op1, op0, op2);
13199 tem = generic_simplify (loc, code, type, op0, op1, op2);
13200 if (tem)
13201 return tem;
13203 /* Strip any conversions that don't change the mode. This is safe
13204 for every expression, except for a comparison expression because
13205 its signedness is derived from its operands. So, in the latter
13206 case, only strip conversions that don't change the signedness.
13208 Note that this is done as an internal manipulation within the
13209 constant folder, in order to find the simplest representation of
13210 the arguments so that their form can be studied. In any cases,
13211 the appropriate type conversions should be put back in the tree
13212 that will get out of the constant folder. */
13213 if (op0)
13215 arg0 = op0;
13216 STRIP_NOPS (arg0);
13219 if (op1)
13221 arg1 = op1;
13222 STRIP_NOPS (arg1);
13225 if (op2)
13227 arg2 = op2;
13228 STRIP_NOPS (arg2);
13231 switch (code)
13233 case COMPONENT_REF:
13234 if (TREE_CODE (arg0) == CONSTRUCTOR
13235 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13237 unsigned HOST_WIDE_INT idx;
13238 tree field, value;
13239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13240 if (field == arg1)
13241 return value;
13243 return NULL_TREE;
13245 case COND_EXPR:
13246 case VEC_COND_EXPR:
13247 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13248 so all simple results must be passed through pedantic_non_lvalue. */
13249 if (TREE_CODE (arg0) == INTEGER_CST)
13251 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13252 tem = integer_zerop (arg0) ? op2 : op1;
13253 /* Only optimize constant conditions when the selected branch
13254 has the same type as the COND_EXPR. This avoids optimizing
13255 away "c ? x : throw", where the throw has a void type.
13256 Avoid throwing away that operand which contains label. */
13257 if ((!TREE_SIDE_EFFECTS (unused_op)
13258 || !contains_label_p (unused_op))
13259 && (! VOID_TYPE_P (TREE_TYPE (tem))
13260 || VOID_TYPE_P (type)))
13261 return protected_set_expr_location_unshare (tem, loc);
13262 return NULL_TREE;
13264 else if (TREE_CODE (arg0) == VECTOR_CST)
13266 unsigned HOST_WIDE_INT nelts;
13267 if ((TREE_CODE (arg1) == VECTOR_CST
13268 || TREE_CODE (arg1) == CONSTRUCTOR)
13269 && (TREE_CODE (arg2) == VECTOR_CST
13270 || TREE_CODE (arg2) == CONSTRUCTOR)
13271 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13273 vec_perm_builder sel (nelts, nelts, 1);
13274 for (unsigned int i = 0; i < nelts; i++)
13276 tree val = VECTOR_CST_ELT (arg0, i);
13277 if (integer_all_onesp (val))
13278 sel.quick_push (i);
13279 else if (integer_zerop (val))
13280 sel.quick_push (nelts + i);
13281 else /* Currently unreachable. */
13282 return NULL_TREE;
13284 vec_perm_indices indices (sel, 2, nelts);
13285 tree t = fold_vec_perm (type, arg1, arg2, indices);
13286 if (t != NULL_TREE)
13287 return t;
13291 /* If we have A op B ? A : C, we may be able to convert this to a
13292 simpler expression, depending on the operation and the values
13293 of B and C. Signed zeros prevent all of these transformations,
13294 for reasons given above each one.
13296 Also try swapping the arguments and inverting the conditional. */
13297 if (COMPARISON_CLASS_P (arg0)
13298 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13299 && !HONOR_SIGNED_ZEROS (op1))
13301 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13302 TREE_OPERAND (arg0, 0),
13303 TREE_OPERAND (arg0, 1),
13304 op1, op2);
13305 if (tem)
13306 return tem;
13309 if (COMPARISON_CLASS_P (arg0)
13310 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13311 && !HONOR_SIGNED_ZEROS (op2))
13313 enum tree_code comp_code = TREE_CODE (arg0);
13314 tree arg00 = TREE_OPERAND (arg0, 0);
13315 tree arg01 = TREE_OPERAND (arg0, 1);
13316 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13317 if (comp_code != ERROR_MARK)
13318 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13319 arg00,
13320 arg01,
13321 op2, op1);
13322 if (tem)
13323 return tem;
13326 /* If the second operand is simpler than the third, swap them
13327 since that produces better jump optimization results. */
13328 if (truth_value_p (TREE_CODE (arg0))
13329 && tree_swap_operands_p (op1, op2))
13331 location_t loc0 = expr_location_or (arg0, loc);
13332 /* See if this can be inverted. If it can't, possibly because
13333 it was a floating-point inequality comparison, don't do
13334 anything. */
13335 tem = fold_invert_truthvalue (loc0, arg0);
13336 if (tem)
13337 return fold_build3_loc (loc, code, type, tem, op2, op1);
13340 /* Convert A ? 1 : 0 to simply A. */
13341 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13342 : (integer_onep (op1)
13343 && !VECTOR_TYPE_P (type)))
13344 && integer_zerop (op2)
13345 /* If we try to convert OP0 to our type, the
13346 call to fold will try to move the conversion inside
13347 a COND, which will recurse. In that case, the COND_EXPR
13348 is probably the best choice, so leave it alone. */
13349 && type == TREE_TYPE (arg0))
13350 return protected_set_expr_location_unshare (arg0, loc);
13352 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13353 over COND_EXPR in cases such as floating point comparisons. */
13354 if (integer_zerop (op1)
13355 && code == COND_EXPR
13356 && integer_onep (op2)
13357 && !VECTOR_TYPE_P (type)
13358 && truth_value_p (TREE_CODE (arg0)))
13359 return fold_convert_loc (loc, type,
13360 invert_truthvalue_loc (loc, arg0));
13362 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13363 if (TREE_CODE (arg0) == LT_EXPR
13364 && integer_zerop (TREE_OPERAND (arg0, 1))
13365 && integer_zerop (op2)
13366 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13368 /* sign_bit_p looks through both zero and sign extensions,
13369 but for this optimization only sign extensions are
13370 usable. */
13371 tree tem2 = TREE_OPERAND (arg0, 0);
13372 while (tem != tem2)
13374 if (TREE_CODE (tem2) != NOP_EXPR
13375 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13377 tem = NULL_TREE;
13378 break;
13380 tem2 = TREE_OPERAND (tem2, 0);
13382 /* sign_bit_p only checks ARG1 bits within A's precision.
13383 If <sign bit of A> has wider type than A, bits outside
13384 of A's precision in <sign bit of A> need to be checked.
13385 If they are all 0, this optimization needs to be done
13386 in unsigned A's type, if they are all 1 in signed A's type,
13387 otherwise this can't be done. */
13388 if (tem
13389 && TYPE_PRECISION (TREE_TYPE (tem))
13390 < TYPE_PRECISION (TREE_TYPE (arg1))
13391 && TYPE_PRECISION (TREE_TYPE (tem))
13392 < TYPE_PRECISION (type))
13394 int inner_width, outer_width;
13395 tree tem_type;
13397 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13398 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13399 if (outer_width > TYPE_PRECISION (type))
13400 outer_width = TYPE_PRECISION (type);
13402 wide_int mask = wi::shifted_mask
13403 (inner_width, outer_width - inner_width, false,
13404 TYPE_PRECISION (TREE_TYPE (arg1)));
13406 wide_int common = mask & wi::to_wide (arg1);
13407 if (common == mask)
13409 tem_type = signed_type_for (TREE_TYPE (tem));
13410 tem = fold_convert_loc (loc, tem_type, tem);
13412 else if (common == 0)
13414 tem_type = unsigned_type_for (TREE_TYPE (tem));
13415 tem = fold_convert_loc (loc, tem_type, tem);
13417 else
13418 tem = NULL;
13421 if (tem)
13422 return
13423 fold_convert_loc (loc, type,
13424 fold_build2_loc (loc, BIT_AND_EXPR,
13425 TREE_TYPE (tem), tem,
13426 fold_convert_loc (loc,
13427 TREE_TYPE (tem),
13428 arg1)));
13431 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13432 already handled above. */
13433 if (TREE_CODE (arg0) == BIT_AND_EXPR
13434 && integer_onep (TREE_OPERAND (arg0, 1))
13435 && integer_zerop (op2)
13436 && integer_pow2p (arg1))
13438 tree tem = TREE_OPERAND (arg0, 0);
13439 STRIP_NOPS (tem);
13440 if (TREE_CODE (tem) == RSHIFT_EXPR
13441 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13442 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13443 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13444 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13445 fold_convert_loc (loc, type,
13446 TREE_OPERAND (tem, 0)),
13447 op1);
13450 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13451 is probably obsolete because the first operand should be a
13452 truth value (that's why we have the two cases above), but let's
13453 leave it in until we can confirm this for all front-ends. */
13454 if (integer_zerop (op2)
13455 && TREE_CODE (arg0) == NE_EXPR
13456 && integer_zerop (TREE_OPERAND (arg0, 1))
13457 && integer_pow2p (arg1)
13458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13459 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13460 arg1, OEP_ONLY_CONST)
13461 /* operand_equal_p compares just value, not precision, so e.g.
13462 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13463 second operand 32-bit -128, which is not a power of two (or vice
13464 versa. */
13465 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13466 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13468 /* Disable the transformations below for vectors, since
13469 fold_binary_op_with_conditional_arg may undo them immediately,
13470 yielding an infinite loop. */
13471 if (code == VEC_COND_EXPR)
13472 return NULL_TREE;
13474 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13475 if (integer_zerop (op2)
13476 && truth_value_p (TREE_CODE (arg0))
13477 && truth_value_p (TREE_CODE (arg1))
13478 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13479 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13480 : TRUTH_ANDIF_EXPR,
13481 type, fold_convert_loc (loc, type, arg0), op1);
13483 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13484 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13485 && truth_value_p (TREE_CODE (arg0))
13486 && truth_value_p (TREE_CODE (arg1))
13487 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13489 location_t loc0 = expr_location_or (arg0, loc);
13490 /* Only perform transformation if ARG0 is easily inverted. */
13491 tem = fold_invert_truthvalue (loc0, arg0);
13492 if (tem)
13493 return fold_build2_loc (loc, code == VEC_COND_EXPR
13494 ? BIT_IOR_EXPR
13495 : TRUTH_ORIF_EXPR,
13496 type, fold_convert_loc (loc, type, tem),
13497 op1);
13500 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13501 if (integer_zerop (arg1)
13502 && truth_value_p (TREE_CODE (arg0))
13503 && truth_value_p (TREE_CODE (op2))
13504 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13506 location_t loc0 = expr_location_or (arg0, loc);
13507 /* Only perform transformation if ARG0 is easily inverted. */
13508 tem = fold_invert_truthvalue (loc0, arg0);
13509 if (tem)
13510 return fold_build2_loc (loc, code == VEC_COND_EXPR
13511 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13512 type, fold_convert_loc (loc, type, tem),
13513 op2);
13516 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13517 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13518 && truth_value_p (TREE_CODE (arg0))
13519 && truth_value_p (TREE_CODE (op2))
13520 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13521 return fold_build2_loc (loc, code == VEC_COND_EXPR
13522 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13523 type, fold_convert_loc (loc, type, arg0), op2);
13525 return NULL_TREE;
13527 case CALL_EXPR:
13528 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13529 of fold_ternary on them. */
13530 gcc_unreachable ();
13532 case BIT_FIELD_REF:
13533 if (TREE_CODE (arg0) == VECTOR_CST
13534 && (type == TREE_TYPE (TREE_TYPE (arg0))
13535 || (VECTOR_TYPE_P (type)
13536 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13537 && tree_fits_uhwi_p (op1)
13538 && tree_fits_uhwi_p (op2))
13540 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13541 unsigned HOST_WIDE_INT width
13542 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13543 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13544 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13545 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13547 if (n != 0
13548 && (idx % width) == 0
13549 && (n % width) == 0
13550 && known_le ((idx + n) / width,
13551 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13553 idx = idx / width;
13554 n = n / width;
13556 if (TREE_CODE (arg0) == VECTOR_CST)
13558 if (n == 1)
13560 tem = VECTOR_CST_ELT (arg0, idx);
13561 if (VECTOR_TYPE_P (type))
13562 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13563 return tem;
13566 tree_vector_builder vals (type, n, 1);
13567 for (unsigned i = 0; i < n; ++i)
13568 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13569 return vals.build ();
13574 /* On constants we can use native encode/interpret to constant
13575 fold (nearly) all BIT_FIELD_REFs. */
13576 if (CONSTANT_CLASS_P (arg0)
13577 && can_native_interpret_type_p (type)
13578 && BITS_PER_UNIT == 8
13579 && tree_fits_uhwi_p (op1)
13580 && tree_fits_uhwi_p (op2))
13582 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13583 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13584 /* Limit us to a reasonable amount of work. To relax the
13585 other limitations we need bit-shifting of the buffer
13586 and rounding up the size. */
13587 if (bitpos % BITS_PER_UNIT == 0
13588 && bitsize % BITS_PER_UNIT == 0
13589 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13591 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13592 unsigned HOST_WIDE_INT len
13593 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13594 bitpos / BITS_PER_UNIT);
13595 if (len > 0
13596 && len * BITS_PER_UNIT >= bitsize)
13598 tree v = native_interpret_expr (type, b,
13599 bitsize / BITS_PER_UNIT);
13600 if (v)
13601 return v;
13606 return NULL_TREE;
13608 case VEC_PERM_EXPR:
13609 /* Perform constant folding of BIT_INSERT_EXPR. */
13610 if (TREE_CODE (arg2) == VECTOR_CST
13611 && TREE_CODE (op0) == VECTOR_CST
13612 && TREE_CODE (op1) == VECTOR_CST)
13614 /* Build a vector of integers from the tree mask. */
13615 vec_perm_builder builder;
13616 if (!tree_to_vec_perm_builder (&builder, arg2))
13617 return NULL_TREE;
13619 /* Create a vec_perm_indices for the integer vector. */
13620 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13621 bool single_arg = (op0 == op1);
13622 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13623 return fold_vec_perm (type, op0, op1, sel);
13625 return NULL_TREE;
13627 case BIT_INSERT_EXPR:
13628 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13629 if (TREE_CODE (arg0) == INTEGER_CST
13630 && TREE_CODE (arg1) == INTEGER_CST)
13632 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13633 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13634 wide_int tem = (wi::to_wide (arg0)
13635 & wi::shifted_mask (bitpos, bitsize, true,
13636 TYPE_PRECISION (type)));
13637 wide_int tem2
13638 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13639 bitsize), bitpos);
13640 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13642 else if (TREE_CODE (arg0) == VECTOR_CST
13643 && CONSTANT_CLASS_P (arg1)
13644 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13645 TREE_TYPE (arg1)))
13647 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13648 unsigned HOST_WIDE_INT elsize
13649 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13650 if (bitpos % elsize == 0)
13652 unsigned k = bitpos / elsize;
13653 unsigned HOST_WIDE_INT nelts;
13654 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13655 return arg0;
13656 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13658 tree_vector_builder elts (type, nelts, 1);
13659 elts.quick_grow (nelts);
13660 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13661 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13662 return elts.build ();
13666 return NULL_TREE;
13668 default:
13669 return NULL_TREE;
13670 } /* switch (code) */
13673 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13674 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13675 constructor element index of the value returned. If the element is
13676 not found NULL_TREE is returned and *CTOR_IDX is updated to
13677 the index of the element after the ACCESS_INDEX position (which
13678 may be outside of the CTOR array). */
13680 tree
13681 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13682 unsigned *ctor_idx)
13684 tree index_type = NULL_TREE;
13685 signop index_sgn = UNSIGNED;
13686 offset_int low_bound = 0;
13688 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13690 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13691 if (domain_type && TYPE_MIN_VALUE (domain_type))
13693 /* Static constructors for variably sized objects makes no sense. */
13694 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13695 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13696 /* ??? When it is obvious that the range is signed, treat it so. */
13697 if (TYPE_UNSIGNED (index_type)
13698 && TYPE_MAX_VALUE (domain_type)
13699 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13700 TYPE_MIN_VALUE (domain_type)))
13702 index_sgn = SIGNED;
13703 low_bound
13704 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13705 SIGNED);
13707 else
13709 index_sgn = TYPE_SIGN (index_type);
13710 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13715 if (index_type)
13716 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13717 index_sgn);
13719 offset_int index = low_bound;
13720 if (index_type)
13721 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13723 offset_int max_index = index;
13724 unsigned cnt;
13725 tree cfield, cval;
13726 bool first_p = true;
13728 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13730 /* Array constructor might explicitly set index, or specify a range,
13731 or leave index NULL meaning that it is next index after previous
13732 one. */
13733 if (cfield)
13735 if (TREE_CODE (cfield) == INTEGER_CST)
13736 max_index = index
13737 = offset_int::from (wi::to_wide (cfield), index_sgn);
13738 else
13740 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13741 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13742 index_sgn);
13743 max_index
13744 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13745 index_sgn);
13746 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13749 else if (!first_p)
13751 index = max_index + 1;
13752 if (index_type)
13753 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13754 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13755 max_index = index;
13757 else
13758 first_p = false;
13760 /* Do we have match? */
13761 if (wi::cmp (access_index, index, index_sgn) >= 0)
13763 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13765 if (ctor_idx)
13766 *ctor_idx = cnt;
13767 return cval;
13770 else if (in_gimple_form)
13771 /* We're past the element we search for. Note during parsing
13772 the elements might not be sorted.
13773 ??? We should use a binary search and a flag on the
13774 CONSTRUCTOR as to whether elements are sorted in declaration
13775 order. */
13776 break;
13778 if (ctor_idx)
13779 *ctor_idx = cnt;
13780 return NULL_TREE;
13783 /* Perform constant folding and related simplification of EXPR.
13784 The related simplifications include x*1 => x, x*0 => 0, etc.,
13785 and application of the associative law.
13786 NOP_EXPR conversions may be removed freely (as long as we
13787 are careful not to change the type of the overall expression).
13788 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13789 but we can constant-fold them if they have constant operands. */
13791 #ifdef ENABLE_FOLD_CHECKING
13792 # define fold(x) fold_1 (x)
13793 static tree fold_1 (tree);
13794 static
13795 #endif
13796 tree
13797 fold (tree expr)
13799 const tree t = expr;
13800 enum tree_code code = TREE_CODE (t);
13801 enum tree_code_class kind = TREE_CODE_CLASS (code);
13802 tree tem;
13803 location_t loc = EXPR_LOCATION (expr);
13805 /* Return right away if a constant. */
13806 if (kind == tcc_constant)
13807 return t;
13809 /* CALL_EXPR-like objects with variable numbers of operands are
13810 treated specially. */
13811 if (kind == tcc_vl_exp)
13813 if (code == CALL_EXPR)
13815 tem = fold_call_expr (loc, expr, false);
13816 return tem ? tem : expr;
13818 return expr;
13821 if (IS_EXPR_CODE_CLASS (kind))
13823 tree type = TREE_TYPE (t);
13824 tree op0, op1, op2;
13826 switch (TREE_CODE_LENGTH (code))
13828 case 1:
13829 op0 = TREE_OPERAND (t, 0);
13830 tem = fold_unary_loc (loc, code, type, op0);
13831 return tem ? tem : expr;
13832 case 2:
13833 op0 = TREE_OPERAND (t, 0);
13834 op1 = TREE_OPERAND (t, 1);
13835 tem = fold_binary_loc (loc, code, type, op0, op1);
13836 return tem ? tem : expr;
13837 case 3:
13838 op0 = TREE_OPERAND (t, 0);
13839 op1 = TREE_OPERAND (t, 1);
13840 op2 = TREE_OPERAND (t, 2);
13841 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13842 return tem ? tem : expr;
13843 default:
13844 break;
13848 switch (code)
13850 case ARRAY_REF:
13852 tree op0 = TREE_OPERAND (t, 0);
13853 tree op1 = TREE_OPERAND (t, 1);
13855 if (TREE_CODE (op1) == INTEGER_CST
13856 && TREE_CODE (op0) == CONSTRUCTOR
13857 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13859 tree val = get_array_ctor_element_at_index (op0,
13860 wi::to_offset (op1));
13861 if (val)
13862 return val;
13865 return t;
13868 /* Return a VECTOR_CST if possible. */
13869 case CONSTRUCTOR:
13871 tree type = TREE_TYPE (t);
13872 if (TREE_CODE (type) != VECTOR_TYPE)
13873 return t;
13875 unsigned i;
13876 tree val;
13877 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13878 if (! CONSTANT_CLASS_P (val))
13879 return t;
13881 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13884 case CONST_DECL:
13885 return fold (DECL_INITIAL (t));
13887 default:
13888 return t;
13889 } /* switch (code) */
13892 #ifdef ENABLE_FOLD_CHECKING
13893 #undef fold
13895 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13896 hash_table<nofree_ptr_hash<const tree_node> > *);
13897 static void fold_check_failed (const_tree, const_tree);
13898 void print_fold_checksum (const_tree);
13900 /* When --enable-checking=fold, compute a digest of expr before
13901 and after actual fold call to see if fold did not accidentally
13902 change original expr. */
13904 tree
13905 fold (tree expr)
13907 tree ret;
13908 struct md5_ctx ctx;
13909 unsigned char checksum_before[16], checksum_after[16];
13910 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13912 md5_init_ctx (&ctx);
13913 fold_checksum_tree (expr, &ctx, &ht);
13914 md5_finish_ctx (&ctx, checksum_before);
13915 ht.empty ();
13917 ret = fold_1 (expr);
13919 md5_init_ctx (&ctx);
13920 fold_checksum_tree (expr, &ctx, &ht);
13921 md5_finish_ctx (&ctx, checksum_after);
13923 if (memcmp (checksum_before, checksum_after, 16))
13924 fold_check_failed (expr, ret);
13926 return ret;
13929 void
13930 print_fold_checksum (const_tree expr)
13932 struct md5_ctx ctx;
13933 unsigned char checksum[16], cnt;
13934 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13936 md5_init_ctx (&ctx);
13937 fold_checksum_tree (expr, &ctx, &ht);
13938 md5_finish_ctx (&ctx, checksum);
13939 for (cnt = 0; cnt < 16; ++cnt)
13940 fprintf (stderr, "%02x", checksum[cnt]);
13941 putc ('\n', stderr);
13944 static void
13945 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13947 internal_error ("fold check: original tree changed by fold");
13950 static void
13951 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13952 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13954 const tree_node **slot;
13955 enum tree_code code;
13956 union tree_node *buf;
13957 int i, len;
13959 recursive_label:
13960 if (expr == NULL)
13961 return;
13962 slot = ht->find_slot (expr, INSERT);
13963 if (*slot != NULL)
13964 return;
13965 *slot = expr;
13966 code = TREE_CODE (expr);
13967 if (TREE_CODE_CLASS (code) == tcc_declaration
13968 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13970 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13971 size_t sz = tree_size (expr);
13972 buf = XALLOCAVAR (union tree_node, sz);
13973 memcpy ((char *) buf, expr, sz);
13974 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13975 buf->decl_with_vis.symtab_node = NULL;
13976 buf->base.nowarning_flag = 0;
13977 expr = (tree) buf;
13979 else if (TREE_CODE_CLASS (code) == tcc_type
13980 && (TYPE_POINTER_TO (expr)
13981 || TYPE_REFERENCE_TO (expr)
13982 || TYPE_CACHED_VALUES_P (expr)
13983 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13984 || TYPE_NEXT_VARIANT (expr)
13985 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13987 /* Allow these fields to be modified. */
13988 tree tmp;
13989 size_t sz = tree_size (expr);
13990 buf = XALLOCAVAR (union tree_node, sz);
13991 memcpy ((char *) buf, expr, sz);
13992 expr = tmp = (tree) buf;
13993 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13994 TYPE_POINTER_TO (tmp) = NULL;
13995 TYPE_REFERENCE_TO (tmp) = NULL;
13996 TYPE_NEXT_VARIANT (tmp) = NULL;
13997 TYPE_ALIAS_SET (tmp) = -1;
13998 if (TYPE_CACHED_VALUES_P (tmp))
14000 TYPE_CACHED_VALUES_P (tmp) = 0;
14001 TYPE_CACHED_VALUES (tmp) = NULL;
14004 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
14006 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
14007 that and change builtins.cc etc. instead - see PR89543. */
14008 size_t sz = tree_size (expr);
14009 buf = XALLOCAVAR (union tree_node, sz);
14010 memcpy ((char *) buf, expr, sz);
14011 buf->base.nowarning_flag = 0;
14012 expr = (tree) buf;
14014 md5_process_bytes (expr, tree_size (expr), ctx);
14015 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14016 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14017 if (TREE_CODE_CLASS (code) != tcc_type
14018 && TREE_CODE_CLASS (code) != tcc_declaration
14019 && code != TREE_LIST
14020 && code != SSA_NAME
14021 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14022 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14023 switch (TREE_CODE_CLASS (code))
14025 case tcc_constant:
14026 switch (code)
14028 case STRING_CST:
14029 md5_process_bytes (TREE_STRING_POINTER (expr),
14030 TREE_STRING_LENGTH (expr), ctx);
14031 break;
14032 case COMPLEX_CST:
14033 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14034 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14035 break;
14036 case VECTOR_CST:
14037 len = vector_cst_encoded_nelts (expr);
14038 for (i = 0; i < len; ++i)
14039 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
14040 break;
14041 default:
14042 break;
14044 break;
14045 case tcc_exceptional:
14046 switch (code)
14048 case TREE_LIST:
14049 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14050 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14051 expr = TREE_CHAIN (expr);
14052 goto recursive_label;
14053 break;
14054 case TREE_VEC:
14055 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14056 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14057 break;
14058 default:
14059 break;
14061 break;
14062 case tcc_expression:
14063 case tcc_reference:
14064 case tcc_comparison:
14065 case tcc_unary:
14066 case tcc_binary:
14067 case tcc_statement:
14068 case tcc_vl_exp:
14069 len = TREE_OPERAND_LENGTH (expr);
14070 for (i = 0; i < len; ++i)
14071 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14072 break;
14073 case tcc_declaration:
14074 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14075 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14076 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14078 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14079 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14080 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14081 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14082 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14085 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14087 if (TREE_CODE (expr) == FUNCTION_DECL)
14089 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14090 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14092 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14094 break;
14095 case tcc_type:
14096 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14097 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14098 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14099 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14100 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14101 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14102 if (INTEGRAL_TYPE_P (expr)
14103 || SCALAR_FLOAT_TYPE_P (expr))
14105 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14106 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14108 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14109 if (RECORD_OR_UNION_TYPE_P (expr))
14110 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14111 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14112 break;
14113 default:
14114 break;
14118 /* Helper function for outputting the checksum of a tree T. When
14119 debugging with gdb, you can "define mynext" to be "next" followed
14120 by "call debug_fold_checksum (op0)", then just trace down till the
14121 outputs differ. */
14123 DEBUG_FUNCTION void
14124 debug_fold_checksum (const_tree t)
14126 int i;
14127 unsigned char checksum[16];
14128 struct md5_ctx ctx;
14129 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14131 md5_init_ctx (&ctx);
14132 fold_checksum_tree (t, &ctx, &ht);
14133 md5_finish_ctx (&ctx, checksum);
14134 ht.empty ();
14136 for (i = 0; i < 16; i++)
14137 fprintf (stderr, "%d ", checksum[i]);
14139 fprintf (stderr, "\n");
14142 #endif
14144 /* Fold a unary tree expression with code CODE of type TYPE with an
14145 operand OP0. LOC is the location of the resulting expression.
14146 Return a folded expression if successful. Otherwise, return a tree
14147 expression with code CODE of type TYPE with an operand OP0. */
14149 tree
14150 fold_build1_loc (location_t loc,
14151 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14153 tree tem;
14154 #ifdef ENABLE_FOLD_CHECKING
14155 unsigned char checksum_before[16], checksum_after[16];
14156 struct md5_ctx ctx;
14157 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14159 md5_init_ctx (&ctx);
14160 fold_checksum_tree (op0, &ctx, &ht);
14161 md5_finish_ctx (&ctx, checksum_before);
14162 ht.empty ();
14163 #endif
14165 tem = fold_unary_loc (loc, code, type, op0);
14166 if (!tem)
14167 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
14169 #ifdef ENABLE_FOLD_CHECKING
14170 md5_init_ctx (&ctx);
14171 fold_checksum_tree (op0, &ctx, &ht);
14172 md5_finish_ctx (&ctx, checksum_after);
14174 if (memcmp (checksum_before, checksum_after, 16))
14175 fold_check_failed (op0, tem);
14176 #endif
14177 return tem;
14180 /* Fold a binary tree expression with code CODE of type TYPE with
14181 operands OP0 and OP1. LOC is the location of the resulting
14182 expression. Return a folded expression if successful. Otherwise,
14183 return a tree expression with code CODE of type TYPE with operands
14184 OP0 and OP1. */
14186 tree
14187 fold_build2_loc (location_t loc,
14188 enum tree_code code, tree type, tree op0, tree op1
14189 MEM_STAT_DECL)
14191 tree tem;
14192 #ifdef ENABLE_FOLD_CHECKING
14193 unsigned char checksum_before_op0[16],
14194 checksum_before_op1[16],
14195 checksum_after_op0[16],
14196 checksum_after_op1[16];
14197 struct md5_ctx ctx;
14198 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14200 md5_init_ctx (&ctx);
14201 fold_checksum_tree (op0, &ctx, &ht);
14202 md5_finish_ctx (&ctx, checksum_before_op0);
14203 ht.empty ();
14205 md5_init_ctx (&ctx);
14206 fold_checksum_tree (op1, &ctx, &ht);
14207 md5_finish_ctx (&ctx, checksum_before_op1);
14208 ht.empty ();
14209 #endif
14211 tem = fold_binary_loc (loc, code, type, op0, op1);
14212 if (!tem)
14213 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14215 #ifdef ENABLE_FOLD_CHECKING
14216 md5_init_ctx (&ctx);
14217 fold_checksum_tree (op0, &ctx, &ht);
14218 md5_finish_ctx (&ctx, checksum_after_op0);
14219 ht.empty ();
14221 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14222 fold_check_failed (op0, tem);
14224 md5_init_ctx (&ctx);
14225 fold_checksum_tree (op1, &ctx, &ht);
14226 md5_finish_ctx (&ctx, checksum_after_op1);
14228 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14229 fold_check_failed (op1, tem);
14230 #endif
14231 return tem;
14234 /* Fold a ternary tree expression with code CODE of type TYPE with
14235 operands OP0, OP1, and OP2. Return a folded expression if
14236 successful. Otherwise, return a tree expression with code CODE of
14237 type TYPE with operands OP0, OP1, and OP2. */
14239 tree
14240 fold_build3_loc (location_t loc, enum tree_code code, tree type,
14241 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14243 tree tem;
14244 #ifdef ENABLE_FOLD_CHECKING
14245 unsigned char checksum_before_op0[16],
14246 checksum_before_op1[16],
14247 checksum_before_op2[16],
14248 checksum_after_op0[16],
14249 checksum_after_op1[16],
14250 checksum_after_op2[16];
14251 struct md5_ctx ctx;
14252 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14254 md5_init_ctx (&ctx);
14255 fold_checksum_tree (op0, &ctx, &ht);
14256 md5_finish_ctx (&ctx, checksum_before_op0);
14257 ht.empty ();
14259 md5_init_ctx (&ctx);
14260 fold_checksum_tree (op1, &ctx, &ht);
14261 md5_finish_ctx (&ctx, checksum_before_op1);
14262 ht.empty ();
14264 md5_init_ctx (&ctx);
14265 fold_checksum_tree (op2, &ctx, &ht);
14266 md5_finish_ctx (&ctx, checksum_before_op2);
14267 ht.empty ();
14268 #endif
14270 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14271 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14272 if (!tem)
14273 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14275 #ifdef ENABLE_FOLD_CHECKING
14276 md5_init_ctx (&ctx);
14277 fold_checksum_tree (op0, &ctx, &ht);
14278 md5_finish_ctx (&ctx, checksum_after_op0);
14279 ht.empty ();
14281 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14282 fold_check_failed (op0, tem);
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op1, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_after_op1);
14287 ht.empty ();
14289 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14290 fold_check_failed (op1, tem);
14292 md5_init_ctx (&ctx);
14293 fold_checksum_tree (op2, &ctx, &ht);
14294 md5_finish_ctx (&ctx, checksum_after_op2);
14296 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14297 fold_check_failed (op2, tem);
14298 #endif
14299 return tem;
14302 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14303 arguments in ARGARRAY, and a null static chain.
14304 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14305 of type TYPE from the given operands as constructed by build_call_array. */
14307 tree
14308 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14309 int nargs, tree *argarray)
14311 tree tem;
14312 #ifdef ENABLE_FOLD_CHECKING
14313 unsigned char checksum_before_fn[16],
14314 checksum_before_arglist[16],
14315 checksum_after_fn[16],
14316 checksum_after_arglist[16];
14317 struct md5_ctx ctx;
14318 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14319 int i;
14321 md5_init_ctx (&ctx);
14322 fold_checksum_tree (fn, &ctx, &ht);
14323 md5_finish_ctx (&ctx, checksum_before_fn);
14324 ht.empty ();
14326 md5_init_ctx (&ctx);
14327 for (i = 0; i < nargs; i++)
14328 fold_checksum_tree (argarray[i], &ctx, &ht);
14329 md5_finish_ctx (&ctx, checksum_before_arglist);
14330 ht.empty ();
14331 #endif
14333 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14334 if (!tem)
14335 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14337 #ifdef ENABLE_FOLD_CHECKING
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (fn, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_after_fn);
14341 ht.empty ();
14343 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14344 fold_check_failed (fn, tem);
14346 md5_init_ctx (&ctx);
14347 for (i = 0; i < nargs; i++)
14348 fold_checksum_tree (argarray[i], &ctx, &ht);
14349 md5_finish_ctx (&ctx, checksum_after_arglist);
14351 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14352 fold_check_failed (NULL_TREE, tem);
14353 #endif
14354 return tem;
14357 /* Perform constant folding and related simplification of initializer
14358 expression EXPR. These behave identically to "fold_buildN" but ignore
14359 potential run-time traps and exceptions that fold must preserve. */
14361 #define START_FOLD_INIT \
14362 int saved_signaling_nans = flag_signaling_nans;\
14363 int saved_trapping_math = flag_trapping_math;\
14364 int saved_rounding_math = flag_rounding_math;\
14365 int saved_trapv = flag_trapv;\
14366 int saved_folding_initializer = folding_initializer;\
14367 flag_signaling_nans = 0;\
14368 flag_trapping_math = 0;\
14369 flag_rounding_math = 0;\
14370 flag_trapv = 0;\
14371 folding_initializer = 1;
14373 #define END_FOLD_INIT \
14374 flag_signaling_nans = saved_signaling_nans;\
14375 flag_trapping_math = saved_trapping_math;\
14376 flag_rounding_math = saved_rounding_math;\
14377 flag_trapv = saved_trapv;\
14378 folding_initializer = saved_folding_initializer;
14380 tree
14381 fold_init (tree expr)
14383 tree result;
14384 START_FOLD_INIT;
14386 result = fold (expr);
14388 END_FOLD_INIT;
14389 return result;
14392 tree
14393 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14394 tree type, tree op)
14396 tree result;
14397 START_FOLD_INIT;
14399 result = fold_build1_loc (loc, code, type, op);
14401 END_FOLD_INIT;
14402 return result;
14405 tree
14406 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14407 tree type, tree op0, tree op1)
14409 tree result;
14410 START_FOLD_INIT;
14412 result = fold_build2_loc (loc, code, type, op0, op1);
14414 END_FOLD_INIT;
14415 return result;
14418 tree
14419 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14420 int nargs, tree *argarray)
14422 tree result;
14423 START_FOLD_INIT;
14425 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14427 END_FOLD_INIT;
14428 return result;
14431 tree
14432 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14433 tree lhs, tree rhs)
14435 tree result;
14436 START_FOLD_INIT;
14438 result = fold_binary_loc (loc, code, type, lhs, rhs);
14440 END_FOLD_INIT;
14441 return result;
14444 #undef START_FOLD_INIT
14445 #undef END_FOLD_INIT
14447 /* Determine if first argument is a multiple of second argument. Return
14448 false if it is not, or we cannot easily determined it to be.
14450 An example of the sort of thing we care about (at this point; this routine
14451 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14452 fold cases do now) is discovering that
14454 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14456 is a multiple of
14458 SAVE_EXPR (J * 8)
14460 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14462 This code also handles discovering that
14464 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14466 is a multiple of 8 so we don't have to worry about dealing with a
14467 possible remainder.
14469 Note that we *look* inside a SAVE_EXPR only to determine how it was
14470 calculated; it is not safe for fold to do much of anything else with the
14471 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14472 at run time. For example, the latter example above *cannot* be implemented
14473 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14474 evaluation time of the original SAVE_EXPR is not necessarily the same at
14475 the time the new expression is evaluated. The only optimization of this
14476 sort that would be valid is changing
14478 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14480 divided by 8 to
14482 SAVE_EXPR (I) * SAVE_EXPR (J)
14484 (where the same SAVE_EXPR (J) is used in the original and the
14485 transformed version).
14487 NOWRAP specifies whether all outer operations in TYPE should
14488 be considered not wrapping. Any type conversion within TOP acts
14489 as a barrier and we will fall back to NOWRAP being false.
14490 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14491 as not wrapping even though they are generally using unsigned arithmetic. */
14493 bool
14494 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14496 gimple *stmt;
14497 tree op1, op2;
14499 if (operand_equal_p (top, bottom, 0))
14500 return true;
14502 if (TREE_CODE (type) != INTEGER_TYPE)
14503 return false;
14505 switch (TREE_CODE (top))
14507 case BIT_AND_EXPR:
14508 /* Bitwise and provides a power of two multiple. If the mask is
14509 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14510 if (!integer_pow2p (bottom))
14511 return false;
14512 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14513 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14515 case MULT_EXPR:
14516 /* If the multiplication can wrap we cannot recurse further unless
14517 the bottom is a power of two which is where wrapping does not
14518 matter. */
14519 if (!nowrap
14520 && !TYPE_OVERFLOW_UNDEFINED (type)
14521 && !integer_pow2p (bottom))
14522 return false;
14523 if (TREE_CODE (bottom) == INTEGER_CST)
14525 op1 = TREE_OPERAND (top, 0);
14526 op2 = TREE_OPERAND (top, 1);
14527 if (TREE_CODE (op1) == INTEGER_CST)
14528 std::swap (op1, op2);
14529 if (TREE_CODE (op2) == INTEGER_CST)
14531 if (multiple_of_p (type, op2, bottom, nowrap))
14532 return true;
14533 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14534 if (multiple_of_p (type, bottom, op2, nowrap))
14536 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14537 wi::to_widest (op2));
14538 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14540 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14541 return multiple_of_p (type, op1, op2, nowrap);
14544 return multiple_of_p (type, op1, bottom, nowrap);
14547 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14548 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14550 case LSHIFT_EXPR:
14551 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14552 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14554 op1 = TREE_OPERAND (top, 1);
14555 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14557 wide_int mul_op
14558 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14559 return multiple_of_p (type,
14560 wide_int_to_tree (type, mul_op), bottom,
14561 nowrap);
14564 return false;
14566 case MINUS_EXPR:
14567 case PLUS_EXPR:
14568 /* If the addition or subtraction can wrap we cannot recurse further
14569 unless bottom is a power of two which is where wrapping does not
14570 matter. */
14571 if (!nowrap
14572 && !TYPE_OVERFLOW_UNDEFINED (type)
14573 && !integer_pow2p (bottom))
14574 return false;
14576 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14577 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14578 but 0xfffffffd is not. */
14579 op1 = TREE_OPERAND (top, 1);
14580 if (TREE_CODE (top) == PLUS_EXPR
14581 && nowrap
14582 && TYPE_UNSIGNED (type)
14583 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14584 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14586 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14587 precisely, so be conservative here checking if both op0 and op1
14588 are multiple of bottom. Note we check the second operand first
14589 since it's usually simpler. */
14590 return (multiple_of_p (type, op1, bottom, nowrap)
14591 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14593 CASE_CONVERT:
14594 /* Can't handle conversions from non-integral or wider integral type. */
14595 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14596 || (TYPE_PRECISION (type)
14597 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14598 return false;
14599 /* NOWRAP only extends to operations in the outermost type so
14600 make sure to strip it off here. */
14601 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14602 TREE_OPERAND (top, 0), bottom, false);
14604 case SAVE_EXPR:
14605 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14607 case COND_EXPR:
14608 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14609 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14611 case INTEGER_CST:
14612 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14613 return false;
14614 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14615 SIGNED);
14617 case SSA_NAME:
14618 if (TREE_CODE (bottom) == INTEGER_CST
14619 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14620 && gimple_code (stmt) == GIMPLE_ASSIGN)
14622 enum tree_code code = gimple_assign_rhs_code (stmt);
14624 /* Check for special cases to see if top is defined as multiple
14625 of bottom:
14627 top = (X & ~(bottom - 1) ; bottom is power of 2
14631 Y = X % bottom
14632 top = X - Y. */
14633 if (code == BIT_AND_EXPR
14634 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14635 && TREE_CODE (op2) == INTEGER_CST
14636 && integer_pow2p (bottom)
14637 && wi::multiple_of_p (wi::to_widest (op2),
14638 wi::to_widest (bottom), SIGNED))
14639 return true;
14641 op1 = gimple_assign_rhs1 (stmt);
14642 if (code == MINUS_EXPR
14643 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14644 && TREE_CODE (op2) == SSA_NAME
14645 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14646 && gimple_code (stmt) == GIMPLE_ASSIGN
14647 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14648 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14649 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14650 return true;
14653 /* fall through */
14655 default:
14656 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14657 return multiple_p (wi::to_poly_widest (top),
14658 wi::to_poly_widest (bottom));
14660 return false;
14664 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14665 This function returns true for integer expressions, and returns
14666 false if uncertain. */
14668 bool
14669 tree_expr_finite_p (const_tree x)
14671 machine_mode mode = element_mode (x);
14672 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14673 return true;
14674 switch (TREE_CODE (x))
14676 case REAL_CST:
14677 return real_isfinite (TREE_REAL_CST_PTR (x));
14678 case COMPLEX_CST:
14679 return tree_expr_finite_p (TREE_REALPART (x))
14680 && tree_expr_finite_p (TREE_IMAGPART (x));
14681 case FLOAT_EXPR:
14682 return true;
14683 case ABS_EXPR:
14684 case CONVERT_EXPR:
14685 case NON_LVALUE_EXPR:
14686 case NEGATE_EXPR:
14687 case SAVE_EXPR:
14688 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14689 case MIN_EXPR:
14690 case MAX_EXPR:
14691 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14692 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14693 case COND_EXPR:
14694 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14695 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14696 case CALL_EXPR:
14697 switch (get_call_combined_fn (x))
14699 CASE_CFN_FABS:
14700 CASE_CFN_FABS_FN:
14701 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14702 CASE_CFN_FMAX:
14703 CASE_CFN_FMAX_FN:
14704 CASE_CFN_FMIN:
14705 CASE_CFN_FMIN_FN:
14706 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14707 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14708 default:
14709 return false;
14712 default:
14713 return false;
14717 /* Return true if expression X evaluates to an infinity.
14718 This function returns false for integer expressions. */
14720 bool
14721 tree_expr_infinite_p (const_tree x)
14723 if (!HONOR_INFINITIES (x))
14724 return false;
14725 switch (TREE_CODE (x))
14727 case REAL_CST:
14728 return real_isinf (TREE_REAL_CST_PTR (x));
14729 case ABS_EXPR:
14730 case NEGATE_EXPR:
14731 case NON_LVALUE_EXPR:
14732 case SAVE_EXPR:
14733 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14734 case COND_EXPR:
14735 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14736 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14737 default:
14738 return false;
14742 /* Return true if expression X could evaluate to an infinity.
14743 This function returns false for integer expressions, and returns
14744 true if uncertain. */
14746 bool
14747 tree_expr_maybe_infinite_p (const_tree x)
14749 if (!HONOR_INFINITIES (x))
14750 return false;
14751 switch (TREE_CODE (x))
14753 case REAL_CST:
14754 return real_isinf (TREE_REAL_CST_PTR (x));
14755 case FLOAT_EXPR:
14756 return false;
14757 case ABS_EXPR:
14758 case NEGATE_EXPR:
14759 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14760 case COND_EXPR:
14761 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14762 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14763 default:
14764 return true;
14768 /* Return true if expression X evaluates to a signaling NaN.
14769 This function returns false for integer expressions. */
14771 bool
14772 tree_expr_signaling_nan_p (const_tree x)
14774 if (!HONOR_SNANS (x))
14775 return false;
14776 switch (TREE_CODE (x))
14778 case REAL_CST:
14779 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14780 case NON_LVALUE_EXPR:
14781 case SAVE_EXPR:
14782 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14783 case COND_EXPR:
14784 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14785 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14786 default:
14787 return false;
14791 /* Return true if expression X could evaluate to a signaling NaN.
14792 This function returns false for integer expressions, and returns
14793 true if uncertain. */
14795 bool
14796 tree_expr_maybe_signaling_nan_p (const_tree x)
14798 if (!HONOR_SNANS (x))
14799 return false;
14800 switch (TREE_CODE (x))
14802 case REAL_CST:
14803 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14804 case FLOAT_EXPR:
14805 return false;
14806 case ABS_EXPR:
14807 case CONVERT_EXPR:
14808 case NEGATE_EXPR:
14809 case NON_LVALUE_EXPR:
14810 case SAVE_EXPR:
14811 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14812 case MIN_EXPR:
14813 case MAX_EXPR:
14814 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14815 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14816 case COND_EXPR:
14817 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14818 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14819 case CALL_EXPR:
14820 switch (get_call_combined_fn (x))
14822 CASE_CFN_FABS:
14823 CASE_CFN_FABS_FN:
14824 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14825 CASE_CFN_FMAX:
14826 CASE_CFN_FMAX_FN:
14827 CASE_CFN_FMIN:
14828 CASE_CFN_FMIN_FN:
14829 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14830 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14831 default:
14832 return true;
14834 default:
14835 return true;
14839 /* Return true if expression X evaluates to a NaN.
14840 This function returns false for integer expressions. */
14842 bool
14843 tree_expr_nan_p (const_tree x)
14845 if (!HONOR_NANS (x))
14846 return false;
14847 switch (TREE_CODE (x))
14849 case REAL_CST:
14850 return real_isnan (TREE_REAL_CST_PTR (x));
14851 case NON_LVALUE_EXPR:
14852 case SAVE_EXPR:
14853 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14854 case COND_EXPR:
14855 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14856 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14857 default:
14858 return false;
14862 /* Return true if expression X could evaluate to a NaN.
14863 This function returns false for integer expressions, and returns
14864 true if uncertain. */
14866 bool
14867 tree_expr_maybe_nan_p (const_tree x)
14869 if (!HONOR_NANS (x))
14870 return false;
14871 switch (TREE_CODE (x))
14873 case REAL_CST:
14874 return real_isnan (TREE_REAL_CST_PTR (x));
14875 case FLOAT_EXPR:
14876 return false;
14877 case PLUS_EXPR:
14878 case MINUS_EXPR:
14879 case MULT_EXPR:
14880 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14881 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14882 case ABS_EXPR:
14883 case CONVERT_EXPR:
14884 case NEGATE_EXPR:
14885 case NON_LVALUE_EXPR:
14886 case SAVE_EXPR:
14887 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14888 case MIN_EXPR:
14889 case MAX_EXPR:
14890 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14891 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14892 case COND_EXPR:
14893 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14894 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14895 case CALL_EXPR:
14896 switch (get_call_combined_fn (x))
14898 CASE_CFN_FABS:
14899 CASE_CFN_FABS_FN:
14900 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14901 CASE_CFN_FMAX:
14902 CASE_CFN_FMAX_FN:
14903 CASE_CFN_FMIN:
14904 CASE_CFN_FMIN_FN:
14905 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14906 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14907 default:
14908 return true;
14910 default:
14911 return true;
14915 /* Return true if expression X could evaluate to -0.0.
14916 This function returns true if uncertain. */
14918 bool
14919 tree_expr_maybe_real_minus_zero_p (const_tree x)
14921 if (!HONOR_SIGNED_ZEROS (x))
14922 return false;
14923 switch (TREE_CODE (x))
14925 case REAL_CST:
14926 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14927 case INTEGER_CST:
14928 case FLOAT_EXPR:
14929 case ABS_EXPR:
14930 return false;
14931 case NON_LVALUE_EXPR:
14932 case SAVE_EXPR:
14933 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14934 case COND_EXPR:
14935 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14936 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14937 case CALL_EXPR:
14938 switch (get_call_combined_fn (x))
14940 CASE_CFN_FABS:
14941 CASE_CFN_FABS_FN:
14942 return false;
14943 default:
14944 break;
14946 default:
14947 break;
14949 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14950 * but currently those predicates require tree and not const_tree. */
14951 return true;
14954 #define tree_expr_nonnegative_warnv_p(X, Y) \
14955 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14957 #define RECURSE(X) \
14958 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14960 /* Return true if CODE or TYPE is known to be non-negative. */
14962 static bool
14963 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14965 if (!VECTOR_TYPE_P (type)
14966 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14967 && truth_value_p (code))
14968 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14969 have a signed:1 type (where the value is -1 and 0). */
14970 return true;
14971 return false;
14974 /* Return true if (CODE OP0) is known to be non-negative. If the return
14975 value is based on the assumption that signed overflow is undefined,
14976 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14977 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14979 bool
14980 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14981 bool *strict_overflow_p, int depth)
14983 if (TYPE_UNSIGNED (type))
14984 return true;
14986 switch (code)
14988 case ABS_EXPR:
14989 /* We can't return 1 if flag_wrapv is set because
14990 ABS_EXPR<INT_MIN> = INT_MIN. */
14991 if (!ANY_INTEGRAL_TYPE_P (type))
14992 return true;
14993 if (TYPE_OVERFLOW_UNDEFINED (type))
14995 *strict_overflow_p = true;
14996 return true;
14998 break;
15000 case NON_LVALUE_EXPR:
15001 case FLOAT_EXPR:
15002 case FIX_TRUNC_EXPR:
15003 return RECURSE (op0);
15005 CASE_CONVERT:
15007 tree inner_type = TREE_TYPE (op0);
15008 tree outer_type = type;
15010 if (SCALAR_FLOAT_TYPE_P (outer_type))
15012 if (SCALAR_FLOAT_TYPE_P (inner_type))
15013 return RECURSE (op0);
15014 if (INTEGRAL_TYPE_P (inner_type))
15016 if (TYPE_UNSIGNED (inner_type))
15017 return true;
15018 return RECURSE (op0);
15021 else if (INTEGRAL_TYPE_P (outer_type))
15023 if (SCALAR_FLOAT_TYPE_P (inner_type))
15024 return RECURSE (op0);
15025 if (INTEGRAL_TYPE_P (inner_type))
15026 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15027 && TYPE_UNSIGNED (inner_type);
15030 break;
15032 default:
15033 return tree_simple_nonnegative_warnv_p (code, type);
15036 /* We don't know sign of `t', so be conservative and return false. */
15037 return false;
15040 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15041 value is based on the assumption that signed overflow is undefined,
15042 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15043 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15045 bool
15046 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15047 tree op1, bool *strict_overflow_p,
15048 int depth)
15050 if (TYPE_UNSIGNED (type))
15051 return true;
15053 switch (code)
15055 case POINTER_PLUS_EXPR:
15056 case PLUS_EXPR:
15057 if (FLOAT_TYPE_P (type))
15058 return RECURSE (op0) && RECURSE (op1);
15060 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15061 both unsigned and at least 2 bits shorter than the result. */
15062 if (TREE_CODE (type) == INTEGER_TYPE
15063 && TREE_CODE (op0) == NOP_EXPR
15064 && TREE_CODE (op1) == NOP_EXPR)
15066 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15067 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15068 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15069 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15071 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15072 TYPE_PRECISION (inner2)) + 1;
15073 return prec < TYPE_PRECISION (type);
15076 break;
15078 case MULT_EXPR:
15079 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15081 /* x * x is always non-negative for floating point x
15082 or without overflow. */
15083 if (operand_equal_p (op0, op1, 0)
15084 || (RECURSE (op0) && RECURSE (op1)))
15086 if (ANY_INTEGRAL_TYPE_P (type)
15087 && TYPE_OVERFLOW_UNDEFINED (type))
15088 *strict_overflow_p = true;
15089 return true;
15093 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15094 both unsigned and their total bits is shorter than the result. */
15095 if (TREE_CODE (type) == INTEGER_TYPE
15096 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15097 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15099 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15100 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15101 : TREE_TYPE (op0);
15102 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15103 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15104 : TREE_TYPE (op1);
15106 bool unsigned0 = TYPE_UNSIGNED (inner0);
15107 bool unsigned1 = TYPE_UNSIGNED (inner1);
15109 if (TREE_CODE (op0) == INTEGER_CST)
15110 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15112 if (TREE_CODE (op1) == INTEGER_CST)
15113 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15115 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15116 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15118 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15119 ? tree_int_cst_min_precision (op0, UNSIGNED)
15120 : TYPE_PRECISION (inner0);
15122 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15123 ? tree_int_cst_min_precision (op1, UNSIGNED)
15124 : TYPE_PRECISION (inner1);
15126 return precision0 + precision1 < TYPE_PRECISION (type);
15129 return false;
15131 case BIT_AND_EXPR:
15132 return RECURSE (op0) || RECURSE (op1);
15134 case MAX_EXPR:
15135 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
15136 things. */
15137 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
15138 return RECURSE (op0) && RECURSE (op1);
15139 return RECURSE (op0) || RECURSE (op1);
15141 case BIT_IOR_EXPR:
15142 case BIT_XOR_EXPR:
15143 case MIN_EXPR:
15144 case RDIV_EXPR:
15145 case TRUNC_DIV_EXPR:
15146 case CEIL_DIV_EXPR:
15147 case FLOOR_DIV_EXPR:
15148 case ROUND_DIV_EXPR:
15149 return RECURSE (op0) && RECURSE (op1);
15151 case TRUNC_MOD_EXPR:
15152 return RECURSE (op0);
15154 case FLOOR_MOD_EXPR:
15155 return RECURSE (op1);
15157 case CEIL_MOD_EXPR:
15158 case ROUND_MOD_EXPR:
15159 default:
15160 return tree_simple_nonnegative_warnv_p (code, type);
15163 /* We don't know sign of `t', so be conservative and return false. */
15164 return false;
15167 /* Return true if T is known to be non-negative. If the return
15168 value is based on the assumption that signed overflow is undefined,
15169 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15170 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15172 bool
15173 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15175 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15176 return true;
15178 switch (TREE_CODE (t))
15180 case INTEGER_CST:
15181 return tree_int_cst_sgn (t) >= 0;
15183 case REAL_CST:
15184 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15186 case FIXED_CST:
15187 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15189 case COND_EXPR:
15190 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15192 case SSA_NAME:
15193 /* Limit the depth of recursion to avoid quadratic behavior.
15194 This is expected to catch almost all occurrences in practice.
15195 If this code misses important cases that unbounded recursion
15196 would not, passes that need this information could be revised
15197 to provide it through dataflow propagation. */
15198 return (!name_registered_for_update_p (t)
15199 && depth < param_max_ssa_name_query_depth
15200 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15201 strict_overflow_p, depth));
15203 default:
15204 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15208 /* Return true if T is known to be non-negative. If the return
15209 value is based on the assumption that signed overflow is undefined,
15210 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15211 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15213 bool
15214 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15215 bool *strict_overflow_p, int depth)
15217 switch (fn)
15219 CASE_CFN_ACOS:
15220 CASE_CFN_ACOS_FN:
15221 CASE_CFN_ACOSH:
15222 CASE_CFN_ACOSH_FN:
15223 CASE_CFN_CABS:
15224 CASE_CFN_CABS_FN:
15225 CASE_CFN_COSH:
15226 CASE_CFN_COSH_FN:
15227 CASE_CFN_ERFC:
15228 CASE_CFN_ERFC_FN:
15229 CASE_CFN_EXP:
15230 CASE_CFN_EXP_FN:
15231 CASE_CFN_EXP10:
15232 CASE_CFN_EXP2:
15233 CASE_CFN_EXP2_FN:
15234 CASE_CFN_FABS:
15235 CASE_CFN_FABS_FN:
15236 CASE_CFN_FDIM:
15237 CASE_CFN_FDIM_FN:
15238 CASE_CFN_HYPOT:
15239 CASE_CFN_HYPOT_FN:
15240 CASE_CFN_POW10:
15241 CASE_CFN_FFS:
15242 CASE_CFN_PARITY:
15243 CASE_CFN_POPCOUNT:
15244 CASE_CFN_CLZ:
15245 CASE_CFN_CLRSB:
15246 case CFN_BUILT_IN_BSWAP16:
15247 case CFN_BUILT_IN_BSWAP32:
15248 case CFN_BUILT_IN_BSWAP64:
15249 case CFN_BUILT_IN_BSWAP128:
15250 /* Always true. */
15251 return true;
15253 CASE_CFN_SQRT:
15254 CASE_CFN_SQRT_FN:
15255 /* sqrt(-0.0) is -0.0. */
15256 if (!HONOR_SIGNED_ZEROS (type))
15257 return true;
15258 return RECURSE (arg0);
15260 CASE_CFN_ASINH:
15261 CASE_CFN_ASINH_FN:
15262 CASE_CFN_ATAN:
15263 CASE_CFN_ATAN_FN:
15264 CASE_CFN_ATANH:
15265 CASE_CFN_ATANH_FN:
15266 CASE_CFN_CBRT:
15267 CASE_CFN_CBRT_FN:
15268 CASE_CFN_CEIL:
15269 CASE_CFN_CEIL_FN:
15270 CASE_CFN_ERF:
15271 CASE_CFN_ERF_FN:
15272 CASE_CFN_EXPM1:
15273 CASE_CFN_EXPM1_FN:
15274 CASE_CFN_FLOOR:
15275 CASE_CFN_FLOOR_FN:
15276 CASE_CFN_FMOD:
15277 CASE_CFN_FMOD_FN:
15278 CASE_CFN_FREXP:
15279 CASE_CFN_FREXP_FN:
15280 CASE_CFN_ICEIL:
15281 CASE_CFN_IFLOOR:
15282 CASE_CFN_IRINT:
15283 CASE_CFN_IROUND:
15284 CASE_CFN_LCEIL:
15285 CASE_CFN_LDEXP:
15286 CASE_CFN_LFLOOR:
15287 CASE_CFN_LLCEIL:
15288 CASE_CFN_LLFLOOR:
15289 CASE_CFN_LLRINT:
15290 CASE_CFN_LLRINT_FN:
15291 CASE_CFN_LLROUND:
15292 CASE_CFN_LLROUND_FN:
15293 CASE_CFN_LRINT:
15294 CASE_CFN_LRINT_FN:
15295 CASE_CFN_LROUND:
15296 CASE_CFN_LROUND_FN:
15297 CASE_CFN_MODF:
15298 CASE_CFN_MODF_FN:
15299 CASE_CFN_NEARBYINT:
15300 CASE_CFN_NEARBYINT_FN:
15301 CASE_CFN_RINT:
15302 CASE_CFN_RINT_FN:
15303 CASE_CFN_ROUND:
15304 CASE_CFN_ROUND_FN:
15305 CASE_CFN_ROUNDEVEN:
15306 CASE_CFN_ROUNDEVEN_FN:
15307 CASE_CFN_SCALB:
15308 CASE_CFN_SCALBLN:
15309 CASE_CFN_SCALBLN_FN:
15310 CASE_CFN_SCALBN:
15311 CASE_CFN_SCALBN_FN:
15312 CASE_CFN_SIGNBIT:
15313 CASE_CFN_SIGNIFICAND:
15314 CASE_CFN_SINH:
15315 CASE_CFN_SINH_FN:
15316 CASE_CFN_TANH:
15317 CASE_CFN_TANH_FN:
15318 CASE_CFN_TRUNC:
15319 CASE_CFN_TRUNC_FN:
15320 /* True if the 1st argument is nonnegative. */
15321 return RECURSE (arg0);
15323 CASE_CFN_FMAX:
15324 CASE_CFN_FMAX_FN:
15325 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15326 things. In the presence of sNaNs, we're only guaranteed to be
15327 non-negative if both operands are non-negative. In the presence
15328 of qNaNs, we're non-negative if either operand is non-negative
15329 and can't be a qNaN, or if both operands are non-negative. */
15330 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15331 tree_expr_maybe_signaling_nan_p (arg1))
15332 return RECURSE (arg0) && RECURSE (arg1);
15333 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15334 || RECURSE (arg1))
15335 : (RECURSE (arg1)
15336 && !tree_expr_maybe_nan_p (arg1));
15338 CASE_CFN_FMIN:
15339 CASE_CFN_FMIN_FN:
15340 /* True if the 1st AND 2nd arguments are nonnegative. */
15341 return RECURSE (arg0) && RECURSE (arg1);
15343 CASE_CFN_COPYSIGN:
15344 CASE_CFN_COPYSIGN_FN:
15345 /* True if the 2nd argument is nonnegative. */
15346 return RECURSE (arg1);
15348 CASE_CFN_POWI:
15349 /* True if the 1st argument is nonnegative or the second
15350 argument is an even integer. */
15351 if (TREE_CODE (arg1) == INTEGER_CST
15352 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15353 return true;
15354 return RECURSE (arg0);
15356 CASE_CFN_POW:
15357 CASE_CFN_POW_FN:
15358 /* True if the 1st argument is nonnegative or the second
15359 argument is an even integer valued real. */
15360 if (TREE_CODE (arg1) == REAL_CST)
15362 REAL_VALUE_TYPE c;
15363 HOST_WIDE_INT n;
15365 c = TREE_REAL_CST (arg1);
15366 n = real_to_integer (&c);
15367 if ((n & 1) == 0)
15369 REAL_VALUE_TYPE cint;
15370 real_from_integer (&cint, VOIDmode, n, SIGNED);
15371 if (real_identical (&c, &cint))
15372 return true;
15375 return RECURSE (arg0);
15377 default:
15378 break;
15380 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15383 /* Return true if T is known to be non-negative. If the return
15384 value is based on the assumption that signed overflow is undefined,
15385 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15386 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15388 static bool
15389 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15391 enum tree_code code = TREE_CODE (t);
15392 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15393 return true;
15395 switch (code)
15397 case TARGET_EXPR:
15399 tree temp = TARGET_EXPR_SLOT (t);
15400 t = TARGET_EXPR_INITIAL (t);
15402 /* If the initializer is non-void, then it's a normal expression
15403 that will be assigned to the slot. */
15404 if (!VOID_TYPE_P (TREE_TYPE (t)))
15405 return RECURSE (t);
15407 /* Otherwise, the initializer sets the slot in some way. One common
15408 way is an assignment statement at the end of the initializer. */
15409 while (1)
15411 if (TREE_CODE (t) == BIND_EXPR)
15412 t = expr_last (BIND_EXPR_BODY (t));
15413 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15414 || TREE_CODE (t) == TRY_CATCH_EXPR)
15415 t = expr_last (TREE_OPERAND (t, 0));
15416 else if (TREE_CODE (t) == STATEMENT_LIST)
15417 t = expr_last (t);
15418 else
15419 break;
15421 if (TREE_CODE (t) == MODIFY_EXPR
15422 && TREE_OPERAND (t, 0) == temp)
15423 return RECURSE (TREE_OPERAND (t, 1));
15425 return false;
15428 case CALL_EXPR:
15430 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15431 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15433 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15434 get_call_combined_fn (t),
15435 arg0,
15436 arg1,
15437 strict_overflow_p, depth);
15439 case COMPOUND_EXPR:
15440 case MODIFY_EXPR:
15441 return RECURSE (TREE_OPERAND (t, 1));
15443 case BIND_EXPR:
15444 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15446 case SAVE_EXPR:
15447 return RECURSE (TREE_OPERAND (t, 0));
15449 default:
15450 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15454 #undef RECURSE
15455 #undef tree_expr_nonnegative_warnv_p
15457 /* Return true if T is known to be non-negative. If the return
15458 value is based on the assumption that signed overflow is undefined,
15459 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15460 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15462 bool
15463 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15465 enum tree_code code;
15466 if (t == error_mark_node)
15467 return false;
15469 code = TREE_CODE (t);
15470 switch (TREE_CODE_CLASS (code))
15472 case tcc_binary:
15473 case tcc_comparison:
15474 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15475 TREE_TYPE (t),
15476 TREE_OPERAND (t, 0),
15477 TREE_OPERAND (t, 1),
15478 strict_overflow_p, depth);
15480 case tcc_unary:
15481 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15482 TREE_TYPE (t),
15483 TREE_OPERAND (t, 0),
15484 strict_overflow_p, depth);
15486 case tcc_constant:
15487 case tcc_declaration:
15488 case tcc_reference:
15489 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15491 default:
15492 break;
15495 switch (code)
15497 case TRUTH_AND_EXPR:
15498 case TRUTH_OR_EXPR:
15499 case TRUTH_XOR_EXPR:
15500 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15501 TREE_TYPE (t),
15502 TREE_OPERAND (t, 0),
15503 TREE_OPERAND (t, 1),
15504 strict_overflow_p, depth);
15505 case TRUTH_NOT_EXPR:
15506 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15507 TREE_TYPE (t),
15508 TREE_OPERAND (t, 0),
15509 strict_overflow_p, depth);
15511 case COND_EXPR:
15512 case CONSTRUCTOR:
15513 case OBJ_TYPE_REF:
15514 case ADDR_EXPR:
15515 case WITH_SIZE_EXPR:
15516 case SSA_NAME:
15517 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15519 default:
15520 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15524 /* Return true if `t' is known to be non-negative. Handle warnings
15525 about undefined signed overflow. */
15527 bool
15528 tree_expr_nonnegative_p (tree t)
15530 bool ret, strict_overflow_p;
15532 strict_overflow_p = false;
15533 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15534 if (strict_overflow_p)
15535 fold_overflow_warning (("assuming signed overflow does not occur when "
15536 "determining that expression is always "
15537 "non-negative"),
15538 WARN_STRICT_OVERFLOW_MISC);
15539 return ret;
15543 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15544 For floating point we further ensure that T is not denormal.
15545 Similar logic is present in nonzero_address in rtlanal.h.
15547 If the return value is based on the assumption that signed overflow
15548 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15549 change *STRICT_OVERFLOW_P. */
15551 bool
15552 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15553 bool *strict_overflow_p)
15555 switch (code)
15557 case ABS_EXPR:
15558 return tree_expr_nonzero_warnv_p (op0,
15559 strict_overflow_p);
15561 case NOP_EXPR:
15563 tree inner_type = TREE_TYPE (op0);
15564 tree outer_type = type;
15566 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15567 && tree_expr_nonzero_warnv_p (op0,
15568 strict_overflow_p));
15570 break;
15572 case NON_LVALUE_EXPR:
15573 return tree_expr_nonzero_warnv_p (op0,
15574 strict_overflow_p);
15576 default:
15577 break;
15580 return false;
15583 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15584 For floating point we further ensure that T is not denormal.
15585 Similar logic is present in nonzero_address in rtlanal.h.
15587 If the return value is based on the assumption that signed overflow
15588 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15589 change *STRICT_OVERFLOW_P. */
15591 bool
15592 tree_binary_nonzero_warnv_p (enum tree_code code,
15593 tree type,
15594 tree op0,
15595 tree op1, bool *strict_overflow_p)
15597 bool sub_strict_overflow_p;
15598 switch (code)
15600 case POINTER_PLUS_EXPR:
15601 case PLUS_EXPR:
15602 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15604 /* With the presence of negative values it is hard
15605 to say something. */
15606 sub_strict_overflow_p = false;
15607 if (!tree_expr_nonnegative_warnv_p (op0,
15608 &sub_strict_overflow_p)
15609 || !tree_expr_nonnegative_warnv_p (op1,
15610 &sub_strict_overflow_p))
15611 return false;
15612 /* One of operands must be positive and the other non-negative. */
15613 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15614 overflows, on a twos-complement machine the sum of two
15615 nonnegative numbers can never be zero. */
15616 return (tree_expr_nonzero_warnv_p (op0,
15617 strict_overflow_p)
15618 || tree_expr_nonzero_warnv_p (op1,
15619 strict_overflow_p));
15621 break;
15623 case MULT_EXPR:
15624 if (TYPE_OVERFLOW_UNDEFINED (type))
15626 if (tree_expr_nonzero_warnv_p (op0,
15627 strict_overflow_p)
15628 && tree_expr_nonzero_warnv_p (op1,
15629 strict_overflow_p))
15631 *strict_overflow_p = true;
15632 return true;
15635 break;
15637 case MIN_EXPR:
15638 sub_strict_overflow_p = false;
15639 if (tree_expr_nonzero_warnv_p (op0,
15640 &sub_strict_overflow_p)
15641 && tree_expr_nonzero_warnv_p (op1,
15642 &sub_strict_overflow_p))
15644 if (sub_strict_overflow_p)
15645 *strict_overflow_p = true;
15647 break;
15649 case MAX_EXPR:
15650 sub_strict_overflow_p = false;
15651 if (tree_expr_nonzero_warnv_p (op0,
15652 &sub_strict_overflow_p))
15654 if (sub_strict_overflow_p)
15655 *strict_overflow_p = true;
15657 /* When both operands are nonzero, then MAX must be too. */
15658 if (tree_expr_nonzero_warnv_p (op1,
15659 strict_overflow_p))
15660 return true;
15662 /* MAX where operand 0 is positive is positive. */
15663 return tree_expr_nonnegative_warnv_p (op0,
15664 strict_overflow_p);
15666 /* MAX where operand 1 is positive is positive. */
15667 else if (tree_expr_nonzero_warnv_p (op1,
15668 &sub_strict_overflow_p)
15669 && tree_expr_nonnegative_warnv_p (op1,
15670 &sub_strict_overflow_p))
15672 if (sub_strict_overflow_p)
15673 *strict_overflow_p = true;
15674 return true;
15676 break;
15678 case BIT_IOR_EXPR:
15679 return (tree_expr_nonzero_warnv_p (op1,
15680 strict_overflow_p)
15681 || tree_expr_nonzero_warnv_p (op0,
15682 strict_overflow_p));
15684 default:
15685 break;
15688 return false;
15691 /* Return true when T is an address and is known to be nonzero.
15692 For floating point we further ensure that T is not denormal.
15693 Similar logic is present in nonzero_address in rtlanal.h.
15695 If the return value is based on the assumption that signed overflow
15696 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 change *STRICT_OVERFLOW_P. */
15699 bool
15700 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15702 bool sub_strict_overflow_p;
15703 switch (TREE_CODE (t))
15705 case INTEGER_CST:
15706 return !integer_zerop (t);
15708 case ADDR_EXPR:
15710 tree base = TREE_OPERAND (t, 0);
15712 if (!DECL_P (base))
15713 base = get_base_address (base);
15715 if (base && TREE_CODE (base) == TARGET_EXPR)
15716 base = TARGET_EXPR_SLOT (base);
15718 if (!base)
15719 return false;
15721 /* For objects in symbol table check if we know they are non-zero.
15722 Don't do anything for variables and functions before symtab is built;
15723 it is quite possible that they will be declared weak later. */
15724 int nonzero_addr = maybe_nonzero_address (base);
15725 if (nonzero_addr >= 0)
15726 return nonzero_addr;
15728 /* Constants are never weak. */
15729 if (CONSTANT_CLASS_P (base))
15730 return true;
15732 return false;
15735 case COND_EXPR:
15736 sub_strict_overflow_p = false;
15737 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15738 &sub_strict_overflow_p)
15739 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15740 &sub_strict_overflow_p))
15742 if (sub_strict_overflow_p)
15743 *strict_overflow_p = true;
15744 return true;
15746 break;
15748 case SSA_NAME:
15749 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15750 break;
15751 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15753 default:
15754 break;
15756 return false;
15759 #define integer_valued_real_p(X) \
15760 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15762 #define RECURSE(X) \
15763 ((integer_valued_real_p) (X, depth + 1))
15765 /* Return true if the floating point result of (CODE OP0) has an
15766 integer value. We also allow +Inf, -Inf and NaN to be considered
15767 integer values. Return false for signaling NaN.
15769 DEPTH is the current nesting depth of the query. */
15771 bool
15772 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15774 switch (code)
15776 case FLOAT_EXPR:
15777 return true;
15779 case ABS_EXPR:
15780 return RECURSE (op0);
15782 CASE_CONVERT:
15784 tree type = TREE_TYPE (op0);
15785 if (TREE_CODE (type) == INTEGER_TYPE)
15786 return true;
15787 if (SCALAR_FLOAT_TYPE_P (type))
15788 return RECURSE (op0);
15789 break;
15792 default:
15793 break;
15795 return false;
15798 /* Return true if the floating point result of (CODE OP0 OP1) has an
15799 integer value. We also allow +Inf, -Inf and NaN to be considered
15800 integer values. Return false for signaling NaN.
15802 DEPTH is the current nesting depth of the query. */
15804 bool
15805 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15807 switch (code)
15809 case PLUS_EXPR:
15810 case MINUS_EXPR:
15811 case MULT_EXPR:
15812 case MIN_EXPR:
15813 case MAX_EXPR:
15814 return RECURSE (op0) && RECURSE (op1);
15816 default:
15817 break;
15819 return false;
15822 /* Return true if the floating point result of calling FNDECL with arguments
15823 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15824 considered integer values. Return false for signaling NaN. If FNDECL
15825 takes fewer than 2 arguments, the remaining ARGn are null.
15827 DEPTH is the current nesting depth of the query. */
15829 bool
15830 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15832 switch (fn)
15834 CASE_CFN_CEIL:
15835 CASE_CFN_CEIL_FN:
15836 CASE_CFN_FLOOR:
15837 CASE_CFN_FLOOR_FN:
15838 CASE_CFN_NEARBYINT:
15839 CASE_CFN_NEARBYINT_FN:
15840 CASE_CFN_RINT:
15841 CASE_CFN_RINT_FN:
15842 CASE_CFN_ROUND:
15843 CASE_CFN_ROUND_FN:
15844 CASE_CFN_ROUNDEVEN:
15845 CASE_CFN_ROUNDEVEN_FN:
15846 CASE_CFN_TRUNC:
15847 CASE_CFN_TRUNC_FN:
15848 return true;
15850 CASE_CFN_FMIN:
15851 CASE_CFN_FMIN_FN:
15852 CASE_CFN_FMAX:
15853 CASE_CFN_FMAX_FN:
15854 return RECURSE (arg0) && RECURSE (arg1);
15856 default:
15857 break;
15859 return false;
15862 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15863 has an integer value. We also allow +Inf, -Inf and NaN to be
15864 considered integer values. Return false for signaling NaN.
15866 DEPTH is the current nesting depth of the query. */
15868 bool
15869 integer_valued_real_single_p (tree t, int depth)
15871 switch (TREE_CODE (t))
15873 case REAL_CST:
15874 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15876 case COND_EXPR:
15877 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15879 case SSA_NAME:
15880 /* Limit the depth of recursion to avoid quadratic behavior.
15881 This is expected to catch almost all occurrences in practice.
15882 If this code misses important cases that unbounded recursion
15883 would not, passes that need this information could be revised
15884 to provide it through dataflow propagation. */
15885 return (!name_registered_for_update_p (t)
15886 && depth < param_max_ssa_name_query_depth
15887 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15888 depth));
15890 default:
15891 break;
15893 return false;
15896 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15897 has an integer value. We also allow +Inf, -Inf and NaN to be
15898 considered integer values. Return false for signaling NaN.
15900 DEPTH is the current nesting depth of the query. */
15902 static bool
15903 integer_valued_real_invalid_p (tree t, int depth)
15905 switch (TREE_CODE (t))
15907 case COMPOUND_EXPR:
15908 case MODIFY_EXPR:
15909 case BIND_EXPR:
15910 return RECURSE (TREE_OPERAND (t, 1));
15912 case SAVE_EXPR:
15913 return RECURSE (TREE_OPERAND (t, 0));
15915 default:
15916 break;
15918 return false;
15921 #undef RECURSE
15922 #undef integer_valued_real_p
15924 /* Return true if the floating point expression T has an integer value.
15925 We also allow +Inf, -Inf and NaN to be considered integer values.
15926 Return false for signaling NaN.
15928 DEPTH is the current nesting depth of the query. */
15930 bool
15931 integer_valued_real_p (tree t, int depth)
15933 if (t == error_mark_node)
15934 return false;
15936 STRIP_ANY_LOCATION_WRAPPER (t);
15938 tree_code code = TREE_CODE (t);
15939 switch (TREE_CODE_CLASS (code))
15941 case tcc_binary:
15942 case tcc_comparison:
15943 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15944 TREE_OPERAND (t, 1), depth);
15946 case tcc_unary:
15947 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15949 case tcc_constant:
15950 case tcc_declaration:
15951 case tcc_reference:
15952 return integer_valued_real_single_p (t, depth);
15954 default:
15955 break;
15958 switch (code)
15960 case COND_EXPR:
15961 case SSA_NAME:
15962 return integer_valued_real_single_p (t, depth);
15964 case CALL_EXPR:
15966 tree arg0 = (call_expr_nargs (t) > 0
15967 ? CALL_EXPR_ARG (t, 0)
15968 : NULL_TREE);
15969 tree arg1 = (call_expr_nargs (t) > 1
15970 ? CALL_EXPR_ARG (t, 1)
15971 : NULL_TREE);
15972 return integer_valued_real_call_p (get_call_combined_fn (t),
15973 arg0, arg1, depth);
15976 default:
15977 return integer_valued_real_invalid_p (t, depth);
15981 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15982 attempt to fold the expression to a constant without modifying TYPE,
15983 OP0 or OP1.
15985 If the expression could be simplified to a constant, then return
15986 the constant. If the expression would not be simplified to a
15987 constant, then return NULL_TREE. */
15989 tree
15990 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15992 tree tem = fold_binary (code, type, op0, op1);
15993 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15996 /* Given the components of a unary expression CODE, TYPE and OP0,
15997 attempt to fold the expression to a constant without modifying
15998 TYPE or OP0.
16000 If the expression could be simplified to a constant, then return
16001 the constant. If the expression would not be simplified to a
16002 constant, then return NULL_TREE. */
16004 tree
16005 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16007 tree tem = fold_unary (code, type, op0);
16008 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16011 /* If EXP represents referencing an element in a constant string
16012 (either via pointer arithmetic or array indexing), return the
16013 tree representing the value accessed, otherwise return NULL. */
16015 tree
16016 fold_read_from_constant_string (tree exp)
16018 if ((INDIRECT_REF_P (exp)
16019 || TREE_CODE (exp) == ARRAY_REF)
16020 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16022 tree exp1 = TREE_OPERAND (exp, 0);
16023 tree index;
16024 tree string;
16025 location_t loc = EXPR_LOCATION (exp);
16027 if (INDIRECT_REF_P (exp))
16028 string = string_constant (exp1, &index, NULL, NULL);
16029 else
16031 tree low_bound = array_ref_low_bound (exp);
16032 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16034 /* Optimize the special-case of a zero lower bound.
16036 We convert the low_bound to sizetype to avoid some problems
16037 with constant folding. (E.g. suppose the lower bound is 1,
16038 and its mode is QI. Without the conversion,l (ARRAY
16039 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16040 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16041 if (! integer_zerop (low_bound))
16042 index = size_diffop_loc (loc, index,
16043 fold_convert_loc (loc, sizetype, low_bound));
16045 string = exp1;
16048 scalar_int_mode char_mode;
16049 if (string
16050 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16051 && TREE_CODE (string) == STRING_CST
16052 && tree_fits_uhwi_p (index)
16053 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16054 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
16055 &char_mode)
16056 && GET_MODE_SIZE (char_mode) == 1)
16057 return build_int_cst_type (TREE_TYPE (exp),
16058 (TREE_STRING_POINTER (string)
16059 [TREE_INT_CST_LOW (index)]));
16061 return NULL;
16064 /* Folds a read from vector element at IDX of vector ARG. */
16066 tree
16067 fold_read_from_vector (tree arg, poly_uint64 idx)
16069 unsigned HOST_WIDE_INT i;
16070 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
16071 && known_ge (idx, 0u)
16072 && idx.is_constant (&i))
16074 if (TREE_CODE (arg) == VECTOR_CST)
16075 return VECTOR_CST_ELT (arg, i);
16076 else if (TREE_CODE (arg) == CONSTRUCTOR)
16078 if (CONSTRUCTOR_NELTS (arg)
16079 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
16080 return NULL_TREE;
16081 if (i >= CONSTRUCTOR_NELTS (arg))
16082 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
16083 return CONSTRUCTOR_ELT (arg, i)->value;
16086 return NULL_TREE;
16089 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16090 an integer constant, real, or fixed-point constant.
16092 TYPE is the type of the result. */
16094 static tree
16095 fold_negate_const (tree arg0, tree type)
16097 tree t = NULL_TREE;
16099 switch (TREE_CODE (arg0))
16101 case REAL_CST:
16102 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16103 break;
16105 case FIXED_CST:
16107 FIXED_VALUE_TYPE f;
16108 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16109 &(TREE_FIXED_CST (arg0)), NULL,
16110 TYPE_SATURATING (type));
16111 t = build_fixed (type, f);
16112 /* Propagate overflow flags. */
16113 if (overflow_p | TREE_OVERFLOW (arg0))
16114 TREE_OVERFLOW (t) = 1;
16115 break;
16118 default:
16119 if (poly_int_tree_p (arg0))
16121 wi::overflow_type overflow;
16122 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
16123 t = force_fit_type (type, res, 1,
16124 (overflow && ! TYPE_UNSIGNED (type))
16125 || TREE_OVERFLOW (arg0));
16126 break;
16129 gcc_unreachable ();
16132 return t;
16135 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16136 an integer constant or real constant.
16138 TYPE is the type of the result. */
16140 tree
16141 fold_abs_const (tree arg0, tree type)
16143 tree t = NULL_TREE;
16145 switch (TREE_CODE (arg0))
16147 case INTEGER_CST:
16149 /* If the value is unsigned or non-negative, then the absolute value
16150 is the same as the ordinary value. */
16151 wide_int val = wi::to_wide (arg0);
16152 wi::overflow_type overflow = wi::OVF_NONE;
16153 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
16156 /* If the value is negative, then the absolute value is
16157 its negation. */
16158 else
16159 val = wi::neg (val, &overflow);
16161 /* Force to the destination type, set TREE_OVERFLOW for signed
16162 TYPE only. */
16163 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
16165 break;
16167 case REAL_CST:
16168 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16169 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16170 else
16171 t = arg0;
16172 break;
16174 default:
16175 gcc_unreachable ();
16178 return t;
16181 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16182 constant. TYPE is the type of the result. */
16184 static tree
16185 fold_not_const (const_tree arg0, tree type)
16187 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16189 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
16192 /* Given CODE, a relational operator, the target type, TYPE and two
16193 constant operands OP0 and OP1, return the result of the
16194 relational operation. If the result is not a compile time
16195 constant, then return NULL_TREE. */
16197 static tree
16198 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16200 int result, invert;
16202 /* From here on, the only cases we handle are when the result is
16203 known to be a constant. */
16205 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16207 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16208 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16210 /* Handle the cases where either operand is a NaN. */
16211 if (real_isnan (c0) || real_isnan (c1))
16213 switch (code)
16215 case EQ_EXPR:
16216 case ORDERED_EXPR:
16217 result = 0;
16218 break;
16220 case NE_EXPR:
16221 case UNORDERED_EXPR:
16222 case UNLT_EXPR:
16223 case UNLE_EXPR:
16224 case UNGT_EXPR:
16225 case UNGE_EXPR:
16226 case UNEQ_EXPR:
16227 result = 1;
16228 break;
16230 case LT_EXPR:
16231 case LE_EXPR:
16232 case GT_EXPR:
16233 case GE_EXPR:
16234 case LTGT_EXPR:
16235 if (flag_trapping_math)
16236 return NULL_TREE;
16237 result = 0;
16238 break;
16240 default:
16241 gcc_unreachable ();
16244 return constant_boolean_node (result, type);
16247 return constant_boolean_node (real_compare (code, c0, c1), type);
16250 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16252 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16253 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16254 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16257 /* Handle equality/inequality of complex constants. */
16258 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16260 tree rcond = fold_relational_const (code, type,
16261 TREE_REALPART (op0),
16262 TREE_REALPART (op1));
16263 tree icond = fold_relational_const (code, type,
16264 TREE_IMAGPART (op0),
16265 TREE_IMAGPART (op1));
16266 if (code == EQ_EXPR)
16267 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16268 else if (code == NE_EXPR)
16269 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16270 else
16271 return NULL_TREE;
16274 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16276 if (!VECTOR_TYPE_P (type))
16278 /* Have vector comparison with scalar boolean result. */
16279 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16280 && known_eq (VECTOR_CST_NELTS (op0),
16281 VECTOR_CST_NELTS (op1)));
16282 unsigned HOST_WIDE_INT nunits;
16283 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16284 return NULL_TREE;
16285 for (unsigned i = 0; i < nunits; i++)
16287 tree elem0 = VECTOR_CST_ELT (op0, i);
16288 tree elem1 = VECTOR_CST_ELT (op1, i);
16289 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16290 if (tmp == NULL_TREE)
16291 return NULL_TREE;
16292 if (integer_zerop (tmp))
16293 return constant_boolean_node (code == NE_EXPR, type);
16295 return constant_boolean_node (code == EQ_EXPR, type);
16297 tree_vector_builder elts;
16298 if (!elts.new_binary_operation (type, op0, op1, false))
16299 return NULL_TREE;
16300 unsigned int count = elts.encoded_nelts ();
16301 for (unsigned i = 0; i < count; i++)
16303 tree elem_type = TREE_TYPE (type);
16304 tree elem0 = VECTOR_CST_ELT (op0, i);
16305 tree elem1 = VECTOR_CST_ELT (op1, i);
16307 tree tem = fold_relational_const (code, elem_type,
16308 elem0, elem1);
16310 if (tem == NULL_TREE)
16311 return NULL_TREE;
16313 elts.quick_push (build_int_cst (elem_type,
16314 integer_zerop (tem) ? 0 : -1));
16317 return elts.build ();
16320 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16322 To compute GT, swap the arguments and do LT.
16323 To compute GE, do LT and invert the result.
16324 To compute LE, swap the arguments, do LT and invert the result.
16325 To compute NE, do EQ and invert the result.
16327 Therefore, the code below must handle only EQ and LT. */
16329 if (code == LE_EXPR || code == GT_EXPR)
16331 std::swap (op0, op1);
16332 code = swap_tree_comparison (code);
16335 /* Note that it is safe to invert for real values here because we
16336 have already handled the one case that it matters. */
16338 invert = 0;
16339 if (code == NE_EXPR || code == GE_EXPR)
16341 invert = 1;
16342 code = invert_tree_comparison (code, false);
16345 /* Compute a result for LT or EQ if args permit;
16346 Otherwise return T. */
16347 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16349 if (code == EQ_EXPR)
16350 result = tree_int_cst_equal (op0, op1);
16351 else
16352 result = tree_int_cst_lt (op0, op1);
16354 else
16355 return NULL_TREE;
16357 if (invert)
16358 result ^= 1;
16359 return constant_boolean_node (result, type);
16362 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16363 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16364 itself. */
16366 tree
16367 fold_build_cleanup_point_expr (tree type, tree expr)
16369 /* If the expression does not have side effects then we don't have to wrap
16370 it with a cleanup point expression. */
16371 if (!TREE_SIDE_EFFECTS (expr))
16372 return expr;
16374 /* If the expression is a return, check to see if the expression inside the
16375 return has no side effects or the right hand side of the modify expression
16376 inside the return. If either don't have side effects set we don't need to
16377 wrap the expression in a cleanup point expression. Note we don't check the
16378 left hand side of the modify because it should always be a return decl. */
16379 if (TREE_CODE (expr) == RETURN_EXPR)
16381 tree op = TREE_OPERAND (expr, 0);
16382 if (!op || !TREE_SIDE_EFFECTS (op))
16383 return expr;
16384 op = TREE_OPERAND (op, 1);
16385 if (!TREE_SIDE_EFFECTS (op))
16386 return expr;
16389 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16392 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16393 of an indirection through OP0, or NULL_TREE if no simplification is
16394 possible. */
16396 tree
16397 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16399 tree sub = op0;
16400 tree subtype;
16401 poly_uint64 const_op01;
16403 STRIP_NOPS (sub);
16404 subtype = TREE_TYPE (sub);
16405 if (!POINTER_TYPE_P (subtype)
16406 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16407 return NULL_TREE;
16409 if (TREE_CODE (sub) == ADDR_EXPR)
16411 tree op = TREE_OPERAND (sub, 0);
16412 tree optype = TREE_TYPE (op);
16414 /* *&CONST_DECL -> to the value of the const decl. */
16415 if (TREE_CODE (op) == CONST_DECL)
16416 return DECL_INITIAL (op);
16417 /* *&p => p; make sure to handle *&"str"[cst] here. */
16418 if (type == optype)
16420 tree fop = fold_read_from_constant_string (op);
16421 if (fop)
16422 return fop;
16423 else
16424 return op;
16426 /* *(foo *)&fooarray => fooarray[0] */
16427 else if (TREE_CODE (optype) == ARRAY_TYPE
16428 && type == TREE_TYPE (optype)
16429 && (!in_gimple_form
16430 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16432 tree type_domain = TYPE_DOMAIN (optype);
16433 tree min_val = size_zero_node;
16434 if (type_domain && TYPE_MIN_VALUE (type_domain))
16435 min_val = TYPE_MIN_VALUE (type_domain);
16436 if (in_gimple_form
16437 && TREE_CODE (min_val) != INTEGER_CST)
16438 return NULL_TREE;
16439 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16440 NULL_TREE, NULL_TREE);
16442 /* *(foo *)&complexfoo => __real__ complexfoo */
16443 else if (TREE_CODE (optype) == COMPLEX_TYPE
16444 && type == TREE_TYPE (optype))
16445 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16446 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16447 else if (VECTOR_TYPE_P (optype)
16448 && type == TREE_TYPE (optype))
16450 tree part_width = TYPE_SIZE (type);
16451 tree index = bitsize_int (0);
16452 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16453 index);
16457 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16458 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16460 tree op00 = TREE_OPERAND (sub, 0);
16461 tree op01 = TREE_OPERAND (sub, 1);
16463 STRIP_NOPS (op00);
16464 if (TREE_CODE (op00) == ADDR_EXPR)
16466 tree op00type;
16467 op00 = TREE_OPERAND (op00, 0);
16468 op00type = TREE_TYPE (op00);
16470 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16471 if (VECTOR_TYPE_P (op00type)
16472 && type == TREE_TYPE (op00type)
16473 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16474 but we want to treat offsets with MSB set as negative.
16475 For the code below negative offsets are invalid and
16476 TYPE_SIZE of the element is something unsigned, so
16477 check whether op01 fits into poly_int64, which implies
16478 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16479 then just use poly_uint64 because we want to treat the
16480 value as unsigned. */
16481 && tree_fits_poly_int64_p (op01))
16483 tree part_width = TYPE_SIZE (type);
16484 poly_uint64 max_offset
16485 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16486 * TYPE_VECTOR_SUBPARTS (op00type));
16487 if (known_lt (const_op01, max_offset))
16489 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16490 return fold_build3_loc (loc,
16491 BIT_FIELD_REF, type, op00,
16492 part_width, index);
16495 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16496 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16497 && type == TREE_TYPE (op00type))
16499 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16500 const_op01))
16501 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16503 /* ((foo *)&fooarray)[1] => fooarray[1] */
16504 else if (TREE_CODE (op00type) == ARRAY_TYPE
16505 && type == TREE_TYPE (op00type))
16507 tree type_domain = TYPE_DOMAIN (op00type);
16508 tree min_val = size_zero_node;
16509 if (type_domain && TYPE_MIN_VALUE (type_domain))
16510 min_val = TYPE_MIN_VALUE (type_domain);
16511 poly_uint64 type_size, index;
16512 if (poly_int_tree_p (min_val)
16513 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16514 && multiple_p (const_op01, type_size, &index))
16516 poly_offset_int off = index + wi::to_poly_offset (min_val);
16517 op01 = wide_int_to_tree (sizetype, off);
16518 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16519 NULL_TREE, NULL_TREE);
16525 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16526 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16527 && type == TREE_TYPE (TREE_TYPE (subtype))
16528 && (!in_gimple_form
16529 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16531 tree type_domain;
16532 tree min_val = size_zero_node;
16533 sub = build_fold_indirect_ref_loc (loc, sub);
16534 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16535 if (type_domain && TYPE_MIN_VALUE (type_domain))
16536 min_val = TYPE_MIN_VALUE (type_domain);
16537 if (in_gimple_form
16538 && TREE_CODE (min_val) != INTEGER_CST)
16539 return NULL_TREE;
16540 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16541 NULL_TREE);
16544 return NULL_TREE;
16547 /* Builds an expression for an indirection through T, simplifying some
16548 cases. */
16550 tree
16551 build_fold_indirect_ref_loc (location_t loc, tree t)
16553 tree type = TREE_TYPE (TREE_TYPE (t));
16554 tree sub = fold_indirect_ref_1 (loc, type, t);
16556 if (sub)
16557 return sub;
16559 return build1_loc (loc, INDIRECT_REF, type, t);
16562 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16564 tree
16565 fold_indirect_ref_loc (location_t loc, tree t)
16567 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16569 if (sub)
16570 return sub;
16571 else
16572 return t;
16575 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16576 whose result is ignored. The type of the returned tree need not be
16577 the same as the original expression. */
16579 tree
16580 fold_ignored_result (tree t)
16582 if (!TREE_SIDE_EFFECTS (t))
16583 return integer_zero_node;
16585 for (;;)
16586 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16588 case tcc_unary:
16589 t = TREE_OPERAND (t, 0);
16590 break;
16592 case tcc_binary:
16593 case tcc_comparison:
16594 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16595 t = TREE_OPERAND (t, 0);
16596 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16597 t = TREE_OPERAND (t, 1);
16598 else
16599 return t;
16600 break;
16602 case tcc_expression:
16603 switch (TREE_CODE (t))
16605 case COMPOUND_EXPR:
16606 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16607 return t;
16608 t = TREE_OPERAND (t, 0);
16609 break;
16611 case COND_EXPR:
16612 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16613 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16614 return t;
16615 t = TREE_OPERAND (t, 0);
16616 break;
16618 default:
16619 return t;
16621 break;
16623 default:
16624 return t;
16628 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16630 tree
16631 round_up_loc (location_t loc, tree value, unsigned int divisor)
16633 tree div = NULL_TREE;
16635 if (divisor == 1)
16636 return value;
16638 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16639 have to do anything. Only do this when we are not given a const,
16640 because in that case, this check is more expensive than just
16641 doing it. */
16642 if (TREE_CODE (value) != INTEGER_CST)
16644 div = build_int_cst (TREE_TYPE (value), divisor);
16646 if (multiple_of_p (TREE_TYPE (value), value, div))
16647 return value;
16650 /* If divisor is a power of two, simplify this to bit manipulation. */
16651 if (pow2_or_zerop (divisor))
16653 if (TREE_CODE (value) == INTEGER_CST)
16655 wide_int val = wi::to_wide (value);
16656 bool overflow_p;
16658 if ((val & (divisor - 1)) == 0)
16659 return value;
16661 overflow_p = TREE_OVERFLOW (value);
16662 val += divisor - 1;
16663 val &= (int) -divisor;
16664 if (val == 0)
16665 overflow_p = true;
16667 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16669 else
16671 tree t;
16673 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16674 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16675 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16676 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16679 else
16681 if (!div)
16682 div = build_int_cst (TREE_TYPE (value), divisor);
16683 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16684 value = size_binop_loc (loc, MULT_EXPR, value, div);
16687 return value;
16690 /* Likewise, but round down. */
16692 tree
16693 round_down_loc (location_t loc, tree value, int divisor)
16695 tree div = NULL_TREE;
16697 gcc_assert (divisor > 0);
16698 if (divisor == 1)
16699 return value;
16701 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16702 have to do anything. Only do this when we are not given a const,
16703 because in that case, this check is more expensive than just
16704 doing it. */
16705 if (TREE_CODE (value) != INTEGER_CST)
16707 div = build_int_cst (TREE_TYPE (value), divisor);
16709 if (multiple_of_p (TREE_TYPE (value), value, div))
16710 return value;
16713 /* If divisor is a power of two, simplify this to bit manipulation. */
16714 if (pow2_or_zerop (divisor))
16716 tree t;
16718 t = build_int_cst (TREE_TYPE (value), -divisor);
16719 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16721 else
16723 if (!div)
16724 div = build_int_cst (TREE_TYPE (value), divisor);
16725 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16726 value = size_binop_loc (loc, MULT_EXPR, value, div);
16729 return value;
16732 /* Returns the pointer to the base of the object addressed by EXP and
16733 extracts the information about the offset of the access, storing it
16734 to PBITPOS and POFFSET. */
16736 static tree
16737 split_address_to_core_and_offset (tree exp,
16738 poly_int64 *pbitpos, tree *poffset)
16740 tree core;
16741 machine_mode mode;
16742 int unsignedp, reversep, volatilep;
16743 poly_int64 bitsize;
16744 location_t loc = EXPR_LOCATION (exp);
16746 if (TREE_CODE (exp) == SSA_NAME)
16747 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16748 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16749 exp = gimple_assign_rhs1 (def);
16751 if (TREE_CODE (exp) == ADDR_EXPR)
16753 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16754 poffset, &mode, &unsignedp, &reversep,
16755 &volatilep);
16756 core = build_fold_addr_expr_loc (loc, core);
16758 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16760 core = TREE_OPERAND (exp, 0);
16761 STRIP_NOPS (core);
16762 *pbitpos = 0;
16763 *poffset = TREE_OPERAND (exp, 1);
16764 if (poly_int_tree_p (*poffset))
16766 poly_offset_int tem
16767 = wi::sext (wi::to_poly_offset (*poffset),
16768 TYPE_PRECISION (TREE_TYPE (*poffset)));
16769 tem <<= LOG2_BITS_PER_UNIT;
16770 if (tem.to_shwi (pbitpos))
16771 *poffset = NULL_TREE;
16774 else
16776 core = exp;
16777 *pbitpos = 0;
16778 *poffset = NULL_TREE;
16781 return core;
16784 /* Returns true if addresses of E1 and E2 differ by a constant, false
16785 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16787 bool
16788 ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16790 tree core1, core2;
16791 poly_int64 bitpos1, bitpos2;
16792 tree toffset1, toffset2, tdiff, type;
16794 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16795 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16797 poly_int64 bytepos1, bytepos2;
16798 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16799 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16800 || !operand_equal_p (core1, core2, 0))
16801 return false;
16803 if (toffset1 && toffset2)
16805 type = TREE_TYPE (toffset1);
16806 if (type != TREE_TYPE (toffset2))
16807 toffset2 = fold_convert (type, toffset2);
16809 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16810 if (!cst_and_fits_in_hwi (tdiff))
16811 return false;
16813 *diff = int_cst_value (tdiff);
16815 else if (toffset1 || toffset2)
16817 /* If only one of the offsets is non-constant, the difference cannot
16818 be a constant. */
16819 return false;
16821 else
16822 *diff = 0;
16824 *diff += bytepos1 - bytepos2;
16825 return true;
16828 /* Return OFF converted to a pointer offset type suitable as offset for
16829 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16830 tree
16831 convert_to_ptrofftype_loc (location_t loc, tree off)
16833 if (ptrofftype_p (TREE_TYPE (off)))
16834 return off;
16835 return fold_convert_loc (loc, sizetype, off);
16838 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16839 tree
16840 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16842 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16843 ptr, convert_to_ptrofftype_loc (loc, off));
16846 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16847 tree
16848 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16850 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16851 ptr, size_int (off));
16854 /* Return a pointer to a NUL-terminated string containing the sequence
16855 of bytes corresponding to the representation of the object referred to
16856 by SRC (or a subsequence of such bytes within it if SRC is a reference
16857 to an initialized constant array plus some constant offset).
16858 Set *STRSIZE the number of bytes in the constant sequence including
16859 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16860 where A is the array that stores the constant sequence that SRC points
16861 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16862 need not point to a string or even an array of characters but may point
16863 to an object of any type. */
16865 const char *
16866 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16868 /* The offset into the array A storing the string, and A's byte size. */
16869 tree offset_node;
16870 tree mem_size;
16872 if (strsize)
16873 *strsize = 0;
16875 if (strsize)
16876 src = byte_representation (src, &offset_node, &mem_size, NULL);
16877 else
16878 src = string_constant (src, &offset_node, &mem_size, NULL);
16879 if (!src)
16880 return NULL;
16882 unsigned HOST_WIDE_INT offset = 0;
16883 if (offset_node != NULL_TREE)
16885 if (!tree_fits_uhwi_p (offset_node))
16886 return NULL;
16887 else
16888 offset = tree_to_uhwi (offset_node);
16891 if (!tree_fits_uhwi_p (mem_size))
16892 return NULL;
16894 /* ARRAY_SIZE is the byte size of the array the constant sequence
16895 is stored in and equal to sizeof A. INIT_BYTES is the number
16896 of bytes in the constant sequence used to initialize the array,
16897 including any embedded NULs as well as the terminating NUL (for
16898 strings), but not including any trailing zeros/NULs past
16899 the terminating one appended implicitly to a string literal to
16900 zero out the remainder of the array it's stored in. For example,
16901 given:
16902 const char a[7] = "abc\0d";
16903 n = strlen (a + 1);
16904 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16905 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16906 is equal to strlen (A) + 1. */
16907 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16908 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16909 const char *string = TREE_STRING_POINTER (src);
16911 /* Ideally this would turn into a gcc_checking_assert over time. */
16912 if (init_bytes > array_size)
16913 init_bytes = array_size;
16915 if (init_bytes == 0 || offset >= array_size)
16916 return NULL;
16918 if (strsize)
16920 /* Compute and store the number of characters from the beginning
16921 of the substring at OFFSET to the end, including the terminating
16922 nul. Offsets past the initial length refer to null strings. */
16923 if (offset < init_bytes)
16924 *strsize = init_bytes - offset;
16925 else
16926 *strsize = 1;
16928 else
16930 tree eltype = TREE_TYPE (TREE_TYPE (src));
16931 /* Support only properly NUL-terminated single byte strings. */
16932 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16933 return NULL;
16934 if (string[init_bytes - 1] != '\0')
16935 return NULL;
16938 return offset < init_bytes ? string + offset : "";
16941 /* Return a pointer to a NUL-terminated string corresponding to
16942 the expression STR referencing a constant string, possibly
16943 involving a constant offset. Return null if STR either doesn't
16944 reference a constant string or if it involves a nonconstant
16945 offset. */
16947 const char *
16948 c_getstr (tree str)
16950 return getbyterep (str, NULL);
16953 /* Given a tree T, compute which bits in T may be nonzero. */
16955 wide_int
16956 tree_nonzero_bits (const_tree t)
16958 switch (TREE_CODE (t))
16960 case INTEGER_CST:
16961 return wi::to_wide (t);
16962 case SSA_NAME:
16963 return get_nonzero_bits (t);
16964 case NON_LVALUE_EXPR:
16965 case SAVE_EXPR:
16966 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16967 case BIT_AND_EXPR:
16968 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16969 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16970 case BIT_IOR_EXPR:
16971 case BIT_XOR_EXPR:
16972 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16973 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16974 case COND_EXPR:
16975 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16976 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16977 CASE_CONVERT:
16978 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16979 TYPE_PRECISION (TREE_TYPE (t)),
16980 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16981 case PLUS_EXPR:
16982 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16984 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16985 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16986 if (wi::bit_and (nzbits1, nzbits2) == 0)
16987 return wi::bit_or (nzbits1, nzbits2);
16989 break;
16990 case LSHIFT_EXPR:
16991 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16993 tree type = TREE_TYPE (t);
16994 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16995 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16996 TYPE_PRECISION (type));
16997 return wi::neg_p (arg1)
16998 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16999 : wi::lshift (nzbits, arg1);
17001 break;
17002 case RSHIFT_EXPR:
17003 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
17005 tree type = TREE_TYPE (t);
17006 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
17007 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
17008 TYPE_PRECISION (type));
17009 return wi::neg_p (arg1)
17010 ? wi::lshift (nzbits, -arg1)
17011 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
17013 break;
17014 default:
17015 break;
17018 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
17021 /* Helper function for address compare simplifications in match.pd.
17022 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
17023 TYPE is the type of comparison operands.
17024 BASE0, BASE1, OFF0 and OFF1 are set by the function.
17025 GENERIC is true if GENERIC folding and false for GIMPLE folding.
17026 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
17027 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
17028 and 2 if unknown. */
17031 address_compare (tree_code code, tree type, tree op0, tree op1,
17032 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
17033 bool generic)
17035 if (TREE_CODE (op0) == SSA_NAME)
17036 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
17037 if (TREE_CODE (op1) == SSA_NAME)
17038 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
17039 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
17040 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
17041 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
17042 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
17043 if (base0 && TREE_CODE (base0) == MEM_REF)
17045 off0 += mem_ref_offset (base0).force_shwi ();
17046 base0 = TREE_OPERAND (base0, 0);
17048 if (base1 && TREE_CODE (base1) == MEM_REF)
17050 off1 += mem_ref_offset (base1).force_shwi ();
17051 base1 = TREE_OPERAND (base1, 0);
17053 if (base0 == NULL_TREE || base1 == NULL_TREE)
17054 return 2;
17056 int equal = 2;
17057 /* Punt in GENERIC on variables with value expressions;
17058 the value expressions might point to fields/elements
17059 of other vars etc. */
17060 if (generic
17061 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
17062 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
17063 return 2;
17064 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
17066 symtab_node *node0 = symtab_node::get_create (base0);
17067 symtab_node *node1 = symtab_node::get_create (base1);
17068 equal = node0->equal_address_to (node1);
17070 else if ((DECL_P (base0)
17071 || TREE_CODE (base0) == SSA_NAME
17072 || TREE_CODE (base0) == STRING_CST)
17073 && (DECL_P (base1)
17074 || TREE_CODE (base1) == SSA_NAME
17075 || TREE_CODE (base1) == STRING_CST))
17076 equal = (base0 == base1);
17077 /* Assume different STRING_CSTs with the same content will be
17078 merged. */
17079 if (equal == 0
17080 && TREE_CODE (base0) == STRING_CST
17081 && TREE_CODE (base1) == STRING_CST
17082 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
17083 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
17084 TREE_STRING_LENGTH (base0)) == 0)
17085 equal = 1;
17086 if (equal == 1)
17088 if (code == EQ_EXPR
17089 || code == NE_EXPR
17090 /* If the offsets are equal we can ignore overflow. */
17091 || known_eq (off0, off1)
17092 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
17093 /* Or if we compare using pointers to decls or strings. */
17094 || (POINTER_TYPE_P (type)
17095 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
17096 return 1;
17097 return 2;
17099 if (equal != 0)
17100 return equal;
17101 if (code != EQ_EXPR && code != NE_EXPR)
17102 return 2;
17104 /* At this point we know (or assume) the two pointers point at
17105 different objects. */
17106 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
17107 off0.is_constant (&ioff0);
17108 off1.is_constant (&ioff1);
17109 /* Punt on non-zero offsets from functions. */
17110 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
17111 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
17112 return 2;
17113 /* Or if the bases are neither decls nor string literals. */
17114 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
17115 return 2;
17116 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
17117 return 2;
17118 /* For initializers, assume addresses of different functions are
17119 different. */
17120 if (folding_initializer
17121 && TREE_CODE (base0) == FUNCTION_DECL
17122 && TREE_CODE (base1) == FUNCTION_DECL)
17123 return 0;
17125 /* Compute whether one address points to the start of one
17126 object and another one to the end of another one. */
17127 poly_int64 size0 = 0, size1 = 0;
17128 if (TREE_CODE (base0) == STRING_CST)
17130 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
17131 equal = 2;
17132 else
17133 size0 = TREE_STRING_LENGTH (base0);
17135 else if (TREE_CODE (base0) == FUNCTION_DECL)
17136 size0 = 1;
17137 else
17139 tree sz0 = DECL_SIZE_UNIT (base0);
17140 if (!tree_fits_poly_int64_p (sz0))
17141 equal = 2;
17142 else
17143 size0 = tree_to_poly_int64 (sz0);
17145 if (TREE_CODE (base1) == STRING_CST)
17147 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
17148 equal = 2;
17149 else
17150 size1 = TREE_STRING_LENGTH (base1);
17152 else if (TREE_CODE (base1) == FUNCTION_DECL)
17153 size1 = 1;
17154 else
17156 tree sz1 = DECL_SIZE_UNIT (base1);
17157 if (!tree_fits_poly_int64_p (sz1))
17158 equal = 2;
17159 else
17160 size1 = tree_to_poly_int64 (sz1);
17162 if (equal == 0)
17164 /* If one offset is pointing (or could be) to the beginning of one
17165 object and the other is pointing to one past the last byte of the
17166 other object, punt. */
17167 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
17168 equal = 2;
17169 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
17170 equal = 2;
17171 /* If both offsets are the same, there are some cases we know that are
17172 ok. Either if we know they aren't zero, or if we know both sizes
17173 are no zero. */
17174 if (equal == 2
17175 && known_eq (off0, off1)
17176 && (known_ne (off0, 0)
17177 || (known_ne (size0, 0) && known_ne (size1, 0))))
17178 equal = 0;
17181 /* At this point, equal is 2 if either one or both pointers are out of
17182 bounds of their object, or one points to start of its object and the
17183 other points to end of its object. This is unspecified behavior
17184 e.g. in C++. Otherwise equal is 0. */
17185 if (folding_cxx_constexpr && equal)
17186 return equal;
17188 /* When both pointers point to string literals, even when equal is 0,
17189 due to tail merging of string literals the pointers might be the same. */
17190 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17192 if (ioff0 < 0
17193 || ioff1 < 0
17194 || ioff0 > TREE_STRING_LENGTH (base0)
17195 || ioff1 > TREE_STRING_LENGTH (base1))
17196 return 2;
17198 /* If the bytes in the string literals starting at the pointers
17199 differ, the pointers need to be different. */
17200 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17201 TREE_STRING_POINTER (base1) + ioff1,
17202 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17203 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17205 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17206 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17207 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17208 ioffmin) == 0)
17209 /* If even the bytes in the string literal before the
17210 pointers are the same, the string literals could be
17211 tail merged. */
17212 return 2;
17214 return 0;
17217 if (folding_cxx_constexpr)
17218 return 0;
17220 /* If this is a pointer comparison, ignore for now even
17221 valid equalities where one pointer is the offset zero
17222 of one object and the other to one past end of another one. */
17223 if (!INTEGRAL_TYPE_P (type))
17224 return 0;
17226 /* Assume that string literals can't be adjacent to variables
17227 (automatic or global). */
17228 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17229 return 0;
17231 /* Assume that automatic variables can't be adjacent to global
17232 variables. */
17233 if (is_global_var (base0) != is_global_var (base1))
17234 return 0;
17236 return equal;
17239 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17240 tree
17241 ctor_single_nonzero_element (const_tree t)
17243 unsigned HOST_WIDE_INT idx;
17244 constructor_elt *ce;
17245 tree elt = NULL_TREE;
17247 if (TREE_CODE (t) != CONSTRUCTOR)
17248 return NULL_TREE;
17249 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17250 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17252 if (elt)
17253 return NULL_TREE;
17254 elt = ce->value;
17256 return elt;
17259 #if CHECKING_P
17261 namespace selftest {
17263 /* Helper functions for writing tests of folding trees. */
17265 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17267 static void
17268 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17269 tree constant)
17271 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17274 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17275 wrapping WRAPPED_EXPR. */
17277 static void
17278 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17279 tree wrapped_expr)
17281 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17282 ASSERT_NE (wrapped_expr, result);
17283 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17284 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17287 /* Verify that various arithmetic binary operations are folded
17288 correctly. */
17290 static void
17291 test_arithmetic_folding ()
17293 tree type = integer_type_node;
17294 tree x = create_tmp_var_raw (type, "x");
17295 tree zero = build_zero_cst (type);
17296 tree one = build_int_cst (type, 1);
17298 /* Addition. */
17299 /* 1 <-- (0 + 1) */
17300 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17301 one);
17302 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17303 one);
17305 /* (nonlvalue)x <-- (x + 0) */
17306 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17309 /* Subtraction. */
17310 /* 0 <-- (x - x) */
17311 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17312 zero);
17313 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17316 /* Multiplication. */
17317 /* 0 <-- (x * 0) */
17318 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17319 zero);
17321 /* (nonlvalue)x <-- (x * 1) */
17322 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17326 namespace test_fold_vec_perm_cst {
17328 /* Build a VECTOR_CST corresponding to VMODE, and has
17329 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17330 Fill it with randomized elements, using rand() % THRESHOLD. */
17332 static tree
17333 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17334 unsigned nelts_per_pattern,
17335 int step = 0, bool natural_stepped = false,
17336 int threshold = 100)
17338 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17339 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17340 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17342 // Fill a0 for each pattern
17343 for (unsigned i = 0; i < npatterns; i++)
17344 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17346 if (nelts_per_pattern == 1)
17347 return builder.build ();
17349 // Fill a1 for each pattern
17350 for (unsigned i = 0; i < npatterns; i++)
17352 tree a1;
17353 if (natural_stepped)
17355 tree a0 = builder[i];
17356 wide_int a0_val = wi::to_wide (a0);
17357 wide_int a1_val = a0_val + step;
17358 a1 = wide_int_to_tree (inner_type, a1_val);
17360 else
17361 a1 = build_int_cst (inner_type, rand () % threshold);
17362 builder.quick_push (a1);
17364 if (nelts_per_pattern == 2)
17365 return builder.build ();
17367 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17369 tree prev_elem = builder[i - npatterns];
17370 wide_int prev_elem_val = wi::to_wide (prev_elem);
17371 wide_int val = prev_elem_val + step;
17372 builder.quick_push (wide_int_to_tree (inner_type, val));
17375 return builder.build ();
17378 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17379 when result is VLA. */
17381 static void
17382 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17383 tree res, tree *expected_res)
17385 /* Actual npatterns and encoded_elts in res may be less than expected due
17386 to canonicalization. */
17387 ASSERT_TRUE (res != NULL_TREE);
17388 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17389 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17391 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17392 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17395 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17396 when the result is VLS. */
17398 static void
17399 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17401 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17402 for (unsigned i = 0; i < expected_nelts; i++)
17403 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17406 /* Helper routine to push multiple elements into BUILDER. */
17407 template<unsigned N>
17408 static void builder_push_elems (vec_perm_builder& builder,
17409 poly_uint64 (&elems)[N])
17411 for (unsigned i = 0; i < N; i++)
17412 builder.quick_push (elems[i]);
17415 #define ARG0(index) vector_cst_elt (arg0, index)
17416 #define ARG1(index) vector_cst_elt (arg1, index)
17418 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17420 static void
17421 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17423 for (int i = 0; i < 10; i++)
17425 /* Case 1:
17426 sel = { 0, 4, 1, 5, ... }
17427 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17429 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17430 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17432 tree inner_type
17433 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17434 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17436 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17437 vec_perm_builder builder (res_len, 4, 1);
17438 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17439 builder_push_elems (builder, mask_elems);
17441 vec_perm_indices sel (builder, 2, res_len);
17442 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17444 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17445 validate_res (4, 1, res, expected_res);
17448 /* Case 2: Same as case 1, but contains an out of bounds access which
17449 should wrap around.
17450 sel = {0, 8, 4, 12, ...} (4, 1)
17451 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17453 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17454 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17456 tree inner_type
17457 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17458 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17460 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17461 vec_perm_builder builder (res_len, 4, 1);
17462 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17463 builder_push_elems (builder, mask_elems);
17465 vec_perm_indices sel (builder, 2, res_len);
17466 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17468 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17469 validate_res (4, 1, res, expected_res);
17474 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17476 static void
17477 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17479 for (int i = 0; i < 10; i++)
17481 /* Case 1:
17482 sel = { 0, 1, 2, 3}
17483 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17485 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17486 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17488 tree inner_type
17489 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17490 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17492 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17493 vec_perm_builder builder (res_len, 4, 1);
17494 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17495 builder_push_elems (builder, mask_elems);
17497 vec_perm_indices sel (builder, 2, res_len);
17498 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17500 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17501 validate_res_vls (res, expected_res, 4);
17504 /* Case 2: Same as Case 1, but crossing input vector.
17505 sel = {0, 2, 4, 6}
17506 In this case,the index 4 is ambiguous since len = 4 + 4x.
17507 Since we cannot determine, which vector to choose from during
17508 compile time, should return NULL_TREE. */
17510 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17511 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17513 tree inner_type
17514 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17515 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17517 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17518 vec_perm_builder builder (res_len, 4, 1);
17519 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17520 builder_push_elems (builder, mask_elems);
17522 vec_perm_indices sel (builder, 2, res_len);
17523 const char *reason;
17524 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17526 ASSERT_TRUE (res == NULL_TREE);
17527 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17532 /* Test all input vectors. */
17534 static void
17535 test_all_nunits (machine_mode vmode)
17537 /* Test with 10 different inputs. */
17538 for (int i = 0; i < 10; i++)
17540 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17541 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17542 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17544 /* Case 1: mask = {0, ...} // (1, 1)
17545 res = { arg0[0], ... } // (1, 1) */
17547 vec_perm_builder builder (len, 1, 1);
17548 builder.quick_push (0);
17549 vec_perm_indices sel (builder, 2, len);
17550 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17551 tree expected_res[] = { ARG0(0) };
17552 validate_res (1, 1, res, expected_res);
17555 /* Case 2: mask = {len, ...} // (1, 1)
17556 res = { arg1[0], ... } // (1, 1) */
17558 vec_perm_builder builder (len, 1, 1);
17559 builder.quick_push (len);
17560 vec_perm_indices sel (builder, 2, len);
17561 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17563 tree expected_res[] = { ARG1(0) };
17564 validate_res (1, 1, res, expected_res);
17569 /* Test all vectors which contain at-least 2 elements. */
17571 static void
17572 test_nunits_min_2 (machine_mode vmode)
17574 for (int i = 0; i < 10; i++)
17576 /* Case 1: mask = { 0, len, ... } // (2, 1)
17577 res = { arg0[0], arg1[0], ... } // (2, 1) */
17579 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17580 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17581 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17583 vec_perm_builder builder (len, 2, 1);
17584 poly_uint64 mask_elems[] = { 0, len };
17585 builder_push_elems (builder, mask_elems);
17587 vec_perm_indices sel (builder, 2, len);
17588 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17590 tree expected_res[] = { ARG0(0), ARG1(0) };
17591 validate_res (2, 1, res, expected_res);
17594 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17595 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17597 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17598 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17599 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17601 vec_perm_builder builder (len, 2, 2);
17602 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17603 builder_push_elems (builder, mask_elems);
17605 vec_perm_indices sel (builder, 2, len);
17606 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17608 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17609 validate_res (2, 2, res, expected_res);
17612 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17613 Test that the stepped sequence of the pattern selects from
17614 same input pattern. Since input vectors have npatterns = 2,
17615 and step (a2 - a1) = 1, step is not a multiple of npatterns
17616 in input vector. So return NULL_TREE. */
17618 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17619 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17620 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17622 vec_perm_builder builder (len, 1, 3);
17623 poly_uint64 mask_elems[] = { 0, 0, 1 };
17624 builder_push_elems (builder, mask_elems);
17626 vec_perm_indices sel (builder, 2, len);
17627 const char *reason;
17628 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17629 &reason);
17630 ASSERT_TRUE (res == NULL_TREE);
17631 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17634 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17635 Test that stepped sequence of the pattern selects from arg0.
17636 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17638 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17639 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17640 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17642 vec_perm_builder builder (len, 1, 3);
17643 poly_uint64 mask_elems[] = { len, 0, 1 };
17644 builder_push_elems (builder, mask_elems);
17646 vec_perm_indices sel (builder, 2, len);
17647 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17649 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17650 validate_res (1, 3, res, expected_res);
17653 /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17654 In this case ensure that arg has a natural stepped sequence
17655 to preserve arg's encoding.
17657 As a concrete example, consider:
17658 arg0: { -16, -9, -10, ... } // (1, 3)
17659 arg1: { -12, -5, -6, ... } // (1, 3)
17660 sel = { 0, len, len + 1, ... } // (1, 3)
17662 This will create res with following encoding:
17663 res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17664 = { -16, -12, -5, ... }
17666 The step in above encoding would be: (-5) - (-12) = 7
17667 And hence res[3] would be computed as -5 + 7 = 2.
17668 instead of arg1[2], ie, -6.
17669 Ensure that valid_mask_for_fold_vec_perm_cst returns false
17670 for this case. */
17672 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17673 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17674 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17676 vec_perm_builder builder (len, 1, 3);
17677 poly_uint64 mask_elems[] = { 0, len, len+1 };
17678 builder_push_elems (builder, mask_elems);
17680 vec_perm_indices sel (builder, 2, len);
17681 const char *reason;
17682 /* FIXME: It may happen that build_vec_cst_rand may build a natural
17683 stepped pattern, even if we didn't explicitly tell it to. So folding
17684 may not always fail, but if it does, ensure that's because arg1 does
17685 not have a natural stepped sequence (and not due to other reason) */
17686 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17687 if (res == NULL_TREE)
17688 ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17691 /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17692 sequence and thus folding should be valid for this case. */
17694 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17695 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17696 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17698 vec_perm_builder builder (len, 1, 3);
17699 poly_uint64 mask_elems[] = { 0, len, len+1 };
17700 builder_push_elems (builder, mask_elems);
17702 vec_perm_indices sel (builder, 2, len);
17703 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17705 tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17706 validate_res (1, 3, res, expected_res);
17709 /* Case 8: Same as aarch64/sve/slp_3.c:
17710 arg0, arg1 are dup vectors.
17711 sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17712 So res = { arg0[0], arg1[0], ... } // (2, 1)
17714 In this case, since the input vectors are dup, only the first two
17715 elements per pattern in sel are considered significant. */
17717 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17718 tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17719 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17721 vec_perm_builder builder (len, 2, 3);
17722 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17723 builder_push_elems (builder, mask_elems);
17725 vec_perm_indices sel (builder, 2, len);
17726 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17728 tree expected_res[] = { ARG0(0), ARG1(0) };
17729 validate_res (2, 1, res, expected_res);
17734 /* Test all vectors which contain at-least 4 elements. */
17736 static void
17737 test_nunits_min_4 (machine_mode vmode)
17739 for (int i = 0; i < 10; i++)
17741 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17742 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17744 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17745 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17746 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17748 vec_perm_builder builder (len, 4, 1);
17749 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17750 builder_push_elems (builder, mask_elems);
17752 vec_perm_indices sel (builder, 2, len);
17753 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17755 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17756 validate_res (4, 1, res, expected_res);
17759 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17760 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17762 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17763 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17764 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17766 vec_perm_builder builder (arg0_len, 1, 3);
17767 poly_uint64 mask_elems[] = {0, 1, 2};
17768 builder_push_elems (builder, mask_elems);
17770 vec_perm_indices sel (builder, 2, arg0_len);
17771 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17772 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17773 validate_res (1, 3, res, expected_res);
17776 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17777 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17779 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17780 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17781 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17783 vec_perm_builder builder (len, 1, 3);
17784 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17785 builder_push_elems (builder, mask_elems);
17787 vec_perm_indices sel (builder, 2, len);
17788 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17789 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17790 validate_res (1, 3, res, expected_res);
17793 /* Case 4:
17794 sel = { len, 0, 2, ... } // (1, 3)
17795 This should return NULL because we cross the input vectors.
17796 Because,
17797 Let's assume len = C + Cx
17798 a1 = 0
17799 S = 2
17800 esel = arg0_len / sel_npatterns = C + Cx
17801 ae = 0 + (esel - 2) * S
17802 = 0 + (C + Cx - 2) * 2
17803 = 2(C-2) + 2Cx
17805 For C >= 4:
17806 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17807 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17808 Since q1 != qe, we cross input vectors.
17809 So return NULL_TREE. */
17811 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17812 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17813 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17815 vec_perm_builder builder (arg0_len, 1, 3);
17816 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17817 builder_push_elems (builder, mask_elems);
17819 vec_perm_indices sel (builder, 2, arg0_len);
17820 const char *reason;
17821 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17822 ASSERT_TRUE (res == NULL_TREE);
17823 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17826 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17827 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17828 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17830 Note that fold_vec_perm_cst will set
17831 res_npatterns = max(4, max(4, 2)) = 4
17832 However after canonicalizing, we will end up with shape (2, 2). */
17834 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17835 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17836 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17838 vec_perm_builder builder (len, 2, 2);
17839 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17840 builder_push_elems (builder, mask_elems);
17842 vec_perm_indices sel (builder, 2, len);
17843 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17844 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17845 validate_res (2, 2, res, expected_res);
17848 /* Case 6: Test combination in sel, where one pattern is dup and other
17849 is stepped sequence.
17850 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17851 res = { arg0[0], arg0[0], arg0[0],
17852 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17854 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17855 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17856 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17858 vec_perm_builder builder (len, 2, 3);
17859 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17860 builder_push_elems (builder, mask_elems);
17862 vec_perm_indices sel (builder, 2, len);
17863 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17865 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17866 ARG0(1), ARG0(0), ARG0(2) };
17867 validate_res (2, 3, res, expected_res);
17870 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17871 when arg0, arg1 and sel have different number of patterns.
17872 arg0 is of shape (1, 1)
17873 arg1 is of shape (4, 1)
17874 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17876 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17877 However,
17878 step = (len+2) - (len+1) = 1
17879 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17880 Since step is not a multiple of arg_npatterns,
17881 valid_mask_for_fold_vec_perm_cst should return false,
17882 and thus fold_vec_perm_cst should return NULL_TREE. */
17884 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17885 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17886 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17888 vec_perm_builder builder (len, 2, 3);
17889 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17890 builder_push_elems (builder, mask_elems);
17892 vec_perm_indices sel (builder, 2, len);
17893 const char *reason;
17894 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17896 ASSERT_TRUE (res == NULL_TREE);
17897 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17900 /* Case 8: PR111754: When input vector is not a stepped sequence,
17901 check that the result is not a stepped sequence either, even
17902 if sel has a stepped sequence. */
17904 tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17905 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17907 vec_perm_builder builder (len, 1, 3);
17908 poly_uint64 mask_elems[] = { 0, 1, 2 };
17909 builder_push_elems (builder, mask_elems);
17911 vec_perm_indices sel (builder, 1, len);
17912 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17914 tree expected_res[] = { ARG0(0), ARG0(1) };
17915 validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17918 /* Case 9: If sel doesn't contain a stepped sequence,
17919 check that the result has same encoding as sel, irrespective
17920 of shape of input vectors. */
17922 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17923 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17924 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17926 vec_perm_builder builder (len, 1, 2);
17927 poly_uint64 mask_elems[] = { 0, len };
17928 builder_push_elems (builder, mask_elems);
17930 vec_perm_indices sel (builder, 2, len);
17931 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17933 tree expected_res[] = { ARG0(0), ARG1(0) };
17934 validate_res (sel.encoding ().npatterns (),
17935 sel.encoding ().nelts_per_pattern (), res, expected_res);
17940 /* Test all vectors which contain at-least 8 elements. */
17942 static void
17943 test_nunits_min_8 (machine_mode vmode)
17945 for (int i = 0; i < 10; i++)
17947 /* Case 1: sel_npatterns (4) > input npatterns (2)
17948 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17949 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17950 arg0[2], arg0[0], arg0[3], arg1[0],
17951 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17953 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17954 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17955 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17957 vec_perm_builder builder(len, 4, 3);
17958 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17959 4, 0, 5, len };
17960 builder_push_elems (builder, mask_elems);
17962 vec_perm_indices sel (builder, 2, len);
17963 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17965 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17966 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17967 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17968 validate_res (4, 3, res, expected_res);
17973 /* Test vectors for which nunits[0] <= 4. */
17975 static void
17976 test_nunits_max_4 (machine_mode vmode)
17978 /* Case 1: mask = {0, 4, ...} // (1, 2)
17979 This should return NULL_TREE because the index 4 may choose
17980 from either arg0 or arg1 depending on vector length. */
17982 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17983 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17984 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17986 vec_perm_builder builder (len, 1, 2);
17987 poly_uint64 mask_elems[] = {0, 4};
17988 builder_push_elems (builder, mask_elems);
17990 vec_perm_indices sel (builder, 2, len);
17991 const char *reason;
17992 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17993 ASSERT_TRUE (res == NULL_TREE);
17994 ASSERT_TRUE (reason != NULL);
17995 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17999 #undef ARG0
18000 #undef ARG1
18002 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
18004 static bool
18005 is_simple_vla_size (poly_uint64 size)
18007 if (size.is_constant ()
18008 || !pow2p_hwi (size.coeffs[0]))
18009 return false;
18010 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
18011 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
18012 return false;
18013 return true;
18016 /* Execute fold_vec_perm_cst unit tests. */
18018 static void
18019 test ()
18021 machine_mode vnx4si_mode = E_VOIDmode;
18022 machine_mode v4si_mode = E_VOIDmode;
18024 machine_mode vmode;
18025 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
18027 /* Obtain modes corresponding to VNx4SI and V4SI,
18028 to call mixed mode tests below.
18029 FIXME: Is there a better way to do this ? */
18030 if (GET_MODE_INNER (vmode) == SImode)
18032 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18033 if (is_simple_vla_size (nunits)
18034 && nunits.coeffs[0] == 4)
18035 vnx4si_mode = vmode;
18036 else if (known_eq (nunits, poly_uint64 (4)))
18037 v4si_mode = vmode;
18040 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
18041 || !targetm.vector_mode_supported_p (vmode))
18042 continue;
18044 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18045 test_all_nunits (vmode);
18046 if (nunits.coeffs[0] >= 2)
18047 test_nunits_min_2 (vmode);
18048 if (nunits.coeffs[0] >= 4)
18049 test_nunits_min_4 (vmode);
18050 if (nunits.coeffs[0] >= 8)
18051 test_nunits_min_8 (vmode);
18053 if (nunits.coeffs[0] <= 4)
18054 test_nunits_max_4 (vmode);
18057 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
18058 && targetm.vector_mode_supported_p (vnx4si_mode)
18059 && targetm.vector_mode_supported_p (v4si_mode))
18061 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
18062 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
18065 } // end of test_fold_vec_perm_cst namespace
18067 /* Verify that various binary operations on vectors are folded
18068 correctly. */
18070 static void
18071 test_vector_folding ()
18073 tree inner_type = integer_type_node;
18074 tree type = build_vector_type (inner_type, 4);
18075 tree zero = build_zero_cst (type);
18076 tree one = build_one_cst (type);
18077 tree index = build_index_vector (type, 0, 1);
18079 /* Verify equality tests that return a scalar boolean result. */
18080 tree res_type = boolean_type_node;
18081 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
18082 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
18083 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
18084 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
18085 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
18086 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18087 index, one)));
18088 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
18089 index, index)));
18090 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18091 index, index)));
18094 /* Verify folding of VEC_DUPLICATE_EXPRs. */
18096 static void
18097 test_vec_duplicate_folding ()
18099 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
18100 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
18101 /* This will be 1 if VEC_MODE isn't a vector mode. */
18102 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
18104 tree type = build_vector_type (ssizetype, nunits);
18105 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
18106 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
18107 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
18110 /* Run all of the selftests within this file. */
18112 void
18113 fold_const_cc_tests ()
18115 test_arithmetic_folding ();
18116 test_vector_folding ();
18117 test_vec_duplicate_folding ();
18118 test_fold_vec_perm_cst::test ();
18121 } // namespace selftest
18123 #endif /* CHECKING_P */