Require target lra in gcc.dg/pr108095.c
[official-gcc.git] / gcc / fold-const.cc
blob4f8561509ffb2abf1ace960544151790f25bf9f8
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 min value to RES. */
1218 bool
1219 can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1221 if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1223 res = wi::to_poly_wide (arg1);
1224 return true;
1226 else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1228 res = wi::to_poly_wide (arg2);
1229 return true;
1232 return false;
1235 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 produce a new constant in RES. Return FALSE if we don't know how
1237 to evaluate CODE at compile-time. */
1239 static bool
1240 poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 const_tree arg1, const_tree arg2,
1242 signop sign, wi::overflow_type *overflow)
1244 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 switch (code)
1248 case PLUS_EXPR:
1249 res = wi::add (wi::to_poly_wide (arg1),
1250 wi::to_poly_wide (arg2), sign, overflow);
1251 break;
1253 case MINUS_EXPR:
1254 res = wi::sub (wi::to_poly_wide (arg1),
1255 wi::to_poly_wide (arg2), sign, overflow);
1256 break;
1258 case MULT_EXPR:
1259 if (TREE_CODE (arg2) == INTEGER_CST)
1260 res = wi::mul (wi::to_poly_wide (arg1),
1261 wi::to_wide (arg2), sign, overflow);
1262 else if (TREE_CODE (arg1) == INTEGER_CST)
1263 res = wi::mul (wi::to_poly_wide (arg2),
1264 wi::to_wide (arg1), sign, overflow);
1265 else
1266 return NULL_TREE;
1267 break;
1269 case LSHIFT_EXPR:
1270 if (TREE_CODE (arg2) == INTEGER_CST)
1271 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1272 else
1273 return false;
1274 break;
1276 case BIT_IOR_EXPR:
1277 if (TREE_CODE (arg2) != INTEGER_CST
1278 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1279 &res))
1280 return false;
1281 break;
1283 case MIN_EXPR:
1284 if (!can_min_p (arg1, arg2, res))
1285 return false;
1286 break;
1288 default:
1289 return false;
1291 return true;
1294 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 produce a new constant. Return NULL_TREE if we don't know how to
1296 evaluate CODE at compile-time. */
1298 tree
1299 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 int overflowable)
1302 poly_wide_int poly_res;
1303 tree type = TREE_TYPE (arg1);
1304 signop sign = TYPE_SIGN (type);
1305 wi::overflow_type overflow = wi::OVF_NONE;
1307 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1309 wide_int warg1 = wi::to_wide (arg1), res;
1310 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1311 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1312 return NULL_TREE;
1313 poly_res = res;
1315 else if (!poly_int_tree_p (arg1)
1316 || !poly_int_tree_p (arg2)
1317 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1318 return NULL_TREE;
1319 return force_fit_type (type, poly_res, overflowable,
1320 (((sign == SIGNED || overflowable == -1)
1321 && overflow)
1322 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1325 /* Return true if binary operation OP distributes over addition in operand
1326 OPNO, with the other operand being held constant. OPNO counts from 1. */
1328 static bool
1329 distributes_over_addition_p (tree_code op, int opno)
1331 switch (op)
1333 case PLUS_EXPR:
1334 case MINUS_EXPR:
1335 case MULT_EXPR:
1336 return true;
1338 case LSHIFT_EXPR:
1339 return opno == 1;
1341 default:
1342 return false;
1346 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1347 constant. We assume ARG1 and ARG2 have the same data type, or at least
1348 are the same kind of constant and the same machine mode. Return zero if
1349 combining the constants is not allowed in the current operating mode. */
1351 static tree
1352 const_binop (enum tree_code code, tree arg1, tree arg2)
1354 /* Sanity check for the recursive cases. */
1355 if (!arg1 || !arg2)
1356 return NULL_TREE;
1358 STRIP_NOPS (arg1);
1359 STRIP_NOPS (arg2);
1361 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1363 if (code == POINTER_PLUS_EXPR)
1364 return int_const_binop (PLUS_EXPR,
1365 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1367 return int_const_binop (code, arg1, arg2);
1370 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1372 machine_mode mode;
1373 REAL_VALUE_TYPE d1;
1374 REAL_VALUE_TYPE d2;
1375 REAL_VALUE_TYPE value;
1376 REAL_VALUE_TYPE result;
1377 bool inexact;
1378 tree t, type;
1380 /* The following codes are handled by real_arithmetic. */
1381 switch (code)
1383 case PLUS_EXPR:
1384 case MINUS_EXPR:
1385 case MULT_EXPR:
1386 case RDIV_EXPR:
1387 case MIN_EXPR:
1388 case MAX_EXPR:
1389 break;
1391 default:
1392 return NULL_TREE;
1395 d1 = TREE_REAL_CST (arg1);
1396 d2 = TREE_REAL_CST (arg2);
1398 type = TREE_TYPE (arg1);
1399 mode = TYPE_MODE (type);
1401 /* Don't perform operation if we honor signaling NaNs and
1402 either operand is a signaling NaN. */
1403 if (HONOR_SNANS (mode)
1404 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1405 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1406 return NULL_TREE;
1408 /* Don't perform operation if it would raise a division
1409 by zero exception. */
1410 if (code == RDIV_EXPR
1411 && real_equal (&d2, &dconst0)
1412 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1413 return NULL_TREE;
1415 /* If either operand is a NaN, just return it. Otherwise, set up
1416 for floating-point trap; we return an overflow. */
1417 if (REAL_VALUE_ISNAN (d1))
1419 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1420 is off. */
1421 d1.signalling = 0;
1422 t = build_real (type, d1);
1423 return t;
1425 else if (REAL_VALUE_ISNAN (d2))
1427 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1428 is off. */
1429 d2.signalling = 0;
1430 t = build_real (type, d2);
1431 return t;
1434 inexact = real_arithmetic (&value, code, &d1, &d2);
1435 real_convert (&result, mode, &value);
1437 /* Don't constant fold this floating point operation if
1438 both operands are not NaN but the result is NaN, and
1439 flag_trapping_math. Such operations should raise an
1440 invalid operation exception. */
1441 if (flag_trapping_math
1442 && MODE_HAS_NANS (mode)
1443 && REAL_VALUE_ISNAN (result)
1444 && !REAL_VALUE_ISNAN (d1)
1445 && !REAL_VALUE_ISNAN (d2))
1446 return NULL_TREE;
1448 /* Don't constant fold this floating point operation if
1449 the result has overflowed and flag_trapping_math. */
1450 if (flag_trapping_math
1451 && MODE_HAS_INFINITIES (mode)
1452 && REAL_VALUE_ISINF (result)
1453 && !REAL_VALUE_ISINF (d1)
1454 && !REAL_VALUE_ISINF (d2))
1455 return NULL_TREE;
1457 /* Don't constant fold this floating point operation if the
1458 result may dependent upon the run-time rounding mode and
1459 flag_rounding_math is set, or if GCC's software emulation
1460 is unable to accurately represent the result. */
1461 if ((flag_rounding_math
1462 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1463 && (inexact || !real_identical (&result, &value)))
1464 return NULL_TREE;
1466 t = build_real (type, result);
1468 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1469 return t;
1472 if (TREE_CODE (arg1) == FIXED_CST)
1474 FIXED_VALUE_TYPE f1;
1475 FIXED_VALUE_TYPE f2;
1476 FIXED_VALUE_TYPE result;
1477 tree t, type;
1478 bool sat_p;
1479 bool overflow_p;
1481 /* The following codes are handled by fixed_arithmetic. */
1482 switch (code)
1484 case PLUS_EXPR:
1485 case MINUS_EXPR:
1486 case MULT_EXPR:
1487 case TRUNC_DIV_EXPR:
1488 if (TREE_CODE (arg2) != FIXED_CST)
1489 return NULL_TREE;
1490 f2 = TREE_FIXED_CST (arg2);
1491 break;
1493 case LSHIFT_EXPR:
1494 case RSHIFT_EXPR:
1496 if (TREE_CODE (arg2) != INTEGER_CST)
1497 return NULL_TREE;
1498 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1499 f2.data.high = w2.elt (1);
1500 f2.data.low = w2.ulow ();
1501 f2.mode = SImode;
1503 break;
1505 default:
1506 return NULL_TREE;
1509 f1 = TREE_FIXED_CST (arg1);
1510 type = TREE_TYPE (arg1);
1511 sat_p = TYPE_SATURATING (type);
1512 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1513 t = build_fixed (type, result);
1514 /* Propagate overflow flags. */
1515 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1516 TREE_OVERFLOW (t) = 1;
1517 return t;
1520 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1522 tree type = TREE_TYPE (arg1);
1523 tree r1 = TREE_REALPART (arg1);
1524 tree i1 = TREE_IMAGPART (arg1);
1525 tree r2 = TREE_REALPART (arg2);
1526 tree i2 = TREE_IMAGPART (arg2);
1527 tree real, imag;
1529 switch (code)
1531 case PLUS_EXPR:
1532 case MINUS_EXPR:
1533 real = const_binop (code, r1, r2);
1534 imag = const_binop (code, i1, i2);
1535 break;
1537 case MULT_EXPR:
1538 if (COMPLEX_FLOAT_TYPE_P (type))
1539 return do_mpc_arg2 (arg1, arg2, type,
1540 /* do_nonfinite= */ folding_initializer,
1541 mpc_mul);
1543 real = const_binop (MINUS_EXPR,
1544 const_binop (MULT_EXPR, r1, r2),
1545 const_binop (MULT_EXPR, i1, i2));
1546 imag = const_binop (PLUS_EXPR,
1547 const_binop (MULT_EXPR, r1, i2),
1548 const_binop (MULT_EXPR, i1, r2));
1549 break;
1551 case RDIV_EXPR:
1552 if (COMPLEX_FLOAT_TYPE_P (type))
1553 return do_mpc_arg2 (arg1, arg2, type,
1554 /* do_nonfinite= */ folding_initializer,
1555 mpc_div);
1556 /* Fallthru. */
1557 case TRUNC_DIV_EXPR:
1558 case CEIL_DIV_EXPR:
1559 case FLOOR_DIV_EXPR:
1560 case ROUND_DIV_EXPR:
1561 if (flag_complex_method == 0)
1563 /* Keep this algorithm in sync with
1564 tree-complex.cc:expand_complex_div_straight().
1566 Expand complex division to scalars, straightforward algorithm.
1567 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1568 t = br*br + bi*bi
1570 tree magsquared
1571 = const_binop (PLUS_EXPR,
1572 const_binop (MULT_EXPR, r2, r2),
1573 const_binop (MULT_EXPR, i2, i2));
1574 tree t1
1575 = const_binop (PLUS_EXPR,
1576 const_binop (MULT_EXPR, r1, r2),
1577 const_binop (MULT_EXPR, i1, i2));
1578 tree t2
1579 = const_binop (MINUS_EXPR,
1580 const_binop (MULT_EXPR, i1, r2),
1581 const_binop (MULT_EXPR, r1, i2));
1583 real = const_binop (code, t1, magsquared);
1584 imag = const_binop (code, t2, magsquared);
1586 else
1588 /* Keep this algorithm in sync with
1589 tree-complex.cc:expand_complex_div_wide().
1591 Expand complex division to scalars, modified algorithm to minimize
1592 overflow with wide input ranges. */
1593 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1594 fold_abs_const (r2, TREE_TYPE (type)),
1595 fold_abs_const (i2, TREE_TYPE (type)));
1597 if (integer_nonzerop (compare))
1599 /* In the TRUE branch, we compute
1600 ratio = br/bi;
1601 div = (br * ratio) + bi;
1602 tr = (ar * ratio) + ai;
1603 ti = (ai * ratio) - ar;
1604 tr = tr / div;
1605 ti = ti / div; */
1606 tree ratio = const_binop (code, r2, i2);
1607 tree div = const_binop (PLUS_EXPR, i2,
1608 const_binop (MULT_EXPR, r2, ratio));
1609 real = const_binop (MULT_EXPR, r1, ratio);
1610 real = const_binop (PLUS_EXPR, real, i1);
1611 real = const_binop (code, real, div);
1613 imag = const_binop (MULT_EXPR, i1, ratio);
1614 imag = const_binop (MINUS_EXPR, imag, r1);
1615 imag = const_binop (code, imag, div);
1617 else
1619 /* In the FALSE branch, we compute
1620 ratio = d/c;
1621 divisor = (d * ratio) + c;
1622 tr = (b * ratio) + a;
1623 ti = b - (a * ratio);
1624 tr = tr / div;
1625 ti = ti / div; */
1626 tree ratio = const_binop (code, i2, r2);
1627 tree div = const_binop (PLUS_EXPR, r2,
1628 const_binop (MULT_EXPR, i2, ratio));
1630 real = const_binop (MULT_EXPR, i1, ratio);
1631 real = const_binop (PLUS_EXPR, real, r1);
1632 real = const_binop (code, real, div);
1634 imag = const_binop (MULT_EXPR, r1, ratio);
1635 imag = const_binop (MINUS_EXPR, i1, imag);
1636 imag = const_binop (code, imag, div);
1639 break;
1641 default:
1642 return NULL_TREE;
1645 if (real && imag)
1646 return build_complex (type, real, imag);
1649 if (TREE_CODE (arg1) == VECTOR_CST
1650 && TREE_CODE (arg2) == VECTOR_CST
1651 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1652 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1654 tree type = TREE_TYPE (arg1);
1655 bool step_ok_p;
1656 if (VECTOR_CST_STEPPED_P (arg1)
1657 && VECTOR_CST_STEPPED_P (arg2))
1658 /* We can operate directly on the encoding if:
1660 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1661 implies
1662 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1664 Addition and subtraction are the supported operators
1665 for which this is true. */
1666 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1667 else if (VECTOR_CST_STEPPED_P (arg1))
1668 /* We can operate directly on stepped encodings if:
1670 a3 - a2 == a2 - a1
1671 implies:
1672 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1674 which is true if (x -> x op c) distributes over addition. */
1675 step_ok_p = distributes_over_addition_p (code, 1);
1676 else
1677 /* Similarly in reverse. */
1678 step_ok_p = distributes_over_addition_p (code, 2);
1679 tree_vector_builder elts;
1680 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1681 return NULL_TREE;
1682 unsigned int count = elts.encoded_nelts ();
1683 for (unsigned int i = 0; i < count; ++i)
1685 tree elem1 = VECTOR_CST_ELT (arg1, i);
1686 tree elem2 = VECTOR_CST_ELT (arg2, i);
1688 tree elt = const_binop (code, elem1, elem2);
1690 /* It is possible that const_binop cannot handle the given
1691 code and return NULL_TREE */
1692 if (elt == NULL_TREE)
1693 return NULL_TREE;
1694 elts.quick_push (elt);
1697 return elts.build ();
1700 /* Shifts allow a scalar offset for a vector. */
1701 if (TREE_CODE (arg1) == VECTOR_CST
1702 && TREE_CODE (arg2) == INTEGER_CST)
1704 tree type = TREE_TYPE (arg1);
1705 bool step_ok_p = distributes_over_addition_p (code, 1);
1706 tree_vector_builder elts;
1707 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1708 return NULL_TREE;
1709 unsigned int count = elts.encoded_nelts ();
1710 for (unsigned int i = 0; i < count; ++i)
1712 tree elem1 = VECTOR_CST_ELT (arg1, i);
1714 tree elt = const_binop (code, elem1, arg2);
1716 /* It is possible that const_binop cannot handle the given
1717 code and return NULL_TREE. */
1718 if (elt == NULL_TREE)
1719 return NULL_TREE;
1720 elts.quick_push (elt);
1723 return elts.build ();
1725 return NULL_TREE;
1728 /* Overload that adds a TYPE parameter to be able to dispatch
1729 to fold_relational_const. */
1731 tree
1732 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1734 if (TREE_CODE_CLASS (code) == tcc_comparison)
1735 return fold_relational_const (code, type, arg1, arg2);
1737 /* ??? Until we make the const_binop worker take the type of the
1738 result as argument put those cases that need it here. */
1739 switch (code)
1741 case VEC_SERIES_EXPR:
1742 if (CONSTANT_CLASS_P (arg1)
1743 && CONSTANT_CLASS_P (arg2))
1744 return build_vec_series (type, arg1, arg2);
1745 return NULL_TREE;
1747 case COMPLEX_EXPR:
1748 if ((TREE_CODE (arg1) == REAL_CST
1749 && TREE_CODE (arg2) == REAL_CST)
1750 || (TREE_CODE (arg1) == INTEGER_CST
1751 && TREE_CODE (arg2) == INTEGER_CST))
1752 return build_complex (type, arg1, arg2);
1753 return NULL_TREE;
1755 case POINTER_DIFF_EXPR:
1756 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1758 poly_offset_int res = (wi::to_poly_offset (arg1)
1759 - wi::to_poly_offset (arg2));
1760 return force_fit_type (type, res, 1,
1761 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1763 return NULL_TREE;
1765 case VEC_PACK_TRUNC_EXPR:
1766 case VEC_PACK_FIX_TRUNC_EXPR:
1767 case VEC_PACK_FLOAT_EXPR:
1769 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1771 if (TREE_CODE (arg1) != VECTOR_CST
1772 || TREE_CODE (arg2) != VECTOR_CST)
1773 return NULL_TREE;
1775 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1776 return NULL_TREE;
1778 out_nelts = in_nelts * 2;
1779 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1780 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1782 tree_vector_builder elts (type, out_nelts, 1);
1783 for (i = 0; i < out_nelts; i++)
1785 tree elt = (i < in_nelts
1786 ? VECTOR_CST_ELT (arg1, i)
1787 : VECTOR_CST_ELT (arg2, i - in_nelts));
1788 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1789 ? NOP_EXPR
1790 : code == VEC_PACK_FLOAT_EXPR
1791 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1792 TREE_TYPE (type), elt);
1793 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1794 return NULL_TREE;
1795 elts.quick_push (elt);
1798 return elts.build ();
1801 case VEC_WIDEN_MULT_LO_EXPR:
1802 case VEC_WIDEN_MULT_HI_EXPR:
1803 case VEC_WIDEN_MULT_EVEN_EXPR:
1804 case VEC_WIDEN_MULT_ODD_EXPR:
1806 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1808 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1809 return NULL_TREE;
1811 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1812 return NULL_TREE;
1813 out_nelts = in_nelts / 2;
1814 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1815 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1817 if (code == VEC_WIDEN_MULT_LO_EXPR)
1818 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1819 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1820 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1821 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1822 scale = 1, ofs = 0;
1823 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1824 scale = 1, ofs = 1;
1826 tree_vector_builder elts (type, out_nelts, 1);
1827 for (out = 0; out < out_nelts; out++)
1829 unsigned int in = (out << scale) + ofs;
1830 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1831 VECTOR_CST_ELT (arg1, in));
1832 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1833 VECTOR_CST_ELT (arg2, in));
1835 if (t1 == NULL_TREE || t2 == NULL_TREE)
1836 return NULL_TREE;
1837 tree elt = const_binop (MULT_EXPR, t1, t2);
1838 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1839 return NULL_TREE;
1840 elts.quick_push (elt);
1843 return elts.build ();
1846 default:;
1849 if (TREE_CODE_CLASS (code) != tcc_binary)
1850 return NULL_TREE;
1852 /* Make sure type and arg0 have the same saturating flag. */
1853 gcc_checking_assert (TYPE_SATURATING (type)
1854 == TYPE_SATURATING (TREE_TYPE (arg1)));
1856 return const_binop (code, arg1, arg2);
1859 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1860 Return zero if computing the constants is not possible. */
1862 tree
1863 const_unop (enum tree_code code, tree type, tree arg0)
1865 /* Don't perform the operation, other than NEGATE and ABS, if
1866 flag_signaling_nans is on and the operand is a signaling NaN. */
1867 if (TREE_CODE (arg0) == REAL_CST
1868 && HONOR_SNANS (arg0)
1869 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1870 && code != NEGATE_EXPR
1871 && code != ABS_EXPR
1872 && code != ABSU_EXPR)
1873 return NULL_TREE;
1875 switch (code)
1877 CASE_CONVERT:
1878 case FLOAT_EXPR:
1879 case FIX_TRUNC_EXPR:
1880 case FIXED_CONVERT_EXPR:
1881 return fold_convert_const (code, type, arg0);
1883 case ADDR_SPACE_CONVERT_EXPR:
1884 /* If the source address is 0, and the source address space
1885 cannot have a valid object at 0, fold to dest type null. */
1886 if (integer_zerop (arg0)
1887 && !(targetm.addr_space.zero_address_valid
1888 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1889 return fold_convert_const (code, type, arg0);
1890 break;
1892 case VIEW_CONVERT_EXPR:
1893 return fold_view_convert_expr (type, arg0);
1895 case NEGATE_EXPR:
1897 /* Can't call fold_negate_const directly here as that doesn't
1898 handle all cases and we might not be able to negate some
1899 constants. */
1900 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1901 if (tem && CONSTANT_CLASS_P (tem))
1902 return tem;
1903 break;
1906 case ABS_EXPR:
1907 case ABSU_EXPR:
1908 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1909 return fold_abs_const (arg0, type);
1910 break;
1912 case CONJ_EXPR:
1913 if (TREE_CODE (arg0) == COMPLEX_CST)
1915 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1916 TREE_TYPE (type));
1917 return build_complex (type, TREE_REALPART (arg0), ipart);
1919 break;
1921 case BIT_NOT_EXPR:
1922 if (TREE_CODE (arg0) == INTEGER_CST)
1923 return fold_not_const (arg0, type);
1924 else if (POLY_INT_CST_P (arg0))
1925 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1926 /* Perform BIT_NOT_EXPR on each element individually. */
1927 else if (TREE_CODE (arg0) == VECTOR_CST)
1929 tree elem;
1931 /* This can cope with stepped encodings because ~x == -1 - x. */
1932 tree_vector_builder elements;
1933 elements.new_unary_operation (type, arg0, true);
1934 unsigned int i, count = elements.encoded_nelts ();
1935 for (i = 0; i < count; ++i)
1937 elem = VECTOR_CST_ELT (arg0, i);
1938 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1939 if (elem == NULL_TREE)
1940 break;
1941 elements.quick_push (elem);
1943 if (i == count)
1944 return elements.build ();
1946 break;
1948 case TRUTH_NOT_EXPR:
1949 if (TREE_CODE (arg0) == INTEGER_CST)
1950 return constant_boolean_node (integer_zerop (arg0), type);
1951 break;
1953 case REALPART_EXPR:
1954 if (TREE_CODE (arg0) == COMPLEX_CST)
1955 return fold_convert (type, TREE_REALPART (arg0));
1956 break;
1958 case IMAGPART_EXPR:
1959 if (TREE_CODE (arg0) == COMPLEX_CST)
1960 return fold_convert (type, TREE_IMAGPART (arg0));
1961 break;
1963 case VEC_UNPACK_LO_EXPR:
1964 case VEC_UNPACK_HI_EXPR:
1965 case VEC_UNPACK_FLOAT_LO_EXPR:
1966 case VEC_UNPACK_FLOAT_HI_EXPR:
1967 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1968 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1970 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1971 enum tree_code subcode;
1973 if (TREE_CODE (arg0) != VECTOR_CST)
1974 return NULL_TREE;
1976 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1977 return NULL_TREE;
1978 out_nelts = in_nelts / 2;
1979 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1981 unsigned int offset = 0;
1982 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1983 || code == VEC_UNPACK_FLOAT_LO_EXPR
1984 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1985 offset = out_nelts;
1987 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1988 subcode = NOP_EXPR;
1989 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1990 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1991 subcode = FLOAT_EXPR;
1992 else
1993 subcode = FIX_TRUNC_EXPR;
1995 tree_vector_builder elts (type, out_nelts, 1);
1996 for (i = 0; i < out_nelts; i++)
1998 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1999 VECTOR_CST_ELT (arg0, i + offset));
2000 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2001 return NULL_TREE;
2002 elts.quick_push (elt);
2005 return elts.build ();
2008 case VEC_DUPLICATE_EXPR:
2009 if (CONSTANT_CLASS_P (arg0))
2010 return build_vector_from_val (type, arg0);
2011 return NULL_TREE;
2013 default:
2014 break;
2017 return NULL_TREE;
2020 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2021 indicates which particular sizetype to create. */
2023 tree
2024 size_int_kind (poly_int64 number, enum size_type_kind kind)
2026 return build_int_cst (sizetype_tab[(int) kind], number);
2029 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2030 is a tree code. The type of the result is taken from the operands.
2031 Both must be equivalent integer types, ala int_binop_types_match_p.
2032 If the operands are constant, so is the result. */
2034 tree
2035 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2037 tree type = TREE_TYPE (arg0);
2039 if (arg0 == error_mark_node || arg1 == error_mark_node)
2040 return error_mark_node;
2042 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2043 TREE_TYPE (arg1)));
2045 /* Handle the special case of two poly_int constants faster. */
2046 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2048 /* And some specific cases even faster than that. */
2049 if (code == PLUS_EXPR)
2051 if (integer_zerop (arg0)
2052 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2053 return arg1;
2054 if (integer_zerop (arg1)
2055 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2056 return arg0;
2058 else if (code == MINUS_EXPR)
2060 if (integer_zerop (arg1)
2061 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2062 return arg0;
2064 else if (code == MULT_EXPR)
2066 if (integer_onep (arg0)
2067 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2068 return arg1;
2071 /* Handle general case of two integer constants. For sizetype
2072 constant calculations we always want to know about overflow,
2073 even in the unsigned case. */
2074 tree res = int_const_binop (code, arg0, arg1, -1);
2075 if (res != NULL_TREE)
2076 return res;
2079 return fold_build2_loc (loc, code, type, arg0, arg1);
2082 /* Given two values, either both of sizetype or both of bitsizetype,
2083 compute the difference between the two values. Return the value
2084 in signed type corresponding to the type of the operands. */
2086 tree
2087 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2089 tree type = TREE_TYPE (arg0);
2090 tree ctype;
2092 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2093 TREE_TYPE (arg1)));
2095 /* If the type is already signed, just do the simple thing. */
2096 if (!TYPE_UNSIGNED (type))
2097 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2099 if (type == sizetype)
2100 ctype = ssizetype;
2101 else if (type == bitsizetype)
2102 ctype = sbitsizetype;
2103 else
2104 ctype = signed_type_for (type);
2106 /* If either operand is not a constant, do the conversions to the signed
2107 type and subtract. The hardware will do the right thing with any
2108 overflow in the subtraction. */
2109 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2110 return size_binop_loc (loc, MINUS_EXPR,
2111 fold_convert_loc (loc, ctype, arg0),
2112 fold_convert_loc (loc, ctype, arg1));
2114 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2115 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2116 overflow) and negate (which can't either). Special-case a result
2117 of zero while we're here. */
2118 if (tree_int_cst_equal (arg0, arg1))
2119 return build_int_cst (ctype, 0);
2120 else if (tree_int_cst_lt (arg1, arg0))
2121 return fold_convert_loc (loc, ctype,
2122 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2123 else
2124 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2125 fold_convert_loc (loc, ctype,
2126 size_binop_loc (loc,
2127 MINUS_EXPR,
2128 arg1, arg0)));
2131 /* A subroutine of fold_convert_const handling conversions of an
2132 INTEGER_CST to another integer type. */
2134 static tree
2135 fold_convert_const_int_from_int (tree type, const_tree arg1)
2137 /* Given an integer constant, make new constant with new type,
2138 appropriately sign-extended or truncated. Use widest_int
2139 so that any extension is done according ARG1's type. */
2140 return force_fit_type (type, wi::to_widest (arg1),
2141 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2142 TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2148 static tree
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2151 bool overflow = false;
2152 tree t;
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 wide_int val;
2164 REAL_VALUE_TYPE r;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2167 switch (code)
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2171 break;
2173 default:
2174 gcc_unreachable ();
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2180 overflow = true;
2181 val = wi::zero (TYPE_PRECISION (type));
2184 /* See if R is less than the lower bound or greater than the
2185 upper bound. */
2187 if (! overflow)
2189 tree lt = TYPE_MIN_VALUE (type);
2190 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2191 if (real_less (&r, &l))
2193 overflow = true;
2194 val = wi::to_wide (lt);
2198 if (! overflow)
2200 tree ut = TYPE_MAX_VALUE (type);
2201 if (ut)
2203 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2204 if (real_less (&u, &r))
2206 overflow = true;
2207 val = wi::to_wide (ut);
2212 if (! overflow)
2213 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2215 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2216 return t;
2219 /* A subroutine of fold_convert_const handling conversions of a
2220 FIXED_CST to an integer type. */
2222 static tree
2223 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2225 tree t;
2226 double_int temp, temp_trunc;
2227 scalar_mode mode;
2229 /* Right shift FIXED_CST to temp by fbit. */
2230 temp = TREE_FIXED_CST (arg1).data;
2231 mode = TREE_FIXED_CST (arg1).mode;
2232 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2234 temp = temp.rshift (GET_MODE_FBIT (mode),
2235 HOST_BITS_PER_DOUBLE_INT,
2236 SIGNED_FIXED_POINT_MODE_P (mode));
2238 /* Left shift temp to temp_trunc by fbit. */
2239 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2240 HOST_BITS_PER_DOUBLE_INT,
2241 SIGNED_FIXED_POINT_MODE_P (mode));
2243 else
2245 temp = double_int_zero;
2246 temp_trunc = double_int_zero;
2249 /* If FIXED_CST is negative, we need to round the value toward 0.
2250 By checking if the fractional bits are not zero to add 1 to temp. */
2251 if (SIGNED_FIXED_POINT_MODE_P (mode)
2252 && temp_trunc.is_negative ()
2253 && TREE_FIXED_CST (arg1).data != temp_trunc)
2254 temp += double_int_one;
2256 /* Given a fixed-point constant, make new constant with new type,
2257 appropriately sign-extended or truncated. */
2258 t = force_fit_type (type, temp, -1,
2259 (temp.is_negative ()
2260 && (TYPE_UNSIGNED (type)
2261 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2262 | TREE_OVERFLOW (arg1));
2264 return t;
2267 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2268 to another floating point type. */
2270 static tree
2271 fold_convert_const_real_from_real (tree type, const_tree arg1)
2273 REAL_VALUE_TYPE value;
2274 tree t;
2276 /* If the underlying modes are the same, simply treat it as
2277 copy and rebuild with TREE_REAL_CST information and the
2278 given type. */
2279 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2281 t = build_real (type, TREE_REAL_CST (arg1));
2282 return t;
2285 /* Don't perform the operation if flag_signaling_nans is on
2286 and the operand is a signaling NaN. */
2287 if (HONOR_SNANS (arg1)
2288 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2289 return NULL_TREE;
2291 /* With flag_rounding_math we should respect the current rounding mode
2292 unless the conversion is exact. */
2293 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2294 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2295 return NULL_TREE;
2297 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2298 t = build_real (type, value);
2300 /* If converting an infinity or NAN to a representation that doesn't
2301 have one, set the overflow bit so that we can produce some kind of
2302 error message at the appropriate point if necessary. It's not the
2303 most user-friendly message, but it's better than nothing. */
2304 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2305 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2306 TREE_OVERFLOW (t) = 1;
2307 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2308 && !MODE_HAS_NANS (TYPE_MODE (type)))
2309 TREE_OVERFLOW (t) = 1;
2310 /* Regular overflow, conversion produced an infinity in a mode that
2311 can't represent them. */
2312 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2313 && REAL_VALUE_ISINF (value)
2314 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2315 TREE_OVERFLOW (t) = 1;
2316 else
2317 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2318 return t;
2321 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2322 to a floating point type. */
2324 static tree
2325 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2327 REAL_VALUE_TYPE value;
2328 tree t;
2330 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2331 &TREE_FIXED_CST (arg1));
2332 t = build_real (type, value);
2334 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2335 return t;
2338 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2339 to another fixed-point type. */
2341 static tree
2342 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2344 FIXED_VALUE_TYPE value;
2345 tree t;
2346 bool overflow_p;
2348 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2349 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2350 t = build_fixed (type, value);
2352 /* Propagate overflow flags. */
2353 if (overflow_p | TREE_OVERFLOW (arg1))
2354 TREE_OVERFLOW (t) = 1;
2355 return t;
2358 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2359 to a fixed-point type. */
2361 static tree
2362 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2364 FIXED_VALUE_TYPE value;
2365 tree t;
2366 bool overflow_p;
2367 double_int di;
2369 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2371 di.low = TREE_INT_CST_ELT (arg1, 0);
2372 if (TREE_INT_CST_NUNITS (arg1) == 1)
2373 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2374 else
2375 di.high = TREE_INT_CST_ELT (arg1, 1);
2377 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2378 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2379 TYPE_SATURATING (type));
2380 t = build_fixed (type, value);
2382 /* Propagate overflow flags. */
2383 if (overflow_p | TREE_OVERFLOW (arg1))
2384 TREE_OVERFLOW (t) = 1;
2385 return t;
2388 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2389 to a fixed-point type. */
2391 static tree
2392 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2394 FIXED_VALUE_TYPE value;
2395 tree t;
2396 bool overflow_p;
2398 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2399 &TREE_REAL_CST (arg1),
2400 TYPE_SATURATING (type));
2401 t = build_fixed (type, value);
2403 /* Propagate overflow flags. */
2404 if (overflow_p | TREE_OVERFLOW (arg1))
2405 TREE_OVERFLOW (t) = 1;
2406 return t;
2409 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2410 type TYPE. If no simplification can be done return NULL_TREE. */
2412 static tree
2413 fold_convert_const (enum tree_code code, tree type, tree arg1)
2415 tree arg_type = TREE_TYPE (arg1);
2416 if (arg_type == type)
2417 return arg1;
2419 /* We can't widen types, since the runtime value could overflow the
2420 original type before being extended to the new type. */
2421 if (POLY_INT_CST_P (arg1)
2422 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2423 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2424 return build_poly_int_cst (type,
2425 poly_wide_int::from (poly_int_cst_value (arg1),
2426 TYPE_PRECISION (type),
2427 TYPE_SIGN (arg_type)));
2429 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2430 || TREE_CODE (type) == OFFSET_TYPE)
2432 if (TREE_CODE (arg1) == INTEGER_CST)
2433 return fold_convert_const_int_from_int (type, arg1);
2434 else if (TREE_CODE (arg1) == REAL_CST)
2435 return fold_convert_const_int_from_real (code, type, arg1);
2436 else if (TREE_CODE (arg1) == FIXED_CST)
2437 return fold_convert_const_int_from_fixed (type, arg1);
2439 else if (SCALAR_FLOAT_TYPE_P (type))
2441 if (TREE_CODE (arg1) == INTEGER_CST)
2443 tree res = build_real_from_int_cst (type, arg1);
2444 /* Avoid the folding if flag_rounding_math is on and the
2445 conversion is not exact. */
2446 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2448 bool fail = false;
2449 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2450 TYPE_PRECISION (TREE_TYPE (arg1)));
2451 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2452 return NULL_TREE;
2454 return res;
2456 else if (TREE_CODE (arg1) == REAL_CST)
2457 return fold_convert_const_real_from_real (type, arg1);
2458 else if (TREE_CODE (arg1) == FIXED_CST)
2459 return fold_convert_const_real_from_fixed (type, arg1);
2461 else if (FIXED_POINT_TYPE_P (type))
2463 if (TREE_CODE (arg1) == FIXED_CST)
2464 return fold_convert_const_fixed_from_fixed (type, arg1);
2465 else if (TREE_CODE (arg1) == INTEGER_CST)
2466 return fold_convert_const_fixed_from_int (type, arg1);
2467 else if (TREE_CODE (arg1) == REAL_CST)
2468 return fold_convert_const_fixed_from_real (type, arg1);
2470 else if (VECTOR_TYPE_P (type))
2472 if (TREE_CODE (arg1) == VECTOR_CST
2473 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2475 tree elttype = TREE_TYPE (type);
2476 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2477 /* We can't handle steps directly when extending, since the
2478 values need to wrap at the original precision first. */
2479 bool step_ok_p
2480 = (INTEGRAL_TYPE_P (elttype)
2481 && INTEGRAL_TYPE_P (arg1_elttype)
2482 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2483 tree_vector_builder v;
2484 if (!v.new_unary_operation (type, arg1, step_ok_p))
2485 return NULL_TREE;
2486 unsigned int len = v.encoded_nelts ();
2487 for (unsigned int i = 0; i < len; ++i)
2489 tree elt = VECTOR_CST_ELT (arg1, i);
2490 tree cvt = fold_convert_const (code, elttype, elt);
2491 if (cvt == NULL_TREE)
2492 return NULL_TREE;
2493 v.quick_push (cvt);
2495 return v.build ();
2498 return NULL_TREE;
2501 /* Construct a vector of zero elements of vector type TYPE. */
2503 static tree
2504 build_zero_vector (tree type)
2506 tree t;
2508 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2509 return build_vector_from_val (type, t);
2512 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2514 bool
2515 fold_convertible_p (const_tree type, const_tree arg)
2517 const_tree orig = TREE_TYPE (arg);
2519 if (type == orig)
2520 return true;
2522 if (TREE_CODE (arg) == ERROR_MARK
2523 || TREE_CODE (type) == ERROR_MARK
2524 || TREE_CODE (orig) == ERROR_MARK)
2525 return false;
2527 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2528 return true;
2530 switch (TREE_CODE (type))
2532 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2533 case POINTER_TYPE: case REFERENCE_TYPE:
2534 case OFFSET_TYPE:
2535 return (INTEGRAL_TYPE_P (orig)
2536 || (POINTER_TYPE_P (orig)
2537 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2538 || TREE_CODE (orig) == OFFSET_TYPE);
2540 case REAL_TYPE:
2541 case FIXED_POINT_TYPE:
2542 case VOID_TYPE:
2543 return TREE_CODE (type) == TREE_CODE (orig);
2545 case VECTOR_TYPE:
2546 return (VECTOR_TYPE_P (orig)
2547 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2548 TYPE_VECTOR_SUBPARTS (orig))
2549 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2551 default:
2552 return false;
2556 /* Convert expression ARG to type TYPE. Used by the middle-end for
2557 simple conversions in preference to calling the front-end's convert. */
2559 tree
2560 fold_convert_loc (location_t loc, tree type, tree arg)
2562 tree orig = TREE_TYPE (arg);
2563 tree tem;
2565 if (type == orig)
2566 return arg;
2568 if (TREE_CODE (arg) == ERROR_MARK
2569 || TREE_CODE (type) == ERROR_MARK
2570 || TREE_CODE (orig) == ERROR_MARK)
2571 return error_mark_node;
2573 switch (TREE_CODE (type))
2575 case POINTER_TYPE:
2576 case REFERENCE_TYPE:
2577 /* Handle conversions between pointers to different address spaces. */
2578 if (POINTER_TYPE_P (orig)
2579 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2580 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2581 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2582 /* fall through */
2584 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2585 case OFFSET_TYPE: case BITINT_TYPE:
2586 if (TREE_CODE (arg) == INTEGER_CST)
2588 tem = fold_convert_const (NOP_EXPR, type, arg);
2589 if (tem != NULL_TREE)
2590 return tem;
2592 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2593 || TREE_CODE (orig) == OFFSET_TYPE)
2594 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2595 if (TREE_CODE (orig) == COMPLEX_TYPE)
2596 return fold_convert_loc (loc, type,
2597 fold_build1_loc (loc, REALPART_EXPR,
2598 TREE_TYPE (orig), arg));
2599 gcc_assert (VECTOR_TYPE_P (orig)
2600 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2601 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2603 case REAL_TYPE:
2604 if (TREE_CODE (arg) == INTEGER_CST)
2606 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2607 if (tem != NULL_TREE)
2608 return tem;
2610 else if (TREE_CODE (arg) == REAL_CST)
2612 tem = fold_convert_const (NOP_EXPR, type, arg);
2613 if (tem != NULL_TREE)
2614 return tem;
2616 else if (TREE_CODE (arg) == FIXED_CST)
2618 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2619 if (tem != NULL_TREE)
2620 return tem;
2623 switch (TREE_CODE (orig))
2625 case INTEGER_TYPE: case BITINT_TYPE:
2626 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2627 case POINTER_TYPE: case REFERENCE_TYPE:
2628 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2630 case REAL_TYPE:
2631 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2633 case FIXED_POINT_TYPE:
2634 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2636 case COMPLEX_TYPE:
2637 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2638 return fold_convert_loc (loc, type, tem);
2640 default:
2641 gcc_unreachable ();
2644 case FIXED_POINT_TYPE:
2645 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2646 || TREE_CODE (arg) == REAL_CST)
2648 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2649 if (tem != NULL_TREE)
2650 goto fold_convert_exit;
2653 switch (TREE_CODE (orig))
2655 case FIXED_POINT_TYPE:
2656 case INTEGER_TYPE:
2657 case ENUMERAL_TYPE:
2658 case BOOLEAN_TYPE:
2659 case REAL_TYPE:
2660 case BITINT_TYPE:
2661 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2663 case COMPLEX_TYPE:
2664 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2665 return fold_convert_loc (loc, type, tem);
2667 default:
2668 gcc_unreachable ();
2671 case COMPLEX_TYPE:
2672 switch (TREE_CODE (orig))
2674 case INTEGER_TYPE: case BITINT_TYPE:
2675 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2676 case POINTER_TYPE: case REFERENCE_TYPE:
2677 case REAL_TYPE:
2678 case FIXED_POINT_TYPE:
2679 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2680 fold_convert_loc (loc, TREE_TYPE (type), arg),
2681 fold_convert_loc (loc, TREE_TYPE (type),
2682 integer_zero_node));
2683 case COMPLEX_TYPE:
2685 tree rpart, ipart;
2687 if (TREE_CODE (arg) == COMPLEX_EXPR)
2689 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2690 TREE_OPERAND (arg, 0));
2691 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2692 TREE_OPERAND (arg, 1));
2693 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2696 arg = save_expr (arg);
2697 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2698 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2699 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2700 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2701 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2704 default:
2705 gcc_unreachable ();
2708 case VECTOR_TYPE:
2709 if (integer_zerop (arg))
2710 return build_zero_vector (type);
2711 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2712 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2713 || VECTOR_TYPE_P (orig));
2714 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2716 case VOID_TYPE:
2717 tem = fold_ignored_result (arg);
2718 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2720 default:
2721 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2722 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2723 gcc_unreachable ();
2725 fold_convert_exit:
2726 tem = protected_set_expr_location_unshare (tem, loc);
2727 return tem;
2730 /* Return false if expr can be assumed not to be an lvalue, true
2731 otherwise. */
2733 static bool
2734 maybe_lvalue_p (const_tree x)
2736 /* We only need to wrap lvalue tree codes. */
2737 switch (TREE_CODE (x))
2739 case VAR_DECL:
2740 case PARM_DECL:
2741 case RESULT_DECL:
2742 case LABEL_DECL:
2743 case FUNCTION_DECL:
2744 case SSA_NAME:
2745 case COMPOUND_LITERAL_EXPR:
2747 case COMPONENT_REF:
2748 case MEM_REF:
2749 case INDIRECT_REF:
2750 case ARRAY_REF:
2751 case ARRAY_RANGE_REF:
2752 case BIT_FIELD_REF:
2753 case OBJ_TYPE_REF:
2755 case REALPART_EXPR:
2756 case IMAGPART_EXPR:
2757 case PREINCREMENT_EXPR:
2758 case PREDECREMENT_EXPR:
2759 case SAVE_EXPR:
2760 case TRY_CATCH_EXPR:
2761 case WITH_CLEANUP_EXPR:
2762 case COMPOUND_EXPR:
2763 case MODIFY_EXPR:
2764 case TARGET_EXPR:
2765 case COND_EXPR:
2766 case BIND_EXPR:
2767 case VIEW_CONVERT_EXPR:
2768 break;
2770 default:
2771 /* Assume the worst for front-end tree codes. */
2772 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2773 break;
2774 return false;
2777 return true;
2780 /* Return an expr equal to X but certainly not valid as an lvalue. */
2782 tree
2783 non_lvalue_loc (location_t loc, tree x)
2785 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2786 us. */
2787 if (in_gimple_form)
2788 return x;
2790 if (! maybe_lvalue_p (x))
2791 return x;
2792 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2795 /* Given a tree comparison code, return the code that is the logical inverse.
2796 It is generally not safe to do this for floating-point comparisons, except
2797 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2798 ERROR_MARK in this case. */
2800 enum tree_code
2801 invert_tree_comparison (enum tree_code code, bool honor_nans)
2803 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2804 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2805 return ERROR_MARK;
2807 switch (code)
2809 case EQ_EXPR:
2810 return NE_EXPR;
2811 case NE_EXPR:
2812 return EQ_EXPR;
2813 case GT_EXPR:
2814 return honor_nans ? UNLE_EXPR : LE_EXPR;
2815 case GE_EXPR:
2816 return honor_nans ? UNLT_EXPR : LT_EXPR;
2817 case LT_EXPR:
2818 return honor_nans ? UNGE_EXPR : GE_EXPR;
2819 case LE_EXPR:
2820 return honor_nans ? UNGT_EXPR : GT_EXPR;
2821 case LTGT_EXPR:
2822 return UNEQ_EXPR;
2823 case UNEQ_EXPR:
2824 return LTGT_EXPR;
2825 case UNGT_EXPR:
2826 return LE_EXPR;
2827 case UNGE_EXPR:
2828 return LT_EXPR;
2829 case UNLT_EXPR:
2830 return GE_EXPR;
2831 case UNLE_EXPR:
2832 return GT_EXPR;
2833 case ORDERED_EXPR:
2834 return UNORDERED_EXPR;
2835 case UNORDERED_EXPR:
2836 return ORDERED_EXPR;
2837 default:
2838 gcc_unreachable ();
2842 /* Similar, but return the comparison that results if the operands are
2843 swapped. This is safe for floating-point. */
2845 enum tree_code
2846 swap_tree_comparison (enum tree_code code)
2848 switch (code)
2850 case EQ_EXPR:
2851 case NE_EXPR:
2852 case ORDERED_EXPR:
2853 case UNORDERED_EXPR:
2854 case LTGT_EXPR:
2855 case UNEQ_EXPR:
2856 return code;
2857 case GT_EXPR:
2858 return LT_EXPR;
2859 case GE_EXPR:
2860 return LE_EXPR;
2861 case LT_EXPR:
2862 return GT_EXPR;
2863 case LE_EXPR:
2864 return GE_EXPR;
2865 case UNGT_EXPR:
2866 return UNLT_EXPR;
2867 case UNGE_EXPR:
2868 return UNLE_EXPR;
2869 case UNLT_EXPR:
2870 return UNGT_EXPR;
2871 case UNLE_EXPR:
2872 return UNGE_EXPR;
2873 default:
2874 gcc_unreachable ();
2879 /* Convert a comparison tree code from an enum tree_code representation
2880 into a compcode bit-based encoding. This function is the inverse of
2881 compcode_to_comparison. */
2883 static enum comparison_code
2884 comparison_to_compcode (enum tree_code code)
2886 switch (code)
2888 case LT_EXPR:
2889 return COMPCODE_LT;
2890 case EQ_EXPR:
2891 return COMPCODE_EQ;
2892 case LE_EXPR:
2893 return COMPCODE_LE;
2894 case GT_EXPR:
2895 return COMPCODE_GT;
2896 case NE_EXPR:
2897 return COMPCODE_NE;
2898 case GE_EXPR:
2899 return COMPCODE_GE;
2900 case ORDERED_EXPR:
2901 return COMPCODE_ORD;
2902 case UNORDERED_EXPR:
2903 return COMPCODE_UNORD;
2904 case UNLT_EXPR:
2905 return COMPCODE_UNLT;
2906 case UNEQ_EXPR:
2907 return COMPCODE_UNEQ;
2908 case UNLE_EXPR:
2909 return COMPCODE_UNLE;
2910 case UNGT_EXPR:
2911 return COMPCODE_UNGT;
2912 case LTGT_EXPR:
2913 return COMPCODE_LTGT;
2914 case UNGE_EXPR:
2915 return COMPCODE_UNGE;
2916 default:
2917 gcc_unreachable ();
2921 /* Convert a compcode bit-based encoding of a comparison operator back
2922 to GCC's enum tree_code representation. This function is the
2923 inverse of comparison_to_compcode. */
2925 static enum tree_code
2926 compcode_to_comparison (enum comparison_code code)
2928 switch (code)
2930 case COMPCODE_LT:
2931 return LT_EXPR;
2932 case COMPCODE_EQ:
2933 return EQ_EXPR;
2934 case COMPCODE_LE:
2935 return LE_EXPR;
2936 case COMPCODE_GT:
2937 return GT_EXPR;
2938 case COMPCODE_NE:
2939 return NE_EXPR;
2940 case COMPCODE_GE:
2941 return GE_EXPR;
2942 case COMPCODE_ORD:
2943 return ORDERED_EXPR;
2944 case COMPCODE_UNORD:
2945 return UNORDERED_EXPR;
2946 case COMPCODE_UNLT:
2947 return UNLT_EXPR;
2948 case COMPCODE_UNEQ:
2949 return UNEQ_EXPR;
2950 case COMPCODE_UNLE:
2951 return UNLE_EXPR;
2952 case COMPCODE_UNGT:
2953 return UNGT_EXPR;
2954 case COMPCODE_LTGT:
2955 return LTGT_EXPR;
2956 case COMPCODE_UNGE:
2957 return UNGE_EXPR;
2958 default:
2959 gcc_unreachable ();
2963 /* Return true if COND1 tests the opposite condition of COND2. */
2965 bool
2966 inverse_conditions_p (const_tree cond1, const_tree cond2)
2968 return (COMPARISON_CLASS_P (cond1)
2969 && COMPARISON_CLASS_P (cond2)
2970 && (invert_tree_comparison
2971 (TREE_CODE (cond1),
2972 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2973 && operand_equal_p (TREE_OPERAND (cond1, 0),
2974 TREE_OPERAND (cond2, 0), 0)
2975 && operand_equal_p (TREE_OPERAND (cond1, 1),
2976 TREE_OPERAND (cond2, 1), 0));
2979 /* Return a tree for the comparison which is the combination of
2980 doing the AND or OR (depending on CODE) of the two operations LCODE
2981 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2982 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2983 if this makes the transformation invalid. */
2985 tree
2986 combine_comparisons (location_t loc,
2987 enum tree_code code, enum tree_code lcode,
2988 enum tree_code rcode, tree truth_type,
2989 tree ll_arg, tree lr_arg)
2991 bool honor_nans = HONOR_NANS (ll_arg);
2992 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2993 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2994 int compcode;
2996 switch (code)
2998 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2999 compcode = lcompcode & rcompcode;
3000 break;
3002 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3003 compcode = lcompcode | rcompcode;
3004 break;
3006 default:
3007 return NULL_TREE;
3010 if (!honor_nans)
3012 /* Eliminate unordered comparisons, as well as LTGT and ORD
3013 which are not used unless the mode has NaNs. */
3014 compcode &= ~COMPCODE_UNORD;
3015 if (compcode == COMPCODE_LTGT)
3016 compcode = COMPCODE_NE;
3017 else if (compcode == COMPCODE_ORD)
3018 compcode = COMPCODE_TRUE;
3020 else if (flag_trapping_math)
3022 /* Check that the original operation and the optimized ones will trap
3023 under the same condition. */
3024 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3025 && (lcompcode != COMPCODE_EQ)
3026 && (lcompcode != COMPCODE_ORD);
3027 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3028 && (rcompcode != COMPCODE_EQ)
3029 && (rcompcode != COMPCODE_ORD);
3030 bool trap = (compcode & COMPCODE_UNORD) == 0
3031 && (compcode != COMPCODE_EQ)
3032 && (compcode != COMPCODE_ORD);
3034 /* In a short-circuited boolean expression the LHS might be
3035 such that the RHS, if evaluated, will never trap. For
3036 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3037 if neither x nor y is NaN. (This is a mixed blessing: for
3038 example, the expression above will never trap, hence
3039 optimizing it to x < y would be invalid). */
3040 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3041 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3042 rtrap = false;
3044 /* If the comparison was short-circuited, and only the RHS
3045 trapped, we may now generate a spurious trap. */
3046 if (rtrap && !ltrap
3047 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3048 return NULL_TREE;
3050 /* If we changed the conditions that cause a trap, we lose. */
3051 if ((ltrap || rtrap) != trap)
3052 return NULL_TREE;
3055 if (compcode == COMPCODE_TRUE)
3056 return constant_boolean_node (true, truth_type);
3057 else if (compcode == COMPCODE_FALSE)
3058 return constant_boolean_node (false, truth_type);
3059 else
3061 enum tree_code tcode;
3063 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3064 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3068 /* Return nonzero if two operands (typically of the same tree node)
3069 are necessarily equal. FLAGS modifies behavior as follows:
3071 If OEP_ONLY_CONST is set, only return nonzero for constants.
3072 This function tests whether the operands are indistinguishable;
3073 it does not test whether they are equal using C's == operation.
3074 The distinction is important for IEEE floating point, because
3075 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3076 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3078 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3079 even though it may hold multiple values during a function.
3080 This is because a GCC tree node guarantees that nothing else is
3081 executed between the evaluation of its "operands" (which may often
3082 be evaluated in arbitrary order). Hence if the operands themselves
3083 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3084 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3085 unset means assuming isochronic (or instantaneous) tree equivalence.
3086 Unless comparing arbitrary expression trees, such as from different
3087 statements, this flag can usually be left unset.
3089 If OEP_PURE_SAME is set, then pure functions with identical arguments
3090 are considered the same. It is used when the caller has other ways
3091 to ensure that global memory is unchanged in between.
3093 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3094 not values of expressions.
3096 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3097 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3099 If OEP_BITWISE is set, then require the values to be bitwise identical
3100 rather than simply numerically equal. Do not take advantage of things
3101 like math-related flags or undefined behavior; only return true for
3102 values that are provably bitwise identical in all circumstances.
3104 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3105 any operand with side effect. This is unnecesarily conservative in the
3106 case we know that arg0 and arg1 are in disjoint code paths (such as in
3107 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3108 addresses with TREE_CONSTANT flag set so we know that &var == &var
3109 even if var is volatile. */
3111 bool
3112 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3113 unsigned int flags)
3115 bool r;
3116 if (verify_hash_value (arg0, arg1, flags, &r))
3117 return r;
3119 STRIP_ANY_LOCATION_WRAPPER (arg0);
3120 STRIP_ANY_LOCATION_WRAPPER (arg1);
3122 /* If either is ERROR_MARK, they aren't equal. */
3123 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3124 || TREE_TYPE (arg0) == error_mark_node
3125 || TREE_TYPE (arg1) == error_mark_node)
3126 return false;
3128 /* Similar, if either does not have a type (like a template id),
3129 they aren't equal. */
3130 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3131 return false;
3133 /* Bitwise identity makes no sense if the values have different layouts. */
3134 if ((flags & OEP_BITWISE)
3135 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3136 return false;
3138 /* We cannot consider pointers to different address space equal. */
3139 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3140 && POINTER_TYPE_P (TREE_TYPE (arg1))
3141 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3142 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3143 return false;
3145 /* Check equality of integer constants before bailing out due to
3146 precision differences. */
3147 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3149 /* Address of INTEGER_CST is not defined; check that we did not forget
3150 to drop the OEP_ADDRESS_OF flags. */
3151 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3152 return tree_int_cst_equal (arg0, arg1);
3155 if (!(flags & OEP_ADDRESS_OF))
3157 /* If both types don't have the same signedness, then we can't consider
3158 them equal. We must check this before the STRIP_NOPS calls
3159 because they may change the signedness of the arguments. As pointers
3160 strictly don't have a signedness, require either two pointers or
3161 two non-pointers as well. */
3162 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3163 || POINTER_TYPE_P (TREE_TYPE (arg0))
3164 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3165 return false;
3167 /* If both types don't have the same precision, then it is not safe
3168 to strip NOPs. */
3169 if (element_precision (TREE_TYPE (arg0))
3170 != element_precision (TREE_TYPE (arg1)))
3171 return false;
3173 STRIP_NOPS (arg0);
3174 STRIP_NOPS (arg1);
3176 #if 0
3177 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3178 sanity check once the issue is solved. */
3179 else
3180 /* Addresses of conversions and SSA_NAMEs (and many other things)
3181 are not defined. Check that we did not forget to drop the
3182 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3183 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3184 && TREE_CODE (arg0) != SSA_NAME);
3185 #endif
3187 /* In case both args are comparisons but with different comparison
3188 code, try to swap the comparison operands of one arg to produce
3189 a match and compare that variant. */
3190 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3191 && COMPARISON_CLASS_P (arg0)
3192 && COMPARISON_CLASS_P (arg1))
3194 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3196 if (TREE_CODE (arg0) == swap_code)
3197 return operand_equal_p (TREE_OPERAND (arg0, 0),
3198 TREE_OPERAND (arg1, 1), flags)
3199 && operand_equal_p (TREE_OPERAND (arg0, 1),
3200 TREE_OPERAND (arg1, 0), flags);
3203 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3205 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3206 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3208 else if (flags & OEP_ADDRESS_OF)
3210 /* If we are interested in comparing addresses ignore
3211 MEM_REF wrappings of the base that can appear just for
3212 TBAA reasons. */
3213 if (TREE_CODE (arg0) == MEM_REF
3214 && DECL_P (arg1)
3215 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3216 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3217 && integer_zerop (TREE_OPERAND (arg0, 1)))
3218 return true;
3219 else if (TREE_CODE (arg1) == MEM_REF
3220 && DECL_P (arg0)
3221 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3222 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3223 && integer_zerop (TREE_OPERAND (arg1, 1)))
3224 return true;
3225 return false;
3227 else
3228 return false;
3231 /* When not checking adddresses, this is needed for conversions and for
3232 COMPONENT_REF. Might as well play it safe and always test this. */
3233 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3234 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3235 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3236 && !(flags & OEP_ADDRESS_OF)))
3237 return false;
3239 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3240 We don't care about side effects in that case because the SAVE_EXPR
3241 takes care of that for us. In all other cases, two expressions are
3242 equal if they have no side effects. If we have two identical
3243 expressions with side effects that should be treated the same due
3244 to the only side effects being identical SAVE_EXPR's, that will
3245 be detected in the recursive calls below.
3246 If we are taking an invariant address of two identical objects
3247 they are necessarily equal as well. */
3248 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3249 && (TREE_CODE (arg0) == SAVE_EXPR
3250 || (flags & OEP_MATCH_SIDE_EFFECTS)
3251 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3252 return true;
3254 /* Next handle constant cases, those for which we can return 1 even
3255 if ONLY_CONST is set. */
3256 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3257 switch (TREE_CODE (arg0))
3259 case INTEGER_CST:
3260 return tree_int_cst_equal (arg0, arg1);
3262 case FIXED_CST:
3263 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3264 TREE_FIXED_CST (arg1));
3266 case REAL_CST:
3267 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3268 return true;
3270 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3272 /* If we do not distinguish between signed and unsigned zero,
3273 consider them equal. */
3274 if (real_zerop (arg0) && real_zerop (arg1))
3275 return true;
3277 return false;
3279 case VECTOR_CST:
3281 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3282 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3283 return false;
3285 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3286 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3287 return false;
3289 unsigned int count = vector_cst_encoded_nelts (arg0);
3290 for (unsigned int i = 0; i < count; ++i)
3291 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3292 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3293 return false;
3294 return true;
3297 case COMPLEX_CST:
3298 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3299 flags)
3300 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3301 flags));
3303 case STRING_CST:
3304 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3305 && ! memcmp (TREE_STRING_POINTER (arg0),
3306 TREE_STRING_POINTER (arg1),
3307 TREE_STRING_LENGTH (arg0)));
3309 case ADDR_EXPR:
3310 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3311 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3312 flags | OEP_ADDRESS_OF
3313 | OEP_MATCH_SIDE_EFFECTS);
3314 case CONSTRUCTOR:
3315 /* In GIMPLE empty constructors are allowed in initializers of
3316 aggregates. */
3317 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3318 default:
3319 break;
3322 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3323 two instances of undefined behavior will give identical results. */
3324 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3325 return false;
3327 /* Define macros to test an operand from arg0 and arg1 for equality and a
3328 variant that allows null and views null as being different from any
3329 non-null value. In the latter case, if either is null, the both
3330 must be; otherwise, do the normal comparison. */
3331 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3332 TREE_OPERAND (arg1, N), flags)
3334 #define OP_SAME_WITH_NULL(N) \
3335 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3336 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3338 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3340 case tcc_unary:
3341 /* Two conversions are equal only if signedness and modes match. */
3342 switch (TREE_CODE (arg0))
3344 CASE_CONVERT:
3345 case FIX_TRUNC_EXPR:
3346 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3347 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3348 return false;
3349 break;
3350 default:
3351 break;
3354 return OP_SAME (0);
3357 case tcc_comparison:
3358 case tcc_binary:
3359 if (OP_SAME (0) && OP_SAME (1))
3360 return true;
3362 /* For commutative ops, allow the other order. */
3363 return (commutative_tree_code (TREE_CODE (arg0))
3364 && operand_equal_p (TREE_OPERAND (arg0, 0),
3365 TREE_OPERAND (arg1, 1), flags)
3366 && operand_equal_p (TREE_OPERAND (arg0, 1),
3367 TREE_OPERAND (arg1, 0), flags));
3369 case tcc_reference:
3370 /* If either of the pointer (or reference) expressions we are
3371 dereferencing contain a side effect, these cannot be equal,
3372 but their addresses can be. */
3373 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3374 && (TREE_SIDE_EFFECTS (arg0)
3375 || TREE_SIDE_EFFECTS (arg1)))
3376 return false;
3378 switch (TREE_CODE (arg0))
3380 case INDIRECT_REF:
3381 if (!(flags & OEP_ADDRESS_OF))
3383 if (TYPE_ALIGN (TREE_TYPE (arg0))
3384 != TYPE_ALIGN (TREE_TYPE (arg1)))
3385 return false;
3386 /* Verify that the access types are compatible. */
3387 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3388 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3389 return false;
3391 flags &= ~OEP_ADDRESS_OF;
3392 return OP_SAME (0);
3394 case IMAGPART_EXPR:
3395 /* Require the same offset. */
3396 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3397 TYPE_SIZE (TREE_TYPE (arg1)),
3398 flags & ~OEP_ADDRESS_OF))
3399 return false;
3401 /* Fallthru. */
3402 case REALPART_EXPR:
3403 case VIEW_CONVERT_EXPR:
3404 return OP_SAME (0);
3406 case TARGET_MEM_REF:
3407 case MEM_REF:
3408 if (!(flags & OEP_ADDRESS_OF))
3410 /* Require equal access sizes */
3411 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3412 && (!TYPE_SIZE (TREE_TYPE (arg0))
3413 || !TYPE_SIZE (TREE_TYPE (arg1))
3414 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3415 TYPE_SIZE (TREE_TYPE (arg1)),
3416 flags)))
3417 return false;
3418 /* Verify that access happens in similar types. */
3419 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3420 return false;
3421 /* Verify that accesses are TBAA compatible. */
3422 if (!alias_ptr_types_compatible_p
3423 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3424 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3425 || (MR_DEPENDENCE_CLIQUE (arg0)
3426 != MR_DEPENDENCE_CLIQUE (arg1))
3427 || (MR_DEPENDENCE_BASE (arg0)
3428 != MR_DEPENDENCE_BASE (arg1)))
3429 return false;
3430 /* Verify that alignment is compatible. */
3431 if (TYPE_ALIGN (TREE_TYPE (arg0))
3432 != TYPE_ALIGN (TREE_TYPE (arg1)))
3433 return false;
3435 flags &= ~OEP_ADDRESS_OF;
3436 return (OP_SAME (0) && OP_SAME (1)
3437 /* TARGET_MEM_REF require equal extra operands. */
3438 && (TREE_CODE (arg0) != TARGET_MEM_REF
3439 || (OP_SAME_WITH_NULL (2)
3440 && OP_SAME_WITH_NULL (3)
3441 && OP_SAME_WITH_NULL (4))));
3443 case ARRAY_REF:
3444 case ARRAY_RANGE_REF:
3445 if (!OP_SAME (0))
3446 return false;
3447 flags &= ~OEP_ADDRESS_OF;
3448 /* Compare the array index by value if it is constant first as we
3449 may have different types but same value here. */
3450 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3451 TREE_OPERAND (arg1, 1))
3452 || OP_SAME (1))
3453 && OP_SAME_WITH_NULL (2)
3454 && OP_SAME_WITH_NULL (3)
3455 /* Compare low bound and element size as with OEP_ADDRESS_OF
3456 we have to account for the offset of the ref. */
3457 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3458 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3459 || (operand_equal_p (array_ref_low_bound
3460 (CONST_CAST_TREE (arg0)),
3461 array_ref_low_bound
3462 (CONST_CAST_TREE (arg1)), flags)
3463 && operand_equal_p (array_ref_element_size
3464 (CONST_CAST_TREE (arg0)),
3465 array_ref_element_size
3466 (CONST_CAST_TREE (arg1)),
3467 flags))));
3469 case COMPONENT_REF:
3470 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3471 may be NULL when we're called to compare MEM_EXPRs. */
3472 if (!OP_SAME_WITH_NULL (0))
3473 return false;
3475 bool compare_address = flags & OEP_ADDRESS_OF;
3477 /* Most of time we only need to compare FIELD_DECLs for equality.
3478 However when determining address look into actual offsets.
3479 These may match for unions and unshared record types. */
3480 flags &= ~OEP_ADDRESS_OF;
3481 if (!OP_SAME (1))
3483 if (compare_address
3484 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3486 tree field0 = TREE_OPERAND (arg0, 1);
3487 tree field1 = TREE_OPERAND (arg1, 1);
3489 /* Non-FIELD_DECL operands can appear in C++ templates. */
3490 if (TREE_CODE (field0) != FIELD_DECL
3491 || TREE_CODE (field1) != FIELD_DECL
3492 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3493 DECL_FIELD_OFFSET (field1), flags)
3494 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3495 DECL_FIELD_BIT_OFFSET (field1),
3496 flags))
3497 return false;
3499 else
3500 return false;
3503 return OP_SAME_WITH_NULL (2);
3505 case BIT_FIELD_REF:
3506 if (!OP_SAME (0))
3507 return false;
3508 flags &= ~OEP_ADDRESS_OF;
3509 return OP_SAME (1) && OP_SAME (2);
3511 default:
3512 return false;
3515 case tcc_expression:
3516 switch (TREE_CODE (arg0))
3518 case ADDR_EXPR:
3519 /* Be sure we pass right ADDRESS_OF flag. */
3520 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3521 return operand_equal_p (TREE_OPERAND (arg0, 0),
3522 TREE_OPERAND (arg1, 0),
3523 flags | OEP_ADDRESS_OF);
3525 case TRUTH_NOT_EXPR:
3526 return OP_SAME (0);
3528 case TRUTH_ANDIF_EXPR:
3529 case TRUTH_ORIF_EXPR:
3530 return OP_SAME (0) && OP_SAME (1);
3532 case WIDEN_MULT_PLUS_EXPR:
3533 case WIDEN_MULT_MINUS_EXPR:
3534 if (!OP_SAME (2))
3535 return false;
3536 /* The multiplcation operands are commutative. */
3537 /* FALLTHRU */
3539 case TRUTH_AND_EXPR:
3540 case TRUTH_OR_EXPR:
3541 case TRUTH_XOR_EXPR:
3542 if (OP_SAME (0) && OP_SAME (1))
3543 return true;
3545 /* Otherwise take into account this is a commutative operation. */
3546 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3547 TREE_OPERAND (arg1, 1), flags)
3548 && operand_equal_p (TREE_OPERAND (arg0, 1),
3549 TREE_OPERAND (arg1, 0), flags));
3551 case COND_EXPR:
3552 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3553 return false;
3554 flags &= ~OEP_ADDRESS_OF;
3555 return OP_SAME (0);
3557 case BIT_INSERT_EXPR:
3558 /* BIT_INSERT_EXPR has an implict operand as the type precision
3559 of op1. Need to check to make sure they are the same. */
3560 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3561 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3562 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3563 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3564 return false;
3565 /* FALLTHRU */
3567 case VEC_COND_EXPR:
3568 case DOT_PROD_EXPR:
3569 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3571 case MODIFY_EXPR:
3572 case INIT_EXPR:
3573 case COMPOUND_EXPR:
3574 case PREDECREMENT_EXPR:
3575 case PREINCREMENT_EXPR:
3576 case POSTDECREMENT_EXPR:
3577 case POSTINCREMENT_EXPR:
3578 if (flags & OEP_LEXICOGRAPHIC)
3579 return OP_SAME (0) && OP_SAME (1);
3580 return false;
3582 case CLEANUP_POINT_EXPR:
3583 case EXPR_STMT:
3584 case SAVE_EXPR:
3585 if (flags & OEP_LEXICOGRAPHIC)
3586 return OP_SAME (0);
3587 return false;
3589 case OBJ_TYPE_REF:
3590 /* Virtual table reference. */
3591 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3592 OBJ_TYPE_REF_EXPR (arg1), flags))
3593 return false;
3594 flags &= ~OEP_ADDRESS_OF;
3595 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3596 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3597 return false;
3598 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3599 OBJ_TYPE_REF_OBJECT (arg1), flags))
3600 return false;
3601 if (virtual_method_call_p (arg0))
3603 if (!virtual_method_call_p (arg1))
3604 return false;
3605 return types_same_for_odr (obj_type_ref_class (arg0),
3606 obj_type_ref_class (arg1));
3608 return false;
3610 default:
3611 return false;
3614 case tcc_vl_exp:
3615 switch (TREE_CODE (arg0))
3617 case CALL_EXPR:
3618 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3619 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3620 /* If not both CALL_EXPRs are either internal or normal function
3621 functions, then they are not equal. */
3622 return false;
3623 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3625 /* If the CALL_EXPRs call different internal functions, then they
3626 are not equal. */
3627 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3628 return false;
3630 else
3632 /* If the CALL_EXPRs call different functions, then they are not
3633 equal. */
3634 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3635 flags))
3636 return false;
3639 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3641 unsigned int cef = call_expr_flags (arg0);
3642 if (flags & OEP_PURE_SAME)
3643 cef &= ECF_CONST | ECF_PURE;
3644 else
3645 cef &= ECF_CONST;
3646 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3647 return false;
3650 /* Now see if all the arguments are the same. */
3652 const_call_expr_arg_iterator iter0, iter1;
3653 const_tree a0, a1;
3654 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3655 a1 = first_const_call_expr_arg (arg1, &iter1);
3656 a0 && a1;
3657 a0 = next_const_call_expr_arg (&iter0),
3658 a1 = next_const_call_expr_arg (&iter1))
3659 if (! operand_equal_p (a0, a1, flags))
3660 return false;
3662 /* If we get here and both argument lists are exhausted
3663 then the CALL_EXPRs are equal. */
3664 return ! (a0 || a1);
3666 default:
3667 return false;
3670 case tcc_declaration:
3671 /* Consider __builtin_sqrt equal to sqrt. */
3672 if (TREE_CODE (arg0) == FUNCTION_DECL)
3673 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3674 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3675 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3676 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3678 if (DECL_P (arg0)
3679 && (flags & OEP_DECL_NAME)
3680 && (flags & OEP_LEXICOGRAPHIC))
3682 /* Consider decls with the same name equal. The caller needs
3683 to make sure they refer to the same entity (such as a function
3684 formal parameter). */
3685 tree a0name = DECL_NAME (arg0);
3686 tree a1name = DECL_NAME (arg1);
3687 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3688 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3689 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3691 return false;
3693 case tcc_exceptional:
3694 if (TREE_CODE (arg0) == CONSTRUCTOR)
3696 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3697 return false;
3699 /* In GIMPLE constructors are used only to build vectors from
3700 elements. Individual elements in the constructor must be
3701 indexed in increasing order and form an initial sequence.
3703 We make no effort to compare constructors in generic.
3704 (see sem_variable::equals in ipa-icf which can do so for
3705 constants). */
3706 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3707 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3708 return false;
3710 /* Be sure that vectors constructed have the same representation.
3711 We only tested element precision and modes to match.
3712 Vectors may be BLKmode and thus also check that the number of
3713 parts match. */
3714 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3715 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3716 return false;
3718 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3719 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3720 unsigned int len = vec_safe_length (v0);
3722 if (len != vec_safe_length (v1))
3723 return false;
3725 for (unsigned int i = 0; i < len; i++)
3727 constructor_elt *c0 = &(*v0)[i];
3728 constructor_elt *c1 = &(*v1)[i];
3730 if (!operand_equal_p (c0->value, c1->value, flags)
3731 /* In GIMPLE the indexes can be either NULL or matching i.
3732 Double check this so we won't get false
3733 positives for GENERIC. */
3734 || (c0->index
3735 && (TREE_CODE (c0->index) != INTEGER_CST
3736 || compare_tree_int (c0->index, i)))
3737 || (c1->index
3738 && (TREE_CODE (c1->index) != INTEGER_CST
3739 || compare_tree_int (c1->index, i))))
3740 return false;
3742 return true;
3744 else if (TREE_CODE (arg0) == STATEMENT_LIST
3745 && (flags & OEP_LEXICOGRAPHIC))
3747 /* Compare the STATEMENT_LISTs. */
3748 tree_stmt_iterator tsi1, tsi2;
3749 tree body1 = CONST_CAST_TREE (arg0);
3750 tree body2 = CONST_CAST_TREE (arg1);
3751 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3752 tsi_next (&tsi1), tsi_next (&tsi2))
3754 /* The lists don't have the same number of statements. */
3755 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3756 return false;
3757 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3758 return true;
3759 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3760 flags & (OEP_LEXICOGRAPHIC
3761 | OEP_NO_HASH_CHECK)))
3762 return false;
3765 return false;
3767 case tcc_statement:
3768 switch (TREE_CODE (arg0))
3770 case RETURN_EXPR:
3771 if (flags & OEP_LEXICOGRAPHIC)
3772 return OP_SAME_WITH_NULL (0);
3773 return false;
3774 case DEBUG_BEGIN_STMT:
3775 if (flags & OEP_LEXICOGRAPHIC)
3776 return true;
3777 return false;
3778 default:
3779 return false;
3782 default:
3783 return false;
3786 #undef OP_SAME
3787 #undef OP_SAME_WITH_NULL
3790 /* Generate a hash value for an expression. This can be used iteratively
3791 by passing a previous result as the HSTATE argument. */
3793 void
3794 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3795 unsigned int flags)
3797 int i;
3798 enum tree_code code;
3799 enum tree_code_class tclass;
3801 if (t == NULL_TREE || t == error_mark_node)
3803 hstate.merge_hash (0);
3804 return;
3807 STRIP_ANY_LOCATION_WRAPPER (t);
3809 if (!(flags & OEP_ADDRESS_OF))
3810 STRIP_NOPS (t);
3812 code = TREE_CODE (t);
3814 switch (code)
3816 /* Alas, constants aren't shared, so we can't rely on pointer
3817 identity. */
3818 case VOID_CST:
3819 hstate.merge_hash (0);
3820 return;
3821 case INTEGER_CST:
3822 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3823 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3824 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3825 return;
3826 case REAL_CST:
3828 unsigned int val2;
3829 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3830 val2 = rvc_zero;
3831 else
3832 val2 = real_hash (TREE_REAL_CST_PTR (t));
3833 hstate.merge_hash (val2);
3834 return;
3836 case FIXED_CST:
3838 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3839 hstate.merge_hash (val2);
3840 return;
3842 case STRING_CST:
3843 hstate.add ((const void *) TREE_STRING_POINTER (t),
3844 TREE_STRING_LENGTH (t));
3845 return;
3846 case COMPLEX_CST:
3847 hash_operand (TREE_REALPART (t), hstate, flags);
3848 hash_operand (TREE_IMAGPART (t), hstate, flags);
3849 return;
3850 case VECTOR_CST:
3852 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3853 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3854 unsigned int count = vector_cst_encoded_nelts (t);
3855 for (unsigned int i = 0; i < count; ++i)
3856 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3857 return;
3859 case SSA_NAME:
3860 /* We can just compare by pointer. */
3861 hstate.add_hwi (SSA_NAME_VERSION (t));
3862 return;
3863 case PLACEHOLDER_EXPR:
3864 /* The node itself doesn't matter. */
3865 return;
3866 case BLOCK:
3867 case OMP_CLAUSE:
3868 /* Ignore. */
3869 return;
3870 case TREE_LIST:
3871 /* A list of expressions, for a CALL_EXPR or as the elements of a
3872 VECTOR_CST. */
3873 for (; t; t = TREE_CHAIN (t))
3874 hash_operand (TREE_VALUE (t), hstate, flags);
3875 return;
3876 case CONSTRUCTOR:
3878 unsigned HOST_WIDE_INT idx;
3879 tree field, value;
3880 flags &= ~OEP_ADDRESS_OF;
3881 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3882 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3884 /* In GIMPLE the indexes can be either NULL or matching i. */
3885 if (field == NULL_TREE)
3886 field = bitsize_int (idx);
3887 hash_operand (field, hstate, flags);
3888 hash_operand (value, hstate, flags);
3890 return;
3892 case STATEMENT_LIST:
3894 tree_stmt_iterator i;
3895 for (i = tsi_start (CONST_CAST_TREE (t));
3896 !tsi_end_p (i); tsi_next (&i))
3897 hash_operand (tsi_stmt (i), hstate, flags);
3898 return;
3900 case TREE_VEC:
3901 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3902 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3903 return;
3904 case IDENTIFIER_NODE:
3905 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3906 return;
3907 case FUNCTION_DECL:
3908 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3909 Otherwise nodes that compare equal according to operand_equal_p might
3910 get different hash codes. However, don't do this for machine specific
3911 or front end builtins, since the function code is overloaded in those
3912 cases. */
3913 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3914 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3916 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3917 code = TREE_CODE (t);
3919 /* FALL THROUGH */
3920 default:
3921 if (POLY_INT_CST_P (t))
3923 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3924 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3925 return;
3927 tclass = TREE_CODE_CLASS (code);
3929 if (tclass == tcc_declaration)
3931 /* DECL's have a unique ID */
3932 hstate.add_hwi (DECL_UID (t));
3934 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3936 /* For comparisons that can be swapped, use the lower
3937 tree code. */
3938 enum tree_code ccode = swap_tree_comparison (code);
3939 if (code < ccode)
3940 ccode = code;
3941 hstate.add_object (ccode);
3942 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3943 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3945 else if (CONVERT_EXPR_CODE_P (code))
3947 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3948 operand_equal_p. */
3949 enum tree_code ccode = NOP_EXPR;
3950 hstate.add_object (ccode);
3952 /* Don't hash the type, that can lead to having nodes which
3953 compare equal according to operand_equal_p, but which
3954 have different hash codes. Make sure to include signedness
3955 in the hash computation. */
3956 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3957 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3959 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3960 else if (code == MEM_REF
3961 && (flags & OEP_ADDRESS_OF) != 0
3962 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3963 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3964 && integer_zerop (TREE_OPERAND (t, 1)))
3965 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3966 hstate, flags);
3967 /* Don't ICE on FE specific trees, or their arguments etc.
3968 during operand_equal_p hash verification. */
3969 else if (!IS_EXPR_CODE_CLASS (tclass))
3970 gcc_assert (flags & OEP_HASH_CHECK);
3971 else
3973 unsigned int sflags = flags;
3975 hstate.add_object (code);
3977 switch (code)
3979 case ADDR_EXPR:
3980 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3981 flags |= OEP_ADDRESS_OF;
3982 sflags = flags;
3983 break;
3985 case INDIRECT_REF:
3986 case MEM_REF:
3987 case TARGET_MEM_REF:
3988 flags &= ~OEP_ADDRESS_OF;
3989 sflags = flags;
3990 break;
3992 case COMPONENT_REF:
3993 if (sflags & OEP_ADDRESS_OF)
3995 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3996 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3997 hstate, flags & ~OEP_ADDRESS_OF);
3998 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3999 hstate, flags & ~OEP_ADDRESS_OF);
4000 return;
4002 break;
4003 case ARRAY_REF:
4004 case ARRAY_RANGE_REF:
4005 case BIT_FIELD_REF:
4006 sflags &= ~OEP_ADDRESS_OF;
4007 break;
4009 case COND_EXPR:
4010 flags &= ~OEP_ADDRESS_OF;
4011 break;
4013 case WIDEN_MULT_PLUS_EXPR:
4014 case WIDEN_MULT_MINUS_EXPR:
4016 /* The multiplication operands are commutative. */
4017 inchash::hash one, two;
4018 hash_operand (TREE_OPERAND (t, 0), one, flags);
4019 hash_operand (TREE_OPERAND (t, 1), two, flags);
4020 hstate.add_commutative (one, two);
4021 hash_operand (TREE_OPERAND (t, 2), two, flags);
4022 return;
4025 case CALL_EXPR:
4026 if (CALL_EXPR_FN (t) == NULL_TREE)
4027 hstate.add_int (CALL_EXPR_IFN (t));
4028 break;
4030 case TARGET_EXPR:
4031 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4032 Usually different TARGET_EXPRs just should use
4033 different temporaries in their slots. */
4034 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4035 return;
4037 case OBJ_TYPE_REF:
4038 /* Virtual table reference. */
4039 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4040 flags &= ~OEP_ADDRESS_OF;
4041 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4042 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4043 if (!virtual_method_call_p (t))
4044 return;
4045 if (tree c = obj_type_ref_class (t))
4047 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4048 /* We compute mangled names only when free_lang_data is run.
4049 In that case we can hash precisely. */
4050 if (TREE_CODE (c) == TYPE_DECL
4051 && DECL_ASSEMBLER_NAME_SET_P (c))
4052 hstate.add_object
4053 (IDENTIFIER_HASH_VALUE
4054 (DECL_ASSEMBLER_NAME (c)));
4056 return;
4057 default:
4058 break;
4061 /* Don't hash the type, that can lead to having nodes which
4062 compare equal according to operand_equal_p, but which
4063 have different hash codes. */
4064 if (code == NON_LVALUE_EXPR)
4066 /* Make sure to include signness in the hash computation. */
4067 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4068 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4071 else if (commutative_tree_code (code))
4073 /* It's a commutative expression. We want to hash it the same
4074 however it appears. We do this by first hashing both operands
4075 and then rehashing based on the order of their independent
4076 hashes. */
4077 inchash::hash one, two;
4078 hash_operand (TREE_OPERAND (t, 0), one, flags);
4079 hash_operand (TREE_OPERAND (t, 1), two, flags);
4080 hstate.add_commutative (one, two);
4082 else
4083 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4084 hash_operand (TREE_OPERAND (t, i), hstate,
4085 i == 0 ? flags : sflags);
4087 return;
4091 bool
4092 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4093 unsigned int flags, bool *ret)
4095 /* When checking and unless comparing DECL names, verify that if
4096 the outermost operand_equal_p call returns non-zero then ARG0
4097 and ARG1 have the same hash value. */
4098 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4100 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4102 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4104 inchash::hash hstate0 (0), hstate1 (0);
4105 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4106 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4107 hashval_t h0 = hstate0.end ();
4108 hashval_t h1 = hstate1.end ();
4109 gcc_assert (h0 == h1);
4111 *ret = true;
4113 else
4114 *ret = false;
4116 return true;
4119 return false;
4123 static operand_compare default_compare_instance;
4125 /* Conveinece wrapper around operand_compare class because usually we do
4126 not need to play with the valueizer. */
4128 bool
4129 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4131 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4134 namespace inchash
4137 /* Generate a hash value for an expression. This can be used iteratively
4138 by passing a previous result as the HSTATE argument.
4140 This function is intended to produce the same hash for expressions which
4141 would compare equal using operand_equal_p. */
4142 void
4143 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4145 default_compare_instance.hash_operand (t, hstate, flags);
4150 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4151 with a different signedness or a narrower precision. */
4153 static bool
4154 operand_equal_for_comparison_p (tree arg0, tree arg1)
4156 if (operand_equal_p (arg0, arg1, 0))
4157 return true;
4159 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4160 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4161 return false;
4163 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4164 and see if the inner values are the same. This removes any
4165 signedness comparison, which doesn't matter here. */
4166 tree op0 = arg0;
4167 tree op1 = arg1;
4168 STRIP_NOPS (op0);
4169 STRIP_NOPS (op1);
4170 if (operand_equal_p (op0, op1, 0))
4171 return true;
4173 /* Discard a single widening conversion from ARG1 and see if the inner
4174 value is the same as ARG0. */
4175 if (CONVERT_EXPR_P (arg1)
4176 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4177 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4178 < TYPE_PRECISION (TREE_TYPE (arg1))
4179 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4180 return true;
4182 return false;
4185 /* See if ARG is an expression that is either a comparison or is performing
4186 arithmetic on comparisons. The comparisons must only be comparing
4187 two different values, which will be stored in *CVAL1 and *CVAL2; if
4188 they are nonzero it means that some operands have already been found.
4189 No variables may be used anywhere else in the expression except in the
4190 comparisons.
4192 If this is true, return 1. Otherwise, return zero. */
4194 static bool
4195 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4197 enum tree_code code = TREE_CODE (arg);
4198 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4200 /* We can handle some of the tcc_expression cases here. */
4201 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4202 tclass = tcc_unary;
4203 else if (tclass == tcc_expression
4204 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4205 || code == COMPOUND_EXPR))
4206 tclass = tcc_binary;
4208 switch (tclass)
4210 case tcc_unary:
4211 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4213 case tcc_binary:
4214 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4215 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4217 case tcc_constant:
4218 return true;
4220 case tcc_expression:
4221 if (code == COND_EXPR)
4222 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4223 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4224 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4225 return false;
4227 case tcc_comparison:
4228 /* First see if we can handle the first operand, then the second. For
4229 the second operand, we know *CVAL1 can't be zero. It must be that
4230 one side of the comparison is each of the values; test for the
4231 case where this isn't true by failing if the two operands
4232 are the same. */
4234 if (operand_equal_p (TREE_OPERAND (arg, 0),
4235 TREE_OPERAND (arg, 1), 0))
4236 return false;
4238 if (*cval1 == 0)
4239 *cval1 = TREE_OPERAND (arg, 0);
4240 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4242 else if (*cval2 == 0)
4243 *cval2 = TREE_OPERAND (arg, 0);
4244 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4246 else
4247 return false;
4249 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4251 else if (*cval2 == 0)
4252 *cval2 = TREE_OPERAND (arg, 1);
4253 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4255 else
4256 return false;
4258 return true;
4260 default:
4261 return false;
4265 /* ARG is a tree that is known to contain just arithmetic operations and
4266 comparisons. Evaluate the operations in the tree substituting NEW0 for
4267 any occurrence of OLD0 as an operand of a comparison and likewise for
4268 NEW1 and OLD1. */
4270 static tree
4271 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4272 tree old1, tree new1)
4274 tree type = TREE_TYPE (arg);
4275 enum tree_code code = TREE_CODE (arg);
4276 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4278 /* We can handle some of the tcc_expression cases here. */
4279 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4280 tclass = tcc_unary;
4281 else if (tclass == tcc_expression
4282 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4283 tclass = tcc_binary;
4285 switch (tclass)
4287 case tcc_unary:
4288 return fold_build1_loc (loc, code, type,
4289 eval_subst (loc, TREE_OPERAND (arg, 0),
4290 old0, new0, old1, new1));
4292 case tcc_binary:
4293 return fold_build2_loc (loc, code, type,
4294 eval_subst (loc, TREE_OPERAND (arg, 0),
4295 old0, new0, old1, new1),
4296 eval_subst (loc, TREE_OPERAND (arg, 1),
4297 old0, new0, old1, new1));
4299 case tcc_expression:
4300 switch (code)
4302 case SAVE_EXPR:
4303 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4304 old1, new1);
4306 case COMPOUND_EXPR:
4307 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4308 old1, new1);
4310 case COND_EXPR:
4311 return fold_build3_loc (loc, code, type,
4312 eval_subst (loc, TREE_OPERAND (arg, 0),
4313 old0, new0, old1, new1),
4314 eval_subst (loc, TREE_OPERAND (arg, 1),
4315 old0, new0, old1, new1),
4316 eval_subst (loc, TREE_OPERAND (arg, 2),
4317 old0, new0, old1, new1));
4318 default:
4319 break;
4321 /* Fall through - ??? */
4323 case tcc_comparison:
4325 tree arg0 = TREE_OPERAND (arg, 0);
4326 tree arg1 = TREE_OPERAND (arg, 1);
4328 /* We need to check both for exact equality and tree equality. The
4329 former will be true if the operand has a side-effect. In that
4330 case, we know the operand occurred exactly once. */
4332 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4333 arg0 = new0;
4334 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4335 arg0 = new1;
4337 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4338 arg1 = new0;
4339 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4340 arg1 = new1;
4342 return fold_build2_loc (loc, code, type, arg0, arg1);
4345 default:
4346 return arg;
4350 /* Return a tree for the case when the result of an expression is RESULT
4351 converted to TYPE and OMITTED was previously an operand of the expression
4352 but is now not needed (e.g., we folded OMITTED * 0).
4354 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4355 the conversion of RESULT to TYPE. */
4357 tree
4358 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4360 tree t = fold_convert_loc (loc, type, result);
4362 /* If the resulting operand is an empty statement, just return the omitted
4363 statement casted to void. */
4364 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4365 return build1_loc (loc, NOP_EXPR, void_type_node,
4366 fold_ignored_result (omitted));
4368 if (TREE_SIDE_EFFECTS (omitted))
4369 return build2_loc (loc, COMPOUND_EXPR, type,
4370 fold_ignored_result (omitted), t);
4372 return non_lvalue_loc (loc, t);
4375 /* Return a tree for the case when the result of an expression is RESULT
4376 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4377 of the expression but are now not needed.
4379 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4380 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4381 evaluated before OMITTED2. Otherwise, if neither has side effects,
4382 just do the conversion of RESULT to TYPE. */
4384 tree
4385 omit_two_operands_loc (location_t loc, tree type, tree result,
4386 tree omitted1, tree omitted2)
4388 tree t = fold_convert_loc (loc, type, result);
4390 if (TREE_SIDE_EFFECTS (omitted2))
4391 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4392 if (TREE_SIDE_EFFECTS (omitted1))
4393 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4395 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4399 /* Return a simplified tree node for the truth-negation of ARG. This
4400 never alters ARG itself. We assume that ARG is an operation that
4401 returns a truth value (0 or 1).
4403 FIXME: one would think we would fold the result, but it causes
4404 problems with the dominator optimizer. */
4406 static tree
4407 fold_truth_not_expr (location_t loc, tree arg)
4409 tree type = TREE_TYPE (arg);
4410 enum tree_code code = TREE_CODE (arg);
4411 location_t loc1, loc2;
4413 /* If this is a comparison, we can simply invert it, except for
4414 floating-point non-equality comparisons, in which case we just
4415 enclose a TRUTH_NOT_EXPR around what we have. */
4417 if (TREE_CODE_CLASS (code) == tcc_comparison)
4419 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4420 if (FLOAT_TYPE_P (op_type)
4421 && flag_trapping_math
4422 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4423 && code != NE_EXPR && code != EQ_EXPR)
4424 return NULL_TREE;
4426 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4427 if (code == ERROR_MARK)
4428 return NULL_TREE;
4430 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4431 TREE_OPERAND (arg, 1));
4432 copy_warning (ret, arg);
4433 return ret;
4436 switch (code)
4438 case INTEGER_CST:
4439 return constant_boolean_node (integer_zerop (arg), type);
4441 case TRUTH_AND_EXPR:
4442 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4443 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4444 return build2_loc (loc, TRUTH_OR_EXPR, type,
4445 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4446 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4448 case TRUTH_OR_EXPR:
4449 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4450 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4451 return build2_loc (loc, TRUTH_AND_EXPR, type,
4452 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4453 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4455 case TRUTH_XOR_EXPR:
4456 /* Here we can invert either operand. We invert the first operand
4457 unless the second operand is a TRUTH_NOT_EXPR in which case our
4458 result is the XOR of the first operand with the inside of the
4459 negation of the second operand. */
4461 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4462 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4463 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4464 else
4465 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4466 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4467 TREE_OPERAND (arg, 1));
4469 case TRUTH_ANDIF_EXPR:
4470 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4471 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4472 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4473 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4474 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4476 case TRUTH_ORIF_EXPR:
4477 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4478 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4479 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4480 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4481 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4483 case TRUTH_NOT_EXPR:
4484 return TREE_OPERAND (arg, 0);
4486 case COND_EXPR:
4488 tree arg1 = TREE_OPERAND (arg, 1);
4489 tree arg2 = TREE_OPERAND (arg, 2);
4491 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4492 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4494 /* A COND_EXPR may have a throw as one operand, which
4495 then has void type. Just leave void operands
4496 as they are. */
4497 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4498 VOID_TYPE_P (TREE_TYPE (arg1))
4499 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4500 VOID_TYPE_P (TREE_TYPE (arg2))
4501 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4504 case COMPOUND_EXPR:
4505 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4506 return build2_loc (loc, COMPOUND_EXPR, type,
4507 TREE_OPERAND (arg, 0),
4508 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4510 case NON_LVALUE_EXPR:
4511 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4512 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4514 CASE_CONVERT:
4515 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4516 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4518 /* fall through */
4520 case FLOAT_EXPR:
4521 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4522 return build1_loc (loc, TREE_CODE (arg), type,
4523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4525 case BIT_AND_EXPR:
4526 if (!integer_onep (TREE_OPERAND (arg, 1)))
4527 return NULL_TREE;
4528 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4530 case SAVE_EXPR:
4531 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4533 case CLEANUP_POINT_EXPR:
4534 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4535 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4536 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4538 default:
4539 return NULL_TREE;
4543 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4544 assume that ARG is an operation that returns a truth value (0 or 1
4545 for scalars, 0 or -1 for vectors). Return the folded expression if
4546 folding is successful. Otherwise, return NULL_TREE. */
4548 static tree
4549 fold_invert_truthvalue (location_t loc, tree arg)
4551 tree type = TREE_TYPE (arg);
4552 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4553 ? BIT_NOT_EXPR
4554 : TRUTH_NOT_EXPR,
4555 type, arg);
4558 /* Return a simplified tree node for the truth-negation of ARG. This
4559 never alters ARG itself. We assume that ARG is an operation that
4560 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4562 tree
4563 invert_truthvalue_loc (location_t loc, tree arg)
4565 if (TREE_CODE (arg) == ERROR_MARK)
4566 return arg;
4568 tree type = TREE_TYPE (arg);
4569 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4570 ? BIT_NOT_EXPR
4571 : TRUTH_NOT_EXPR,
4572 type, arg);
4575 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4576 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4577 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4578 is the original memory reference used to preserve the alias set of
4579 the access. */
4581 static tree
4582 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4583 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4584 int unsignedp, int reversep)
4586 tree result, bftype;
4588 /* Attempt not to lose the access path if possible. */
4589 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4591 tree ninner = TREE_OPERAND (orig_inner, 0);
4592 machine_mode nmode;
4593 poly_int64 nbitsize, nbitpos;
4594 tree noffset;
4595 int nunsignedp, nreversep, nvolatilep = 0;
4596 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4597 &noffset, &nmode, &nunsignedp,
4598 &nreversep, &nvolatilep);
4599 if (base == inner
4600 && noffset == NULL_TREE
4601 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4602 && !reversep
4603 && !nreversep
4604 && !nvolatilep)
4606 inner = ninner;
4607 bitpos -= nbitpos;
4611 alias_set_type iset = get_alias_set (orig_inner);
4612 if (iset == 0 && get_alias_set (inner) != iset)
4613 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4614 build_fold_addr_expr (inner),
4615 build_int_cst (ptr_type_node, 0));
4617 if (known_eq (bitpos, 0) && !reversep)
4619 tree size = TYPE_SIZE (TREE_TYPE (inner));
4620 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4621 || POINTER_TYPE_P (TREE_TYPE (inner)))
4622 && tree_fits_shwi_p (size)
4623 && tree_to_shwi (size) == bitsize)
4624 return fold_convert_loc (loc, type, inner);
4627 bftype = type;
4628 if (TYPE_PRECISION (bftype) != bitsize
4629 || TYPE_UNSIGNED (bftype) == !unsignedp)
4630 bftype = build_nonstandard_integer_type (bitsize, 0);
4632 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4633 bitsize_int (bitsize), bitsize_int (bitpos));
4634 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4636 if (bftype != type)
4637 result = fold_convert_loc (loc, type, result);
4639 return result;
4642 /* Optimize a bit-field compare.
4644 There are two cases: First is a compare against a constant and the
4645 second is a comparison of two items where the fields are at the same
4646 bit position relative to the start of a chunk (byte, halfword, word)
4647 large enough to contain it. In these cases we can avoid the shift
4648 implicit in bitfield extractions.
4650 For constants, we emit a compare of the shifted constant with the
4651 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4652 compared. For two fields at the same position, we do the ANDs with the
4653 similar mask and compare the result of the ANDs.
4655 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4656 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4657 are the left and right operands of the comparison, respectively.
4659 If the optimization described above can be done, we return the resulting
4660 tree. Otherwise we return zero. */
4662 static tree
4663 optimize_bit_field_compare (location_t loc, enum tree_code code,
4664 tree compare_type, tree lhs, tree rhs)
4666 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4667 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4668 tree type = TREE_TYPE (lhs);
4669 tree unsigned_type;
4670 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4671 machine_mode lmode, rmode;
4672 scalar_int_mode nmode;
4673 int lunsignedp, runsignedp;
4674 int lreversep, rreversep;
4675 int lvolatilep = 0, rvolatilep = 0;
4676 tree linner, rinner = NULL_TREE;
4677 tree mask;
4678 tree offset;
4680 /* Get all the information about the extractions being done. If the bit size
4681 is the same as the size of the underlying object, we aren't doing an
4682 extraction at all and so can do nothing. We also don't want to
4683 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4684 then will no longer be able to replace it. */
4685 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4686 &lunsignedp, &lreversep, &lvolatilep);
4687 if (linner == lhs
4688 || !known_size_p (plbitsize)
4689 || !plbitsize.is_constant (&lbitsize)
4690 || !plbitpos.is_constant (&lbitpos)
4691 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4692 || offset != 0
4693 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4694 || lvolatilep)
4695 return 0;
4697 if (const_p)
4698 rreversep = lreversep;
4699 else
4701 /* If this is not a constant, we can only do something if bit positions,
4702 sizes, signedness and storage order are the same. */
4703 rinner
4704 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4705 &runsignedp, &rreversep, &rvolatilep);
4707 if (rinner == rhs
4708 || maybe_ne (lbitpos, rbitpos)
4709 || maybe_ne (lbitsize, rbitsize)
4710 || lunsignedp != runsignedp
4711 || lreversep != rreversep
4712 || offset != 0
4713 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4714 || rvolatilep)
4715 return 0;
4718 /* Honor the C++ memory model and mimic what RTL expansion does. */
4719 poly_uint64 bitstart = 0;
4720 poly_uint64 bitend = 0;
4721 if (TREE_CODE (lhs) == COMPONENT_REF)
4723 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4724 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4725 return 0;
4728 /* See if we can find a mode to refer to this field. We should be able to,
4729 but fail if we can't. */
4730 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4731 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4732 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4733 TYPE_ALIGN (TREE_TYPE (rinner))),
4734 BITS_PER_WORD, false, &nmode))
4735 return 0;
4737 /* Set signed and unsigned types of the precision of this mode for the
4738 shifts below. */
4739 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4741 /* Compute the bit position and size for the new reference and our offset
4742 within it. If the new reference is the same size as the original, we
4743 won't optimize anything, so return zero. */
4744 nbitsize = GET_MODE_BITSIZE (nmode);
4745 nbitpos = lbitpos & ~ (nbitsize - 1);
4746 lbitpos -= nbitpos;
4747 if (nbitsize == lbitsize)
4748 return 0;
4750 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4751 lbitpos = nbitsize - lbitsize - lbitpos;
4753 /* Make the mask to be used against the extracted field. */
4754 mask = build_int_cst_type (unsigned_type, -1);
4755 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4756 mask = const_binop (RSHIFT_EXPR, mask,
4757 size_int (nbitsize - lbitsize - lbitpos));
4759 if (! const_p)
4761 if (nbitpos < 0)
4762 return 0;
4764 /* If not comparing with constant, just rework the comparison
4765 and return. */
4766 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4767 nbitsize, nbitpos, 1, lreversep);
4768 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4769 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4770 nbitsize, nbitpos, 1, rreversep);
4771 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4772 return fold_build2_loc (loc, code, compare_type, t1, t2);
4775 /* Otherwise, we are handling the constant case. See if the constant is too
4776 big for the field. Warn and return a tree for 0 (false) if so. We do
4777 this not only for its own sake, but to avoid having to test for this
4778 error case below. If we didn't, we might generate wrong code.
4780 For unsigned fields, the constant shifted right by the field length should
4781 be all zero. For signed fields, the high-order bits should agree with
4782 the sign bit. */
4784 if (lunsignedp)
4786 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4788 warning (0, "comparison is always %d due to width of bit-field",
4789 code == NE_EXPR);
4790 return constant_boolean_node (code == NE_EXPR, compare_type);
4793 else
4795 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4796 if (tem != 0 && tem != -1)
4798 warning (0, "comparison is always %d due to width of bit-field",
4799 code == NE_EXPR);
4800 return constant_boolean_node (code == NE_EXPR, compare_type);
4804 if (nbitpos < 0)
4805 return 0;
4807 /* Single-bit compares should always be against zero. */
4808 if (lbitsize == 1 && ! integer_zerop (rhs))
4810 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4811 rhs = build_int_cst (type, 0);
4814 /* Make a new bitfield reference, shift the constant over the
4815 appropriate number of bits and mask it with the computed mask
4816 (in case this was a signed field). If we changed it, make a new one. */
4817 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4818 nbitsize, nbitpos, 1, lreversep);
4820 rhs = const_binop (BIT_AND_EXPR,
4821 const_binop (LSHIFT_EXPR,
4822 fold_convert_loc (loc, unsigned_type, rhs),
4823 size_int (lbitpos)),
4824 mask);
4826 lhs = build2_loc (loc, code, compare_type,
4827 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4828 return lhs;
4831 /* Subroutine for fold_truth_andor_1: decode a field reference.
4833 If EXP is a comparison reference, we return the innermost reference.
4835 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4836 set to the starting bit number.
4838 If the innermost field can be completely contained in a mode-sized
4839 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4841 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4842 otherwise it is not changed.
4844 *PUNSIGNEDP is set to the signedness of the field.
4846 *PREVERSEP is set to the storage order of the field.
4848 *PMASK is set to the mask used. This is either contained in a
4849 BIT_AND_EXPR or derived from the width of the field.
4851 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4853 Return 0 if this is not a component reference or is one that we can't
4854 do anything with. */
4856 static tree
4857 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4858 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4859 int *punsignedp, int *preversep, int *pvolatilep,
4860 tree *pmask, tree *pand_mask)
4862 tree exp = *exp_;
4863 tree outer_type = 0;
4864 tree and_mask = 0;
4865 tree mask, inner, offset;
4866 tree unsigned_type;
4867 unsigned int precision;
4869 /* All the optimizations using this function assume integer fields.
4870 There are problems with FP fields since the type_for_size call
4871 below can fail for, e.g., XFmode. */
4872 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4873 return NULL_TREE;
4875 /* We are interested in the bare arrangement of bits, so strip everything
4876 that doesn't affect the machine mode. However, record the type of the
4877 outermost expression if it may matter below. */
4878 if (CONVERT_EXPR_P (exp)
4879 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4880 outer_type = TREE_TYPE (exp);
4881 STRIP_NOPS (exp);
4883 if (TREE_CODE (exp) == BIT_AND_EXPR)
4885 and_mask = TREE_OPERAND (exp, 1);
4886 exp = TREE_OPERAND (exp, 0);
4887 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4888 if (TREE_CODE (and_mask) != INTEGER_CST)
4889 return NULL_TREE;
4892 poly_int64 poly_bitsize, poly_bitpos;
4893 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4894 pmode, punsignedp, preversep, pvolatilep);
4895 if ((inner == exp && and_mask == 0)
4896 || !poly_bitsize.is_constant (pbitsize)
4897 || !poly_bitpos.is_constant (pbitpos)
4898 || *pbitsize < 0
4899 || offset != 0
4900 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4901 /* Reject out-of-bound accesses (PR79731). */
4902 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4903 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4904 *pbitpos + *pbitsize) < 0))
4905 return NULL_TREE;
4907 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4908 if (unsigned_type == NULL_TREE)
4909 return NULL_TREE;
4911 *exp_ = exp;
4913 /* If the number of bits in the reference is the same as the bitsize of
4914 the outer type, then the outer type gives the signedness. Otherwise
4915 (in case of a small bitfield) the signedness is unchanged. */
4916 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4917 *punsignedp = TYPE_UNSIGNED (outer_type);
4919 /* Compute the mask to access the bitfield. */
4920 precision = TYPE_PRECISION (unsigned_type);
4922 mask = build_int_cst_type (unsigned_type, -1);
4924 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4925 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4927 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4928 if (and_mask != 0)
4929 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4930 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4932 *pmask = mask;
4933 *pand_mask = and_mask;
4934 return inner;
4937 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4938 bit positions and MASK is SIGNED. */
4940 static bool
4941 all_ones_mask_p (const_tree mask, unsigned int size)
4943 tree type = TREE_TYPE (mask);
4944 unsigned int precision = TYPE_PRECISION (type);
4946 /* If this function returns true when the type of the mask is
4947 UNSIGNED, then there will be errors. In particular see
4948 gcc.c-torture/execute/990326-1.c. There does not appear to be
4949 any documentation paper trail as to why this is so. But the pre
4950 wide-int worked with that restriction and it has been preserved
4951 here. */
4952 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4953 return false;
4955 return wi::mask (size, false, precision) == wi::to_wide (mask);
4958 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4959 represents the sign bit of EXP's type. If EXP represents a sign
4960 or zero extension, also test VAL against the unextended type.
4961 The return value is the (sub)expression whose sign bit is VAL,
4962 or NULL_TREE otherwise. */
4964 tree
4965 sign_bit_p (tree exp, const_tree val)
4967 int width;
4968 tree t;
4970 /* Tree EXP must have an integral type. */
4971 t = TREE_TYPE (exp);
4972 if (! INTEGRAL_TYPE_P (t))
4973 return NULL_TREE;
4975 /* Tree VAL must be an integer constant. */
4976 if (TREE_CODE (val) != INTEGER_CST
4977 || TREE_OVERFLOW (val))
4978 return NULL_TREE;
4980 width = TYPE_PRECISION (t);
4981 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4982 return exp;
4984 /* Handle extension from a narrower type. */
4985 if (TREE_CODE (exp) == NOP_EXPR
4986 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4987 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4989 return NULL_TREE;
4992 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4993 operand is simple enough to be evaluated unconditionally. */
4995 static bool
4996 simple_operand_p (const_tree exp)
4998 /* Strip any conversions that don't change the machine mode. */
4999 STRIP_NOPS (exp);
5001 return (CONSTANT_CLASS_P (exp)
5002 || TREE_CODE (exp) == SSA_NAME
5003 || (DECL_P (exp)
5004 && ! TREE_ADDRESSABLE (exp)
5005 && ! TREE_THIS_VOLATILE (exp)
5006 && ! DECL_NONLOCAL (exp)
5007 /* Don't regard global variables as simple. They may be
5008 allocated in ways unknown to the compiler (shared memory,
5009 #pragma weak, etc). */
5010 && ! TREE_PUBLIC (exp)
5011 && ! DECL_EXTERNAL (exp)
5012 /* Weakrefs are not safe to be read, since they can be NULL.
5013 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5014 have DECL_WEAK flag set. */
5015 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5016 /* Loading a static variable is unduly expensive, but global
5017 registers aren't expensive. */
5018 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5021 /* Determine if an operand is simple enough to be evaluated unconditionally.
5022 In addition to simple_operand_p, we assume that comparisons, conversions,
5023 and logic-not operations are simple, if their operands are simple, too. */
5025 bool
5026 simple_condition_p (tree exp)
5028 enum tree_code code;
5030 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5031 return false;
5033 while (CONVERT_EXPR_P (exp))
5034 exp = TREE_OPERAND (exp, 0);
5036 code = TREE_CODE (exp);
5038 if (TREE_CODE_CLASS (code) == tcc_comparison)
5039 return (simple_operand_p (TREE_OPERAND (exp, 0))
5040 && simple_operand_p (TREE_OPERAND (exp, 1)));
5042 if (code == TRUTH_NOT_EXPR)
5043 return simple_condition_p (TREE_OPERAND (exp, 0));
5045 return simple_operand_p (exp);
5049 /* The following functions are subroutines to fold_range_test and allow it to
5050 try to change a logical combination of comparisons into a range test.
5052 For example, both
5053 X == 2 || X == 3 || X == 4 || X == 5
5055 X >= 2 && X <= 5
5056 are converted to
5057 (unsigned) (X - 2) <= 3
5059 We describe each set of comparisons as being either inside or outside
5060 a range, using a variable named like IN_P, and then describe the
5061 range with a lower and upper bound. If one of the bounds is omitted,
5062 it represents either the highest or lowest value of the type.
5064 In the comments below, we represent a range by two numbers in brackets
5065 preceded by a "+" to designate being inside that range, or a "-" to
5066 designate being outside that range, so the condition can be inverted by
5067 flipping the prefix. An omitted bound is represented by a "-". For
5068 example, "- [-, 10]" means being outside the range starting at the lowest
5069 possible value and ending at 10, in other words, being greater than 10.
5070 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5071 always false.
5073 We set up things so that the missing bounds are handled in a consistent
5074 manner so neither a missing bound nor "true" and "false" need to be
5075 handled using a special case. */
5077 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5078 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5079 and UPPER1_P are nonzero if the respective argument is an upper bound
5080 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5081 must be specified for a comparison. ARG1 will be converted to ARG0's
5082 type if both are specified. */
5084 static tree
5085 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5086 tree arg1, int upper1_p)
5088 tree tem;
5089 int result;
5090 int sgn0, sgn1;
5092 /* If neither arg represents infinity, do the normal operation.
5093 Else, if not a comparison, return infinity. Else handle the special
5094 comparison rules. Note that most of the cases below won't occur, but
5095 are handled for consistency. */
5097 if (arg0 != 0 && arg1 != 0)
5099 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5100 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5101 STRIP_NOPS (tem);
5102 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5105 if (TREE_CODE_CLASS (code) != tcc_comparison)
5106 return 0;
5108 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5109 for neither. In real maths, we cannot assume open ended ranges are
5110 the same. But, this is computer arithmetic, where numbers are finite.
5111 We can therefore make the transformation of any unbounded range with
5112 the value Z, Z being greater than any representable number. This permits
5113 us to treat unbounded ranges as equal. */
5114 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5115 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5116 switch (code)
5118 case EQ_EXPR:
5119 result = sgn0 == sgn1;
5120 break;
5121 case NE_EXPR:
5122 result = sgn0 != sgn1;
5123 break;
5124 case LT_EXPR:
5125 result = sgn0 < sgn1;
5126 break;
5127 case LE_EXPR:
5128 result = sgn0 <= sgn1;
5129 break;
5130 case GT_EXPR:
5131 result = sgn0 > sgn1;
5132 break;
5133 case GE_EXPR:
5134 result = sgn0 >= sgn1;
5135 break;
5136 default:
5137 gcc_unreachable ();
5140 return constant_boolean_node (result, type);
5143 /* Helper routine for make_range. Perform one step for it, return
5144 new expression if the loop should continue or NULL_TREE if it should
5145 stop. */
5147 tree
5148 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5149 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5150 bool *strict_overflow_p)
5152 tree arg0_type = TREE_TYPE (arg0);
5153 tree n_low, n_high, low = *p_low, high = *p_high;
5154 int in_p = *p_in_p, n_in_p;
5156 switch (code)
5158 case TRUTH_NOT_EXPR:
5159 /* We can only do something if the range is testing for zero. */
5160 if (low == NULL_TREE || high == NULL_TREE
5161 || ! integer_zerop (low) || ! integer_zerop (high))
5162 return NULL_TREE;
5163 *p_in_p = ! in_p;
5164 return arg0;
5166 case EQ_EXPR: case NE_EXPR:
5167 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5168 /* We can only do something if the range is testing for zero
5169 and if the second operand is an integer constant. Note that
5170 saying something is "in" the range we make is done by
5171 complementing IN_P since it will set in the initial case of
5172 being not equal to zero; "out" is leaving it alone. */
5173 if (low == NULL_TREE || high == NULL_TREE
5174 || ! integer_zerop (low) || ! integer_zerop (high)
5175 || TREE_CODE (arg1) != INTEGER_CST)
5176 return NULL_TREE;
5178 switch (code)
5180 case NE_EXPR: /* - [c, c] */
5181 low = high = arg1;
5182 break;
5183 case EQ_EXPR: /* + [c, c] */
5184 in_p = ! in_p, low = high = arg1;
5185 break;
5186 case GT_EXPR: /* - [-, c] */
5187 low = 0, high = arg1;
5188 break;
5189 case GE_EXPR: /* + [c, -] */
5190 in_p = ! in_p, low = arg1, high = 0;
5191 break;
5192 case LT_EXPR: /* - [c, -] */
5193 low = arg1, high = 0;
5194 break;
5195 case LE_EXPR: /* + [-, c] */
5196 in_p = ! in_p, low = 0, high = arg1;
5197 break;
5198 default:
5199 gcc_unreachable ();
5202 /* If this is an unsigned comparison, we also know that EXP is
5203 greater than or equal to zero. We base the range tests we make
5204 on that fact, so we record it here so we can parse existing
5205 range tests. We test arg0_type since often the return type
5206 of, e.g. EQ_EXPR, is boolean. */
5207 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5209 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5210 in_p, low, high, 1,
5211 build_int_cst (arg0_type, 0),
5212 NULL_TREE))
5213 return NULL_TREE;
5215 in_p = n_in_p, low = n_low, high = n_high;
5217 /* If the high bound is missing, but we have a nonzero low
5218 bound, reverse the range so it goes from zero to the low bound
5219 minus 1. */
5220 if (high == 0 && low && ! integer_zerop (low))
5222 in_p = ! in_p;
5223 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5224 build_int_cst (TREE_TYPE (low), 1), 0);
5225 low = build_int_cst (arg0_type, 0);
5229 *p_low = low;
5230 *p_high = high;
5231 *p_in_p = in_p;
5232 return arg0;
5234 case NEGATE_EXPR:
5235 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5236 low and high are non-NULL, then normalize will DTRT. */
5237 if (!TYPE_UNSIGNED (arg0_type)
5238 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5240 if (low == NULL_TREE)
5241 low = TYPE_MIN_VALUE (arg0_type);
5242 if (high == NULL_TREE)
5243 high = TYPE_MAX_VALUE (arg0_type);
5246 /* (-x) IN [a,b] -> x in [-b, -a] */
5247 n_low = range_binop (MINUS_EXPR, exp_type,
5248 build_int_cst (exp_type, 0),
5249 0, high, 1);
5250 n_high = range_binop (MINUS_EXPR, exp_type,
5251 build_int_cst (exp_type, 0),
5252 0, low, 0);
5253 if (n_high != 0 && TREE_OVERFLOW (n_high))
5254 return NULL_TREE;
5255 goto normalize;
5257 case BIT_NOT_EXPR:
5258 /* ~ X -> -X - 1 */
5259 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5260 build_int_cst (exp_type, 1));
5262 case PLUS_EXPR:
5263 case MINUS_EXPR:
5264 if (TREE_CODE (arg1) != INTEGER_CST)
5265 return NULL_TREE;
5267 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5268 move a constant to the other side. */
5269 if (!TYPE_UNSIGNED (arg0_type)
5270 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5271 return NULL_TREE;
5273 /* If EXP is signed, any overflow in the computation is undefined,
5274 so we don't worry about it so long as our computations on
5275 the bounds don't overflow. For unsigned, overflow is defined
5276 and this is exactly the right thing. */
5277 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5278 arg0_type, low, 0, arg1, 0);
5279 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5280 arg0_type, high, 1, arg1, 0);
5281 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5282 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5283 return NULL_TREE;
5285 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5286 *strict_overflow_p = true;
5288 normalize:
5289 /* Check for an unsigned range which has wrapped around the maximum
5290 value thus making n_high < n_low, and normalize it. */
5291 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5293 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5294 build_int_cst (TREE_TYPE (n_high), 1), 0);
5295 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5296 build_int_cst (TREE_TYPE (n_low), 1), 0);
5298 /* If the range is of the form +/- [ x+1, x ], we won't
5299 be able to normalize it. But then, it represents the
5300 whole range or the empty set, so make it
5301 +/- [ -, - ]. */
5302 if (tree_int_cst_equal (n_low, low)
5303 && tree_int_cst_equal (n_high, high))
5304 low = high = 0;
5305 else
5306 in_p = ! in_p;
5308 else
5309 low = n_low, high = n_high;
5311 *p_low = low;
5312 *p_high = high;
5313 *p_in_p = in_p;
5314 return arg0;
5316 CASE_CONVERT:
5317 case NON_LVALUE_EXPR:
5318 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5319 return NULL_TREE;
5321 if (! INTEGRAL_TYPE_P (arg0_type)
5322 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5323 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5324 return NULL_TREE;
5326 n_low = low, n_high = high;
5328 if (n_low != 0)
5329 n_low = fold_convert_loc (loc, arg0_type, n_low);
5331 if (n_high != 0)
5332 n_high = fold_convert_loc (loc, arg0_type, n_high);
5334 /* If we're converting arg0 from an unsigned type, to exp,
5335 a signed type, we will be doing the comparison as unsigned.
5336 The tests above have already verified that LOW and HIGH
5337 are both positive.
5339 So we have to ensure that we will handle large unsigned
5340 values the same way that the current signed bounds treat
5341 negative values. */
5343 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5345 tree high_positive;
5346 tree equiv_type;
5347 /* For fixed-point modes, we need to pass the saturating flag
5348 as the 2nd parameter. */
5349 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5350 equiv_type
5351 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5352 TYPE_SATURATING (arg0_type));
5353 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5354 equiv_type = arg0_type;
5355 else
5356 equiv_type
5357 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5359 /* A range without an upper bound is, naturally, unbounded.
5360 Since convert would have cropped a very large value, use
5361 the max value for the destination type. */
5362 high_positive
5363 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5364 : TYPE_MAX_VALUE (arg0_type);
5366 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5367 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5368 fold_convert_loc (loc, arg0_type,
5369 high_positive),
5370 build_int_cst (arg0_type, 1));
5372 /* If the low bound is specified, "and" the range with the
5373 range for which the original unsigned value will be
5374 positive. */
5375 if (low != 0)
5377 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5378 1, fold_convert_loc (loc, arg0_type,
5379 integer_zero_node),
5380 high_positive))
5381 return NULL_TREE;
5383 in_p = (n_in_p == in_p);
5385 else
5387 /* Otherwise, "or" the range with the range of the input
5388 that will be interpreted as negative. */
5389 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5390 1, fold_convert_loc (loc, arg0_type,
5391 integer_zero_node),
5392 high_positive))
5393 return NULL_TREE;
5395 in_p = (in_p != n_in_p);
5399 /* Otherwise, if we are converting arg0 from signed type, to exp,
5400 an unsigned type, we will do the comparison as signed. If
5401 high is non-NULL, we punt above if it doesn't fit in the signed
5402 type, so if we get through here, +[-, high] or +[low, high] are
5403 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5404 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5405 high is not, the +[low, -] range is equivalent to union of
5406 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5407 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5408 low being 0, which should be treated as [-, -]. */
5409 else if (TYPE_UNSIGNED (exp_type)
5410 && !TYPE_UNSIGNED (arg0_type)
5411 && low
5412 && !high)
5414 if (integer_zerop (low))
5415 n_low = NULL_TREE;
5416 else
5418 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5419 n_low, build_int_cst (arg0_type, -1));
5420 n_low = build_zero_cst (arg0_type);
5421 in_p = !in_p;
5425 *p_low = n_low;
5426 *p_high = n_high;
5427 *p_in_p = in_p;
5428 return arg0;
5430 default:
5431 return NULL_TREE;
5435 /* Given EXP, a logical expression, set the range it is testing into
5436 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5437 actually being tested. *PLOW and *PHIGH will be made of the same
5438 type as the returned expression. If EXP is not a comparison, we
5439 will most likely not be returning a useful value and range. Set
5440 *STRICT_OVERFLOW_P to true if the return value is only valid
5441 because signed overflow is undefined; otherwise, do not change
5442 *STRICT_OVERFLOW_P. */
5444 tree
5445 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5446 bool *strict_overflow_p)
5448 enum tree_code code;
5449 tree arg0, arg1 = NULL_TREE;
5450 tree exp_type, nexp;
5451 int in_p;
5452 tree low, high;
5453 location_t loc = EXPR_LOCATION (exp);
5455 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5456 and see if we can refine the range. Some of the cases below may not
5457 happen, but it doesn't seem worth worrying about this. We "continue"
5458 the outer loop when we've changed something; otherwise we "break"
5459 the switch, which will "break" the while. */
5461 in_p = 0;
5462 low = high = build_int_cst (TREE_TYPE (exp), 0);
5464 while (1)
5466 code = TREE_CODE (exp);
5467 exp_type = TREE_TYPE (exp);
5468 arg0 = NULL_TREE;
5470 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5472 if (TREE_OPERAND_LENGTH (exp) > 0)
5473 arg0 = TREE_OPERAND (exp, 0);
5474 if (TREE_CODE_CLASS (code) == tcc_binary
5475 || TREE_CODE_CLASS (code) == tcc_comparison
5476 || (TREE_CODE_CLASS (code) == tcc_expression
5477 && TREE_OPERAND_LENGTH (exp) > 1))
5478 arg1 = TREE_OPERAND (exp, 1);
5480 if (arg0 == NULL_TREE)
5481 break;
5483 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5484 &high, &in_p, strict_overflow_p);
5485 if (nexp == NULL_TREE)
5486 break;
5487 exp = nexp;
5490 /* If EXP is a constant, we can evaluate whether this is true or false. */
5491 if (TREE_CODE (exp) == INTEGER_CST)
5493 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5494 exp, 0, low, 0))
5495 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5496 exp, 1, high, 1)));
5497 low = high = 0;
5498 exp = 0;
5501 *pin_p = in_p, *plow = low, *phigh = high;
5502 return exp;
5505 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5506 a bitwise check i.e. when
5507 LOW == 0xXX...X00...0
5508 HIGH == 0xXX...X11...1
5509 Return corresponding mask in MASK and stem in VALUE. */
5511 static bool
5512 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5513 tree *value)
5515 if (TREE_CODE (low) != INTEGER_CST
5516 || TREE_CODE (high) != INTEGER_CST)
5517 return false;
5519 unsigned prec = TYPE_PRECISION (type);
5520 wide_int lo = wi::to_wide (low, prec);
5521 wide_int hi = wi::to_wide (high, prec);
5523 wide_int end_mask = lo ^ hi;
5524 if ((end_mask & (end_mask + 1)) != 0
5525 || (lo & end_mask) != 0)
5526 return false;
5528 wide_int stem_mask = ~end_mask;
5529 wide_int stem = lo & stem_mask;
5530 if (stem != (hi & stem_mask))
5531 return false;
5533 *mask = wide_int_to_tree (type, stem_mask);
5534 *value = wide_int_to_tree (type, stem);
5536 return true;
5539 /* Helper routine for build_range_check and match.pd. Return the type to
5540 perform the check or NULL if it shouldn't be optimized. */
5542 tree
5543 range_check_type (tree etype)
5545 /* First make sure that arithmetics in this type is valid, then make sure
5546 that it wraps around. */
5547 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5548 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5550 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5552 tree utype, minv, maxv;
5554 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5555 for the type in question, as we rely on this here. */
5556 utype = unsigned_type_for (etype);
5557 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5558 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5559 build_int_cst (TREE_TYPE (maxv), 1), 1);
5560 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5562 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5563 minv, 1, maxv, 1)))
5564 etype = utype;
5565 else
5566 return NULL_TREE;
5568 else if (POINTER_TYPE_P (etype)
5569 || TREE_CODE (etype) == OFFSET_TYPE
5570 /* Right now all BITINT_TYPEs satisfy
5571 (unsigned) max + 1 == (unsigned) min, so no need to verify
5572 that like for INTEGER_TYPEs. */
5573 || TREE_CODE (etype) == BITINT_TYPE)
5574 etype = unsigned_type_for (etype);
5575 return etype;
5578 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5579 type, TYPE, return an expression to test if EXP is in (or out of, depending
5580 on IN_P) the range. Return 0 if the test couldn't be created. */
5582 tree
5583 build_range_check (location_t loc, tree type, tree exp, int in_p,
5584 tree low, tree high)
5586 tree etype = TREE_TYPE (exp), mask, value;
5588 /* Disable this optimization for function pointer expressions
5589 on targets that require function pointer canonicalization. */
5590 if (targetm.have_canonicalize_funcptr_for_compare ()
5591 && POINTER_TYPE_P (etype)
5592 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5593 return NULL_TREE;
5595 if (! in_p)
5597 value = build_range_check (loc, type, exp, 1, low, high);
5598 if (value != 0)
5599 return invert_truthvalue_loc (loc, value);
5601 return 0;
5604 if (low == 0 && high == 0)
5605 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5607 if (low == 0)
5608 return fold_build2_loc (loc, LE_EXPR, type, exp,
5609 fold_convert_loc (loc, etype, high));
5611 if (high == 0)
5612 return fold_build2_loc (loc, GE_EXPR, type, exp,
5613 fold_convert_loc (loc, etype, low));
5615 if (operand_equal_p (low, high, 0))
5616 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5617 fold_convert_loc (loc, etype, low));
5619 if (TREE_CODE (exp) == BIT_AND_EXPR
5620 && maskable_range_p (low, high, etype, &mask, &value))
5621 return fold_build2_loc (loc, EQ_EXPR, type,
5622 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5623 exp, mask),
5624 value);
5626 if (integer_zerop (low))
5628 if (! TYPE_UNSIGNED (etype))
5630 etype = unsigned_type_for (etype);
5631 high = fold_convert_loc (loc, etype, high);
5632 exp = fold_convert_loc (loc, etype, exp);
5634 return build_range_check (loc, type, exp, 1, 0, high);
5637 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5638 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5640 int prec = TYPE_PRECISION (etype);
5642 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5644 if (TYPE_UNSIGNED (etype))
5646 tree signed_etype = signed_type_for (etype);
5647 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5648 etype
5649 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5650 else
5651 etype = signed_etype;
5652 exp = fold_convert_loc (loc, etype, exp);
5654 return fold_build2_loc (loc, GT_EXPR, type, exp,
5655 build_int_cst (etype, 0));
5659 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5660 This requires wrap-around arithmetics for the type of the expression. */
5661 etype = range_check_type (etype);
5662 if (etype == NULL_TREE)
5663 return NULL_TREE;
5665 high = fold_convert_loc (loc, etype, high);
5666 low = fold_convert_loc (loc, etype, low);
5667 exp = fold_convert_loc (loc, etype, exp);
5669 value = const_binop (MINUS_EXPR, high, low);
5671 if (value != 0 && !TREE_OVERFLOW (value))
5672 return build_range_check (loc, type,
5673 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5674 1, build_int_cst (etype, 0), value);
5676 return 0;
5679 /* Return the predecessor of VAL in its type, handling the infinite case. */
5681 static tree
5682 range_predecessor (tree val)
5684 tree type = TREE_TYPE (val);
5686 if (INTEGRAL_TYPE_P (type)
5687 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5688 return 0;
5689 else
5690 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5691 build_int_cst (TREE_TYPE (val), 1), 0);
5694 /* Return the successor of VAL in its type, handling the infinite case. */
5696 static tree
5697 range_successor (tree val)
5699 tree type = TREE_TYPE (val);
5701 if (INTEGRAL_TYPE_P (type)
5702 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5703 return 0;
5704 else
5705 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5706 build_int_cst (TREE_TYPE (val), 1), 0);
5709 /* Given two ranges, see if we can merge them into one. Return 1 if we
5710 can, 0 if we can't. Set the output range into the specified parameters. */
5712 bool
5713 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5714 tree high0, int in1_p, tree low1, tree high1)
5716 bool no_overlap;
5717 int subset;
5718 int temp;
5719 tree tem;
5720 int in_p;
5721 tree low, high;
5722 int lowequal = ((low0 == 0 && low1 == 0)
5723 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5724 low0, 0, low1, 0)));
5725 int highequal = ((high0 == 0 && high1 == 0)
5726 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5727 high0, 1, high1, 1)));
5729 /* Make range 0 be the range that starts first, or ends last if they
5730 start at the same value. Swap them if it isn't. */
5731 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5732 low0, 0, low1, 0))
5733 || (lowequal
5734 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5735 high1, 1, high0, 1))))
5737 temp = in0_p, in0_p = in1_p, in1_p = temp;
5738 tem = low0, low0 = low1, low1 = tem;
5739 tem = high0, high0 = high1, high1 = tem;
5742 /* If the second range is != high1 where high1 is the type maximum of
5743 the type, try first merging with < high1 range. */
5744 if (low1
5745 && high1
5746 && TREE_CODE (low1) == INTEGER_CST
5747 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5748 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5749 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5750 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5751 && operand_equal_p (low1, high1, 0))
5753 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5754 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5755 !in1_p, NULL_TREE, range_predecessor (low1)))
5756 return true;
5757 /* Similarly for the second range != low1 where low1 is the type minimum
5758 of the type, try first merging with > low1 range. */
5759 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5760 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5761 !in1_p, range_successor (low1), NULL_TREE))
5762 return true;
5765 /* Now flag two cases, whether the ranges are disjoint or whether the
5766 second range is totally subsumed in the first. Note that the tests
5767 below are simplified by the ones above. */
5768 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5769 high0, 1, low1, 0));
5770 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5771 high1, 1, high0, 1));
5773 /* We now have four cases, depending on whether we are including or
5774 excluding the two ranges. */
5775 if (in0_p && in1_p)
5777 /* If they don't overlap, the result is false. If the second range
5778 is a subset it is the result. Otherwise, the range is from the start
5779 of the second to the end of the first. */
5780 if (no_overlap)
5781 in_p = 0, low = high = 0;
5782 else if (subset)
5783 in_p = 1, low = low1, high = high1;
5784 else
5785 in_p = 1, low = low1, high = high0;
5788 else if (in0_p && ! in1_p)
5790 /* If they don't overlap, the result is the first range. If they are
5791 equal, the result is false. If the second range is a subset of the
5792 first, and the ranges begin at the same place, we go from just after
5793 the end of the second range to the end of the first. If the second
5794 range is not a subset of the first, or if it is a subset and both
5795 ranges end at the same place, the range starts at the start of the
5796 first range and ends just before the second range.
5797 Otherwise, we can't describe this as a single range. */
5798 if (no_overlap)
5799 in_p = 1, low = low0, high = high0;
5800 else if (lowequal && highequal)
5801 in_p = 0, low = high = 0;
5802 else if (subset && lowequal)
5804 low = range_successor (high1);
5805 high = high0;
5806 in_p = 1;
5807 if (low == 0)
5809 /* We are in the weird situation where high0 > high1 but
5810 high1 has no successor. Punt. */
5811 return 0;
5814 else if (! subset || highequal)
5816 low = low0;
5817 high = range_predecessor (low1);
5818 in_p = 1;
5819 if (high == 0)
5821 /* low0 < low1 but low1 has no predecessor. Punt. */
5822 return 0;
5825 else
5826 return 0;
5829 else if (! in0_p && in1_p)
5831 /* If they don't overlap, the result is the second range. If the second
5832 is a subset of the first, the result is false. Otherwise,
5833 the range starts just after the first range and ends at the
5834 end of the second. */
5835 if (no_overlap)
5836 in_p = 1, low = low1, high = high1;
5837 else if (subset || highequal)
5838 in_p = 0, low = high = 0;
5839 else
5841 low = range_successor (high0);
5842 high = high1;
5843 in_p = 1;
5844 if (low == 0)
5846 /* high1 > high0 but high0 has no successor. Punt. */
5847 return 0;
5852 else
5854 /* The case where we are excluding both ranges. Here the complex case
5855 is if they don't overlap. In that case, the only time we have a
5856 range is if they are adjacent. If the second is a subset of the
5857 first, the result is the first. Otherwise, the range to exclude
5858 starts at the beginning of the first range and ends at the end of the
5859 second. */
5860 if (no_overlap)
5862 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5863 range_successor (high0),
5864 1, low1, 0)))
5865 in_p = 0, low = low0, high = high1;
5866 else
5868 /* Canonicalize - [min, x] into - [-, x]. */
5869 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5870 switch (TREE_CODE (TREE_TYPE (low0)))
5872 case ENUMERAL_TYPE:
5873 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5874 GET_MODE_BITSIZE
5875 (TYPE_MODE (TREE_TYPE (low0)))))
5876 break;
5877 /* FALLTHROUGH */
5878 case INTEGER_TYPE:
5879 if (tree_int_cst_equal (low0,
5880 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5881 low0 = 0;
5882 break;
5883 case POINTER_TYPE:
5884 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5885 && integer_zerop (low0))
5886 low0 = 0;
5887 break;
5888 default:
5889 break;
5892 /* Canonicalize - [x, max] into - [x, -]. */
5893 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5894 switch (TREE_CODE (TREE_TYPE (high1)))
5896 case ENUMERAL_TYPE:
5897 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5898 GET_MODE_BITSIZE
5899 (TYPE_MODE (TREE_TYPE (high1)))))
5900 break;
5901 /* FALLTHROUGH */
5902 case INTEGER_TYPE:
5903 if (tree_int_cst_equal (high1,
5904 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5905 high1 = 0;
5906 break;
5907 case POINTER_TYPE:
5908 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5909 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5910 high1, 1,
5911 build_int_cst (TREE_TYPE (high1), 1),
5912 1)))
5913 high1 = 0;
5914 break;
5915 default:
5916 break;
5919 /* The ranges might be also adjacent between the maximum and
5920 minimum values of the given type. For
5921 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5922 return + [x + 1, y - 1]. */
5923 if (low0 == 0 && high1 == 0)
5925 low = range_successor (high0);
5926 high = range_predecessor (low1);
5927 if (low == 0 || high == 0)
5928 return 0;
5930 in_p = 1;
5932 else
5933 return 0;
5936 else if (subset)
5937 in_p = 0, low = low0, high = high0;
5938 else
5939 in_p = 0, low = low0, high = high1;
5942 *pin_p = in_p, *plow = low, *phigh = high;
5943 return 1;
5947 /* Subroutine of fold, looking inside expressions of the form
5948 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5949 are the three operands of the COND_EXPR. This function is
5950 being used also to optimize A op B ? C : A, by reversing the
5951 comparison first.
5953 Return a folded expression whose code is not a COND_EXPR
5954 anymore, or NULL_TREE if no folding opportunity is found. */
5956 static tree
5957 fold_cond_expr_with_comparison (location_t loc, tree type,
5958 enum tree_code comp_code,
5959 tree arg00, tree arg01, tree arg1, tree arg2)
5961 tree arg1_type = TREE_TYPE (arg1);
5962 tree tem;
5964 STRIP_NOPS (arg1);
5965 STRIP_NOPS (arg2);
5967 /* If we have A op 0 ? A : -A, consider applying the following
5968 transformations:
5970 A == 0? A : -A same as -A
5971 A != 0? A : -A same as A
5972 A >= 0? A : -A same as abs (A)
5973 A > 0? A : -A same as abs (A)
5974 A <= 0? A : -A same as -abs (A)
5975 A < 0? A : -A same as -abs (A)
5977 None of these transformations work for modes with signed
5978 zeros. If A is +/-0, the first two transformations will
5979 change the sign of the result (from +0 to -0, or vice
5980 versa). The last four will fix the sign of the result,
5981 even though the original expressions could be positive or
5982 negative, depending on the sign of A.
5984 Note that all these transformations are correct if A is
5985 NaN, since the two alternatives (A and -A) are also NaNs. */
5986 if (!HONOR_SIGNED_ZEROS (type)
5987 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5988 ? real_zerop (arg01)
5989 : integer_zerop (arg01))
5990 && ((TREE_CODE (arg2) == NEGATE_EXPR
5991 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5992 /* In the case that A is of the form X-Y, '-A' (arg2) may
5993 have already been folded to Y-X, check for that. */
5994 || (TREE_CODE (arg1) == MINUS_EXPR
5995 && TREE_CODE (arg2) == MINUS_EXPR
5996 && operand_equal_p (TREE_OPERAND (arg1, 0),
5997 TREE_OPERAND (arg2, 1), 0)
5998 && operand_equal_p (TREE_OPERAND (arg1, 1),
5999 TREE_OPERAND (arg2, 0), 0))))
6000 switch (comp_code)
6002 case EQ_EXPR:
6003 case UNEQ_EXPR:
6004 tem = fold_convert_loc (loc, arg1_type, arg1);
6005 return fold_convert_loc (loc, type, negate_expr (tem));
6006 case NE_EXPR:
6007 case LTGT_EXPR:
6008 return fold_convert_loc (loc, type, arg1);
6009 case UNGE_EXPR:
6010 case UNGT_EXPR:
6011 if (flag_trapping_math)
6012 break;
6013 /* Fall through. */
6014 case GE_EXPR:
6015 case GT_EXPR:
6016 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6017 break;
6018 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6019 return fold_convert_loc (loc, type, tem);
6020 case UNLE_EXPR:
6021 case UNLT_EXPR:
6022 if (flag_trapping_math)
6023 break;
6024 /* FALLTHRU */
6025 case LE_EXPR:
6026 case LT_EXPR:
6027 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6028 break;
6029 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6030 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6032 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6033 is not, invokes UB both in abs and in the negation of it.
6034 So, use ABSU_EXPR instead. */
6035 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6036 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6037 tem = negate_expr (tem);
6038 return fold_convert_loc (loc, type, tem);
6040 else
6042 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6043 return negate_expr (fold_convert_loc (loc, type, tem));
6045 default:
6046 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6047 break;
6050 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6051 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6052 both transformations are correct when A is NaN: A != 0
6053 is then true, and A == 0 is false. */
6055 if (!HONOR_SIGNED_ZEROS (type)
6056 && integer_zerop (arg01) && integer_zerop (arg2))
6058 if (comp_code == NE_EXPR)
6059 return fold_convert_loc (loc, type, arg1);
6060 else if (comp_code == EQ_EXPR)
6061 return build_zero_cst (type);
6064 /* Try some transformations of A op B ? A : B.
6066 A == B? A : B same as B
6067 A != B? A : B same as A
6068 A >= B? A : B same as max (A, B)
6069 A > B? A : B same as max (B, A)
6070 A <= B? A : B same as min (A, B)
6071 A < B? A : B same as min (B, A)
6073 As above, these transformations don't work in the presence
6074 of signed zeros. For example, if A and B are zeros of
6075 opposite sign, the first two transformations will change
6076 the sign of the result. In the last four, the original
6077 expressions give different results for (A=+0, B=-0) and
6078 (A=-0, B=+0), but the transformed expressions do not.
6080 The first two transformations are correct if either A or B
6081 is a NaN. In the first transformation, the condition will
6082 be false, and B will indeed be chosen. In the case of the
6083 second transformation, the condition A != B will be true,
6084 and A will be chosen.
6086 The conversions to max() and min() are not correct if B is
6087 a number and A is not. The conditions in the original
6088 expressions will be false, so all four give B. The min()
6089 and max() versions would give a NaN instead. */
6090 if (!HONOR_SIGNED_ZEROS (type)
6091 && operand_equal_for_comparison_p (arg01, arg2)
6092 /* Avoid these transformations if the COND_EXPR may be used
6093 as an lvalue in the C++ front-end. PR c++/19199. */
6094 && (in_gimple_form
6095 || VECTOR_TYPE_P (type)
6096 || (! lang_GNU_CXX ()
6097 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6098 || ! maybe_lvalue_p (arg1)
6099 || ! maybe_lvalue_p (arg2)))
6101 tree comp_op0 = arg00;
6102 tree comp_op1 = arg01;
6103 tree comp_type = TREE_TYPE (comp_op0);
6105 switch (comp_code)
6107 case EQ_EXPR:
6108 return fold_convert_loc (loc, type, arg2);
6109 case NE_EXPR:
6110 return fold_convert_loc (loc, type, arg1);
6111 case LE_EXPR:
6112 case LT_EXPR:
6113 case UNLE_EXPR:
6114 case UNLT_EXPR:
6115 /* In C++ a ?: expression can be an lvalue, so put the
6116 operand which will be used if they are equal first
6117 so that we can convert this back to the
6118 corresponding COND_EXPR. */
6119 if (!HONOR_NANS (arg1))
6121 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6122 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6123 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6124 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6125 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6126 comp_op1, comp_op0);
6127 return fold_convert_loc (loc, type, tem);
6129 break;
6130 case GE_EXPR:
6131 case GT_EXPR:
6132 case UNGE_EXPR:
6133 case UNGT_EXPR:
6134 if (!HONOR_NANS (arg1))
6136 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6137 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6138 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6139 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6140 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6141 comp_op1, comp_op0);
6142 return fold_convert_loc (loc, type, tem);
6144 break;
6145 case UNEQ_EXPR:
6146 if (!HONOR_NANS (arg1))
6147 return fold_convert_loc (loc, type, arg2);
6148 break;
6149 case LTGT_EXPR:
6150 if (!HONOR_NANS (arg1))
6151 return fold_convert_loc (loc, type, arg1);
6152 break;
6153 default:
6154 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6155 break;
6159 return NULL_TREE;
6164 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6165 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6166 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6167 false) >= 2)
6168 #endif
6170 /* EXP is some logical combination of boolean tests. See if we can
6171 merge it into some range test. Return the new tree if so. */
6173 static tree
6174 fold_range_test (location_t loc, enum tree_code code, tree type,
6175 tree op0, tree op1)
6177 int or_op = (code == TRUTH_ORIF_EXPR
6178 || code == TRUTH_OR_EXPR);
6179 int in0_p, in1_p, in_p;
6180 tree low0, low1, low, high0, high1, high;
6181 bool strict_overflow_p = false;
6182 tree tem, lhs, rhs;
6183 const char * const warnmsg = G_("assuming signed overflow does not occur "
6184 "when simplifying range test");
6186 if (!INTEGRAL_TYPE_P (type))
6187 return 0;
6189 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6190 /* If op0 is known true or false and this is a short-circuiting
6191 operation we must not merge with op1 since that makes side-effects
6192 unconditional. So special-case this. */
6193 if (!lhs
6194 && ((code == TRUTH_ORIF_EXPR && in0_p)
6195 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6196 return op0;
6197 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6199 /* If this is an OR operation, invert both sides; we will invert
6200 again at the end. */
6201 if (or_op)
6202 in0_p = ! in0_p, in1_p = ! in1_p;
6204 /* If both expressions are the same, if we can merge the ranges, and we
6205 can build the range test, return it or it inverted. If one of the
6206 ranges is always true or always false, consider it to be the same
6207 expression as the other. */
6208 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6209 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6210 in1_p, low1, high1)
6211 && (tem = (build_range_check (loc, type,
6212 lhs != 0 ? lhs
6213 : rhs != 0 ? rhs : integer_zero_node,
6214 in_p, low, high))) != 0)
6216 if (strict_overflow_p)
6217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6218 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6221 /* On machines where the branch cost is expensive, if this is a
6222 short-circuited branch and the underlying object on both sides
6223 is the same, make a non-short-circuit operation. */
6224 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6225 if (param_logical_op_non_short_circuit != -1)
6226 logical_op_non_short_circuit
6227 = param_logical_op_non_short_circuit;
6228 if (logical_op_non_short_circuit
6229 && !sanitize_coverage_p ()
6230 && lhs != 0 && rhs != 0
6231 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6232 && operand_equal_p (lhs, rhs, 0))
6234 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6235 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6236 which cases we can't do this. */
6237 if (simple_operand_p (lhs))
6238 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6239 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6240 type, op0, op1);
6242 else if (!lang_hooks.decls.global_bindings_p ()
6243 && !CONTAINS_PLACEHOLDER_P (lhs))
6245 tree common = save_expr (lhs);
6247 if ((lhs = build_range_check (loc, type, common,
6248 or_op ? ! in0_p : in0_p,
6249 low0, high0)) != 0
6250 && (rhs = build_range_check (loc, type, common,
6251 or_op ? ! in1_p : in1_p,
6252 low1, high1)) != 0)
6254 if (strict_overflow_p)
6255 fold_overflow_warning (warnmsg,
6256 WARN_STRICT_OVERFLOW_COMPARISON);
6257 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6258 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6259 type, lhs, rhs);
6264 return 0;
6267 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6268 bit value. Arrange things so the extra bits will be set to zero if and
6269 only if C is signed-extended to its full width. If MASK is nonzero,
6270 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6272 static tree
6273 unextend (tree c, int p, int unsignedp, tree mask)
6275 tree type = TREE_TYPE (c);
6276 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6277 tree temp;
6279 if (p == modesize || unsignedp)
6280 return c;
6282 /* We work by getting just the sign bit into the low-order bit, then
6283 into the high-order bit, then sign-extend. We then XOR that value
6284 with C. */
6285 temp = build_int_cst (TREE_TYPE (c),
6286 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6288 /* We must use a signed type in order to get an arithmetic right shift.
6289 However, we must also avoid introducing accidental overflows, so that
6290 a subsequent call to integer_zerop will work. Hence we must
6291 do the type conversion here. At this point, the constant is either
6292 zero or one, and the conversion to a signed type can never overflow.
6293 We could get an overflow if this conversion is done anywhere else. */
6294 if (TYPE_UNSIGNED (type))
6295 temp = fold_convert (signed_type_for (type), temp);
6297 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6298 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6299 if (mask != 0)
6300 temp = const_binop (BIT_AND_EXPR, temp,
6301 fold_convert (TREE_TYPE (c), mask));
6302 /* If necessary, convert the type back to match the type of C. */
6303 if (TYPE_UNSIGNED (type))
6304 temp = fold_convert (type, temp);
6306 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6309 /* For an expression that has the form
6310 (A && B) || ~B
6312 (A || B) && ~B,
6313 we can drop one of the inner expressions and simplify to
6314 A || ~B
6316 A && ~B
6317 LOC is the location of the resulting expression. OP is the inner
6318 logical operation; the left-hand side in the examples above, while CMPOP
6319 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6320 removing a condition that guards another, as in
6321 (A != NULL && A->...) || A == NULL
6322 which we must not transform. If RHS_ONLY is true, only eliminate the
6323 right-most operand of the inner logical operation. */
6325 static tree
6326 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6327 bool rhs_only)
6329 tree type = TREE_TYPE (cmpop);
6330 enum tree_code code = TREE_CODE (cmpop);
6331 enum tree_code truthop_code = TREE_CODE (op);
6332 tree lhs = TREE_OPERAND (op, 0);
6333 tree rhs = TREE_OPERAND (op, 1);
6334 tree orig_lhs = lhs, orig_rhs = rhs;
6335 enum tree_code rhs_code = TREE_CODE (rhs);
6336 enum tree_code lhs_code = TREE_CODE (lhs);
6337 enum tree_code inv_code;
6339 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6340 return NULL_TREE;
6342 if (TREE_CODE_CLASS (code) != tcc_comparison)
6343 return NULL_TREE;
6345 if (rhs_code == truthop_code)
6347 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6348 if (newrhs != NULL_TREE)
6350 rhs = newrhs;
6351 rhs_code = TREE_CODE (rhs);
6354 if (lhs_code == truthop_code && !rhs_only)
6356 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6357 if (newlhs != NULL_TREE)
6359 lhs = newlhs;
6360 lhs_code = TREE_CODE (lhs);
6364 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6365 if (inv_code == rhs_code
6366 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6367 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6368 return lhs;
6369 if (!rhs_only && inv_code == lhs_code
6370 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6371 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6372 return rhs;
6373 if (rhs != orig_rhs || lhs != orig_lhs)
6374 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6375 lhs, rhs);
6376 return NULL_TREE;
6379 /* Find ways of folding logical expressions of LHS and RHS:
6380 Try to merge two comparisons to the same innermost item.
6381 Look for range tests like "ch >= '0' && ch <= '9'".
6382 Look for combinations of simple terms on machines with expensive branches
6383 and evaluate the RHS unconditionally.
6385 For example, if we have p->a == 2 && p->b == 4 and we can make an
6386 object large enough to span both A and B, we can do this with a comparison
6387 against the object ANDed with the a mask.
6389 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6390 operations to do this with one comparison.
6392 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6393 function and the one above.
6395 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6396 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6398 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6399 two operands.
6401 We return the simplified tree or 0 if no optimization is possible. */
6403 static tree
6404 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6405 tree lhs, tree rhs)
6407 /* If this is the "or" of two comparisons, we can do something if
6408 the comparisons are NE_EXPR. If this is the "and", we can do something
6409 if the comparisons are EQ_EXPR. I.e.,
6410 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6412 WANTED_CODE is this operation code. For single bit fields, we can
6413 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6414 comparison for one-bit fields. */
6416 enum tree_code wanted_code;
6417 enum tree_code lcode, rcode;
6418 tree ll_arg, lr_arg, rl_arg, rr_arg;
6419 tree ll_inner, lr_inner, rl_inner, rr_inner;
6420 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6421 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6422 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6423 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6424 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6425 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6426 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6427 scalar_int_mode lnmode, rnmode;
6428 tree ll_mask, lr_mask, rl_mask, rr_mask;
6429 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6430 tree l_const, r_const;
6431 tree lntype, rntype, result;
6432 HOST_WIDE_INT first_bit, end_bit;
6433 int volatilep;
6435 /* Start by getting the comparison codes. Fail if anything is volatile.
6436 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6437 it were surrounded with a NE_EXPR. */
6439 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6440 return 0;
6442 lcode = TREE_CODE (lhs);
6443 rcode = TREE_CODE (rhs);
6445 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6447 lhs = build2 (NE_EXPR, truth_type, lhs,
6448 build_int_cst (TREE_TYPE (lhs), 0));
6449 lcode = NE_EXPR;
6452 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6454 rhs = build2 (NE_EXPR, truth_type, rhs,
6455 build_int_cst (TREE_TYPE (rhs), 0));
6456 rcode = NE_EXPR;
6459 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6460 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6461 return 0;
6463 ll_arg = TREE_OPERAND (lhs, 0);
6464 lr_arg = TREE_OPERAND (lhs, 1);
6465 rl_arg = TREE_OPERAND (rhs, 0);
6466 rr_arg = TREE_OPERAND (rhs, 1);
6468 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6469 if (simple_operand_p (ll_arg)
6470 && simple_operand_p (lr_arg))
6472 if (operand_equal_p (ll_arg, rl_arg, 0)
6473 && operand_equal_p (lr_arg, rr_arg, 0))
6475 result = combine_comparisons (loc, code, lcode, rcode,
6476 truth_type, ll_arg, lr_arg);
6477 if (result)
6478 return result;
6480 else if (operand_equal_p (ll_arg, rr_arg, 0)
6481 && operand_equal_p (lr_arg, rl_arg, 0))
6483 result = combine_comparisons (loc, code, lcode,
6484 swap_tree_comparison (rcode),
6485 truth_type, ll_arg, lr_arg);
6486 if (result)
6487 return result;
6491 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6492 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6494 /* If the RHS can be evaluated unconditionally and its operands are
6495 simple, it wins to evaluate the RHS unconditionally on machines
6496 with expensive branches. In this case, this isn't a comparison
6497 that can be merged. */
6499 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6500 false) >= 2
6501 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6502 && simple_operand_p (rl_arg)
6503 && simple_operand_p (rr_arg))
6505 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6506 if (code == TRUTH_OR_EXPR
6507 && lcode == NE_EXPR && integer_zerop (lr_arg)
6508 && rcode == NE_EXPR && integer_zerop (rr_arg)
6509 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6510 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6511 return build2_loc (loc, NE_EXPR, truth_type,
6512 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6513 ll_arg, rl_arg),
6514 build_int_cst (TREE_TYPE (ll_arg), 0));
6516 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6517 if (code == TRUTH_AND_EXPR
6518 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6519 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6520 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6521 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6522 return build2_loc (loc, EQ_EXPR, truth_type,
6523 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6524 ll_arg, rl_arg),
6525 build_int_cst (TREE_TYPE (ll_arg), 0));
6528 /* See if the comparisons can be merged. Then get all the parameters for
6529 each side. */
6531 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6532 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6533 return 0;
6535 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6536 volatilep = 0;
6537 ll_inner = decode_field_reference (loc, &ll_arg,
6538 &ll_bitsize, &ll_bitpos, &ll_mode,
6539 &ll_unsignedp, &ll_reversep, &volatilep,
6540 &ll_mask, &ll_and_mask);
6541 lr_inner = decode_field_reference (loc, &lr_arg,
6542 &lr_bitsize, &lr_bitpos, &lr_mode,
6543 &lr_unsignedp, &lr_reversep, &volatilep,
6544 &lr_mask, &lr_and_mask);
6545 rl_inner = decode_field_reference (loc, &rl_arg,
6546 &rl_bitsize, &rl_bitpos, &rl_mode,
6547 &rl_unsignedp, &rl_reversep, &volatilep,
6548 &rl_mask, &rl_and_mask);
6549 rr_inner = decode_field_reference (loc, &rr_arg,
6550 &rr_bitsize, &rr_bitpos, &rr_mode,
6551 &rr_unsignedp, &rr_reversep, &volatilep,
6552 &rr_mask, &rr_and_mask);
6554 /* It must be true that the inner operation on the lhs of each
6555 comparison must be the same if we are to be able to do anything.
6556 Then see if we have constants. If not, the same must be true for
6557 the rhs's. */
6558 if (volatilep
6559 || ll_reversep != rl_reversep
6560 || ll_inner == 0 || rl_inner == 0
6561 || ! operand_equal_p (ll_inner, rl_inner, 0))
6562 return 0;
6564 if (TREE_CODE (lr_arg) == INTEGER_CST
6565 && TREE_CODE (rr_arg) == INTEGER_CST)
6567 l_const = lr_arg, r_const = rr_arg;
6568 lr_reversep = ll_reversep;
6570 else if (lr_reversep != rr_reversep
6571 || lr_inner == 0 || rr_inner == 0
6572 || ! operand_equal_p (lr_inner, rr_inner, 0))
6573 return 0;
6574 else
6575 l_const = r_const = 0;
6577 /* If either comparison code is not correct for our logical operation,
6578 fail. However, we can convert a one-bit comparison against zero into
6579 the opposite comparison against that bit being set in the field. */
6581 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6582 if (lcode != wanted_code)
6584 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6586 /* Make the left operand unsigned, since we are only interested
6587 in the value of one bit. Otherwise we are doing the wrong
6588 thing below. */
6589 ll_unsignedp = 1;
6590 l_const = ll_mask;
6592 else
6593 return 0;
6596 /* This is analogous to the code for l_const above. */
6597 if (rcode != wanted_code)
6599 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6601 rl_unsignedp = 1;
6602 r_const = rl_mask;
6604 else
6605 return 0;
6608 /* See if we can find a mode that contains both fields being compared on
6609 the left. If we can't, fail. Otherwise, update all constants and masks
6610 to be relative to a field of that size. */
6611 first_bit = MIN (ll_bitpos, rl_bitpos);
6612 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6613 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6614 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6615 volatilep, &lnmode))
6616 return 0;
6618 lnbitsize = GET_MODE_BITSIZE (lnmode);
6619 lnbitpos = first_bit & ~ (lnbitsize - 1);
6620 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6621 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6623 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6625 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6626 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6629 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6630 size_int (xll_bitpos));
6631 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6632 size_int (xrl_bitpos));
6633 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6634 return 0;
6636 if (l_const)
6638 l_const = fold_convert_loc (loc, lntype, l_const);
6639 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6640 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6641 if (l_const == NULL_TREE)
6642 return 0;
6643 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6644 fold_build1_loc (loc, BIT_NOT_EXPR,
6645 lntype, ll_mask))))
6647 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6649 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6652 if (r_const)
6654 r_const = fold_convert_loc (loc, lntype, r_const);
6655 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6656 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6657 if (r_const == NULL_TREE)
6658 return 0;
6659 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6660 fold_build1_loc (loc, BIT_NOT_EXPR,
6661 lntype, rl_mask))))
6663 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6665 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6669 /* If the right sides are not constant, do the same for it. Also,
6670 disallow this optimization if a size, signedness or storage order
6671 mismatch occurs between the left and right sides. */
6672 if (l_const == 0)
6674 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6675 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6676 || ll_reversep != lr_reversep
6677 /* Make sure the two fields on the right
6678 correspond to the left without being swapped. */
6679 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6680 return 0;
6682 first_bit = MIN (lr_bitpos, rr_bitpos);
6683 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6684 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6685 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6686 volatilep, &rnmode))
6687 return 0;
6689 rnbitsize = GET_MODE_BITSIZE (rnmode);
6690 rnbitpos = first_bit & ~ (rnbitsize - 1);
6691 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6692 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6694 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6696 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6697 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6700 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6701 rntype, lr_mask),
6702 size_int (xlr_bitpos));
6703 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6704 rntype, rr_mask),
6705 size_int (xrr_bitpos));
6706 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6707 return 0;
6709 /* Make a mask that corresponds to both fields being compared.
6710 Do this for both items being compared. If the operands are the
6711 same size and the bits being compared are in the same position
6712 then we can do this by masking both and comparing the masked
6713 results. */
6714 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6715 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6716 if (lnbitsize == rnbitsize
6717 && xll_bitpos == xlr_bitpos
6718 && lnbitpos >= 0
6719 && rnbitpos >= 0)
6721 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6722 lntype, lnbitsize, lnbitpos,
6723 ll_unsignedp || rl_unsignedp, ll_reversep);
6724 if (! all_ones_mask_p (ll_mask, lnbitsize))
6725 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6727 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6728 rntype, rnbitsize, rnbitpos,
6729 lr_unsignedp || rr_unsignedp, lr_reversep);
6730 if (! all_ones_mask_p (lr_mask, rnbitsize))
6731 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6733 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6736 /* There is still another way we can do something: If both pairs of
6737 fields being compared are adjacent, we may be able to make a wider
6738 field containing them both.
6740 Note that we still must mask the lhs/rhs expressions. Furthermore,
6741 the mask must be shifted to account for the shift done by
6742 make_bit_field_ref. */
6743 if (((ll_bitsize + ll_bitpos == rl_bitpos
6744 && lr_bitsize + lr_bitpos == rr_bitpos)
6745 || (ll_bitpos == rl_bitpos + rl_bitsize
6746 && lr_bitpos == rr_bitpos + rr_bitsize))
6747 && ll_bitpos >= 0
6748 && rl_bitpos >= 0
6749 && lr_bitpos >= 0
6750 && rr_bitpos >= 0)
6752 tree type;
6754 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6755 ll_bitsize + rl_bitsize,
6756 MIN (ll_bitpos, rl_bitpos),
6757 ll_unsignedp, ll_reversep);
6758 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6759 lr_bitsize + rr_bitsize,
6760 MIN (lr_bitpos, rr_bitpos),
6761 lr_unsignedp, lr_reversep);
6763 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6764 size_int (MIN (xll_bitpos, xrl_bitpos)));
6765 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6766 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6767 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6768 return 0;
6770 /* Convert to the smaller type before masking out unwanted bits. */
6771 type = lntype;
6772 if (lntype != rntype)
6774 if (lnbitsize > rnbitsize)
6776 lhs = fold_convert_loc (loc, rntype, lhs);
6777 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6778 type = rntype;
6780 else if (lnbitsize < rnbitsize)
6782 rhs = fold_convert_loc (loc, lntype, rhs);
6783 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6784 type = lntype;
6788 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6789 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6791 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6792 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6794 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6797 return 0;
6800 /* Handle the case of comparisons with constants. If there is something in
6801 common between the masks, those bits of the constants must be the same.
6802 If not, the condition is always false. Test for this to avoid generating
6803 incorrect code below. */
6804 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6805 if (! integer_zerop (result)
6806 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6807 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6809 if (wanted_code == NE_EXPR)
6811 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6812 return constant_boolean_node (true, truth_type);
6814 else
6816 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6817 return constant_boolean_node (false, truth_type);
6821 if (lnbitpos < 0)
6822 return 0;
6824 /* Construct the expression we will return. First get the component
6825 reference we will make. Unless the mask is all ones the width of
6826 that field, perform the mask operation. Then compare with the
6827 merged constant. */
6828 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6829 lntype, lnbitsize, lnbitpos,
6830 ll_unsignedp || rl_unsignedp, ll_reversep);
6832 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6833 if (! all_ones_mask_p (ll_mask, lnbitsize))
6834 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6836 return build2_loc (loc, wanted_code, truth_type, result,
6837 const_binop (BIT_IOR_EXPR, l_const, r_const));
6840 /* T is an integer expression that is being multiplied, divided, or taken a
6841 modulus (CODE says which and what kind of divide or modulus) by a
6842 constant C. See if we can eliminate that operation by folding it with
6843 other operations already in T. WIDE_TYPE, if non-null, is a type that
6844 should be used for the computation if wider than our type.
6846 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6847 (X * 2) + (Y * 4). We must, however, be assured that either the original
6848 expression would not overflow or that overflow is undefined for the type
6849 in the language in question.
6851 If we return a non-null expression, it is an equivalent form of the
6852 original computation, but need not be in the original type.
6854 We set *STRICT_OVERFLOW_P to true if the return values depends on
6855 signed overflow being undefined. Otherwise we do not change
6856 *STRICT_OVERFLOW_P. */
6858 static tree
6859 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6860 bool *strict_overflow_p)
6862 /* To avoid exponential search depth, refuse to allow recursion past
6863 three levels. Beyond that (1) it's highly unlikely that we'll find
6864 something interesting and (2) we've probably processed it before
6865 when we built the inner expression. */
6867 static int depth;
6868 tree ret;
6870 if (depth > 3)
6871 return NULL;
6873 depth++;
6874 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6875 depth--;
6877 return ret;
6880 static tree
6881 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6882 bool *strict_overflow_p)
6884 tree type = TREE_TYPE (t);
6885 enum tree_code tcode = TREE_CODE (t);
6886 tree ctype = type;
6887 if (wide_type)
6889 if (TREE_CODE (type) == BITINT_TYPE
6890 || TREE_CODE (wide_type) == BITINT_TYPE)
6892 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6893 ctype = wide_type;
6895 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6896 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6897 ctype = wide_type;
6899 tree t1, t2;
6900 bool same_p = tcode == code;
6901 tree op0 = NULL_TREE, op1 = NULL_TREE;
6902 bool sub_strict_overflow_p;
6904 /* Don't deal with constants of zero here; they confuse the code below. */
6905 if (integer_zerop (c))
6906 return NULL_TREE;
6908 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6909 op0 = TREE_OPERAND (t, 0);
6911 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6912 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6914 /* Note that we need not handle conditional operations here since fold
6915 already handles those cases. So just do arithmetic here. */
6916 switch (tcode)
6918 case INTEGER_CST:
6919 /* For a constant, we can always simplify if we are a multiply
6920 or (for divide and modulus) if it is a multiple of our constant. */
6921 if (code == MULT_EXPR
6922 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6923 TYPE_SIGN (type)))
6925 tree tem = const_binop (code, fold_convert (ctype, t),
6926 fold_convert (ctype, c));
6927 /* If the multiplication overflowed, we lost information on it.
6928 See PR68142 and PR69845. */
6929 if (TREE_OVERFLOW (tem))
6930 return NULL_TREE;
6931 return tem;
6933 break;
6935 CASE_CONVERT: case NON_LVALUE_EXPR:
6936 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6937 break;
6938 /* If op0 is an expression ... */
6939 if ((COMPARISON_CLASS_P (op0)
6940 || UNARY_CLASS_P (op0)
6941 || BINARY_CLASS_P (op0)
6942 || VL_EXP_CLASS_P (op0)
6943 || EXPRESSION_CLASS_P (op0))
6944 /* ... and has wrapping overflow, and its type is smaller
6945 than ctype, then we cannot pass through as widening. */
6946 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6947 && (TYPE_PRECISION (ctype)
6948 > TYPE_PRECISION (TREE_TYPE (op0))))
6949 /* ... or this is a truncation (t is narrower than op0),
6950 then we cannot pass through this narrowing. */
6951 || (TYPE_PRECISION (type)
6952 < TYPE_PRECISION (TREE_TYPE (op0)))
6953 /* ... or signedness changes for division or modulus,
6954 then we cannot pass through this conversion. */
6955 || (code != MULT_EXPR
6956 && (TYPE_UNSIGNED (ctype)
6957 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6958 /* ... or has undefined overflow while the converted to
6959 type has not, we cannot do the operation in the inner type
6960 as that would introduce undefined overflow. */
6961 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6962 && !TYPE_OVERFLOW_UNDEFINED (type))))
6963 break;
6965 /* Pass the constant down and see if we can make a simplification. If
6966 we can, replace this expression with the inner simplification for
6967 possible later conversion to our or some other type. */
6968 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6969 && TREE_CODE (t2) == INTEGER_CST
6970 && !TREE_OVERFLOW (t2)
6971 && (t1 = extract_muldiv (op0, t2, code,
6972 code == MULT_EXPR ? ctype : NULL_TREE,
6973 strict_overflow_p)) != 0)
6974 return t1;
6975 break;
6977 case ABS_EXPR:
6978 /* If widening the type changes it from signed to unsigned, then we
6979 must avoid building ABS_EXPR itself as unsigned. */
6980 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6982 tree cstype = (*signed_type_for) (ctype);
6983 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6984 != 0)
6986 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6987 return fold_convert (ctype, t1);
6989 break;
6991 /* If the constant is negative, we cannot simplify this. */
6992 if (tree_int_cst_sgn (c) == -1)
6993 break;
6994 /* FALLTHROUGH */
6995 case NEGATE_EXPR:
6996 /* For division and modulus, type can't be unsigned, as e.g.
6997 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6998 For signed types, even with wrapping overflow, this is fine. */
6999 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7000 break;
7001 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
7002 != 0)
7003 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7004 break;
7006 case MIN_EXPR: case MAX_EXPR:
7007 /* If widening the type changes the signedness, then we can't perform
7008 this optimization as that changes the result. */
7009 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7010 break;
7012 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7013 sub_strict_overflow_p = false;
7014 if ((t1 = extract_muldiv (op0, c, code, wide_type,
7015 &sub_strict_overflow_p)) != 0
7016 && (t2 = extract_muldiv (op1, c, code, wide_type,
7017 &sub_strict_overflow_p)) != 0)
7019 if (tree_int_cst_sgn (c) < 0)
7020 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7021 if (sub_strict_overflow_p)
7022 *strict_overflow_p = true;
7023 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7024 fold_convert (ctype, t2));
7026 break;
7028 case LSHIFT_EXPR: case RSHIFT_EXPR:
7029 /* If the second operand is constant, this is a multiplication
7030 or floor division, by a power of two, so we can treat it that
7031 way unless the multiplier or divisor overflows. Signed
7032 left-shift overflow is implementation-defined rather than
7033 undefined in C90, so do not convert signed left shift into
7034 multiplication. */
7035 if (TREE_CODE (op1) == INTEGER_CST
7036 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7037 /* const_binop may not detect overflow correctly,
7038 so check for it explicitly here. */
7039 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7040 wi::to_wide (op1))
7041 && (t1 = fold_convert (ctype,
7042 const_binop (LSHIFT_EXPR, size_one_node,
7043 op1))) != 0
7044 && !TREE_OVERFLOW (t1))
7045 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7046 ? MULT_EXPR : FLOOR_DIV_EXPR,
7047 ctype,
7048 fold_convert (ctype, op0),
7049 t1),
7050 c, code, wide_type, strict_overflow_p);
7051 break;
7053 case PLUS_EXPR: case MINUS_EXPR:
7054 /* See if we can eliminate the operation on both sides. If we can, we
7055 can return a new PLUS or MINUS. If we can't, the only remaining
7056 cases where we can do anything are if the second operand is a
7057 constant. */
7058 sub_strict_overflow_p = false;
7059 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7060 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7061 if (t1 != 0 && t2 != 0
7062 && TYPE_OVERFLOW_WRAPS (ctype)
7063 && (code == MULT_EXPR
7064 /* If not multiplication, we can only do this if both operands
7065 are divisible by c. */
7066 || (multiple_of_p (ctype, op0, c)
7067 && multiple_of_p (ctype, op1, c))))
7069 if (sub_strict_overflow_p)
7070 *strict_overflow_p = true;
7071 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7072 fold_convert (ctype, t2));
7075 /* If this was a subtraction, negate OP1 and set it to be an addition.
7076 This simplifies the logic below. */
7077 if (tcode == MINUS_EXPR)
7079 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7080 /* If OP1 was not easily negatable, the constant may be OP0. */
7081 if (TREE_CODE (op0) == INTEGER_CST)
7083 std::swap (op0, op1);
7084 std::swap (t1, t2);
7088 if (TREE_CODE (op1) != INTEGER_CST)
7089 break;
7091 /* If either OP1 or C are negative, this optimization is not safe for
7092 some of the division and remainder types while for others we need
7093 to change the code. */
7094 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7096 if (code == CEIL_DIV_EXPR)
7097 code = FLOOR_DIV_EXPR;
7098 else if (code == FLOOR_DIV_EXPR)
7099 code = CEIL_DIV_EXPR;
7100 else if (code != MULT_EXPR
7101 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7102 break;
7105 /* If it's a multiply or a division/modulus operation of a multiple
7106 of our constant, do the operation and verify it doesn't overflow. */
7107 if (code == MULT_EXPR
7108 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7109 TYPE_SIGN (type)))
7111 op1 = const_binop (code, fold_convert (ctype, op1),
7112 fold_convert (ctype, c));
7113 /* We allow the constant to overflow with wrapping semantics. */
7114 if (op1 == 0
7115 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7116 break;
7118 else
7119 break;
7121 /* If we have an unsigned type, we cannot widen the operation since it
7122 will change the result if the original computation overflowed. */
7123 if (TYPE_UNSIGNED (ctype) && ctype != type)
7124 break;
7126 /* The last case is if we are a multiply. In that case, we can
7127 apply the distributive law to commute the multiply and addition
7128 if the multiplication of the constants doesn't overflow
7129 and overflow is defined. With undefined overflow
7130 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7131 But fold_plusminus_mult_expr would factor back any power-of-two
7132 value so do not distribute in the first place in this case. */
7133 if (code == MULT_EXPR
7134 && TYPE_OVERFLOW_WRAPS (ctype)
7135 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7136 return fold_build2 (tcode, ctype,
7137 fold_build2 (code, ctype,
7138 fold_convert (ctype, op0),
7139 fold_convert (ctype, c)),
7140 op1);
7142 break;
7144 case MULT_EXPR:
7145 /* We have a special case here if we are doing something like
7146 (C * 8) % 4 since we know that's zero. */
7147 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7148 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7149 /* If the multiplication can overflow we cannot optimize this. */
7150 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7151 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7152 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7153 TYPE_SIGN (type)))
7155 *strict_overflow_p = true;
7156 return omit_one_operand (type, integer_zero_node, op0);
7159 /* ... fall through ... */
7161 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7162 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7163 /* If we can extract our operation from the LHS, do so and return a
7164 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7165 do something only if the second operand is a constant. */
7166 if (same_p
7167 && TYPE_OVERFLOW_WRAPS (ctype)
7168 && (t1 = extract_muldiv (op0, c, code, wide_type,
7169 strict_overflow_p)) != 0)
7170 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7171 fold_convert (ctype, op1));
7172 else if (tcode == MULT_EXPR && code == MULT_EXPR
7173 && TYPE_OVERFLOW_WRAPS (ctype)
7174 && (t1 = extract_muldiv (op1, c, code, wide_type,
7175 strict_overflow_p)) != 0)
7176 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7177 fold_convert (ctype, t1));
7178 else if (TREE_CODE (op1) != INTEGER_CST)
7179 return 0;
7181 /* If these are the same operation types, we can associate them
7182 assuming no overflow. */
7183 if (tcode == code)
7185 bool overflow_p = false;
7186 wi::overflow_type overflow_mul;
7187 signop sign = TYPE_SIGN (ctype);
7188 unsigned prec = TYPE_PRECISION (ctype);
7189 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7190 wi::to_wide (c, prec),
7191 sign, &overflow_mul);
7192 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7193 if (overflow_mul
7194 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7195 overflow_p = true;
7196 if (!overflow_p)
7197 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7198 wide_int_to_tree (ctype, mul));
7201 /* If these operations "cancel" each other, we have the main
7202 optimizations of this pass, which occur when either constant is a
7203 multiple of the other, in which case we replace this with either an
7204 operation or CODE or TCODE.
7206 If we have an unsigned type, we cannot do this since it will change
7207 the result if the original computation overflowed. */
7208 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7209 && !TYPE_OVERFLOW_SANITIZED (ctype)
7210 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7211 || (tcode == MULT_EXPR
7212 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7213 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7214 && code != MULT_EXPR)))
7216 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7217 TYPE_SIGN (type)))
7219 *strict_overflow_p = true;
7220 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7221 fold_convert (ctype,
7222 const_binop (TRUNC_DIV_EXPR,
7223 op1, c)));
7225 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7226 TYPE_SIGN (type)))
7228 *strict_overflow_p = true;
7229 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7230 fold_convert (ctype,
7231 const_binop (TRUNC_DIV_EXPR,
7232 c, op1)));
7235 break;
7237 default:
7238 break;
7241 return 0;
7244 /* Return a node which has the indicated constant VALUE (either 0 or
7245 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7246 and is of the indicated TYPE. */
7248 tree
7249 constant_boolean_node (bool value, tree type)
7251 if (type == integer_type_node)
7252 return value ? integer_one_node : integer_zero_node;
7253 else if (type == boolean_type_node)
7254 return value ? boolean_true_node : boolean_false_node;
7255 else if (VECTOR_TYPE_P (type))
7256 return build_vector_from_val (type,
7257 build_int_cst (TREE_TYPE (type),
7258 value ? -1 : 0));
7259 else
7260 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7264 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7265 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7266 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7267 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7268 COND is the first argument to CODE; otherwise (as in the example
7269 given here), it is the second argument. TYPE is the type of the
7270 original expression. Return NULL_TREE if no simplification is
7271 possible. */
7273 static tree
7274 fold_binary_op_with_conditional_arg (location_t loc,
7275 enum tree_code code,
7276 tree type, tree op0, tree op1,
7277 tree cond, tree arg, int cond_first_p)
7279 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7280 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7281 tree test, true_value, false_value;
7282 tree lhs = NULL_TREE;
7283 tree rhs = NULL_TREE;
7284 enum tree_code cond_code = COND_EXPR;
7286 /* Do not move possibly trapping operations into the conditional as this
7287 pessimizes code and causes gimplification issues when applied late. */
7288 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7289 ANY_INTEGRAL_TYPE_P (type)
7290 && TYPE_OVERFLOW_TRAPS (type), op1))
7291 return NULL_TREE;
7293 if (TREE_CODE (cond) == COND_EXPR
7294 || TREE_CODE (cond) == VEC_COND_EXPR)
7296 test = TREE_OPERAND (cond, 0);
7297 true_value = TREE_OPERAND (cond, 1);
7298 false_value = TREE_OPERAND (cond, 2);
7299 /* If this operand throws an expression, then it does not make
7300 sense to try to perform a logical or arithmetic operation
7301 involving it. */
7302 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7303 lhs = true_value;
7304 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7305 rhs = false_value;
7307 else if (!(TREE_CODE (type) != VECTOR_TYPE
7308 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7310 tree testtype = TREE_TYPE (cond);
7311 test = cond;
7312 true_value = constant_boolean_node (true, testtype);
7313 false_value = constant_boolean_node (false, testtype);
7315 else
7316 /* Detect the case of mixing vector and scalar types - bail out. */
7317 return NULL_TREE;
7319 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7320 cond_code = VEC_COND_EXPR;
7322 /* This transformation is only worthwhile if we don't have to wrap ARG
7323 in a SAVE_EXPR and the operation can be simplified without recursing
7324 on at least one of the branches once its pushed inside the COND_EXPR. */
7325 if (!TREE_CONSTANT (arg)
7326 && (TREE_SIDE_EFFECTS (arg)
7327 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7328 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7329 return NULL_TREE;
7331 arg = fold_convert_loc (loc, arg_type, arg);
7332 if (lhs == 0)
7334 true_value = fold_convert_loc (loc, cond_type, true_value);
7335 if (cond_first_p)
7336 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7337 else
7338 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7340 if (rhs == 0)
7342 false_value = fold_convert_loc (loc, cond_type, false_value);
7343 if (cond_first_p)
7344 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7345 else
7346 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7349 /* Check that we have simplified at least one of the branches. */
7350 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7351 return NULL_TREE;
7353 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7357 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7359 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7360 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7361 if ARG - ZERO_ARG is the same as X.
7363 If ARG is NULL, check for any value of type TYPE.
7365 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7366 and finite. The problematic cases are when X is zero, and its mode
7367 has signed zeros. In the case of rounding towards -infinity,
7368 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7369 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7371 bool
7372 fold_real_zero_addition_p (const_tree type, const_tree arg,
7373 const_tree zero_arg, int negate)
7375 if (!real_zerop (zero_arg))
7376 return false;
7378 /* Don't allow the fold with -fsignaling-nans. */
7379 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7380 return false;
7382 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7383 if (!HONOR_SIGNED_ZEROS (type))
7384 return true;
7386 /* There is no case that is safe for all rounding modes. */
7387 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7388 return false;
7390 /* In a vector or complex, we would need to check the sign of all zeros. */
7391 if (TREE_CODE (zero_arg) == VECTOR_CST)
7392 zero_arg = uniform_vector_p (zero_arg);
7393 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7394 return false;
7396 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7397 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7398 negate = !negate;
7400 /* The mode has signed zeros, and we have to honor their sign.
7401 In this situation, there are only two cases we can return true for.
7402 (i) X - 0 is the same as X with default rounding.
7403 (ii) X + 0 is X when X can't possibly be -0.0. */
7404 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7407 /* Subroutine of match.pd that optimizes comparisons of a division by
7408 a nonzero integer constant against an integer constant, i.e.
7409 X/C1 op C2.
7411 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7412 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7414 enum tree_code
7415 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7416 tree *hi, bool *neg_overflow)
7418 tree prod, tmp, type = TREE_TYPE (c1);
7419 signop sign = TYPE_SIGN (type);
7420 wi::overflow_type overflow;
7422 /* We have to do this the hard way to detect unsigned overflow.
7423 prod = int_const_binop (MULT_EXPR, c1, c2); */
7424 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7425 prod = force_fit_type (type, val, -1, overflow);
7426 *neg_overflow = false;
7428 if (sign == UNSIGNED)
7430 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7431 *lo = prod;
7433 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7434 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7435 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7437 else if (tree_int_cst_sgn (c1) >= 0)
7439 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7440 switch (tree_int_cst_sgn (c2))
7442 case -1:
7443 *neg_overflow = true;
7444 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7445 *hi = prod;
7446 break;
7448 case 0:
7449 *lo = fold_negate_const (tmp, type);
7450 *hi = tmp;
7451 break;
7453 case 1:
7454 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7455 *lo = prod;
7456 break;
7458 default:
7459 gcc_unreachable ();
7462 else
7464 /* A negative divisor reverses the relational operators. */
7465 code = swap_tree_comparison (code);
7467 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7468 switch (tree_int_cst_sgn (c2))
7470 case -1:
7471 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7472 *lo = prod;
7473 break;
7475 case 0:
7476 *hi = fold_negate_const (tmp, type);
7477 *lo = tmp;
7478 break;
7480 case 1:
7481 *neg_overflow = true;
7482 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7483 *hi = prod;
7484 break;
7486 default:
7487 gcc_unreachable ();
7491 if (code != EQ_EXPR && code != NE_EXPR)
7492 return code;
7494 if (TREE_OVERFLOW (*lo)
7495 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7496 *lo = NULL_TREE;
7497 if (TREE_OVERFLOW (*hi)
7498 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7499 *hi = NULL_TREE;
7501 return code;
7504 /* Test whether it is preferable to swap two operands, ARG0 and
7505 ARG1, for example because ARG0 is an integer constant and ARG1
7506 isn't. */
7508 bool
7509 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7511 if (CONSTANT_CLASS_P (arg1))
7512 return false;
7513 if (CONSTANT_CLASS_P (arg0))
7514 return true;
7516 STRIP_NOPS (arg0);
7517 STRIP_NOPS (arg1);
7519 if (TREE_CONSTANT (arg1))
7520 return false;
7521 if (TREE_CONSTANT (arg0))
7522 return true;
7524 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7525 for commutative and comparison operators. Ensuring a canonical
7526 form allows the optimizers to find additional redundancies without
7527 having to explicitly check for both orderings. */
7528 if (TREE_CODE (arg0) == SSA_NAME
7529 && TREE_CODE (arg1) == SSA_NAME
7530 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7531 return true;
7533 /* Put SSA_NAMEs last. */
7534 if (TREE_CODE (arg1) == SSA_NAME)
7535 return false;
7536 if (TREE_CODE (arg0) == SSA_NAME)
7537 return true;
7539 /* Put variables last. */
7540 if (DECL_P (arg1))
7541 return false;
7542 if (DECL_P (arg0))
7543 return true;
7545 return false;
7549 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7550 means A >= Y && A != MAX, but in this case we know that
7551 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7553 static tree
7554 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7556 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7558 if (TREE_CODE (bound) == LT_EXPR)
7559 a = TREE_OPERAND (bound, 0);
7560 else if (TREE_CODE (bound) == GT_EXPR)
7561 a = TREE_OPERAND (bound, 1);
7562 else
7563 return NULL_TREE;
7565 typea = TREE_TYPE (a);
7566 if (!INTEGRAL_TYPE_P (typea)
7567 && !POINTER_TYPE_P (typea))
7568 return NULL_TREE;
7570 if (TREE_CODE (ineq) == LT_EXPR)
7572 a1 = TREE_OPERAND (ineq, 1);
7573 y = TREE_OPERAND (ineq, 0);
7575 else if (TREE_CODE (ineq) == GT_EXPR)
7577 a1 = TREE_OPERAND (ineq, 0);
7578 y = TREE_OPERAND (ineq, 1);
7580 else
7581 return NULL_TREE;
7583 if (TREE_TYPE (a1) != typea)
7584 return NULL_TREE;
7586 if (POINTER_TYPE_P (typea))
7588 /* Convert the pointer types into integer before taking the difference. */
7589 tree ta = fold_convert_loc (loc, ssizetype, a);
7590 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7591 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7593 else
7594 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7596 if (!diff || !integer_onep (diff))
7597 return NULL_TREE;
7599 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7602 /* Fold a sum or difference of at least one multiplication.
7603 Returns the folded tree or NULL if no simplification could be made. */
7605 static tree
7606 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7607 tree arg0, tree arg1)
7609 tree arg00, arg01, arg10, arg11;
7610 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7612 /* (A * C) +- (B * C) -> (A+-B) * C.
7613 (A * C) +- A -> A * (C+-1).
7614 We are most concerned about the case where C is a constant,
7615 but other combinations show up during loop reduction. Since
7616 it is not difficult, try all four possibilities. */
7618 if (TREE_CODE (arg0) == MULT_EXPR)
7620 arg00 = TREE_OPERAND (arg0, 0);
7621 arg01 = TREE_OPERAND (arg0, 1);
7623 else if (TREE_CODE (arg0) == INTEGER_CST)
7625 arg00 = build_one_cst (type);
7626 arg01 = arg0;
7628 else
7630 /* We cannot generate constant 1 for fract. */
7631 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7632 return NULL_TREE;
7633 arg00 = arg0;
7634 arg01 = build_one_cst (type);
7636 if (TREE_CODE (arg1) == MULT_EXPR)
7638 arg10 = TREE_OPERAND (arg1, 0);
7639 arg11 = TREE_OPERAND (arg1, 1);
7641 else if (TREE_CODE (arg1) == INTEGER_CST)
7643 arg10 = build_one_cst (type);
7644 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7645 the purpose of this canonicalization. */
7646 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7647 && negate_expr_p (arg1)
7648 && code == PLUS_EXPR)
7650 arg11 = negate_expr (arg1);
7651 code = MINUS_EXPR;
7653 else
7654 arg11 = arg1;
7656 else
7658 /* We cannot generate constant 1 for fract. */
7659 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7660 return NULL_TREE;
7661 arg10 = arg1;
7662 arg11 = build_one_cst (type);
7664 same = NULL_TREE;
7666 /* Prefer factoring a common non-constant. */
7667 if (operand_equal_p (arg00, arg10, 0))
7668 same = arg00, alt0 = arg01, alt1 = arg11;
7669 else if (operand_equal_p (arg01, arg11, 0))
7670 same = arg01, alt0 = arg00, alt1 = arg10;
7671 else if (operand_equal_p (arg00, arg11, 0))
7672 same = arg00, alt0 = arg01, alt1 = arg10;
7673 else if (operand_equal_p (arg01, arg10, 0))
7674 same = arg01, alt0 = arg00, alt1 = arg11;
7676 /* No identical multiplicands; see if we can find a common
7677 power-of-two factor in non-power-of-two multiplies. This
7678 can help in multi-dimensional array access. */
7679 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7681 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7682 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7683 HOST_WIDE_INT tmp;
7684 bool swap = false;
7685 tree maybe_same;
7687 /* Move min of absolute values to int11. */
7688 if (absu_hwi (int01) < absu_hwi (int11))
7690 tmp = int01, int01 = int11, int11 = tmp;
7691 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7692 maybe_same = arg01;
7693 swap = true;
7695 else
7696 maybe_same = arg11;
7698 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7699 if (factor > 1
7700 && pow2p_hwi (factor)
7701 && (int01 & (factor - 1)) == 0
7702 /* The remainder should not be a constant, otherwise we
7703 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7704 increased the number of multiplications necessary. */
7705 && TREE_CODE (arg10) != INTEGER_CST)
7707 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7708 build_int_cst (TREE_TYPE (arg00),
7709 int01 / int11));
7710 alt1 = arg10;
7711 same = maybe_same;
7712 if (swap)
7713 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7717 if (!same)
7718 return NULL_TREE;
7720 if (! ANY_INTEGRAL_TYPE_P (type)
7721 || TYPE_OVERFLOW_WRAPS (type)
7722 /* We are neither factoring zero nor minus one. */
7723 || TREE_CODE (same) == INTEGER_CST)
7724 return fold_build2_loc (loc, MULT_EXPR, type,
7725 fold_build2_loc (loc, code, type,
7726 fold_convert_loc (loc, type, alt0),
7727 fold_convert_loc (loc, type, alt1)),
7728 fold_convert_loc (loc, type, same));
7730 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7731 same may be minus one and thus the multiplication may overflow. Perform
7732 the sum operation in an unsigned type. */
7733 tree utype = unsigned_type_for (type);
7734 tree tem = fold_build2_loc (loc, code, utype,
7735 fold_convert_loc (loc, utype, alt0),
7736 fold_convert_loc (loc, utype, alt1));
7737 /* If the sum evaluated to a constant that is not -INF the multiplication
7738 cannot overflow. */
7739 if (TREE_CODE (tem) == INTEGER_CST
7740 && (wi::to_wide (tem)
7741 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7742 return fold_build2_loc (loc, MULT_EXPR, type,
7743 fold_convert (type, tem), same);
7745 /* Do not resort to unsigned multiplication because
7746 we lose the no-overflow property of the expression. */
7747 return NULL_TREE;
7750 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7751 specified by EXPR into the buffer PTR of length LEN bytes.
7752 Return the number of bytes placed in the buffer, or zero
7753 upon failure. */
7755 static int
7756 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7758 tree type = TREE_TYPE (expr);
7759 int total_bytes;
7760 if (TREE_CODE (type) == BITINT_TYPE)
7762 struct bitint_info info;
7763 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7764 gcc_assert (ok);
7765 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7766 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7768 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7769 /* More work is needed when adding _BitInt support to PDP endian
7770 if limb is smaller than word, or if _BitInt limb ordering doesn't
7771 match target endianity here. */
7772 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7773 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7774 || (GET_MODE_SIZE (limb_mode)
7775 >= UNITS_PER_WORD)));
7777 else
7778 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7780 else
7781 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7782 int byte, offset, word, words;
7783 unsigned char value;
7785 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7786 return 0;
7787 if (off == -1)
7788 off = 0;
7790 if (ptr == NULL)
7791 /* Dry run. */
7792 return MIN (len, total_bytes - off);
7794 words = total_bytes / UNITS_PER_WORD;
7796 for (byte = 0; byte < total_bytes; byte++)
7798 int bitpos = byte * BITS_PER_UNIT;
7799 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7800 number of bytes. */
7801 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7803 if (total_bytes > UNITS_PER_WORD)
7805 word = byte / UNITS_PER_WORD;
7806 if (WORDS_BIG_ENDIAN)
7807 word = (words - 1) - word;
7808 offset = word * UNITS_PER_WORD;
7809 if (BYTES_BIG_ENDIAN)
7810 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7811 else
7812 offset += byte % UNITS_PER_WORD;
7814 else
7815 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7816 if (offset >= off && offset - off < len)
7817 ptr[offset - off] = value;
7819 return MIN (len, total_bytes - off);
7823 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7824 specified by EXPR into the buffer PTR of length LEN bytes.
7825 Return the number of bytes placed in the buffer, or zero
7826 upon failure. */
7828 static int
7829 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7831 tree type = TREE_TYPE (expr);
7832 scalar_mode mode = SCALAR_TYPE_MODE (type);
7833 int total_bytes = GET_MODE_SIZE (mode);
7834 FIXED_VALUE_TYPE value;
7835 tree i_value, i_type;
7837 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7838 return 0;
7840 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7842 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7843 return 0;
7845 value = TREE_FIXED_CST (expr);
7846 i_value = double_int_to_tree (i_type, value.data);
7848 return native_encode_int (i_value, ptr, len, off);
7852 /* Subroutine of native_encode_expr. Encode the REAL_CST
7853 specified by EXPR into the buffer PTR of length LEN bytes.
7854 Return the number of bytes placed in the buffer, or zero
7855 upon failure. */
7857 static int
7858 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7860 tree type = TREE_TYPE (expr);
7861 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7862 int byte, offset, word, words, bitpos;
7863 unsigned char value;
7865 /* There are always 32 bits in each long, no matter the size of
7866 the hosts long. We handle floating point representations with
7867 up to 192 bits. */
7868 long tmp[6];
7870 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7871 return 0;
7872 if (off == -1)
7873 off = 0;
7875 if (ptr == NULL)
7876 /* Dry run. */
7877 return MIN (len, total_bytes - off);
7879 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7881 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7883 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7884 bitpos += BITS_PER_UNIT)
7886 byte = (bitpos / BITS_PER_UNIT) & 3;
7887 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7889 if (UNITS_PER_WORD < 4)
7891 word = byte / UNITS_PER_WORD;
7892 if (WORDS_BIG_ENDIAN)
7893 word = (words - 1) - word;
7894 offset = word * UNITS_PER_WORD;
7895 if (BYTES_BIG_ENDIAN)
7896 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7897 else
7898 offset += byte % UNITS_PER_WORD;
7900 else
7902 offset = byte;
7903 if (BYTES_BIG_ENDIAN)
7905 /* Reverse bytes within each long, or within the entire float
7906 if it's smaller than a long (for HFmode). */
7907 offset = MIN (3, total_bytes - 1) - offset;
7908 gcc_assert (offset >= 0);
7911 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7912 if (offset >= off
7913 && offset - off < len)
7914 ptr[offset - off] = value;
7916 return MIN (len, total_bytes - off);
7919 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7920 specified by EXPR into the buffer PTR of length LEN bytes.
7921 Return the number of bytes placed in the buffer, or zero
7922 upon failure. */
7924 static int
7925 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7927 int rsize, isize;
7928 tree part;
7930 part = TREE_REALPART (expr);
7931 rsize = native_encode_expr (part, ptr, len, off);
7932 if (off == -1 && rsize == 0)
7933 return 0;
7934 part = TREE_IMAGPART (expr);
7935 if (off != -1)
7936 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7937 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7938 len - rsize, off);
7939 if (off == -1 && isize != rsize)
7940 return 0;
7941 return rsize + isize;
7944 /* Like native_encode_vector, but only encode the first COUNT elements.
7945 The other arguments are as for native_encode_vector. */
7947 static int
7948 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7949 int off, unsigned HOST_WIDE_INT count)
7951 tree itype = TREE_TYPE (TREE_TYPE (expr));
7952 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7953 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7955 /* This is the only case in which elements can be smaller than a byte.
7956 Element 0 is always in the lsb of the containing byte. */
7957 unsigned int elt_bits = TYPE_PRECISION (itype);
7958 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7959 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7960 return 0;
7962 if (off == -1)
7963 off = 0;
7965 /* Zero the buffer and then set bits later where necessary. */
7966 int extract_bytes = MIN (len, total_bytes - off);
7967 if (ptr)
7968 memset (ptr, 0, extract_bytes);
7970 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7971 unsigned int first_elt = off * elts_per_byte;
7972 unsigned int extract_elts = extract_bytes * elts_per_byte;
7973 for (unsigned int i = 0; i < extract_elts; ++i)
7975 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7976 if (TREE_CODE (elt) != INTEGER_CST)
7977 return 0;
7979 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7981 unsigned int bit = i * elt_bits;
7982 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7985 return extract_bytes;
7988 int offset = 0;
7989 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7990 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7992 if (off >= size)
7994 off -= size;
7995 continue;
7997 tree elem = VECTOR_CST_ELT (expr, i);
7998 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7999 len - offset, off);
8000 if ((off == -1 && res != size) || res == 0)
8001 return 0;
8002 offset += res;
8003 if (offset >= len)
8004 return (off == -1 && i < count - 1) ? 0 : offset;
8005 if (off != -1)
8006 off = 0;
8008 return offset;
8011 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8012 specified by EXPR into the buffer PTR of length LEN bytes.
8013 Return the number of bytes placed in the buffer, or zero
8014 upon failure. */
8016 static int
8017 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8019 unsigned HOST_WIDE_INT count;
8020 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8021 return 0;
8022 return native_encode_vector_part (expr, ptr, len, off, count);
8026 /* Subroutine of native_encode_expr. Encode the STRING_CST
8027 specified by EXPR into the buffer PTR of length LEN bytes.
8028 Return the number of bytes placed in the buffer, or zero
8029 upon failure. */
8031 static int
8032 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8034 tree type = TREE_TYPE (expr);
8036 /* Wide-char strings are encoded in target byte-order so native
8037 encoding them is trivial. */
8038 if (BITS_PER_UNIT != CHAR_BIT
8039 || TREE_CODE (type) != ARRAY_TYPE
8040 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8041 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8042 return 0;
8044 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8045 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8046 return 0;
8047 if (off == -1)
8048 off = 0;
8049 len = MIN (total_bytes - off, len);
8050 if (ptr == NULL)
8051 /* Dry run. */;
8052 else
8054 int written = 0;
8055 if (off < TREE_STRING_LENGTH (expr))
8057 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8058 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8060 memset (ptr + written, 0, len - written);
8062 return len;
8066 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8067 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8068 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8069 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8070 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8071 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8072 Return the number of bytes placed in the buffer, or zero upon failure. */
8075 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8077 /* We don't support starting at negative offset and -1 is special. */
8078 if (off < -1)
8079 return 0;
8081 switch (TREE_CODE (expr))
8083 case INTEGER_CST:
8084 return native_encode_int (expr, ptr, len, off);
8086 case REAL_CST:
8087 return native_encode_real (expr, ptr, len, off);
8089 case FIXED_CST:
8090 return native_encode_fixed (expr, ptr, len, off);
8092 case COMPLEX_CST:
8093 return native_encode_complex (expr, ptr, len, off);
8095 case VECTOR_CST:
8096 return native_encode_vector (expr, ptr, len, off);
8098 case STRING_CST:
8099 return native_encode_string (expr, ptr, len, off);
8101 default:
8102 return 0;
8106 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8107 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8108 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8109 machine modes, we can't just use build_nonstandard_integer_type. */
8111 tree
8112 find_bitfield_repr_type (int fieldsize, int len)
8114 machine_mode mode;
8115 for (int pass = 0; pass < 2; pass++)
8117 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8118 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8119 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8120 && known_eq (GET_MODE_PRECISION (mode),
8121 GET_MODE_BITSIZE (mode))
8122 && known_le (GET_MODE_SIZE (mode), len))
8124 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8125 if (ret && TYPE_MODE (ret) == mode)
8126 return ret;
8130 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8131 if (int_n_enabled_p[i]
8132 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8133 && int_n_trees[i].unsigned_type)
8135 tree ret = int_n_trees[i].unsigned_type;
8136 mode = TYPE_MODE (ret);
8137 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8138 && known_eq (GET_MODE_PRECISION (mode),
8139 GET_MODE_BITSIZE (mode))
8140 && known_le (GET_MODE_SIZE (mode), len))
8141 return ret;
8144 return NULL_TREE;
8147 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8148 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8149 to be non-NULL and OFF zero), then in addition to filling the
8150 bytes pointed by PTR with the value also clear any bits pointed
8151 by MASK that are known to be initialized, keep them as is for
8152 e.g. uninitialized padding bits or uninitialized fields. */
8155 native_encode_initializer (tree init, unsigned char *ptr, int len,
8156 int off, unsigned char *mask)
8158 int r;
8160 /* We don't support starting at negative offset and -1 is special. */
8161 if (off < -1 || init == NULL_TREE)
8162 return 0;
8164 gcc_assert (mask == NULL || (off == 0 && ptr));
8166 STRIP_NOPS (init);
8167 switch (TREE_CODE (init))
8169 case VIEW_CONVERT_EXPR:
8170 case NON_LVALUE_EXPR:
8171 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8172 mask);
8173 default:
8174 r = native_encode_expr (init, ptr, len, off);
8175 if (mask)
8176 memset (mask, 0, r);
8177 return r;
8178 case CONSTRUCTOR:
8179 tree type = TREE_TYPE (init);
8180 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8181 if (total_bytes < 0)
8182 return 0;
8183 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8184 return 0;
8185 int o = off == -1 ? 0 : off;
8186 if (TREE_CODE (type) == ARRAY_TYPE)
8188 tree min_index;
8189 unsigned HOST_WIDE_INT cnt;
8190 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8191 constructor_elt *ce;
8193 if (!TYPE_DOMAIN (type)
8194 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8195 return 0;
8197 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8198 if (fieldsize <= 0)
8199 return 0;
8201 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8202 if (ptr)
8203 memset (ptr, '\0', MIN (total_bytes - off, len));
8205 for (cnt = 0; ; cnt++)
8207 tree val = NULL_TREE, index = NULL_TREE;
8208 HOST_WIDE_INT pos = curpos, count = 0;
8209 bool full = false;
8210 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8212 val = ce->value;
8213 index = ce->index;
8215 else if (mask == NULL
8216 || CONSTRUCTOR_NO_CLEARING (init)
8217 || curpos >= total_bytes)
8218 break;
8219 else
8220 pos = total_bytes;
8222 if (index && TREE_CODE (index) == RANGE_EXPR)
8224 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8225 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8226 return 0;
8227 offset_int wpos
8228 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8229 - wi::to_offset (min_index),
8230 TYPE_PRECISION (sizetype));
8231 wpos *= fieldsize;
8232 if (!wi::fits_shwi_p (pos))
8233 return 0;
8234 pos = wpos.to_shwi ();
8235 offset_int wcount
8236 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8237 - wi::to_offset (TREE_OPERAND (index, 0)),
8238 TYPE_PRECISION (sizetype));
8239 if (!wi::fits_shwi_p (wcount))
8240 return 0;
8241 count = wcount.to_shwi ();
8243 else if (index)
8245 if (TREE_CODE (index) != INTEGER_CST)
8246 return 0;
8247 offset_int wpos
8248 = wi::sext (wi::to_offset (index)
8249 - wi::to_offset (min_index),
8250 TYPE_PRECISION (sizetype));
8251 wpos *= fieldsize;
8252 if (!wi::fits_shwi_p (wpos))
8253 return 0;
8254 pos = wpos.to_shwi ();
8257 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8259 if (valueinit == -1)
8261 tree zero = build_zero_cst (TREE_TYPE (type));
8262 r = native_encode_initializer (zero, ptr + curpos,
8263 fieldsize, 0,
8264 mask + curpos);
8265 if (TREE_CODE (zero) == CONSTRUCTOR)
8266 ggc_free (zero);
8267 if (!r)
8268 return 0;
8269 valueinit = curpos;
8270 curpos += fieldsize;
8272 while (curpos != pos)
8274 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8275 memcpy (mask + curpos, mask + valueinit, fieldsize);
8276 curpos += fieldsize;
8280 curpos = pos;
8281 if (val)
8284 if (off == -1
8285 || (curpos >= off
8286 && (curpos + fieldsize
8287 <= (HOST_WIDE_INT) off + len)))
8289 if (full)
8291 if (ptr)
8292 memcpy (ptr + (curpos - o), ptr + (pos - o),
8293 fieldsize);
8294 if (mask)
8295 memcpy (mask + curpos, mask + pos, fieldsize);
8297 else if (!native_encode_initializer (val,
8299 ? ptr + curpos - o
8300 : NULL,
8301 fieldsize,
8302 off == -1 ? -1
8303 : 0,
8304 mask
8305 ? mask + curpos
8306 : NULL))
8307 return 0;
8308 else
8310 full = true;
8311 pos = curpos;
8314 else if (curpos + fieldsize > off
8315 && curpos < (HOST_WIDE_INT) off + len)
8317 /* Partial overlap. */
8318 unsigned char *p = NULL;
8319 int no = 0;
8320 int l;
8321 gcc_assert (mask == NULL);
8322 if (curpos >= off)
8324 if (ptr)
8325 p = ptr + curpos - off;
8326 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8327 fieldsize);
8329 else
8331 p = ptr;
8332 no = off - curpos;
8333 l = len;
8335 if (!native_encode_initializer (val, p, l, no, NULL))
8336 return 0;
8338 curpos += fieldsize;
8340 while (count-- != 0);
8342 return MIN (total_bytes - off, len);
8344 else if (TREE_CODE (type) == RECORD_TYPE
8345 || TREE_CODE (type) == UNION_TYPE)
8347 unsigned HOST_WIDE_INT cnt;
8348 constructor_elt *ce;
8349 tree fld_base = TYPE_FIELDS (type);
8350 tree to_free = NULL_TREE;
8352 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8353 if (ptr != NULL)
8354 memset (ptr, '\0', MIN (total_bytes - o, len));
8355 for (cnt = 0; ; cnt++)
8357 tree val = NULL_TREE, field = NULL_TREE;
8358 HOST_WIDE_INT pos = 0, fieldsize;
8359 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8361 if (to_free)
8363 ggc_free (to_free);
8364 to_free = NULL_TREE;
8367 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8369 val = ce->value;
8370 field = ce->index;
8371 if (field == NULL_TREE)
8372 return 0;
8374 pos = int_byte_position (field);
8375 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8376 continue;
8378 else if (mask == NULL
8379 || CONSTRUCTOR_NO_CLEARING (init))
8380 break;
8381 else
8382 pos = total_bytes;
8384 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8386 tree fld;
8387 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8389 if (TREE_CODE (fld) != FIELD_DECL)
8390 continue;
8391 if (fld == field)
8392 break;
8393 if (DECL_PADDING_P (fld))
8394 continue;
8395 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8396 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8397 return 0;
8398 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8399 continue;
8400 break;
8402 if (fld == NULL_TREE)
8404 if (ce == NULL)
8405 break;
8406 return 0;
8408 fld_base = DECL_CHAIN (fld);
8409 if (fld != field)
8411 cnt--;
8412 field = fld;
8413 pos = int_byte_position (field);
8414 val = build_zero_cst (TREE_TYPE (fld));
8415 if (TREE_CODE (val) == CONSTRUCTOR)
8416 to_free = val;
8420 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8421 && TYPE_DOMAIN (TREE_TYPE (field))
8422 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8424 if (mask || off != -1)
8425 return 0;
8426 if (val == NULL_TREE)
8427 continue;
8428 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8429 return 0;
8430 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8431 if (fieldsize < 0
8432 || (int) fieldsize != fieldsize
8433 || (pos + fieldsize) > INT_MAX)
8434 return 0;
8435 if (pos + fieldsize > total_bytes)
8437 if (ptr != NULL && total_bytes < len)
8438 memset (ptr + total_bytes, '\0',
8439 MIN (pos + fieldsize, len) - total_bytes);
8440 total_bytes = pos + fieldsize;
8443 else
8445 if (DECL_SIZE_UNIT (field) == NULL_TREE
8446 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8447 return 0;
8448 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8450 if (fieldsize == 0)
8451 continue;
8453 /* Prepare to deal with integral bit-fields and filter out other
8454 bit-fields that do not start and end on a byte boundary. */
8455 if (DECL_BIT_FIELD (field))
8457 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8458 return 0;
8459 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8460 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8462 bpos %= BITS_PER_UNIT;
8463 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8464 epos = fieldsize % BITS_PER_UNIT;
8465 fieldsize += BITS_PER_UNIT - 1;
8466 fieldsize /= BITS_PER_UNIT;
8468 else if (bpos % BITS_PER_UNIT
8469 || DECL_SIZE (field) == NULL_TREE
8470 || !tree_fits_shwi_p (DECL_SIZE (field))
8471 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8472 return 0;
8475 if (off != -1 && pos + fieldsize <= off)
8476 continue;
8478 if (val == NULL_TREE)
8479 continue;
8481 if (DECL_BIT_FIELD (field)
8482 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8484 /* FIXME: Handle PDP endian. */
8485 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8486 return 0;
8488 if (TREE_CODE (val) != INTEGER_CST)
8489 return 0;
8491 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8492 tree repr_type = NULL_TREE;
8493 HOST_WIDE_INT rpos = 0;
8494 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8496 rpos = int_byte_position (repr);
8497 repr_type = TREE_TYPE (repr);
8499 else
8501 repr_type = find_bitfield_repr_type (fieldsize, len);
8502 if (repr_type == NULL_TREE)
8503 return 0;
8504 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8505 gcc_assert (repr_size > 0 && repr_size <= len);
8506 if (pos + repr_size <= o + len)
8507 rpos = pos;
8508 else
8510 rpos = o + len - repr_size;
8511 gcc_assert (rpos <= pos);
8515 if (rpos > pos)
8516 return 0;
8517 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8518 int diff = (TYPE_PRECISION (repr_type)
8519 - TYPE_PRECISION (TREE_TYPE (field)));
8520 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8521 if (!BYTES_BIG_ENDIAN)
8522 w = wi::lshift (w, bitoff);
8523 else
8524 w = wi::lshift (w, diff - bitoff);
8525 val = wide_int_to_tree (repr_type, w);
8527 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8528 / BITS_PER_UNIT + 1];
8529 int l = native_encode_int (val, buf, sizeof buf, 0);
8530 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8531 return 0;
8533 if (ptr == NULL)
8534 continue;
8536 /* If the bitfield does not start at byte boundary, handle
8537 the partial byte at the start. */
8538 if (bpos
8539 && (off == -1 || (pos >= off && len >= 1)))
8541 if (!BYTES_BIG_ENDIAN)
8543 int msk = (1 << bpos) - 1;
8544 buf[pos - rpos] &= ~msk;
8545 buf[pos - rpos] |= ptr[pos - o] & msk;
8546 if (mask)
8548 if (fieldsize > 1 || epos == 0)
8549 mask[pos] &= msk;
8550 else
8551 mask[pos] &= (msk | ~((1 << epos) - 1));
8554 else
8556 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8557 buf[pos - rpos] &= msk;
8558 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8559 if (mask)
8561 if (fieldsize > 1 || epos == 0)
8562 mask[pos] &= ~msk;
8563 else
8564 mask[pos] &= (~msk
8565 | ((1 << (BITS_PER_UNIT - epos))
8566 - 1));
8570 /* If the bitfield does not end at byte boundary, handle
8571 the partial byte at the end. */
8572 if (epos
8573 && (off == -1
8574 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8576 if (!BYTES_BIG_ENDIAN)
8578 int msk = (1 << epos) - 1;
8579 buf[pos - rpos + fieldsize - 1] &= msk;
8580 buf[pos - rpos + fieldsize - 1]
8581 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8582 if (mask && (fieldsize > 1 || bpos == 0))
8583 mask[pos + fieldsize - 1] &= ~msk;
8585 else
8587 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8588 buf[pos - rpos + fieldsize - 1] &= ~msk;
8589 buf[pos - rpos + fieldsize - 1]
8590 |= ptr[pos + fieldsize - 1 - o] & msk;
8591 if (mask && (fieldsize > 1 || bpos == 0))
8592 mask[pos + fieldsize - 1] &= msk;
8595 if (off == -1
8596 || (pos >= off
8597 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8599 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8600 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8601 memset (mask + pos + (bpos != 0), 0,
8602 fieldsize - (bpos != 0) - (epos != 0));
8604 else
8606 /* Partial overlap. */
8607 HOST_WIDE_INT fsz = fieldsize;
8608 gcc_assert (mask == NULL);
8609 if (pos < off)
8611 fsz -= (off - pos);
8612 pos = off;
8614 if (pos + fsz > (HOST_WIDE_INT) off + len)
8615 fsz = (HOST_WIDE_INT) off + len - pos;
8616 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8618 continue;
8621 if (off == -1
8622 || (pos >= off
8623 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8625 int fldsize = fieldsize;
8626 if (off == -1)
8628 tree fld = DECL_CHAIN (field);
8629 while (fld)
8631 if (TREE_CODE (fld) == FIELD_DECL)
8632 break;
8633 fld = DECL_CHAIN (fld);
8635 if (fld == NULL_TREE)
8636 fldsize = len - pos;
8638 r = native_encode_initializer (val, ptr ? ptr + pos - o
8639 : NULL,
8640 fldsize,
8641 off == -1 ? -1 : 0,
8642 mask ? mask + pos : NULL);
8643 if (!r)
8644 return 0;
8645 if (off == -1
8646 && fldsize != fieldsize
8647 && r > fieldsize
8648 && pos + r > total_bytes)
8649 total_bytes = pos + r;
8651 else
8653 /* Partial overlap. */
8654 unsigned char *p = NULL;
8655 int no = 0;
8656 int l;
8657 gcc_assert (mask == NULL);
8658 if (pos >= off)
8660 if (ptr)
8661 p = ptr + pos - off;
8662 l = MIN ((HOST_WIDE_INT) off + len - pos,
8663 fieldsize);
8665 else
8667 p = ptr;
8668 no = off - pos;
8669 l = len;
8671 if (!native_encode_initializer (val, p, l, no, NULL))
8672 return 0;
8675 return MIN (total_bytes - off, len);
8677 return 0;
8682 /* Subroutine of native_interpret_expr. Interpret the contents of
8683 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8684 If the buffer cannot be interpreted, return NULL_TREE. */
8686 static tree
8687 native_interpret_int (tree type, const unsigned char *ptr, int len)
8689 int total_bytes;
8690 if (TREE_CODE (type) == BITINT_TYPE)
8692 struct bitint_info info;
8693 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8694 gcc_assert (ok);
8695 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8696 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8698 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8699 /* More work is needed when adding _BitInt support to PDP endian
8700 if limb is smaller than word, or if _BitInt limb ordering doesn't
8701 match target endianity here. */
8702 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8703 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8704 || (GET_MODE_SIZE (limb_mode)
8705 >= UNITS_PER_WORD)));
8707 else
8708 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8710 else
8711 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8713 if (total_bytes > len
8714 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8715 return NULL_TREE;
8717 wide_int result = wi::from_buffer (ptr, total_bytes);
8719 return wide_int_to_tree (type, result);
8723 /* Subroutine of native_interpret_expr. Interpret the contents of
8724 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8725 If the buffer cannot be interpreted, return NULL_TREE. */
8727 static tree
8728 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8730 scalar_mode mode = SCALAR_TYPE_MODE (type);
8731 int total_bytes = GET_MODE_SIZE (mode);
8732 double_int result;
8733 FIXED_VALUE_TYPE fixed_value;
8735 if (total_bytes > len
8736 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8737 return NULL_TREE;
8739 result = double_int::from_buffer (ptr, total_bytes);
8740 fixed_value = fixed_from_double_int (result, mode);
8742 return build_fixed (type, fixed_value);
8746 /* Subroutine of native_interpret_expr. Interpret the contents of
8747 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8748 If the buffer cannot be interpreted, return NULL_TREE. */
8750 tree
8751 native_interpret_real (tree type, const unsigned char *ptr, int len)
8753 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8754 int total_bytes = GET_MODE_SIZE (mode);
8755 unsigned char value;
8756 /* There are always 32 bits in each long, no matter the size of
8757 the hosts long. We handle floating point representations with
8758 up to 192 bits. */
8759 REAL_VALUE_TYPE r;
8760 long tmp[6];
8762 if (total_bytes > len || total_bytes > 24)
8763 return NULL_TREE;
8764 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8766 memset (tmp, 0, sizeof (tmp));
8767 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8768 bitpos += BITS_PER_UNIT)
8770 /* Both OFFSET and BYTE index within a long;
8771 bitpos indexes the whole float. */
8772 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8773 if (UNITS_PER_WORD < 4)
8775 int word = byte / UNITS_PER_WORD;
8776 if (WORDS_BIG_ENDIAN)
8777 word = (words - 1) - word;
8778 offset = word * UNITS_PER_WORD;
8779 if (BYTES_BIG_ENDIAN)
8780 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8781 else
8782 offset += byte % UNITS_PER_WORD;
8784 else
8786 offset = byte;
8787 if (BYTES_BIG_ENDIAN)
8789 /* Reverse bytes within each long, or within the entire float
8790 if it's smaller than a long (for HFmode). */
8791 offset = MIN (3, total_bytes - 1) - offset;
8792 gcc_assert (offset >= 0);
8795 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8797 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8800 real_from_target (&r, tmp, mode);
8801 return build_real (type, r);
8805 /* Subroutine of native_interpret_expr. Interpret the contents of
8806 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8807 If the buffer cannot be interpreted, return NULL_TREE. */
8809 static tree
8810 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8812 tree etype, rpart, ipart;
8813 int size;
8815 etype = TREE_TYPE (type);
8816 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8817 if (size * 2 > len)
8818 return NULL_TREE;
8819 rpart = native_interpret_expr (etype, ptr, size);
8820 if (!rpart)
8821 return NULL_TREE;
8822 ipart = native_interpret_expr (etype, ptr+size, size);
8823 if (!ipart)
8824 return NULL_TREE;
8825 return build_complex (type, rpart, ipart);
8828 /* Read a vector of type TYPE from the target memory image given by BYTES,
8829 which contains LEN bytes. The vector is known to be encodable using
8830 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8832 Return the vector on success, otherwise return null. */
8834 static tree
8835 native_interpret_vector_part (tree type, const unsigned char *bytes,
8836 unsigned int len, unsigned int npatterns,
8837 unsigned int nelts_per_pattern)
8839 tree elt_type = TREE_TYPE (type);
8840 if (VECTOR_BOOLEAN_TYPE_P (type)
8841 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8843 /* This is the only case in which elements can be smaller than a byte.
8844 Element 0 is always in the lsb of the containing byte. */
8845 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8846 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8847 return NULL_TREE;
8849 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8850 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8852 unsigned int bit_index = i * elt_bits;
8853 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8854 unsigned int lsb = bit_index % BITS_PER_UNIT;
8855 builder.quick_push (bytes[byte_index] & (1 << lsb)
8856 ? build_all_ones_cst (elt_type)
8857 : build_zero_cst (elt_type));
8859 return builder.build ();
8862 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8863 if (elt_bytes * npatterns * nelts_per_pattern > len)
8864 return NULL_TREE;
8866 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8867 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8869 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8870 if (!elt)
8871 return NULL_TREE;
8872 builder.quick_push (elt);
8873 bytes += elt_bytes;
8875 return builder.build ();
8878 /* Subroutine of native_interpret_expr. Interpret the contents of
8879 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8880 If the buffer cannot be interpreted, return NULL_TREE. */
8882 static tree
8883 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8885 unsigned HOST_WIDE_INT size;
8887 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8888 || size > len)
8889 return NULL_TREE;
8891 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8892 return native_interpret_vector_part (type, ptr, len, count, 1);
8896 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8897 the buffer PTR of length LEN as a constant of type TYPE. For
8898 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8899 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8900 return NULL_TREE. */
8902 tree
8903 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8905 switch (TREE_CODE (type))
8907 case INTEGER_TYPE:
8908 case ENUMERAL_TYPE:
8909 case BOOLEAN_TYPE:
8910 case POINTER_TYPE:
8911 case REFERENCE_TYPE:
8912 case OFFSET_TYPE:
8913 case BITINT_TYPE:
8914 return native_interpret_int (type, ptr, len);
8916 case REAL_TYPE:
8917 if (tree ret = native_interpret_real (type, ptr, len))
8919 /* For floating point values in composite modes, punt if this
8920 folding doesn't preserve bit representation. As the mode doesn't
8921 have fixed precision while GCC pretends it does, there could be
8922 valid values that GCC can't really represent accurately.
8923 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8924 bit combinationations which GCC doesn't preserve. */
8925 unsigned char buf[24 * 2];
8926 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8927 int total_bytes = GET_MODE_SIZE (mode);
8928 memcpy (buf + 24, ptr, total_bytes);
8929 clear_type_padding_in_mask (type, buf + 24);
8930 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8931 || memcmp (buf + 24, buf, total_bytes) != 0)
8932 return NULL_TREE;
8933 return ret;
8935 return NULL_TREE;
8937 case FIXED_POINT_TYPE:
8938 return native_interpret_fixed (type, ptr, len);
8940 case COMPLEX_TYPE:
8941 return native_interpret_complex (type, ptr, len);
8943 case VECTOR_TYPE:
8944 return native_interpret_vector (type, ptr, len);
8946 default:
8947 return NULL_TREE;
8951 /* Returns true if we can interpret the contents of a native encoding
8952 as TYPE. */
8954 bool
8955 can_native_interpret_type_p (tree type)
8957 switch (TREE_CODE (type))
8959 case INTEGER_TYPE:
8960 case ENUMERAL_TYPE:
8961 case BOOLEAN_TYPE:
8962 case POINTER_TYPE:
8963 case REFERENCE_TYPE:
8964 case FIXED_POINT_TYPE:
8965 case REAL_TYPE:
8966 case COMPLEX_TYPE:
8967 case VECTOR_TYPE:
8968 case OFFSET_TYPE:
8969 return true;
8970 default:
8971 return false;
8975 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8976 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8978 tree
8979 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8980 int len)
8982 vec<constructor_elt, va_gc> *elts = NULL;
8983 if (TREE_CODE (type) == ARRAY_TYPE)
8985 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8986 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8987 return NULL_TREE;
8989 HOST_WIDE_INT cnt = 0;
8990 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8992 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8993 return NULL_TREE;
8994 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8996 if (eltsz == 0)
8997 cnt = 0;
8998 HOST_WIDE_INT pos = 0;
8999 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9001 tree v = NULL_TREE;
9002 if (pos >= len || pos + eltsz > len)
9003 return NULL_TREE;
9004 if (can_native_interpret_type_p (TREE_TYPE (type)))
9006 v = native_interpret_expr (TREE_TYPE (type),
9007 ptr + off + pos, eltsz);
9008 if (v == NULL_TREE)
9009 return NULL_TREE;
9011 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9012 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9013 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9014 eltsz);
9015 if (v == NULL_TREE)
9016 return NULL_TREE;
9017 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9019 return build_constructor (type, elts);
9021 if (TREE_CODE (type) != RECORD_TYPE)
9022 return NULL_TREE;
9023 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9025 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9026 || is_empty_type (TREE_TYPE (field)))
9027 continue;
9028 tree fld = field;
9029 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9030 int diff = 0;
9031 tree v = NULL_TREE;
9032 if (DECL_BIT_FIELD (field))
9034 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9035 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9037 poly_int64 bitoffset;
9038 poly_uint64 field_offset, fld_offset;
9039 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9040 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9041 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9042 else
9043 bitoffset = 0;
9044 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9045 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9046 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9047 - TYPE_PRECISION (TREE_TYPE (field)));
9048 if (!bitoffset.is_constant (&bitoff)
9049 || bitoff < 0
9050 || bitoff > diff)
9051 return NULL_TREE;
9053 else
9055 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9056 return NULL_TREE;
9057 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9058 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9059 bpos %= BITS_PER_UNIT;
9060 fieldsize += bpos;
9061 fieldsize += BITS_PER_UNIT - 1;
9062 fieldsize /= BITS_PER_UNIT;
9063 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9064 if (repr_type == NULL_TREE)
9065 return NULL_TREE;
9066 sz = int_size_in_bytes (repr_type);
9067 if (sz < 0 || sz > len)
9068 return NULL_TREE;
9069 pos = int_byte_position (field);
9070 if (pos < 0 || pos > len || pos + fieldsize > len)
9071 return NULL_TREE;
9072 HOST_WIDE_INT rpos;
9073 if (pos + sz <= len)
9074 rpos = pos;
9075 else
9077 rpos = len - sz;
9078 gcc_assert (rpos <= pos);
9080 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9081 pos = rpos;
9082 diff = (TYPE_PRECISION (repr_type)
9083 - TYPE_PRECISION (TREE_TYPE (field)));
9084 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9085 if (v == NULL_TREE)
9086 return NULL_TREE;
9087 fld = NULL_TREE;
9091 if (fld)
9093 sz = int_size_in_bytes (TREE_TYPE (fld));
9094 if (sz < 0 || sz > len)
9095 return NULL_TREE;
9096 tree byte_pos = byte_position (fld);
9097 if (!tree_fits_shwi_p (byte_pos))
9098 return NULL_TREE;
9099 pos = tree_to_shwi (byte_pos);
9100 if (pos < 0 || pos > len || pos + sz > len)
9101 return NULL_TREE;
9103 if (fld == NULL_TREE)
9104 /* Already handled above. */;
9105 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9107 v = native_interpret_expr (TREE_TYPE (fld),
9108 ptr + off + pos, sz);
9109 if (v == NULL_TREE)
9110 return NULL_TREE;
9112 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9113 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9114 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9115 if (v == NULL_TREE)
9116 return NULL_TREE;
9117 if (fld != field)
9119 if (TREE_CODE (v) != INTEGER_CST)
9120 return NULL_TREE;
9122 /* FIXME: Figure out how to handle PDP endian bitfields. */
9123 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9124 return NULL_TREE;
9125 if (!BYTES_BIG_ENDIAN)
9126 v = wide_int_to_tree (TREE_TYPE (field),
9127 wi::lrshift (wi::to_wide (v), bitoff));
9128 else
9129 v = wide_int_to_tree (TREE_TYPE (field),
9130 wi::lrshift (wi::to_wide (v),
9131 diff - bitoff));
9133 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9135 return build_constructor (type, elts);
9138 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9139 or extracted constant positions and/or sizes aren't byte aligned. */
9141 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9142 bits between adjacent elements. AMNT should be within
9143 [0, BITS_PER_UNIT).
9144 Example, AMNT = 2:
9145 00011111|11100000 << 2 = 01111111|10000000
9146 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9148 void
9149 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9150 unsigned int amnt)
9152 if (amnt == 0)
9153 return;
9155 unsigned char carry_over = 0U;
9156 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9157 unsigned char clear_mask = (~0U) << amnt;
9159 for (unsigned int i = 0; i < sz; i++)
9161 unsigned prev_carry_over = carry_over;
9162 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9164 ptr[i] <<= amnt;
9165 if (i != 0)
9167 ptr[i] &= clear_mask;
9168 ptr[i] |= prev_carry_over;
9173 /* Like shift_bytes_in_array_left but for big-endian.
9174 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9175 bits between adjacent elements. AMNT should be within
9176 [0, BITS_PER_UNIT).
9177 Example, AMNT = 2:
9178 00011111|11100000 >> 2 = 00000111|11111000
9179 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9181 void
9182 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9183 unsigned int amnt)
9185 if (amnt == 0)
9186 return;
9188 unsigned char carry_over = 0U;
9189 unsigned char carry_mask = ~(~0U << amnt);
9191 for (unsigned int i = 0; i < sz; i++)
9193 unsigned prev_carry_over = carry_over;
9194 carry_over = ptr[i] & carry_mask;
9196 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9197 ptr[i] >>= amnt;
9198 ptr[i] |= prev_carry_over;
9202 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9203 directly on the VECTOR_CST encoding, in a way that works for variable-
9204 length vectors. Return the resulting VECTOR_CST on success or null
9205 on failure. */
9207 static tree
9208 fold_view_convert_vector_encoding (tree type, tree expr)
9210 tree expr_type = TREE_TYPE (expr);
9211 poly_uint64 type_bits, expr_bits;
9212 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9213 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9214 return NULL_TREE;
9216 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9217 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9218 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9219 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9221 /* We can only preserve the semantics of a stepped pattern if the new
9222 vector element is an integer of the same size. */
9223 if (VECTOR_CST_STEPPED_P (expr)
9224 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9225 return NULL_TREE;
9227 /* The number of bits needed to encode one element from every pattern
9228 of the original vector. */
9229 unsigned int expr_sequence_bits
9230 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9232 /* The number of bits needed to encode one element from every pattern
9233 of the result. */
9234 unsigned int type_sequence_bits
9235 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9237 /* Don't try to read more bytes than are available, which can happen
9238 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9239 The general VIEW_CONVERT handling can cope with that case, so there's
9240 no point complicating things here. */
9241 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9242 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9243 BITS_PER_UNIT);
9244 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9245 if (known_gt (buffer_bits, expr_bits))
9246 return NULL_TREE;
9248 /* Get enough bytes of EXPR to form the new encoding. */
9249 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9250 buffer.quick_grow (buffer_bytes);
9251 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9252 buffer_bits / expr_elt_bits)
9253 != (int) buffer_bytes)
9254 return NULL_TREE;
9256 /* Reencode the bytes as TYPE. */
9257 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9258 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9259 type_npatterns, nelts_per_pattern);
9262 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9263 TYPE at compile-time. If we're unable to perform the conversion
9264 return NULL_TREE. */
9266 static tree
9267 fold_view_convert_expr (tree type, tree expr)
9269 /* We support up to 512-bit values (for V8DFmode). */
9270 unsigned char buffer[64];
9271 int len;
9273 /* Check that the host and target are sane. */
9274 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9275 return NULL_TREE;
9277 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9278 if (tree res = fold_view_convert_vector_encoding (type, expr))
9279 return res;
9281 len = native_encode_expr (expr, buffer, sizeof (buffer));
9282 if (len == 0)
9283 return NULL_TREE;
9285 return native_interpret_expr (type, buffer, len);
9288 /* Build an expression for the address of T. Folds away INDIRECT_REF
9289 to avoid confusing the gimplify process. */
9291 tree
9292 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9294 /* The size of the object is not relevant when talking about its address. */
9295 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9296 t = TREE_OPERAND (t, 0);
9298 if (INDIRECT_REF_P (t))
9300 t = TREE_OPERAND (t, 0);
9302 if (TREE_TYPE (t) != ptrtype)
9303 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9305 else if (TREE_CODE (t) == MEM_REF
9306 && integer_zerop (TREE_OPERAND (t, 1)))
9308 t = TREE_OPERAND (t, 0);
9310 if (TREE_TYPE (t) != ptrtype)
9311 t = fold_convert_loc (loc, ptrtype, t);
9313 else if (TREE_CODE (t) == MEM_REF
9314 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9315 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9316 TREE_OPERAND (t, 0),
9317 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9318 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9320 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9322 if (TREE_TYPE (t) != ptrtype)
9323 t = fold_convert_loc (loc, ptrtype, t);
9325 else
9326 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9328 return t;
9331 /* Build an expression for the address of T. */
9333 tree
9334 build_fold_addr_expr_loc (location_t loc, tree t)
9336 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9338 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9341 /* Fold a unary expression of code CODE and type TYPE with operand
9342 OP0. Return the folded expression if folding is successful.
9343 Otherwise, return NULL_TREE. */
9345 tree
9346 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9348 tree tem;
9349 tree arg0;
9350 enum tree_code_class kind = TREE_CODE_CLASS (code);
9352 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9353 && TREE_CODE_LENGTH (code) == 1);
9355 arg0 = op0;
9356 if (arg0)
9358 if (CONVERT_EXPR_CODE_P (code)
9359 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9361 /* Don't use STRIP_NOPS, because signedness of argument type
9362 matters. */
9363 STRIP_SIGN_NOPS (arg0);
9365 else
9367 /* Strip any conversions that don't change the mode. This
9368 is safe for every expression, except for a comparison
9369 expression because its signedness is derived from its
9370 operands.
9372 Note that this is done as an internal manipulation within
9373 the constant folder, in order to find the simplest
9374 representation of the arguments so that their form can be
9375 studied. In any cases, the appropriate type conversions
9376 should be put back in the tree that will get out of the
9377 constant folder. */
9378 STRIP_NOPS (arg0);
9381 if (CONSTANT_CLASS_P (arg0))
9383 tree tem = const_unop (code, type, arg0);
9384 if (tem)
9386 if (TREE_TYPE (tem) != type)
9387 tem = fold_convert_loc (loc, type, tem);
9388 return tem;
9393 tem = generic_simplify (loc, code, type, op0);
9394 if (tem)
9395 return tem;
9397 if (TREE_CODE_CLASS (code) == tcc_unary)
9399 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9400 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9401 fold_build1_loc (loc, code, type,
9402 fold_convert_loc (loc, TREE_TYPE (op0),
9403 TREE_OPERAND (arg0, 1))));
9404 else if (TREE_CODE (arg0) == COND_EXPR)
9406 tree arg01 = TREE_OPERAND (arg0, 1);
9407 tree arg02 = TREE_OPERAND (arg0, 2);
9408 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9409 arg01 = fold_build1_loc (loc, code, type,
9410 fold_convert_loc (loc,
9411 TREE_TYPE (op0), arg01));
9412 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9413 arg02 = fold_build1_loc (loc, code, type,
9414 fold_convert_loc (loc,
9415 TREE_TYPE (op0), arg02));
9416 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9417 arg01, arg02);
9419 /* If this was a conversion, and all we did was to move into
9420 inside the COND_EXPR, bring it back out. But leave it if
9421 it is a conversion from integer to integer and the
9422 result precision is no wider than a word since such a
9423 conversion is cheap and may be optimized away by combine,
9424 while it couldn't if it were outside the COND_EXPR. Then return
9425 so we don't get into an infinite recursion loop taking the
9426 conversion out and then back in. */
9428 if ((CONVERT_EXPR_CODE_P (code)
9429 || code == NON_LVALUE_EXPR)
9430 && TREE_CODE (tem) == COND_EXPR
9431 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9432 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9433 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9434 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9435 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9436 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9437 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9438 && (INTEGRAL_TYPE_P
9439 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9440 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9441 || flag_syntax_only))
9442 tem = build1_loc (loc, code, type,
9443 build3 (COND_EXPR,
9444 TREE_TYPE (TREE_OPERAND
9445 (TREE_OPERAND (tem, 1), 0)),
9446 TREE_OPERAND (tem, 0),
9447 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9448 TREE_OPERAND (TREE_OPERAND (tem, 2),
9449 0)));
9450 return tem;
9454 switch (code)
9456 case NON_LVALUE_EXPR:
9457 if (!maybe_lvalue_p (op0))
9458 return fold_convert_loc (loc, type, op0);
9459 return NULL_TREE;
9461 CASE_CONVERT:
9462 case FLOAT_EXPR:
9463 case FIX_TRUNC_EXPR:
9464 if (COMPARISON_CLASS_P (op0))
9466 /* If we have (type) (a CMP b) and type is an integral type, return
9467 new expression involving the new type. Canonicalize
9468 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9469 non-integral type.
9470 Do not fold the result as that would not simplify further, also
9471 folding again results in recursions. */
9472 if (TREE_CODE (type) == BOOLEAN_TYPE)
9473 return build2_loc (loc, TREE_CODE (op0), type,
9474 TREE_OPERAND (op0, 0),
9475 TREE_OPERAND (op0, 1));
9476 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9477 && TREE_CODE (type) != VECTOR_TYPE)
9478 return build3_loc (loc, COND_EXPR, type, op0,
9479 constant_boolean_node (true, type),
9480 constant_boolean_node (false, type));
9483 /* Handle (T *)&A.B.C for A being of type T and B and C
9484 living at offset zero. This occurs frequently in
9485 C++ upcasting and then accessing the base. */
9486 if (TREE_CODE (op0) == ADDR_EXPR
9487 && POINTER_TYPE_P (type)
9488 && handled_component_p (TREE_OPERAND (op0, 0)))
9490 poly_int64 bitsize, bitpos;
9491 tree offset;
9492 machine_mode mode;
9493 int unsignedp, reversep, volatilep;
9494 tree base
9495 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9496 &offset, &mode, &unsignedp, &reversep,
9497 &volatilep);
9498 /* If the reference was to a (constant) zero offset, we can use
9499 the address of the base if it has the same base type
9500 as the result type and the pointer type is unqualified. */
9501 if (!offset
9502 && known_eq (bitpos, 0)
9503 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9504 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9505 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9506 return fold_convert_loc (loc, type,
9507 build_fold_addr_expr_loc (loc, base));
9510 if (TREE_CODE (op0) == MODIFY_EXPR
9511 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9512 /* Detect assigning a bitfield. */
9513 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9514 && DECL_BIT_FIELD
9515 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9517 /* Don't leave an assignment inside a conversion
9518 unless assigning a bitfield. */
9519 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9520 /* First do the assignment, then return converted constant. */
9521 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9522 suppress_warning (tem /* What warning? */);
9523 TREE_USED (tem) = 1;
9524 return tem;
9527 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9528 constants (if x has signed type, the sign bit cannot be set
9529 in c). This folds extension into the BIT_AND_EXPR.
9530 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9531 very likely don't have maximal range for their precision and this
9532 transformation effectively doesn't preserve non-maximal ranges. */
9533 if (TREE_CODE (type) == INTEGER_TYPE
9534 && TREE_CODE (op0) == BIT_AND_EXPR
9535 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9537 tree and_expr = op0;
9538 tree and0 = TREE_OPERAND (and_expr, 0);
9539 tree and1 = TREE_OPERAND (and_expr, 1);
9540 int change = 0;
9542 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9543 || (TYPE_PRECISION (type)
9544 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9545 change = 1;
9546 else if (TYPE_PRECISION (TREE_TYPE (and1))
9547 <= HOST_BITS_PER_WIDE_INT
9548 && tree_fits_uhwi_p (and1))
9550 unsigned HOST_WIDE_INT cst;
9552 cst = tree_to_uhwi (and1);
9553 cst &= HOST_WIDE_INT_M1U
9554 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9555 change = (cst == 0);
9556 if (change
9557 && !flag_syntax_only
9558 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9559 == ZERO_EXTEND))
9561 tree uns = unsigned_type_for (TREE_TYPE (and0));
9562 and0 = fold_convert_loc (loc, uns, and0);
9563 and1 = fold_convert_loc (loc, uns, and1);
9566 if (change)
9568 tem = force_fit_type (type, wi::to_widest (and1), 0,
9569 TREE_OVERFLOW (and1));
9570 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9571 fold_convert_loc (loc, type, and0), tem);
9575 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9576 cast (T1)X will fold away. We assume that this happens when X itself
9577 is a cast. */
9578 if (POINTER_TYPE_P (type)
9579 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9580 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9582 tree arg00 = TREE_OPERAND (arg0, 0);
9583 tree arg01 = TREE_OPERAND (arg0, 1);
9585 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9586 when the pointed type needs higher alignment than
9587 the p+ first operand's pointed type. */
9588 if (!in_gimple_form
9589 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9590 && (min_align_of_type (TREE_TYPE (type))
9591 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9592 return NULL_TREE;
9594 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9595 when type is a reference type and arg00's type is not,
9596 because arg00 could be validly nullptr and if arg01 doesn't return,
9597 we don't want false positive binding of reference to nullptr. */
9598 if (TREE_CODE (type) == REFERENCE_TYPE
9599 && !in_gimple_form
9600 && sanitize_flags_p (SANITIZE_NULL)
9601 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9602 return NULL_TREE;
9604 arg00 = fold_convert_loc (loc, type, arg00);
9605 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9608 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9609 of the same precision, and X is an integer type not narrower than
9610 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9611 if (INTEGRAL_TYPE_P (type)
9612 && TREE_CODE (op0) == BIT_NOT_EXPR
9613 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9614 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9615 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9617 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9618 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9619 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9620 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9621 fold_convert_loc (loc, type, tem));
9624 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9625 type of X and Y (integer types only). */
9626 if (INTEGRAL_TYPE_P (type)
9627 && TREE_CODE (op0) == MULT_EXPR
9628 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9629 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9630 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9631 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9633 /* Be careful not to introduce new overflows. */
9634 tree mult_type;
9635 if (TYPE_OVERFLOW_WRAPS (type))
9636 mult_type = type;
9637 else
9638 mult_type = unsigned_type_for (type);
9640 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9642 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9643 fold_convert_loc (loc, mult_type,
9644 TREE_OPERAND (op0, 0)),
9645 fold_convert_loc (loc, mult_type,
9646 TREE_OPERAND (op0, 1)));
9647 return fold_convert_loc (loc, type, tem);
9651 return NULL_TREE;
9653 case VIEW_CONVERT_EXPR:
9654 if (TREE_CODE (op0) == MEM_REF)
9656 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9657 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9658 tem = fold_build2_loc (loc, MEM_REF, type,
9659 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9660 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9661 return tem;
9664 return NULL_TREE;
9666 case NEGATE_EXPR:
9667 tem = fold_negate_expr (loc, arg0);
9668 if (tem)
9669 return fold_convert_loc (loc, type, tem);
9670 return NULL_TREE;
9672 case ABS_EXPR:
9673 /* Convert fabs((double)float) into (double)fabsf(float). */
9674 if (TREE_CODE (arg0) == NOP_EXPR
9675 && TREE_CODE (type) == REAL_TYPE)
9677 tree targ0 = strip_float_extensions (arg0);
9678 if (targ0 != arg0)
9679 return fold_convert_loc (loc, type,
9680 fold_build1_loc (loc, ABS_EXPR,
9681 TREE_TYPE (targ0),
9682 targ0));
9684 return NULL_TREE;
9686 case BIT_NOT_EXPR:
9687 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9688 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9689 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9690 fold_convert_loc (loc, type,
9691 TREE_OPERAND (arg0, 0)))))
9692 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9693 fold_convert_loc (loc, type,
9694 TREE_OPERAND (arg0, 1)));
9695 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9696 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9697 fold_convert_loc (loc, type,
9698 TREE_OPERAND (arg0, 1)))))
9699 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9700 fold_convert_loc (loc, type,
9701 TREE_OPERAND (arg0, 0)), tem);
9703 return NULL_TREE;
9705 case TRUTH_NOT_EXPR:
9706 /* Note that the operand of this must be an int
9707 and its values must be 0 or 1.
9708 ("true" is a fixed value perhaps depending on the language,
9709 but we don't handle values other than 1 correctly yet.) */
9710 tem = fold_truth_not_expr (loc, arg0);
9711 if (!tem)
9712 return NULL_TREE;
9713 return fold_convert_loc (loc, type, tem);
9715 case INDIRECT_REF:
9716 /* Fold *&X to X if X is an lvalue. */
9717 if (TREE_CODE (op0) == ADDR_EXPR)
9719 tree op00 = TREE_OPERAND (op0, 0);
9720 if ((VAR_P (op00)
9721 || TREE_CODE (op00) == PARM_DECL
9722 || TREE_CODE (op00) == RESULT_DECL)
9723 && !TREE_READONLY (op00))
9724 return op00;
9726 return NULL_TREE;
9728 default:
9729 return NULL_TREE;
9730 } /* switch (code) */
9734 /* If the operation was a conversion do _not_ mark a resulting constant
9735 with TREE_OVERFLOW if the original constant was not. These conversions
9736 have implementation defined behavior and retaining the TREE_OVERFLOW
9737 flag here would confuse later passes such as VRP. */
9738 tree
9739 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9740 tree type, tree op0)
9742 tree res = fold_unary_loc (loc, code, type, op0);
9743 if (res
9744 && TREE_CODE (res) == INTEGER_CST
9745 && TREE_CODE (op0) == INTEGER_CST
9746 && CONVERT_EXPR_CODE_P (code))
9747 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9749 return res;
9752 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9753 operands OP0 and OP1. LOC is the location of the resulting expression.
9754 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9755 Return the folded expression if folding is successful. Otherwise,
9756 return NULL_TREE. */
9757 static tree
9758 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9759 tree arg0, tree arg1, tree op0, tree op1)
9761 tree tem;
9763 /* We only do these simplifications if we are optimizing. */
9764 if (!optimize)
9765 return NULL_TREE;
9767 /* Check for things like (A || B) && (A || C). We can convert this
9768 to A || (B && C). Note that either operator can be any of the four
9769 truth and/or operations and the transformation will still be
9770 valid. Also note that we only care about order for the
9771 ANDIF and ORIF operators. If B contains side effects, this
9772 might change the truth-value of A. */
9773 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9774 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9775 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9776 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9777 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9778 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9780 tree a00 = TREE_OPERAND (arg0, 0);
9781 tree a01 = TREE_OPERAND (arg0, 1);
9782 tree a10 = TREE_OPERAND (arg1, 0);
9783 tree a11 = TREE_OPERAND (arg1, 1);
9784 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9785 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9786 && (code == TRUTH_AND_EXPR
9787 || code == TRUTH_OR_EXPR));
9789 if (operand_equal_p (a00, a10, 0))
9790 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9791 fold_build2_loc (loc, code, type, a01, a11));
9792 else if (commutative && operand_equal_p (a00, a11, 0))
9793 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9794 fold_build2_loc (loc, code, type, a01, a10));
9795 else if (commutative && operand_equal_p (a01, a10, 0))
9796 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9797 fold_build2_loc (loc, code, type, a00, a11));
9799 /* This case if tricky because we must either have commutative
9800 operators or else A10 must not have side-effects. */
9802 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9803 && operand_equal_p (a01, a11, 0))
9804 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9805 fold_build2_loc (loc, code, type, a00, a10),
9806 a01);
9809 /* See if we can build a range comparison. */
9810 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9811 return tem;
9813 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9814 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9816 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9817 if (tem)
9818 return fold_build2_loc (loc, code, type, tem, arg1);
9821 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9822 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9824 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9825 if (tem)
9826 return fold_build2_loc (loc, code, type, arg0, tem);
9829 /* Check for the possibility of merging component references. If our
9830 lhs is another similar operation, try to merge its rhs with our
9831 rhs. Then try to merge our lhs and rhs. */
9832 if (TREE_CODE (arg0) == code
9833 && (tem = fold_truth_andor_1 (loc, code, type,
9834 TREE_OPERAND (arg0, 1), arg1)) != 0)
9835 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9837 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9838 return tem;
9840 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9841 if (param_logical_op_non_short_circuit != -1)
9842 logical_op_non_short_circuit
9843 = param_logical_op_non_short_circuit;
9844 if (logical_op_non_short_circuit
9845 && !sanitize_coverage_p ()
9846 && (code == TRUTH_AND_EXPR
9847 || code == TRUTH_ANDIF_EXPR
9848 || code == TRUTH_OR_EXPR
9849 || code == TRUTH_ORIF_EXPR))
9851 enum tree_code ncode, icode;
9853 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9854 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9855 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9857 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9858 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9859 We don't want to pack more than two leafs to a non-IF AND/OR
9860 expression.
9861 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9862 equal to IF-CODE, then we don't want to add right-hand operand.
9863 If the inner right-hand side of left-hand operand has
9864 side-effects, or isn't simple, then we can't add to it,
9865 as otherwise we might destroy if-sequence. */
9866 if (TREE_CODE (arg0) == icode
9867 && simple_condition_p (arg1)
9868 /* Needed for sequence points to handle trappings, and
9869 side-effects. */
9870 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9872 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9873 arg1);
9874 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9875 tem);
9877 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9878 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9879 else if (TREE_CODE (arg1) == icode
9880 && simple_condition_p (arg0)
9881 /* Needed for sequence points to handle trappings, and
9882 side-effects. */
9883 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9885 tem = fold_build2_loc (loc, ncode, type,
9886 arg0, TREE_OPERAND (arg1, 0));
9887 return fold_build2_loc (loc, icode, type, tem,
9888 TREE_OPERAND (arg1, 1));
9890 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9891 into (A OR B).
9892 For sequence point consistancy, we need to check for trapping,
9893 and side-effects. */
9894 else if (code == icode && simple_condition_p (arg0)
9895 && simple_condition_p (arg1))
9896 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9899 return NULL_TREE;
9902 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9903 by changing CODE to reduce the magnitude of constants involved in
9904 ARG0 of the comparison.
9905 Returns a canonicalized comparison tree if a simplification was
9906 possible, otherwise returns NULL_TREE.
9907 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9908 valid if signed overflow is undefined. */
9910 static tree
9911 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9912 tree arg0, tree arg1,
9913 bool *strict_overflow_p)
9915 enum tree_code code0 = TREE_CODE (arg0);
9916 tree t, cst0 = NULL_TREE;
9917 int sgn0;
9919 /* Match A +- CST code arg1. We can change this only if overflow
9920 is undefined. */
9921 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9923 /* In principle pointers also have undefined overflow behavior,
9924 but that causes problems elsewhere. */
9925 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9926 && (code0 == MINUS_EXPR
9927 || code0 == PLUS_EXPR)
9928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9929 return NULL_TREE;
9931 /* Identify the constant in arg0 and its sign. */
9932 cst0 = TREE_OPERAND (arg0, 1);
9933 sgn0 = tree_int_cst_sgn (cst0);
9935 /* Overflowed constants and zero will cause problems. */
9936 if (integer_zerop (cst0)
9937 || TREE_OVERFLOW (cst0))
9938 return NULL_TREE;
9940 /* See if we can reduce the magnitude of the constant in
9941 arg0 by changing the comparison code. */
9942 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9943 if (code == LT_EXPR
9944 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9945 code = LE_EXPR;
9946 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9947 else if (code == GT_EXPR
9948 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9949 code = GE_EXPR;
9950 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9951 else if (code == LE_EXPR
9952 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9953 code = LT_EXPR;
9954 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9955 else if (code == GE_EXPR
9956 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9957 code = GT_EXPR;
9958 else
9959 return NULL_TREE;
9960 *strict_overflow_p = true;
9962 /* Now build the constant reduced in magnitude. But not if that
9963 would produce one outside of its types range. */
9964 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9965 && ((sgn0 == 1
9966 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9967 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9968 || (sgn0 == -1
9969 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9970 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9971 return NULL_TREE;
9973 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9974 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9975 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9976 t = fold_convert (TREE_TYPE (arg1), t);
9978 return fold_build2_loc (loc, code, type, t, arg1);
9981 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9982 overflow further. Try to decrease the magnitude of constants involved
9983 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9984 and put sole constants at the second argument position.
9985 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9987 static tree
9988 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9989 tree arg0, tree arg1)
9991 tree t;
9992 bool strict_overflow_p;
9993 const char * const warnmsg = G_("assuming signed overflow does not occur "
9994 "when reducing constant in comparison");
9996 /* Try canonicalization by simplifying arg0. */
9997 strict_overflow_p = false;
9998 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9999 &strict_overflow_p);
10000 if (t)
10002 if (strict_overflow_p)
10003 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10004 return t;
10007 /* Try canonicalization by simplifying arg1 using the swapped
10008 comparison. */
10009 code = swap_tree_comparison (code);
10010 strict_overflow_p = false;
10011 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10012 &strict_overflow_p);
10013 if (t && strict_overflow_p)
10014 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10015 return t;
10018 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10019 space. This is used to avoid issuing overflow warnings for
10020 expressions like &p->x which cannot wrap. */
10022 static bool
10023 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10025 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10026 return true;
10028 if (maybe_lt (bitpos, 0))
10029 return true;
10031 poly_wide_int wi_offset;
10032 int precision = TYPE_PRECISION (TREE_TYPE (base));
10033 if (offset == NULL_TREE)
10034 wi_offset = wi::zero (precision);
10035 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10036 return true;
10037 else
10038 wi_offset = wi::to_poly_wide (offset);
10040 wi::overflow_type overflow;
10041 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10042 precision);
10043 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10044 if (overflow)
10045 return true;
10047 poly_uint64 total_hwi, size;
10048 if (!total.to_uhwi (&total_hwi)
10049 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10050 &size)
10051 || known_eq (size, 0U))
10052 return true;
10054 if (known_le (total_hwi, size))
10055 return false;
10057 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10058 array. */
10059 if (TREE_CODE (base) == ADDR_EXPR
10060 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10061 &size)
10062 && maybe_ne (size, 0U)
10063 && known_le (total_hwi, size))
10064 return false;
10066 return true;
10069 /* Return a positive integer when the symbol DECL is known to have
10070 a nonzero address, zero when it's known not to (e.g., it's a weak
10071 symbol), and a negative integer when the symbol is not yet in the
10072 symbol table and so whether or not its address is zero is unknown.
10073 For function local objects always return positive integer. */
10074 static int
10075 maybe_nonzero_address (tree decl)
10077 /* Normally, don't do anything for variables and functions before symtab is
10078 built; it is quite possible that DECL will be declared weak later.
10079 But if folding_initializer, we need a constant answer now, so create
10080 the symtab entry and prevent later weak declaration. */
10081 if (DECL_P (decl) && decl_in_symtab_p (decl))
10082 if (struct symtab_node *symbol
10083 = (folding_initializer
10084 ? symtab_node::get_create (decl)
10085 : symtab_node::get (decl)))
10086 return symbol->nonzero_address ();
10088 /* Function local objects are never NULL. */
10089 if (DECL_P (decl)
10090 && (DECL_CONTEXT (decl)
10091 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10092 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10093 return 1;
10095 return -1;
10098 /* Subroutine of fold_binary. This routine performs all of the
10099 transformations that are common to the equality/inequality
10100 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10101 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10102 fold_binary should call fold_binary. Fold a comparison with
10103 tree code CODE and type TYPE with operands OP0 and OP1. Return
10104 the folded comparison or NULL_TREE. */
10106 static tree
10107 fold_comparison (location_t loc, enum tree_code code, tree type,
10108 tree op0, tree op1)
10110 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10111 tree arg0, arg1, tem;
10113 arg0 = op0;
10114 arg1 = op1;
10116 STRIP_SIGN_NOPS (arg0);
10117 STRIP_SIGN_NOPS (arg1);
10119 /* For comparisons of pointers we can decompose it to a compile time
10120 comparison of the base objects and the offsets into the object.
10121 This requires at least one operand being an ADDR_EXPR or a
10122 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10123 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10124 && (TREE_CODE (arg0) == ADDR_EXPR
10125 || TREE_CODE (arg1) == ADDR_EXPR
10126 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10127 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10129 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10130 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10131 machine_mode mode;
10132 int volatilep, reversep, unsignedp;
10133 bool indirect_base0 = false, indirect_base1 = false;
10135 /* Get base and offset for the access. Strip ADDR_EXPR for
10136 get_inner_reference, but put it back by stripping INDIRECT_REF
10137 off the base object if possible. indirect_baseN will be true
10138 if baseN is not an address but refers to the object itself. */
10139 base0 = arg0;
10140 if (TREE_CODE (arg0) == ADDR_EXPR)
10142 base0
10143 = get_inner_reference (TREE_OPERAND (arg0, 0),
10144 &bitsize, &bitpos0, &offset0, &mode,
10145 &unsignedp, &reversep, &volatilep);
10146 if (INDIRECT_REF_P (base0))
10147 base0 = TREE_OPERAND (base0, 0);
10148 else
10149 indirect_base0 = true;
10151 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10153 base0 = TREE_OPERAND (arg0, 0);
10154 STRIP_SIGN_NOPS (base0);
10155 if (TREE_CODE (base0) == ADDR_EXPR)
10157 base0
10158 = get_inner_reference (TREE_OPERAND (base0, 0),
10159 &bitsize, &bitpos0, &offset0, &mode,
10160 &unsignedp, &reversep, &volatilep);
10161 if (INDIRECT_REF_P (base0))
10162 base0 = TREE_OPERAND (base0, 0);
10163 else
10164 indirect_base0 = true;
10166 if (offset0 == NULL_TREE || integer_zerop (offset0))
10167 offset0 = TREE_OPERAND (arg0, 1);
10168 else
10169 offset0 = size_binop (PLUS_EXPR, offset0,
10170 TREE_OPERAND (arg0, 1));
10171 if (poly_int_tree_p (offset0))
10173 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10174 TYPE_PRECISION (sizetype));
10175 tem <<= LOG2_BITS_PER_UNIT;
10176 tem += bitpos0;
10177 if (tem.to_shwi (&bitpos0))
10178 offset0 = NULL_TREE;
10182 base1 = arg1;
10183 if (TREE_CODE (arg1) == ADDR_EXPR)
10185 base1
10186 = get_inner_reference (TREE_OPERAND (arg1, 0),
10187 &bitsize, &bitpos1, &offset1, &mode,
10188 &unsignedp, &reversep, &volatilep);
10189 if (INDIRECT_REF_P (base1))
10190 base1 = TREE_OPERAND (base1, 0);
10191 else
10192 indirect_base1 = true;
10194 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10196 base1 = TREE_OPERAND (arg1, 0);
10197 STRIP_SIGN_NOPS (base1);
10198 if (TREE_CODE (base1) == ADDR_EXPR)
10200 base1
10201 = get_inner_reference (TREE_OPERAND (base1, 0),
10202 &bitsize, &bitpos1, &offset1, &mode,
10203 &unsignedp, &reversep, &volatilep);
10204 if (INDIRECT_REF_P (base1))
10205 base1 = TREE_OPERAND (base1, 0);
10206 else
10207 indirect_base1 = true;
10209 if (offset1 == NULL_TREE || integer_zerop (offset1))
10210 offset1 = TREE_OPERAND (arg1, 1);
10211 else
10212 offset1 = size_binop (PLUS_EXPR, offset1,
10213 TREE_OPERAND (arg1, 1));
10214 if (poly_int_tree_p (offset1))
10216 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10217 TYPE_PRECISION (sizetype));
10218 tem <<= LOG2_BITS_PER_UNIT;
10219 tem += bitpos1;
10220 if (tem.to_shwi (&bitpos1))
10221 offset1 = NULL_TREE;
10225 /* If we have equivalent bases we might be able to simplify. */
10226 if (indirect_base0 == indirect_base1
10227 && operand_equal_p (base0, base1,
10228 indirect_base0 ? OEP_ADDRESS_OF : 0))
10230 /* We can fold this expression to a constant if the non-constant
10231 offset parts are equal. */
10232 if ((offset0 == offset1
10233 || (offset0 && offset1
10234 && operand_equal_p (offset0, offset1, 0)))
10235 && (equality_code
10236 || (indirect_base0
10237 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10238 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10240 if (!equality_code
10241 && maybe_ne (bitpos0, bitpos1)
10242 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10243 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10244 fold_overflow_warning (("assuming pointer wraparound does not "
10245 "occur when comparing P +- C1 with "
10246 "P +- C2"),
10247 WARN_STRICT_OVERFLOW_CONDITIONAL);
10249 switch (code)
10251 case EQ_EXPR:
10252 if (known_eq (bitpos0, bitpos1))
10253 return constant_boolean_node (true, type);
10254 if (known_ne (bitpos0, bitpos1))
10255 return constant_boolean_node (false, type);
10256 break;
10257 case NE_EXPR:
10258 if (known_ne (bitpos0, bitpos1))
10259 return constant_boolean_node (true, type);
10260 if (known_eq (bitpos0, bitpos1))
10261 return constant_boolean_node (false, type);
10262 break;
10263 case LT_EXPR:
10264 if (known_lt (bitpos0, bitpos1))
10265 return constant_boolean_node (true, type);
10266 if (known_ge (bitpos0, bitpos1))
10267 return constant_boolean_node (false, type);
10268 break;
10269 case LE_EXPR:
10270 if (known_le (bitpos0, bitpos1))
10271 return constant_boolean_node (true, type);
10272 if (known_gt (bitpos0, bitpos1))
10273 return constant_boolean_node (false, type);
10274 break;
10275 case GE_EXPR:
10276 if (known_ge (bitpos0, bitpos1))
10277 return constant_boolean_node (true, type);
10278 if (known_lt (bitpos0, bitpos1))
10279 return constant_boolean_node (false, type);
10280 break;
10281 case GT_EXPR:
10282 if (known_gt (bitpos0, bitpos1))
10283 return constant_boolean_node (true, type);
10284 if (known_le (bitpos0, bitpos1))
10285 return constant_boolean_node (false, type);
10286 break;
10287 default:;
10290 /* We can simplify the comparison to a comparison of the variable
10291 offset parts if the constant offset parts are equal.
10292 Be careful to use signed sizetype here because otherwise we
10293 mess with array offsets in the wrong way. This is possible
10294 because pointer arithmetic is restricted to retain within an
10295 object and overflow on pointer differences is undefined as of
10296 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10297 else if (known_eq (bitpos0, bitpos1)
10298 && (equality_code
10299 || (indirect_base0
10300 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10301 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10303 /* By converting to signed sizetype we cover middle-end pointer
10304 arithmetic which operates on unsigned pointer types of size
10305 type size and ARRAY_REF offsets which are properly sign or
10306 zero extended from their type in case it is narrower than
10307 sizetype. */
10308 if (offset0 == NULL_TREE)
10309 offset0 = build_int_cst (ssizetype, 0);
10310 else
10311 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10312 if (offset1 == NULL_TREE)
10313 offset1 = build_int_cst (ssizetype, 0);
10314 else
10315 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10317 if (!equality_code
10318 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10319 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10320 fold_overflow_warning (("assuming pointer wraparound does not "
10321 "occur when comparing P +- C1 with "
10322 "P +- C2"),
10323 WARN_STRICT_OVERFLOW_COMPARISON);
10325 return fold_build2_loc (loc, code, type, offset0, offset1);
10328 /* For equal offsets we can simplify to a comparison of the
10329 base addresses. */
10330 else if (known_eq (bitpos0, bitpos1)
10331 && (indirect_base0
10332 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10333 && (indirect_base1
10334 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10335 && ((offset0 == offset1)
10336 || (offset0 && offset1
10337 && operand_equal_p (offset0, offset1, 0))))
10339 if (indirect_base0)
10340 base0 = build_fold_addr_expr_loc (loc, base0);
10341 if (indirect_base1)
10342 base1 = build_fold_addr_expr_loc (loc, base1);
10343 return fold_build2_loc (loc, code, type, base0, base1);
10345 /* Comparison between an ordinary (non-weak) symbol and a null
10346 pointer can be eliminated since such symbols must have a non
10347 null address. In C, relational expressions between pointers
10348 to objects and null pointers are undefined. The results
10349 below follow the C++ rules with the additional property that
10350 every object pointer compares greater than a null pointer.
10352 else if (((DECL_P (base0)
10353 && maybe_nonzero_address (base0) > 0
10354 /* Avoid folding references to struct members at offset 0 to
10355 prevent tests like '&ptr->firstmember == 0' from getting
10356 eliminated. When ptr is null, although the -> expression
10357 is strictly speaking invalid, GCC retains it as a matter
10358 of QoI. See PR c/44555. */
10359 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10360 || CONSTANT_CLASS_P (base0))
10361 && indirect_base0
10362 /* The caller guarantees that when one of the arguments is
10363 constant (i.e., null in this case) it is second. */
10364 && integer_zerop (arg1))
10366 switch (code)
10368 case EQ_EXPR:
10369 case LE_EXPR:
10370 case LT_EXPR:
10371 return constant_boolean_node (false, type);
10372 case GE_EXPR:
10373 case GT_EXPR:
10374 case NE_EXPR:
10375 return constant_boolean_node (true, type);
10376 default:
10377 gcc_unreachable ();
10382 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10383 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10384 the resulting offset is smaller in absolute value than the
10385 original one and has the same sign. */
10386 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10387 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10388 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10389 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10390 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10391 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10392 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10393 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10395 tree const1 = TREE_OPERAND (arg0, 1);
10396 tree const2 = TREE_OPERAND (arg1, 1);
10397 tree variable1 = TREE_OPERAND (arg0, 0);
10398 tree variable2 = TREE_OPERAND (arg1, 0);
10399 tree cst;
10400 const char * const warnmsg = G_("assuming signed overflow does not "
10401 "occur when combining constants around "
10402 "a comparison");
10404 /* Put the constant on the side where it doesn't overflow and is
10405 of lower absolute value and of same sign than before. */
10406 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10407 ? MINUS_EXPR : PLUS_EXPR,
10408 const2, const1);
10409 if (!TREE_OVERFLOW (cst)
10410 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10411 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10413 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10414 return fold_build2_loc (loc, code, type,
10415 variable1,
10416 fold_build2_loc (loc, TREE_CODE (arg1),
10417 TREE_TYPE (arg1),
10418 variable2, cst));
10421 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10422 ? MINUS_EXPR : PLUS_EXPR,
10423 const1, const2);
10424 if (!TREE_OVERFLOW (cst)
10425 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10426 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10428 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10429 return fold_build2_loc (loc, code, type,
10430 fold_build2_loc (loc, TREE_CODE (arg0),
10431 TREE_TYPE (arg0),
10432 variable1, cst),
10433 variable2);
10437 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10438 if (tem)
10439 return tem;
10441 /* If we are comparing an expression that just has comparisons
10442 of two integer values, arithmetic expressions of those comparisons,
10443 and constants, we can simplify it. There are only three cases
10444 to check: the two values can either be equal, the first can be
10445 greater, or the second can be greater. Fold the expression for
10446 those three values. Since each value must be 0 or 1, we have
10447 eight possibilities, each of which corresponds to the constant 0
10448 or 1 or one of the six possible comparisons.
10450 This handles common cases like (a > b) == 0 but also handles
10451 expressions like ((x > y) - (y > x)) > 0, which supposedly
10452 occur in macroized code. */
10454 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10456 tree cval1 = 0, cval2 = 0;
10458 if (twoval_comparison_p (arg0, &cval1, &cval2)
10459 /* Don't handle degenerate cases here; they should already
10460 have been handled anyway. */
10461 && cval1 != 0 && cval2 != 0
10462 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10463 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10464 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10465 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10466 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10467 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10468 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10470 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10471 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10473 /* We can't just pass T to eval_subst in case cval1 or cval2
10474 was the same as ARG1. */
10476 tree high_result
10477 = fold_build2_loc (loc, code, type,
10478 eval_subst (loc, arg0, cval1, maxval,
10479 cval2, minval),
10480 arg1);
10481 tree equal_result
10482 = fold_build2_loc (loc, code, type,
10483 eval_subst (loc, arg0, cval1, maxval,
10484 cval2, maxval),
10485 arg1);
10486 tree low_result
10487 = fold_build2_loc (loc, code, type,
10488 eval_subst (loc, arg0, cval1, minval,
10489 cval2, maxval),
10490 arg1);
10492 /* All three of these results should be 0 or 1. Confirm they are.
10493 Then use those values to select the proper code to use. */
10495 if (TREE_CODE (high_result) == INTEGER_CST
10496 && TREE_CODE (equal_result) == INTEGER_CST
10497 && TREE_CODE (low_result) == INTEGER_CST)
10499 /* Make a 3-bit mask with the high-order bit being the
10500 value for `>', the next for '=', and the low for '<'. */
10501 switch ((integer_onep (high_result) * 4)
10502 + (integer_onep (equal_result) * 2)
10503 + integer_onep (low_result))
10505 case 0:
10506 /* Always false. */
10507 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10508 case 1:
10509 code = LT_EXPR;
10510 break;
10511 case 2:
10512 code = EQ_EXPR;
10513 break;
10514 case 3:
10515 code = LE_EXPR;
10516 break;
10517 case 4:
10518 code = GT_EXPR;
10519 break;
10520 case 5:
10521 code = NE_EXPR;
10522 break;
10523 case 6:
10524 code = GE_EXPR;
10525 break;
10526 case 7:
10527 /* Always true. */
10528 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10531 return fold_build2_loc (loc, code, type, cval1, cval2);
10536 return NULL_TREE;
10540 /* Subroutine of fold_binary. Optimize complex multiplications of the
10541 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10542 argument EXPR represents the expression "z" of type TYPE. */
10544 static tree
10545 fold_mult_zconjz (location_t loc, tree type, tree expr)
10547 tree itype = TREE_TYPE (type);
10548 tree rpart, ipart, tem;
10550 if (TREE_CODE (expr) == COMPLEX_EXPR)
10552 rpart = TREE_OPERAND (expr, 0);
10553 ipart = TREE_OPERAND (expr, 1);
10555 else if (TREE_CODE (expr) == COMPLEX_CST)
10557 rpart = TREE_REALPART (expr);
10558 ipart = TREE_IMAGPART (expr);
10560 else
10562 expr = save_expr (expr);
10563 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10564 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10567 rpart = save_expr (rpart);
10568 ipart = save_expr (ipart);
10569 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10570 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10571 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10572 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10573 build_zero_cst (itype));
10577 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10578 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10579 true if successful. */
10581 static bool
10582 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10584 unsigned HOST_WIDE_INT i, nunits;
10586 if (TREE_CODE (arg) == VECTOR_CST
10587 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10589 for (i = 0; i < nunits; ++i)
10590 elts[i] = VECTOR_CST_ELT (arg, i);
10592 else if (TREE_CODE (arg) == CONSTRUCTOR)
10594 constructor_elt *elt;
10596 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10597 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10598 return false;
10599 else
10600 elts[i] = elt->value;
10602 else
10603 return false;
10604 for (; i < nelts; i++)
10605 elts[i]
10606 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10607 return true;
10610 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10611 mask for VLA vec_perm folding.
10612 REASON if specified, will contain the reason why SEL is not suitable.
10613 Used only for debugging and unit-testing. */
10615 static bool
10616 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10617 const vec_perm_indices &sel,
10618 const char **reason = NULL)
10620 unsigned sel_npatterns = sel.encoding ().npatterns ();
10621 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10623 if (!(pow2p_hwi (sel_npatterns)
10624 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10625 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10627 if (reason)
10628 *reason = "npatterns is not power of 2";
10629 return false;
10632 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10633 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10634 poly_uint64 esel;
10635 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10637 if (reason)
10638 *reason = "sel.length is not multiple of sel_npatterns";
10639 return false;
10642 if (sel_nelts_per_pattern < 3)
10643 return true;
10645 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10647 poly_uint64 a1 = sel[pattern + sel_npatterns];
10648 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10649 HOST_WIDE_INT step;
10650 if (!poly_int64 (a2 - a1).is_constant (&step))
10652 if (reason)
10653 *reason = "step is not constant";
10654 return false;
10656 // FIXME: Punt on step < 0 for now, revisit later.
10657 if (step < 0)
10658 return false;
10659 if (step == 0)
10660 continue;
10662 if (!pow2p_hwi (step))
10664 if (reason)
10665 *reason = "step is not power of 2";
10666 return false;
10669 /* Ensure that stepped sequence of the pattern selects elements
10670 only from the same input vector. */
10671 uint64_t q1, qe;
10672 poly_uint64 r1, re;
10673 poly_uint64 ae = a1 + (esel - 2) * step;
10674 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10676 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10677 && can_div_trunc_p (ae, arg_len, &qe, &re)
10678 && q1 == qe))
10680 if (reason)
10681 *reason = "crossed input vectors";
10682 return false;
10685 /* Ensure that the stepped sequence always selects from the same
10686 input pattern. */
10687 unsigned arg_npatterns
10688 = ((q1 & 1) == 0) ? VECTOR_CST_NPATTERNS (arg0)
10689 : VECTOR_CST_NPATTERNS (arg1);
10691 if (!multiple_p (step, arg_npatterns))
10693 if (reason)
10694 *reason = "step is not multiple of npatterns";
10695 return false;
10699 return true;
10702 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10703 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10704 REASON has same purpose as described in
10705 valid_mask_for_fold_vec_perm_cst_p. */
10707 static tree
10708 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10709 const char **reason = NULL)
10711 unsigned res_npatterns, res_nelts_per_pattern;
10712 unsigned HOST_WIDE_INT res_nelts;
10714 /* (1) If SEL is a suitable mask as determined by
10715 valid_mask_for_fold_vec_perm_cst_p, then:
10716 res_npatterns = max of npatterns between ARG0, ARG1, and SEL
10717 res_nelts_per_pattern = max of nelts_per_pattern between
10718 ARG0, ARG1 and SEL.
10719 (2) If SEL is not a suitable mask, and TYPE is VLS then:
10720 res_npatterns = nelts in result vector.
10721 res_nelts_per_pattern = 1.
10722 This exception is made so that VLS ARG0, ARG1 and SEL work as before. */
10723 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10725 res_npatterns
10726 = std::max (VECTOR_CST_NPATTERNS (arg0),
10727 std::max (VECTOR_CST_NPATTERNS (arg1),
10728 sel.encoding ().npatterns ()));
10730 res_nelts_per_pattern
10731 = std::max (VECTOR_CST_NELTS_PER_PATTERN (arg0),
10732 std::max (VECTOR_CST_NELTS_PER_PATTERN (arg1),
10733 sel.encoding ().nelts_per_pattern ()));
10735 res_nelts = res_npatterns * res_nelts_per_pattern;
10737 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10739 res_npatterns = res_nelts;
10740 res_nelts_per_pattern = 1;
10742 else
10743 return NULL_TREE;
10745 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10746 for (unsigned i = 0; i < res_nelts; i++)
10748 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10749 uint64_t q;
10750 poly_uint64 r;
10751 unsigned HOST_WIDE_INT index;
10753 /* Punt if sel[i] /trunc_div len cannot be determined,
10754 because the input vector to be chosen will depend on
10755 runtime vector length.
10756 For example if len == 4 + 4x, and sel[i] == 4,
10757 If len at runtime equals 4, we choose arg1[0].
10758 For any other value of len > 4 at runtime, we choose arg0[4].
10759 which makes the element choice dependent on runtime vector length. */
10760 if (!can_div_trunc_p (sel[i], len, &q, &r))
10762 if (reason)
10763 *reason = "cannot divide selector element by arg len";
10764 return NULL_TREE;
10767 /* sel[i] % len will give the index of element in the chosen input
10768 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10769 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10770 if (!r.is_constant (&index))
10772 if (reason)
10773 *reason = "remainder is not constant";
10774 return NULL_TREE;
10777 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10778 tree elem = vector_cst_elt (arg, index);
10779 out_elts.quick_push (elem);
10782 return out_elts.build ();
10785 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10786 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10787 NULL_TREE otherwise. */
10789 tree
10790 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10792 unsigned int i;
10793 unsigned HOST_WIDE_INT nelts;
10795 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10796 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10797 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10799 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10800 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10801 return NULL_TREE;
10803 if (TREE_CODE (arg0) == VECTOR_CST
10804 && TREE_CODE (arg1) == VECTOR_CST)
10805 return fold_vec_perm_cst (type, arg0, arg1, sel);
10807 /* For fall back case, we want to ensure we have VLS vectors
10808 with equal length. */
10809 if (!sel.length ().is_constant (&nelts))
10810 return NULL_TREE;
10812 gcc_assert (known_eq (sel.length (),
10813 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10814 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10815 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10816 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10817 return NULL_TREE;
10819 vec<constructor_elt, va_gc> *v;
10820 vec_alloc (v, nelts);
10821 for (i = 0; i < nelts; i++)
10823 HOST_WIDE_INT index;
10824 if (!sel[i].is_constant (&index))
10825 return NULL_TREE;
10826 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10828 return build_constructor (type, v);
10831 /* Try to fold a pointer difference of type TYPE two address expressions of
10832 array references AREF0 and AREF1 using location LOC. Return a
10833 simplified expression for the difference or NULL_TREE. */
10835 static tree
10836 fold_addr_of_array_ref_difference (location_t loc, tree type,
10837 tree aref0, tree aref1,
10838 bool use_pointer_diff)
10840 tree base0 = TREE_OPERAND (aref0, 0);
10841 tree base1 = TREE_OPERAND (aref1, 0);
10842 tree base_offset = build_int_cst (type, 0);
10844 /* If the bases are array references as well, recurse. If the bases
10845 are pointer indirections compute the difference of the pointers.
10846 If the bases are equal, we are set. */
10847 if ((TREE_CODE (base0) == ARRAY_REF
10848 && TREE_CODE (base1) == ARRAY_REF
10849 && (base_offset
10850 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10851 use_pointer_diff)))
10852 || (INDIRECT_REF_P (base0)
10853 && INDIRECT_REF_P (base1)
10854 && (base_offset
10855 = use_pointer_diff
10856 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10857 TREE_OPERAND (base0, 0),
10858 TREE_OPERAND (base1, 0))
10859 : fold_binary_loc (loc, MINUS_EXPR, type,
10860 fold_convert (type,
10861 TREE_OPERAND (base0, 0)),
10862 fold_convert (type,
10863 TREE_OPERAND (base1, 0)))))
10864 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10866 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10867 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10868 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10869 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10870 return fold_build2_loc (loc, PLUS_EXPR, type,
10871 base_offset,
10872 fold_build2_loc (loc, MULT_EXPR, type,
10873 diff, esz));
10875 return NULL_TREE;
10878 /* If the real or vector real constant CST of type TYPE has an exact
10879 inverse, return it, else return NULL. */
10881 tree
10882 exact_inverse (tree type, tree cst)
10884 REAL_VALUE_TYPE r;
10885 tree unit_type;
10886 machine_mode mode;
10888 switch (TREE_CODE (cst))
10890 case REAL_CST:
10891 r = TREE_REAL_CST (cst);
10893 if (exact_real_inverse (TYPE_MODE (type), &r))
10894 return build_real (type, r);
10896 return NULL_TREE;
10898 case VECTOR_CST:
10900 unit_type = TREE_TYPE (type);
10901 mode = TYPE_MODE (unit_type);
10903 tree_vector_builder elts;
10904 if (!elts.new_unary_operation (type, cst, false))
10905 return NULL_TREE;
10906 unsigned int count = elts.encoded_nelts ();
10907 for (unsigned int i = 0; i < count; ++i)
10909 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10910 if (!exact_real_inverse (mode, &r))
10911 return NULL_TREE;
10912 elts.quick_push (build_real (unit_type, r));
10915 return elts.build ();
10918 default:
10919 return NULL_TREE;
10923 /* Mask out the tz least significant bits of X of type TYPE where
10924 tz is the number of trailing zeroes in Y. */
10925 static wide_int
10926 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10928 int tz = wi::ctz (y);
10929 if (tz > 0)
10930 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10931 return x;
10934 /* Return true when T is an address and is known to be nonzero.
10935 For floating point we further ensure that T is not denormal.
10936 Similar logic is present in nonzero_address in rtlanal.h.
10938 If the return value is based on the assumption that signed overflow
10939 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10940 change *STRICT_OVERFLOW_P. */
10942 static bool
10943 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10945 tree type = TREE_TYPE (t);
10946 enum tree_code code;
10948 /* Doing something useful for floating point would need more work. */
10949 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10950 return false;
10952 code = TREE_CODE (t);
10953 switch (TREE_CODE_CLASS (code))
10955 case tcc_unary:
10956 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10957 strict_overflow_p);
10958 case tcc_binary:
10959 case tcc_comparison:
10960 return tree_binary_nonzero_warnv_p (code, type,
10961 TREE_OPERAND (t, 0),
10962 TREE_OPERAND (t, 1),
10963 strict_overflow_p);
10964 case tcc_constant:
10965 case tcc_declaration:
10966 case tcc_reference:
10967 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10969 default:
10970 break;
10973 switch (code)
10975 case TRUTH_NOT_EXPR:
10976 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10977 strict_overflow_p);
10979 case TRUTH_AND_EXPR:
10980 case TRUTH_OR_EXPR:
10981 case TRUTH_XOR_EXPR:
10982 return tree_binary_nonzero_warnv_p (code, type,
10983 TREE_OPERAND (t, 0),
10984 TREE_OPERAND (t, 1),
10985 strict_overflow_p);
10987 case COND_EXPR:
10988 case CONSTRUCTOR:
10989 case OBJ_TYPE_REF:
10990 case ADDR_EXPR:
10991 case WITH_SIZE_EXPR:
10992 case SSA_NAME:
10993 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10995 case COMPOUND_EXPR:
10996 case MODIFY_EXPR:
10997 case BIND_EXPR:
10998 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10999 strict_overflow_p);
11001 case SAVE_EXPR:
11002 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11003 strict_overflow_p);
11005 case CALL_EXPR:
11007 tree fndecl = get_callee_fndecl (t);
11008 if (!fndecl) return false;
11009 if (flag_delete_null_pointer_checks && !flag_check_new
11010 && DECL_IS_OPERATOR_NEW_P (fndecl)
11011 && !TREE_NOTHROW (fndecl))
11012 return true;
11013 if (flag_delete_null_pointer_checks
11014 && lookup_attribute ("returns_nonnull",
11015 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11016 return true;
11017 return alloca_call_p (t);
11020 default:
11021 break;
11023 return false;
11026 /* Return true when T is an address and is known to be nonzero.
11027 Handle warnings about undefined signed overflow. */
11029 bool
11030 tree_expr_nonzero_p (tree t)
11032 bool ret, strict_overflow_p;
11034 strict_overflow_p = false;
11035 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11036 if (strict_overflow_p)
11037 fold_overflow_warning (("assuming signed overflow does not occur when "
11038 "determining that expression is always "
11039 "non-zero"),
11040 WARN_STRICT_OVERFLOW_MISC);
11041 return ret;
11044 /* Return true if T is known not to be equal to an integer W. */
11046 bool
11047 expr_not_equal_to (tree t, const wide_int &w)
11049 int_range_max vr;
11050 switch (TREE_CODE (t))
11052 case INTEGER_CST:
11053 return wi::to_wide (t) != w;
11055 case SSA_NAME:
11056 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11057 return false;
11059 if (cfun)
11060 get_range_query (cfun)->range_of_expr (vr, t);
11061 else
11062 get_global_range_query ()->range_of_expr (vr, t);
11064 if (!vr.undefined_p () && !vr.contains_p (w))
11065 return true;
11066 /* If T has some known zero bits and W has any of those bits set,
11067 then T is known not to be equal to W. */
11068 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11069 TYPE_PRECISION (TREE_TYPE (t))), 0))
11070 return true;
11071 return false;
11073 default:
11074 return false;
11078 /* Fold a binary expression of code CODE and type TYPE with operands
11079 OP0 and OP1. LOC is the location of the resulting expression.
11080 Return the folded expression if folding is successful. Otherwise,
11081 return NULL_TREE. */
11083 tree
11084 fold_binary_loc (location_t loc, enum tree_code code, tree type,
11085 tree op0, tree op1)
11087 enum tree_code_class kind = TREE_CODE_CLASS (code);
11088 tree arg0, arg1, tem;
11089 tree t1 = NULL_TREE;
11090 bool strict_overflow_p;
11091 unsigned int prec;
11093 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11094 && TREE_CODE_LENGTH (code) == 2
11095 && op0 != NULL_TREE
11096 && op1 != NULL_TREE);
11098 arg0 = op0;
11099 arg1 = op1;
11101 /* Strip any conversions that don't change the mode. This is
11102 safe for every expression, except for a comparison expression
11103 because its signedness is derived from its operands. So, in
11104 the latter case, only strip conversions that don't change the
11105 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11106 preserved.
11108 Note that this is done as an internal manipulation within the
11109 constant folder, in order to find the simplest representation
11110 of the arguments so that their form can be studied. In any
11111 cases, the appropriate type conversions should be put back in
11112 the tree that will get out of the constant folder. */
11114 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11116 STRIP_SIGN_NOPS (arg0);
11117 STRIP_SIGN_NOPS (arg1);
11119 else
11121 STRIP_NOPS (arg0);
11122 STRIP_NOPS (arg1);
11125 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11126 constant but we can't do arithmetic on them. */
11127 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11129 tem = const_binop (code, type, arg0, arg1);
11130 if (tem != NULL_TREE)
11132 if (TREE_TYPE (tem) != type)
11133 tem = fold_convert_loc (loc, type, tem);
11134 return tem;
11138 /* If this is a commutative operation, and ARG0 is a constant, move it
11139 to ARG1 to reduce the number of tests below. */
11140 if (commutative_tree_code (code)
11141 && tree_swap_operands_p (arg0, arg1))
11142 return fold_build2_loc (loc, code, type, op1, op0);
11144 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11145 to ARG1 to reduce the number of tests below. */
11146 if (kind == tcc_comparison
11147 && tree_swap_operands_p (arg0, arg1))
11148 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11150 tem = generic_simplify (loc, code, type, op0, op1);
11151 if (tem)
11152 return tem;
11154 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11156 First check for cases where an arithmetic operation is applied to a
11157 compound, conditional, or comparison operation. Push the arithmetic
11158 operation inside the compound or conditional to see if any folding
11159 can then be done. Convert comparison to conditional for this purpose.
11160 The also optimizes non-constant cases that used to be done in
11161 expand_expr.
11163 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11164 one of the operands is a comparison and the other is a comparison, a
11165 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11166 code below would make the expression more complex. Change it to a
11167 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11168 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11170 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11171 || code == EQ_EXPR || code == NE_EXPR)
11172 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11173 && ((truth_value_p (TREE_CODE (arg0))
11174 && (truth_value_p (TREE_CODE (arg1))
11175 || (TREE_CODE (arg1) == BIT_AND_EXPR
11176 && integer_onep (TREE_OPERAND (arg1, 1)))))
11177 || (truth_value_p (TREE_CODE (arg1))
11178 && (truth_value_p (TREE_CODE (arg0))
11179 || (TREE_CODE (arg0) == BIT_AND_EXPR
11180 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11182 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11183 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11184 : TRUTH_XOR_EXPR,
11185 boolean_type_node,
11186 fold_convert_loc (loc, boolean_type_node, arg0),
11187 fold_convert_loc (loc, boolean_type_node, arg1));
11189 if (code == EQ_EXPR)
11190 tem = invert_truthvalue_loc (loc, tem);
11192 return fold_convert_loc (loc, type, tem);
11195 if (TREE_CODE_CLASS (code) == tcc_binary
11196 || TREE_CODE_CLASS (code) == tcc_comparison)
11198 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11200 tem = fold_build2_loc (loc, code, type,
11201 fold_convert_loc (loc, TREE_TYPE (op0),
11202 TREE_OPERAND (arg0, 1)), op1);
11203 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11204 tem);
11206 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11208 tem = fold_build2_loc (loc, code, type, op0,
11209 fold_convert_loc (loc, TREE_TYPE (op1),
11210 TREE_OPERAND (arg1, 1)));
11211 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11212 tem);
11215 if (TREE_CODE (arg0) == COND_EXPR
11216 || TREE_CODE (arg0) == VEC_COND_EXPR
11217 || COMPARISON_CLASS_P (arg0))
11219 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11220 arg0, arg1,
11221 /*cond_first_p=*/1);
11222 if (tem != NULL_TREE)
11223 return tem;
11226 if (TREE_CODE (arg1) == COND_EXPR
11227 || TREE_CODE (arg1) == VEC_COND_EXPR
11228 || COMPARISON_CLASS_P (arg1))
11230 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11231 arg1, arg0,
11232 /*cond_first_p=*/0);
11233 if (tem != NULL_TREE)
11234 return tem;
11238 switch (code)
11240 case MEM_REF:
11241 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11242 if (TREE_CODE (arg0) == ADDR_EXPR
11243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11245 tree iref = TREE_OPERAND (arg0, 0);
11246 return fold_build2 (MEM_REF, type,
11247 TREE_OPERAND (iref, 0),
11248 int_const_binop (PLUS_EXPR, arg1,
11249 TREE_OPERAND (iref, 1)));
11252 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11253 if (TREE_CODE (arg0) == ADDR_EXPR
11254 && handled_component_p (TREE_OPERAND (arg0, 0)))
11256 tree base;
11257 poly_int64 coffset;
11258 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11259 &coffset);
11260 if (!base)
11261 return NULL_TREE;
11262 return fold_build2 (MEM_REF, type,
11263 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11264 int_const_binop (PLUS_EXPR, arg1,
11265 size_int (coffset)));
11268 return NULL_TREE;
11270 case POINTER_PLUS_EXPR:
11271 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11272 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11273 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11274 return fold_convert_loc (loc, type,
11275 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11276 fold_convert_loc (loc, sizetype,
11277 arg1),
11278 fold_convert_loc (loc, sizetype,
11279 arg0)));
11281 return NULL_TREE;
11283 case PLUS_EXPR:
11284 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11286 /* X + (X / CST) * -CST is X % CST. */
11287 if (TREE_CODE (arg1) == MULT_EXPR
11288 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11289 && operand_equal_p (arg0,
11290 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11292 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11293 tree cst1 = TREE_OPERAND (arg1, 1);
11294 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11295 cst1, cst0);
11296 if (sum && integer_zerop (sum))
11297 return fold_convert_loc (loc, type,
11298 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11299 TREE_TYPE (arg0), arg0,
11300 cst0));
11304 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11305 one. Make sure the type is not saturating and has the signedness of
11306 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11307 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11308 if ((TREE_CODE (arg0) == MULT_EXPR
11309 || TREE_CODE (arg1) == MULT_EXPR)
11310 && !TYPE_SATURATING (type)
11311 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11312 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11313 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11315 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11316 if (tem)
11317 return tem;
11320 if (! FLOAT_TYPE_P (type))
11322 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11323 (plus (plus (mult) (mult)) (foo)) so that we can
11324 take advantage of the factoring cases below. */
11325 if (ANY_INTEGRAL_TYPE_P (type)
11326 && TYPE_OVERFLOW_WRAPS (type)
11327 && (((TREE_CODE (arg0) == PLUS_EXPR
11328 || TREE_CODE (arg0) == MINUS_EXPR)
11329 && TREE_CODE (arg1) == MULT_EXPR)
11330 || ((TREE_CODE (arg1) == PLUS_EXPR
11331 || TREE_CODE (arg1) == MINUS_EXPR)
11332 && TREE_CODE (arg0) == MULT_EXPR)))
11334 tree parg0, parg1, parg, marg;
11335 enum tree_code pcode;
11337 if (TREE_CODE (arg1) == MULT_EXPR)
11338 parg = arg0, marg = arg1;
11339 else
11340 parg = arg1, marg = arg0;
11341 pcode = TREE_CODE (parg);
11342 parg0 = TREE_OPERAND (parg, 0);
11343 parg1 = TREE_OPERAND (parg, 1);
11344 STRIP_NOPS (parg0);
11345 STRIP_NOPS (parg1);
11347 if (TREE_CODE (parg0) == MULT_EXPR
11348 && TREE_CODE (parg1) != MULT_EXPR)
11349 return fold_build2_loc (loc, pcode, type,
11350 fold_build2_loc (loc, PLUS_EXPR, type,
11351 fold_convert_loc (loc, type,
11352 parg0),
11353 fold_convert_loc (loc, type,
11354 marg)),
11355 fold_convert_loc (loc, type, parg1));
11356 if (TREE_CODE (parg0) != MULT_EXPR
11357 && TREE_CODE (parg1) == MULT_EXPR)
11358 return
11359 fold_build2_loc (loc, PLUS_EXPR, type,
11360 fold_convert_loc (loc, type, parg0),
11361 fold_build2_loc (loc, pcode, type,
11362 fold_convert_loc (loc, type, marg),
11363 fold_convert_loc (loc, type,
11364 parg1)));
11367 else
11369 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11370 to __complex__ ( x, y ). This is not the same for SNaNs or
11371 if signed zeros are involved. */
11372 if (!HONOR_SNANS (arg0)
11373 && !HONOR_SIGNED_ZEROS (arg0)
11374 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11376 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11377 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11378 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11379 bool arg0rz = false, arg0iz = false;
11380 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11381 || (arg0i && (arg0iz = real_zerop (arg0i))))
11383 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11384 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11385 if (arg0rz && arg1i && real_zerop (arg1i))
11387 tree rp = arg1r ? arg1r
11388 : build1 (REALPART_EXPR, rtype, arg1);
11389 tree ip = arg0i ? arg0i
11390 : build1 (IMAGPART_EXPR, rtype, arg0);
11391 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11393 else if (arg0iz && arg1r && real_zerop (arg1r))
11395 tree rp = arg0r ? arg0r
11396 : build1 (REALPART_EXPR, rtype, arg0);
11397 tree ip = arg1i ? arg1i
11398 : build1 (IMAGPART_EXPR, rtype, arg1);
11399 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11404 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11405 We associate floats only if the user has specified
11406 -fassociative-math. */
11407 if (flag_associative_math
11408 && TREE_CODE (arg1) == PLUS_EXPR
11409 && TREE_CODE (arg0) != MULT_EXPR)
11411 tree tree10 = TREE_OPERAND (arg1, 0);
11412 tree tree11 = TREE_OPERAND (arg1, 1);
11413 if (TREE_CODE (tree11) == MULT_EXPR
11414 && TREE_CODE (tree10) == MULT_EXPR)
11416 tree tree0;
11417 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11418 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11421 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11422 We associate floats only if the user has specified
11423 -fassociative-math. */
11424 if (flag_associative_math
11425 && TREE_CODE (arg0) == PLUS_EXPR
11426 && TREE_CODE (arg1) != MULT_EXPR)
11428 tree tree00 = TREE_OPERAND (arg0, 0);
11429 tree tree01 = TREE_OPERAND (arg0, 1);
11430 if (TREE_CODE (tree01) == MULT_EXPR
11431 && TREE_CODE (tree00) == MULT_EXPR)
11433 tree tree0;
11434 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11435 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11440 bit_rotate:
11441 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11442 is a rotate of A by C1 bits. */
11443 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11444 is a rotate of A by B bits.
11445 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11446 though in this case CODE must be | and not + or ^, otherwise
11447 it doesn't return A when B is 0. */
11449 enum tree_code code0, code1;
11450 tree rtype;
11451 code0 = TREE_CODE (arg0);
11452 code1 = TREE_CODE (arg1);
11453 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11454 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11455 && operand_equal_p (TREE_OPERAND (arg0, 0),
11456 TREE_OPERAND (arg1, 0), 0)
11457 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11458 TYPE_UNSIGNED (rtype))
11459 /* Only create rotates in complete modes. Other cases are not
11460 expanded properly. */
11461 && (element_precision (rtype)
11462 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11464 tree tree01, tree11;
11465 tree orig_tree01, orig_tree11;
11466 enum tree_code code01, code11;
11468 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11469 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11470 STRIP_NOPS (tree01);
11471 STRIP_NOPS (tree11);
11472 code01 = TREE_CODE (tree01);
11473 code11 = TREE_CODE (tree11);
11474 if (code11 != MINUS_EXPR
11475 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11477 std::swap (code0, code1);
11478 std::swap (code01, code11);
11479 std::swap (tree01, tree11);
11480 std::swap (orig_tree01, orig_tree11);
11482 if (code01 == INTEGER_CST
11483 && code11 == INTEGER_CST
11484 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11485 == element_precision (rtype)))
11487 tem = build2_loc (loc, LROTATE_EXPR,
11488 rtype, TREE_OPERAND (arg0, 0),
11489 code0 == LSHIFT_EXPR
11490 ? orig_tree01 : orig_tree11);
11491 return fold_convert_loc (loc, type, tem);
11493 else if (code11 == MINUS_EXPR)
11495 tree tree110, tree111;
11496 tree110 = TREE_OPERAND (tree11, 0);
11497 tree111 = TREE_OPERAND (tree11, 1);
11498 STRIP_NOPS (tree110);
11499 STRIP_NOPS (tree111);
11500 if (TREE_CODE (tree110) == INTEGER_CST
11501 && compare_tree_int (tree110,
11502 element_precision (rtype)) == 0
11503 && operand_equal_p (tree01, tree111, 0))
11505 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11506 ? LROTATE_EXPR : RROTATE_EXPR),
11507 rtype, TREE_OPERAND (arg0, 0),
11508 orig_tree01);
11509 return fold_convert_loc (loc, type, tem);
11512 else if (code == BIT_IOR_EXPR
11513 && code11 == BIT_AND_EXPR
11514 && pow2p_hwi (element_precision (rtype)))
11516 tree tree110, tree111;
11517 tree110 = TREE_OPERAND (tree11, 0);
11518 tree111 = TREE_OPERAND (tree11, 1);
11519 STRIP_NOPS (tree110);
11520 STRIP_NOPS (tree111);
11521 if (TREE_CODE (tree110) == NEGATE_EXPR
11522 && TREE_CODE (tree111) == INTEGER_CST
11523 && compare_tree_int (tree111,
11524 element_precision (rtype) - 1) == 0
11525 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11527 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11528 ? LROTATE_EXPR : RROTATE_EXPR),
11529 rtype, TREE_OPERAND (arg0, 0),
11530 orig_tree01);
11531 return fold_convert_loc (loc, type, tem);
11537 associate:
11538 /* In most languages, can't associate operations on floats through
11539 parentheses. Rather than remember where the parentheses were, we
11540 don't associate floats at all, unless the user has specified
11541 -fassociative-math.
11542 And, we need to make sure type is not saturating. */
11544 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11545 && !TYPE_SATURATING (type)
11546 && !TYPE_OVERFLOW_SANITIZED (type))
11548 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11549 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11550 tree atype = type;
11551 bool ok = true;
11553 /* Split both trees into variables, constants, and literals. Then
11554 associate each group together, the constants with literals,
11555 then the result with variables. This increases the chances of
11556 literals being recombined later and of generating relocatable
11557 expressions for the sum of a constant and literal. */
11558 var0 = split_tree (arg0, type, code,
11559 &minus_var0, &con0, &minus_con0,
11560 &lit0, &minus_lit0, 0);
11561 var1 = split_tree (arg1, type, code,
11562 &minus_var1, &con1, &minus_con1,
11563 &lit1, &minus_lit1, code == MINUS_EXPR);
11565 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11566 if (code == MINUS_EXPR)
11567 code = PLUS_EXPR;
11569 /* With undefined overflow prefer doing association in a type
11570 which wraps on overflow, if that is one of the operand types. */
11571 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11572 && !TYPE_OVERFLOW_WRAPS (type))
11574 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11575 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11576 atype = TREE_TYPE (arg0);
11577 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11578 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11579 atype = TREE_TYPE (arg1);
11580 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11583 /* With undefined overflow we can only associate constants with one
11584 variable, and constants whose association doesn't overflow. */
11585 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11586 && !TYPE_OVERFLOW_WRAPS (atype))
11588 if ((var0 && var1) || (minus_var0 && minus_var1))
11590 /* ??? If split_tree would handle NEGATE_EXPR we could
11591 simply reject these cases and the allowed cases would
11592 be the var0/minus_var1 ones. */
11593 tree tmp0 = var0 ? var0 : minus_var0;
11594 tree tmp1 = var1 ? var1 : minus_var1;
11595 bool one_neg = false;
11597 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11599 tmp0 = TREE_OPERAND (tmp0, 0);
11600 one_neg = !one_neg;
11602 if (CONVERT_EXPR_P (tmp0)
11603 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11604 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11605 <= TYPE_PRECISION (atype)))
11606 tmp0 = TREE_OPERAND (tmp0, 0);
11607 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11609 tmp1 = TREE_OPERAND (tmp1, 0);
11610 one_neg = !one_neg;
11612 if (CONVERT_EXPR_P (tmp1)
11613 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11614 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11615 <= TYPE_PRECISION (atype)))
11616 tmp1 = TREE_OPERAND (tmp1, 0);
11617 /* The only case we can still associate with two variables
11618 is if they cancel out. */
11619 if (!one_neg
11620 || !operand_equal_p (tmp0, tmp1, 0))
11621 ok = false;
11623 else if ((var0 && minus_var1
11624 && ! operand_equal_p (var0, minus_var1, 0))
11625 || (minus_var0 && var1
11626 && ! operand_equal_p (minus_var0, var1, 0)))
11627 ok = false;
11630 /* Only do something if we found more than two objects. Otherwise,
11631 nothing has changed and we risk infinite recursion. */
11632 if (ok
11633 && ((var0 != 0) + (var1 != 0)
11634 + (minus_var0 != 0) + (minus_var1 != 0)
11635 + (con0 != 0) + (con1 != 0)
11636 + (minus_con0 != 0) + (minus_con1 != 0)
11637 + (lit0 != 0) + (lit1 != 0)
11638 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11640 var0 = associate_trees (loc, var0, var1, code, atype);
11641 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11642 code, atype);
11643 con0 = associate_trees (loc, con0, con1, code, atype);
11644 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11645 code, atype);
11646 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11647 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11648 code, atype);
11650 if (minus_var0 && var0)
11652 var0 = associate_trees (loc, var0, minus_var0,
11653 MINUS_EXPR, atype);
11654 minus_var0 = 0;
11656 if (minus_con0 && con0)
11658 con0 = associate_trees (loc, con0, minus_con0,
11659 MINUS_EXPR, atype);
11660 minus_con0 = 0;
11663 /* Preserve the MINUS_EXPR if the negative part of the literal is
11664 greater than the positive part. Otherwise, the multiplicative
11665 folding code (i.e extract_muldiv) may be fooled in case
11666 unsigned constants are subtracted, like in the following
11667 example: ((X*2 + 4) - 8U)/2. */
11668 if (minus_lit0 && lit0)
11670 if (TREE_CODE (lit0) == INTEGER_CST
11671 && TREE_CODE (minus_lit0) == INTEGER_CST
11672 && tree_int_cst_lt (lit0, minus_lit0)
11673 /* But avoid ending up with only negated parts. */
11674 && (var0 || con0))
11676 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11677 MINUS_EXPR, atype);
11678 lit0 = 0;
11680 else
11682 lit0 = associate_trees (loc, lit0, minus_lit0,
11683 MINUS_EXPR, atype);
11684 minus_lit0 = 0;
11688 /* Don't introduce overflows through reassociation. */
11689 if ((lit0 && TREE_OVERFLOW_P (lit0))
11690 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11691 return NULL_TREE;
11693 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11694 con0 = associate_trees (loc, con0, lit0, code, atype);
11695 lit0 = 0;
11696 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11697 code, atype);
11698 minus_lit0 = 0;
11700 /* Eliminate minus_con0. */
11701 if (minus_con0)
11703 if (con0)
11704 con0 = associate_trees (loc, con0, minus_con0,
11705 MINUS_EXPR, atype);
11706 else if (var0)
11707 var0 = associate_trees (loc, var0, minus_con0,
11708 MINUS_EXPR, atype);
11709 else
11710 gcc_unreachable ();
11711 minus_con0 = 0;
11714 /* Eliminate minus_var0. */
11715 if (minus_var0)
11717 if (con0)
11718 con0 = associate_trees (loc, con0, minus_var0,
11719 MINUS_EXPR, atype);
11720 else
11721 gcc_unreachable ();
11722 minus_var0 = 0;
11725 return
11726 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11727 code, atype));
11731 return NULL_TREE;
11733 case POINTER_DIFF_EXPR:
11734 case MINUS_EXPR:
11735 /* Fold &a[i] - &a[j] to i-j. */
11736 if (TREE_CODE (arg0) == ADDR_EXPR
11737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11738 && TREE_CODE (arg1) == ADDR_EXPR
11739 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11741 tree tem = fold_addr_of_array_ref_difference (loc, type,
11742 TREE_OPERAND (arg0, 0),
11743 TREE_OPERAND (arg1, 0),
11744 code
11745 == POINTER_DIFF_EXPR);
11746 if (tem)
11747 return tem;
11750 /* Further transformations are not for pointers. */
11751 if (code == POINTER_DIFF_EXPR)
11752 return NULL_TREE;
11754 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11755 if (TREE_CODE (arg0) == NEGATE_EXPR
11756 && negate_expr_p (op1)
11757 /* If arg0 is e.g. unsigned int and type is int, then this could
11758 introduce UB, because if A is INT_MIN at runtime, the original
11759 expression can be well defined while the latter is not.
11760 See PR83269. */
11761 && !(ANY_INTEGRAL_TYPE_P (type)
11762 && TYPE_OVERFLOW_UNDEFINED (type)
11763 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11764 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11765 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11766 fold_convert_loc (loc, type,
11767 TREE_OPERAND (arg0, 0)));
11769 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11770 __complex__ ( x, -y ). This is not the same for SNaNs or if
11771 signed zeros are involved. */
11772 if (!HONOR_SNANS (arg0)
11773 && !HONOR_SIGNED_ZEROS (arg0)
11774 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11776 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11777 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11778 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11779 bool arg0rz = false, arg0iz = false;
11780 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11781 || (arg0i && (arg0iz = real_zerop (arg0i))))
11783 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11784 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11785 if (arg0rz && arg1i && real_zerop (arg1i))
11787 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11788 arg1r ? arg1r
11789 : build1 (REALPART_EXPR, rtype, arg1));
11790 tree ip = arg0i ? arg0i
11791 : build1 (IMAGPART_EXPR, rtype, arg0);
11792 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11794 else if (arg0iz && arg1r && real_zerop (arg1r))
11796 tree rp = arg0r ? arg0r
11797 : build1 (REALPART_EXPR, rtype, arg0);
11798 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11799 arg1i ? arg1i
11800 : build1 (IMAGPART_EXPR, rtype, arg1));
11801 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11806 /* A - B -> A + (-B) if B is easily negatable. */
11807 if (negate_expr_p (op1)
11808 && ! TYPE_OVERFLOW_SANITIZED (type)
11809 && ((FLOAT_TYPE_P (type)
11810 /* Avoid this transformation if B is a positive REAL_CST. */
11811 && (TREE_CODE (op1) != REAL_CST
11812 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11813 || INTEGRAL_TYPE_P (type)))
11814 return fold_build2_loc (loc, PLUS_EXPR, type,
11815 fold_convert_loc (loc, type, arg0),
11816 negate_expr (op1));
11818 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11819 one. Make sure the type is not saturating and has the signedness of
11820 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11821 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11822 if ((TREE_CODE (arg0) == MULT_EXPR
11823 || TREE_CODE (arg1) == MULT_EXPR)
11824 && !TYPE_SATURATING (type)
11825 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11826 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11827 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11829 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11830 if (tem)
11831 return tem;
11834 goto associate;
11836 case MULT_EXPR:
11837 if (! FLOAT_TYPE_P (type))
11839 /* Transform x * -C into -x * C if x is easily negatable. */
11840 if (TREE_CODE (op1) == INTEGER_CST
11841 && tree_int_cst_sgn (op1) == -1
11842 && negate_expr_p (op0)
11843 && negate_expr_p (op1)
11844 && (tem = negate_expr (op1)) != op1
11845 && ! TREE_OVERFLOW (tem))
11846 return fold_build2_loc (loc, MULT_EXPR, type,
11847 fold_convert_loc (loc, type,
11848 negate_expr (op0)), tem);
11850 strict_overflow_p = false;
11851 if (TREE_CODE (arg1) == INTEGER_CST
11852 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11853 &strict_overflow_p)) != 0)
11855 if (strict_overflow_p)
11856 fold_overflow_warning (("assuming signed overflow does not "
11857 "occur when simplifying "
11858 "multiplication"),
11859 WARN_STRICT_OVERFLOW_MISC);
11860 return fold_convert_loc (loc, type, tem);
11863 /* Optimize z * conj(z) for integer complex numbers. */
11864 if (TREE_CODE (arg0) == CONJ_EXPR
11865 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11866 return fold_mult_zconjz (loc, type, arg1);
11867 if (TREE_CODE (arg1) == CONJ_EXPR
11868 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11869 return fold_mult_zconjz (loc, type, arg0);
11871 else
11873 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11874 This is not the same for NaNs or if signed zeros are
11875 involved. */
11876 if (!HONOR_NANS (arg0)
11877 && !HONOR_SIGNED_ZEROS (arg0)
11878 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11879 && TREE_CODE (arg1) == COMPLEX_CST
11880 && real_zerop (TREE_REALPART (arg1)))
11882 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11883 if (real_onep (TREE_IMAGPART (arg1)))
11884 return
11885 fold_build2_loc (loc, COMPLEX_EXPR, type,
11886 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11887 rtype, arg0)),
11888 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11889 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11890 return
11891 fold_build2_loc (loc, COMPLEX_EXPR, type,
11892 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11893 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11894 rtype, arg0)));
11897 /* Optimize z * conj(z) for floating point complex numbers.
11898 Guarded by flag_unsafe_math_optimizations as non-finite
11899 imaginary components don't produce scalar results. */
11900 if (flag_unsafe_math_optimizations
11901 && TREE_CODE (arg0) == CONJ_EXPR
11902 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11903 return fold_mult_zconjz (loc, type, arg1);
11904 if (flag_unsafe_math_optimizations
11905 && TREE_CODE (arg1) == CONJ_EXPR
11906 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11907 return fold_mult_zconjz (loc, type, arg0);
11909 goto associate;
11911 case BIT_IOR_EXPR:
11912 /* Canonicalize (X & C1) | C2. */
11913 if (TREE_CODE (arg0) == BIT_AND_EXPR
11914 && TREE_CODE (arg1) == INTEGER_CST
11915 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11917 int width = TYPE_PRECISION (type), w;
11918 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11919 wide_int c2 = wi::to_wide (arg1);
11921 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11922 if ((c1 & c2) == c1)
11923 return omit_one_operand_loc (loc, type, arg1,
11924 TREE_OPERAND (arg0, 0));
11926 wide_int msk = wi::mask (width, false,
11927 TYPE_PRECISION (TREE_TYPE (arg1)));
11929 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11930 if (wi::bit_and_not (msk, c1 | c2) == 0)
11932 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11933 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11936 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11937 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11938 mode which allows further optimizations. */
11939 c1 &= msk;
11940 c2 &= msk;
11941 wide_int c3 = wi::bit_and_not (c1, c2);
11942 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11944 wide_int mask = wi::mask (w, false,
11945 TYPE_PRECISION (type));
11946 if (((c1 | c2) & mask) == mask
11947 && wi::bit_and_not (c1, mask) == 0)
11949 c3 = mask;
11950 break;
11954 if (c3 != c1)
11956 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11957 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11958 wide_int_to_tree (type, c3));
11959 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11963 /* See if this can be simplified into a rotate first. If that
11964 is unsuccessful continue in the association code. */
11965 goto bit_rotate;
11967 case BIT_XOR_EXPR:
11968 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11969 if (TREE_CODE (arg0) == BIT_AND_EXPR
11970 && INTEGRAL_TYPE_P (type)
11971 && integer_onep (TREE_OPERAND (arg0, 1))
11972 && integer_onep (arg1))
11973 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11974 build_zero_cst (TREE_TYPE (arg0)));
11976 /* See if this can be simplified into a rotate first. If that
11977 is unsuccessful continue in the association code. */
11978 goto bit_rotate;
11980 case BIT_AND_EXPR:
11981 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11982 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11983 && INTEGRAL_TYPE_P (type)
11984 && integer_onep (TREE_OPERAND (arg0, 1))
11985 && integer_onep (arg1))
11987 tree tem2;
11988 tem = TREE_OPERAND (arg0, 0);
11989 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11990 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11991 tem, tem2);
11992 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11993 build_zero_cst (TREE_TYPE (tem)));
11995 /* Fold ~X & 1 as (X & 1) == 0. */
11996 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11997 && INTEGRAL_TYPE_P (type)
11998 && integer_onep (arg1))
12000 tree tem2;
12001 tem = TREE_OPERAND (arg0, 0);
12002 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
12003 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
12004 tem, tem2);
12005 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
12006 build_zero_cst (TREE_TYPE (tem)));
12008 /* Fold !X & 1 as X == 0. */
12009 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12010 && integer_onep (arg1))
12012 tem = TREE_OPERAND (arg0, 0);
12013 return fold_build2_loc (loc, EQ_EXPR, type, tem,
12014 build_zero_cst (TREE_TYPE (tem)));
12017 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12018 multiple of 1 << CST. */
12019 if (TREE_CODE (arg1) == INTEGER_CST)
12021 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12022 wide_int ncst1 = -cst1;
12023 if ((cst1 & ncst1) == ncst1
12024 && multiple_of_p (type, arg0,
12025 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
12026 return fold_convert_loc (loc, type, arg0);
12029 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12030 bits from CST2. */
12031 if (TREE_CODE (arg1) == INTEGER_CST
12032 && TREE_CODE (arg0) == MULT_EXPR
12033 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12035 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12036 wide_int masked
12037 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12039 if (masked == 0)
12040 return omit_two_operands_loc (loc, type, build_zero_cst (type),
12041 arg0, arg1);
12042 else if (masked != warg1)
12044 /* Avoid the transform if arg1 is a mask of some
12045 mode which allows further optimizations. */
12046 int pop = wi::popcount (warg1);
12047 if (!(pop >= BITS_PER_UNIT
12048 && pow2p_hwi (pop)
12049 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12050 return fold_build2_loc (loc, code, type, op0,
12051 wide_int_to_tree (type, masked));
12055 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12056 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12057 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12059 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12061 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12062 if (mask == -1)
12063 return
12064 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12067 goto associate;
12069 case RDIV_EXPR:
12070 /* Don't touch a floating-point divide by zero unless the mode
12071 of the constant can represent infinity. */
12072 if (TREE_CODE (arg1) == REAL_CST
12073 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12074 && real_zerop (arg1))
12075 return NULL_TREE;
12077 /* (-A) / (-B) -> A / B */
12078 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12079 return fold_build2_loc (loc, RDIV_EXPR, type,
12080 TREE_OPERAND (arg0, 0),
12081 negate_expr (arg1));
12082 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12083 return fold_build2_loc (loc, RDIV_EXPR, type,
12084 negate_expr (arg0),
12085 TREE_OPERAND (arg1, 0));
12086 return NULL_TREE;
12088 case TRUNC_DIV_EXPR:
12089 /* Fall through */
12091 case FLOOR_DIV_EXPR:
12092 /* Simplify A / (B << N) where A and B are positive and B is
12093 a power of 2, to A >> (N + log2(B)). */
12094 strict_overflow_p = false;
12095 if (TREE_CODE (arg1) == LSHIFT_EXPR
12096 && (TYPE_UNSIGNED (type)
12097 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12099 tree sval = TREE_OPERAND (arg1, 0);
12100 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12102 tree sh_cnt = TREE_OPERAND (arg1, 1);
12103 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12104 wi::exact_log2 (wi::to_wide (sval)));
12106 if (strict_overflow_p)
12107 fold_overflow_warning (("assuming signed overflow does not "
12108 "occur when simplifying A / (B << N)"),
12109 WARN_STRICT_OVERFLOW_MISC);
12111 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12112 sh_cnt, pow2);
12113 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12114 fold_convert_loc (loc, type, arg0), sh_cnt);
12118 /* Fall through */
12120 case ROUND_DIV_EXPR:
12121 case CEIL_DIV_EXPR:
12122 case EXACT_DIV_EXPR:
12123 if (integer_zerop (arg1))
12124 return NULL_TREE;
12126 /* Convert -A / -B to A / B when the type is signed and overflow is
12127 undefined. */
12128 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12129 && TREE_CODE (op0) == NEGATE_EXPR
12130 && negate_expr_p (op1))
12132 if (ANY_INTEGRAL_TYPE_P (type))
12133 fold_overflow_warning (("assuming signed overflow does not occur "
12134 "when distributing negation across "
12135 "division"),
12136 WARN_STRICT_OVERFLOW_MISC);
12137 return fold_build2_loc (loc, code, type,
12138 fold_convert_loc (loc, type,
12139 TREE_OPERAND (arg0, 0)),
12140 negate_expr (op1));
12142 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12143 && TREE_CODE (arg1) == NEGATE_EXPR
12144 && negate_expr_p (op0))
12146 if (ANY_INTEGRAL_TYPE_P (type))
12147 fold_overflow_warning (("assuming signed overflow does not occur "
12148 "when distributing negation across "
12149 "division"),
12150 WARN_STRICT_OVERFLOW_MISC);
12151 return fold_build2_loc (loc, code, type,
12152 negate_expr (op0),
12153 fold_convert_loc (loc, type,
12154 TREE_OPERAND (arg1, 0)));
12157 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12158 operation, EXACT_DIV_EXPR.
12160 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12161 At one time others generated faster code, it's not clear if they do
12162 after the last round to changes to the DIV code in expmed.cc. */
12163 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12164 && multiple_of_p (type, arg0, arg1))
12165 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12166 fold_convert (type, arg0),
12167 fold_convert (type, arg1));
12169 strict_overflow_p = false;
12170 if (TREE_CODE (arg1) == INTEGER_CST
12171 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12172 &strict_overflow_p)) != 0)
12174 if (strict_overflow_p)
12175 fold_overflow_warning (("assuming signed overflow does not occur "
12176 "when simplifying division"),
12177 WARN_STRICT_OVERFLOW_MISC);
12178 return fold_convert_loc (loc, type, tem);
12181 return NULL_TREE;
12183 case CEIL_MOD_EXPR:
12184 case FLOOR_MOD_EXPR:
12185 case ROUND_MOD_EXPR:
12186 case TRUNC_MOD_EXPR:
12187 strict_overflow_p = false;
12188 if (TREE_CODE (arg1) == INTEGER_CST
12189 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12190 &strict_overflow_p)) != 0)
12192 if (strict_overflow_p)
12193 fold_overflow_warning (("assuming signed overflow does not occur "
12194 "when simplifying modulus"),
12195 WARN_STRICT_OVERFLOW_MISC);
12196 return fold_convert_loc (loc, type, tem);
12199 return NULL_TREE;
12201 case LROTATE_EXPR:
12202 case RROTATE_EXPR:
12203 case RSHIFT_EXPR:
12204 case LSHIFT_EXPR:
12205 /* Since negative shift count is not well-defined,
12206 don't try to compute it in the compiler. */
12207 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12208 return NULL_TREE;
12210 prec = element_precision (type);
12212 /* If we have a rotate of a bit operation with the rotate count and
12213 the second operand of the bit operation both constant,
12214 permute the two operations. */
12215 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12216 && (TREE_CODE (arg0) == BIT_AND_EXPR
12217 || TREE_CODE (arg0) == BIT_IOR_EXPR
12218 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12221 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12222 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12223 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12224 fold_build2_loc (loc, code, type,
12225 arg00, arg1),
12226 fold_build2_loc (loc, code, type,
12227 arg01, arg1));
12230 /* Two consecutive rotates adding up to the some integer
12231 multiple of the precision of the type can be ignored. */
12232 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12233 && TREE_CODE (arg0) == RROTATE_EXPR
12234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12235 && wi::umod_trunc (wi::to_wide (arg1)
12236 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12237 prec) == 0)
12238 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12240 return NULL_TREE;
12242 case MIN_EXPR:
12243 case MAX_EXPR:
12244 goto associate;
12246 case TRUTH_ANDIF_EXPR:
12247 /* Note that the operands of this must be ints
12248 and their values must be 0 or 1.
12249 ("true" is a fixed value perhaps depending on the language.) */
12250 /* If first arg is constant zero, return it. */
12251 if (integer_zerop (arg0))
12252 return fold_convert_loc (loc, type, arg0);
12253 /* FALLTHRU */
12254 case TRUTH_AND_EXPR:
12255 /* If either arg is constant true, drop it. */
12256 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12257 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12258 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12259 /* Preserve sequence points. */
12260 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12261 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12262 /* If second arg is constant zero, result is zero, but first arg
12263 must be evaluated. */
12264 if (integer_zerop (arg1))
12265 return omit_one_operand_loc (loc, type, arg1, arg0);
12266 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12267 case will be handled here. */
12268 if (integer_zerop (arg0))
12269 return omit_one_operand_loc (loc, type, arg0, arg1);
12271 /* !X && X is always false. */
12272 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12273 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12274 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12275 /* X && !X is always false. */
12276 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12277 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12278 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12280 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12281 means A >= Y && A != MAX, but in this case we know that
12282 A < X <= MAX. */
12284 if (!TREE_SIDE_EFFECTS (arg0)
12285 && !TREE_SIDE_EFFECTS (arg1))
12287 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12288 if (tem && !operand_equal_p (tem, arg0, 0))
12289 return fold_convert (type,
12290 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12291 tem, arg1));
12293 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12294 if (tem && !operand_equal_p (tem, arg1, 0))
12295 return fold_convert (type,
12296 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12297 arg0, tem));
12300 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12301 != NULL_TREE)
12302 return tem;
12304 return NULL_TREE;
12306 case TRUTH_ORIF_EXPR:
12307 /* Note that the operands of this must be ints
12308 and their values must be 0 or true.
12309 ("true" is a fixed value perhaps depending on the language.) */
12310 /* If first arg is constant true, return it. */
12311 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12312 return fold_convert_loc (loc, type, arg0);
12313 /* FALLTHRU */
12314 case TRUTH_OR_EXPR:
12315 /* If either arg is constant zero, drop it. */
12316 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12317 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12318 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12319 /* Preserve sequence points. */
12320 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12322 /* If second arg is constant true, result is true, but we must
12323 evaluate first arg. */
12324 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12325 return omit_one_operand_loc (loc, type, arg1, arg0);
12326 /* Likewise for first arg, but note this only occurs here for
12327 TRUTH_OR_EXPR. */
12328 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12329 return omit_one_operand_loc (loc, type, arg0, arg1);
12331 /* !X || X is always true. */
12332 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12334 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12335 /* X || !X is always true. */
12336 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12337 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12338 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12340 /* (X && !Y) || (!X && Y) is X ^ Y */
12341 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12342 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12344 tree a0, a1, l0, l1, n0, n1;
12346 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12347 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12349 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12350 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12352 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12353 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12355 if ((operand_equal_p (n0, a0, 0)
12356 && operand_equal_p (n1, a1, 0))
12357 || (operand_equal_p (n0, a1, 0)
12358 && operand_equal_p (n1, a0, 0)))
12359 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12362 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12363 != NULL_TREE)
12364 return tem;
12366 return NULL_TREE;
12368 case TRUTH_XOR_EXPR:
12369 /* If the second arg is constant zero, drop it. */
12370 if (integer_zerop (arg1))
12371 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12372 /* If the second arg is constant true, this is a logical inversion. */
12373 if (integer_onep (arg1))
12375 tem = invert_truthvalue_loc (loc, arg0);
12376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12378 /* Identical arguments cancel to zero. */
12379 if (operand_equal_p (arg0, arg1, 0))
12380 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12382 /* !X ^ X is always true. */
12383 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12385 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12387 /* X ^ !X is always true. */
12388 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12389 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12390 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12392 return NULL_TREE;
12394 case EQ_EXPR:
12395 case NE_EXPR:
12396 STRIP_NOPS (arg0);
12397 STRIP_NOPS (arg1);
12399 tem = fold_comparison (loc, code, type, op0, op1);
12400 if (tem != NULL_TREE)
12401 return tem;
12403 /* bool_var != 1 becomes !bool_var. */
12404 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12405 && code == NE_EXPR)
12406 return fold_convert_loc (loc, type,
12407 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12408 TREE_TYPE (arg0), arg0));
12410 /* bool_var == 0 becomes !bool_var. */
12411 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12412 && code == EQ_EXPR)
12413 return fold_convert_loc (loc, type,
12414 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12415 TREE_TYPE (arg0), arg0));
12417 /* !exp != 0 becomes !exp */
12418 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12419 && code == NE_EXPR)
12420 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12422 /* If this is an EQ or NE comparison with zero and ARG0 is
12423 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12424 two operations, but the latter can be done in one less insn
12425 on machines that have only two-operand insns or on which a
12426 constant cannot be the first operand. */
12427 if (TREE_CODE (arg0) == BIT_AND_EXPR
12428 && integer_zerop (arg1))
12430 tree arg00 = TREE_OPERAND (arg0, 0);
12431 tree arg01 = TREE_OPERAND (arg0, 1);
12432 if (TREE_CODE (arg00) == LSHIFT_EXPR
12433 && integer_onep (TREE_OPERAND (arg00, 0)))
12435 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12436 arg01, TREE_OPERAND (arg00, 1));
12437 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12438 build_one_cst (TREE_TYPE (arg0)));
12439 return fold_build2_loc (loc, code, type,
12440 fold_convert_loc (loc, TREE_TYPE (arg1),
12441 tem), arg1);
12443 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12444 && integer_onep (TREE_OPERAND (arg01, 0)))
12446 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12447 arg00, TREE_OPERAND (arg01, 1));
12448 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12449 build_one_cst (TREE_TYPE (arg0)));
12450 return fold_build2_loc (loc, code, type,
12451 fold_convert_loc (loc, TREE_TYPE (arg1),
12452 tem), arg1);
12456 /* If this is a comparison of a field, we may be able to simplify it. */
12457 if ((TREE_CODE (arg0) == COMPONENT_REF
12458 || TREE_CODE (arg0) == BIT_FIELD_REF)
12459 /* Handle the constant case even without -O
12460 to make sure the warnings are given. */
12461 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12463 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12464 if (t1)
12465 return t1;
12468 /* Optimize comparisons of strlen vs zero to a compare of the
12469 first character of the string vs zero. To wit,
12470 strlen(ptr) == 0 => *ptr == 0
12471 strlen(ptr) != 0 => *ptr != 0
12472 Other cases should reduce to one of these two (or a constant)
12473 due to the return value of strlen being unsigned. */
12474 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12476 tree fndecl = get_callee_fndecl (arg0);
12478 if (fndecl
12479 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12480 && call_expr_nargs (arg0) == 1
12481 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12482 == POINTER_TYPE))
12484 tree ptrtype
12485 = build_pointer_type (build_qualified_type (char_type_node,
12486 TYPE_QUAL_CONST));
12487 tree ptr = fold_convert_loc (loc, ptrtype,
12488 CALL_EXPR_ARG (arg0, 0));
12489 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12490 return fold_build2_loc (loc, code, type, iref,
12491 build_int_cst (TREE_TYPE (iref), 0));
12495 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12496 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12497 if (TREE_CODE (arg0) == RSHIFT_EXPR
12498 && integer_zerop (arg1)
12499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12501 tree arg00 = TREE_OPERAND (arg0, 0);
12502 tree arg01 = TREE_OPERAND (arg0, 1);
12503 tree itype = TREE_TYPE (arg00);
12504 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12506 if (TYPE_UNSIGNED (itype))
12508 itype = signed_type_for (itype);
12509 arg00 = fold_convert_loc (loc, itype, arg00);
12511 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12512 type, arg00, build_zero_cst (itype));
12516 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12517 (X & C) == 0 when C is a single bit. */
12518 if (TREE_CODE (arg0) == BIT_AND_EXPR
12519 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12520 && integer_zerop (arg1)
12521 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12523 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12524 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12525 TREE_OPERAND (arg0, 1));
12526 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12527 type, tem,
12528 fold_convert_loc (loc, TREE_TYPE (arg0),
12529 arg1));
12532 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12533 constant C is a power of two, i.e. a single bit. */
12534 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12535 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12536 && integer_zerop (arg1)
12537 && integer_pow2p (TREE_OPERAND (arg0, 1))
12538 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12539 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12541 tree arg00 = TREE_OPERAND (arg0, 0);
12542 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12543 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12546 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12547 when is C is a power of two, i.e. a single bit. */
12548 if (TREE_CODE (arg0) == BIT_AND_EXPR
12549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12550 && integer_zerop (arg1)
12551 && integer_pow2p (TREE_OPERAND (arg0, 1))
12552 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12553 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12555 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12556 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12557 arg000, TREE_OPERAND (arg0, 1));
12558 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12559 tem, build_int_cst (TREE_TYPE (tem), 0));
12562 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12563 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12565 tree arg00 = TREE_OPERAND (arg0, 0);
12566 tree arg01 = TREE_OPERAND (arg0, 1);
12567 tree arg10 = TREE_OPERAND (arg1, 0);
12568 tree arg11 = TREE_OPERAND (arg1, 1);
12569 tree itype = TREE_TYPE (arg0);
12571 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12572 operand_equal_p guarantees no side-effects so we don't need
12573 to use omit_one_operand on Z. */
12574 if (operand_equal_p (arg01, arg11, 0))
12575 return fold_build2_loc (loc, code, type, arg00,
12576 fold_convert_loc (loc, TREE_TYPE (arg00),
12577 arg10));
12578 if (operand_equal_p (arg01, arg10, 0))
12579 return fold_build2_loc (loc, code, type, arg00,
12580 fold_convert_loc (loc, TREE_TYPE (arg00),
12581 arg11));
12582 if (operand_equal_p (arg00, arg11, 0))
12583 return fold_build2_loc (loc, code, type, arg01,
12584 fold_convert_loc (loc, TREE_TYPE (arg01),
12585 arg10));
12586 if (operand_equal_p (arg00, arg10, 0))
12587 return fold_build2_loc (loc, code, type, arg01,
12588 fold_convert_loc (loc, TREE_TYPE (arg01),
12589 arg11));
12591 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12592 if (TREE_CODE (arg01) == INTEGER_CST
12593 && TREE_CODE (arg11) == INTEGER_CST)
12595 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12596 fold_convert_loc (loc, itype, arg11));
12597 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12598 return fold_build2_loc (loc, code, type, tem,
12599 fold_convert_loc (loc, itype, arg10));
12603 /* Attempt to simplify equality/inequality comparisons of complex
12604 values. Only lower the comparison if the result is known or
12605 can be simplified to a single scalar comparison. */
12606 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12607 || TREE_CODE (arg0) == COMPLEX_CST)
12608 && (TREE_CODE (arg1) == COMPLEX_EXPR
12609 || TREE_CODE (arg1) == COMPLEX_CST))
12611 tree real0, imag0, real1, imag1;
12612 tree rcond, icond;
12614 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12616 real0 = TREE_OPERAND (arg0, 0);
12617 imag0 = TREE_OPERAND (arg0, 1);
12619 else
12621 real0 = TREE_REALPART (arg0);
12622 imag0 = TREE_IMAGPART (arg0);
12625 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12627 real1 = TREE_OPERAND (arg1, 0);
12628 imag1 = TREE_OPERAND (arg1, 1);
12630 else
12632 real1 = TREE_REALPART (arg1);
12633 imag1 = TREE_IMAGPART (arg1);
12636 rcond = fold_binary_loc (loc, code, type, real0, real1);
12637 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12639 if (integer_zerop (rcond))
12641 if (code == EQ_EXPR)
12642 return omit_two_operands_loc (loc, type, boolean_false_node,
12643 imag0, imag1);
12644 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12646 else
12648 if (code == NE_EXPR)
12649 return omit_two_operands_loc (loc, type, boolean_true_node,
12650 imag0, imag1);
12651 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12655 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12656 if (icond && TREE_CODE (icond) == INTEGER_CST)
12658 if (integer_zerop (icond))
12660 if (code == EQ_EXPR)
12661 return omit_two_operands_loc (loc, type, boolean_false_node,
12662 real0, real1);
12663 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12665 else
12667 if (code == NE_EXPR)
12668 return omit_two_operands_loc (loc, type, boolean_true_node,
12669 real0, real1);
12670 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12675 return NULL_TREE;
12677 case LT_EXPR:
12678 case GT_EXPR:
12679 case LE_EXPR:
12680 case GE_EXPR:
12681 tem = fold_comparison (loc, code, type, op0, op1);
12682 if (tem != NULL_TREE)
12683 return tem;
12685 /* Transform comparisons of the form X +- C CMP X. */
12686 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12688 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12689 && !HONOR_SNANS (arg0))
12691 tree arg01 = TREE_OPERAND (arg0, 1);
12692 enum tree_code code0 = TREE_CODE (arg0);
12693 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12695 /* (X - c) > X becomes false. */
12696 if (code == GT_EXPR
12697 && ((code0 == MINUS_EXPR && is_positive >= 0)
12698 || (code0 == PLUS_EXPR && is_positive <= 0)))
12699 return constant_boolean_node (0, type);
12701 /* Likewise (X + c) < X becomes false. */
12702 if (code == LT_EXPR
12703 && ((code0 == PLUS_EXPR && is_positive >= 0)
12704 || (code0 == MINUS_EXPR && is_positive <= 0)))
12705 return constant_boolean_node (0, type);
12707 /* Convert (X - c) <= X to true. */
12708 if (!HONOR_NANS (arg1)
12709 && code == LE_EXPR
12710 && ((code0 == MINUS_EXPR && is_positive >= 0)
12711 || (code0 == PLUS_EXPR && is_positive <= 0)))
12712 return constant_boolean_node (1, type);
12714 /* Convert (X + c) >= X to true. */
12715 if (!HONOR_NANS (arg1)
12716 && code == GE_EXPR
12717 && ((code0 == PLUS_EXPR && is_positive >= 0)
12718 || (code0 == MINUS_EXPR && is_positive <= 0)))
12719 return constant_boolean_node (1, type);
12722 /* If we are comparing an ABS_EXPR with a constant, we can
12723 convert all the cases into explicit comparisons, but they may
12724 well not be faster than doing the ABS and one comparison.
12725 But ABS (X) <= C is a range comparison, which becomes a subtraction
12726 and a comparison, and is probably faster. */
12727 if (code == LE_EXPR
12728 && TREE_CODE (arg1) == INTEGER_CST
12729 && TREE_CODE (arg0) == ABS_EXPR
12730 && ! TREE_SIDE_EFFECTS (arg0)
12731 && (tem = negate_expr (arg1)) != 0
12732 && TREE_CODE (tem) == INTEGER_CST
12733 && !TREE_OVERFLOW (tem))
12734 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12735 build2 (GE_EXPR, type,
12736 TREE_OPERAND (arg0, 0), tem),
12737 build2 (LE_EXPR, type,
12738 TREE_OPERAND (arg0, 0), arg1));
12740 /* Convert ABS_EXPR<x> >= 0 to true. */
12741 strict_overflow_p = false;
12742 if (code == GE_EXPR
12743 && (integer_zerop (arg1)
12744 || (! HONOR_NANS (arg0)
12745 && real_zerop (arg1)))
12746 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12748 if (strict_overflow_p)
12749 fold_overflow_warning (("assuming signed overflow does not occur "
12750 "when simplifying comparison of "
12751 "absolute value and zero"),
12752 WARN_STRICT_OVERFLOW_CONDITIONAL);
12753 return omit_one_operand_loc (loc, type,
12754 constant_boolean_node (true, type),
12755 arg0);
12758 /* Convert ABS_EXPR<x> < 0 to false. */
12759 strict_overflow_p = false;
12760 if (code == LT_EXPR
12761 && (integer_zerop (arg1) || real_zerop (arg1))
12762 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12764 if (strict_overflow_p)
12765 fold_overflow_warning (("assuming signed overflow does not occur "
12766 "when simplifying comparison of "
12767 "absolute value and zero"),
12768 WARN_STRICT_OVERFLOW_CONDITIONAL);
12769 return omit_one_operand_loc (loc, type,
12770 constant_boolean_node (false, type),
12771 arg0);
12774 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12775 and similarly for >= into !=. */
12776 if ((code == LT_EXPR || code == GE_EXPR)
12777 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12778 && TREE_CODE (arg1) == LSHIFT_EXPR
12779 && integer_onep (TREE_OPERAND (arg1, 0)))
12780 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12781 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12782 TREE_OPERAND (arg1, 1)),
12783 build_zero_cst (TREE_TYPE (arg0)));
12785 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12786 otherwise Y might be >= # of bits in X's type and thus e.g.
12787 (unsigned char) (1 << Y) for Y 15 might be 0.
12788 If the cast is widening, then 1 << Y should have unsigned type,
12789 otherwise if Y is number of bits in the signed shift type minus 1,
12790 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12791 31 might be 0xffffffff80000000. */
12792 if ((code == LT_EXPR || code == GE_EXPR)
12793 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12794 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12795 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12796 && CONVERT_EXPR_P (arg1)
12797 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12798 && (element_precision (TREE_TYPE (arg1))
12799 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12800 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12801 || (element_precision (TREE_TYPE (arg1))
12802 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12803 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12805 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12806 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12807 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12808 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12809 build_zero_cst (TREE_TYPE (arg0)));
12812 return NULL_TREE;
12814 case UNORDERED_EXPR:
12815 case ORDERED_EXPR:
12816 case UNLT_EXPR:
12817 case UNLE_EXPR:
12818 case UNGT_EXPR:
12819 case UNGE_EXPR:
12820 case UNEQ_EXPR:
12821 case LTGT_EXPR:
12822 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12824 tree targ0 = strip_float_extensions (arg0);
12825 tree targ1 = strip_float_extensions (arg1);
12826 tree newtype = TREE_TYPE (targ0);
12828 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12829 newtype = TREE_TYPE (targ1);
12831 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0)))
12832 return fold_build2_loc (loc, code, type,
12833 fold_convert_loc (loc, newtype, targ0),
12834 fold_convert_loc (loc, newtype, targ1));
12837 return NULL_TREE;
12839 case COMPOUND_EXPR:
12840 /* When pedantic, a compound expression can be neither an lvalue
12841 nor an integer constant expression. */
12842 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12843 return NULL_TREE;
12844 /* Don't let (0, 0) be null pointer constant. */
12845 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12846 : fold_convert_loc (loc, type, arg1);
12847 return tem;
12849 default:
12850 return NULL_TREE;
12851 } /* switch (code) */
12854 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12855 ((A & N) + B) & M -> (A + B) & M
12856 Similarly if (N & M) == 0,
12857 ((A | N) + B) & M -> (A + B) & M
12858 and for - instead of + (or unary - instead of +)
12859 and/or ^ instead of |.
12860 If B is constant and (B & M) == 0, fold into A & M.
12862 This function is a helper for match.pd patterns. Return non-NULL
12863 type in which the simplified operation should be performed only
12864 if any optimization is possible.
12866 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12867 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12868 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12869 +/-. */
12870 tree
12871 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12872 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12873 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12874 tree *pmop)
12876 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12877 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12878 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12879 if (~cst1 == 0
12880 || (cst1 & (cst1 + 1)) != 0
12881 || !INTEGRAL_TYPE_P (type)
12882 || (!TYPE_OVERFLOW_WRAPS (type)
12883 && TREE_CODE (type) != INTEGER_TYPE)
12884 || (wi::max_value (type) & cst1) != cst1)
12885 return NULL_TREE;
12887 enum tree_code codes[2] = { code00, code01 };
12888 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12889 int which = 0;
12890 wide_int cst0;
12892 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12893 arg1 (M) is == (1LL << cst) - 1.
12894 Store C into PMOP[0] and D into PMOP[1]. */
12895 pmop[0] = arg00;
12896 pmop[1] = arg01;
12897 which = code != NEGATE_EXPR;
12899 for (; which >= 0; which--)
12900 switch (codes[which])
12902 case BIT_AND_EXPR:
12903 case BIT_IOR_EXPR:
12904 case BIT_XOR_EXPR:
12905 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12906 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12907 if (codes[which] == BIT_AND_EXPR)
12909 if (cst0 != cst1)
12910 break;
12912 else if (cst0 != 0)
12913 break;
12914 /* If C or D is of the form (A & N) where
12915 (N & M) == M, or of the form (A | N) or
12916 (A ^ N) where (N & M) == 0, replace it with A. */
12917 pmop[which] = arg0xx[2 * which];
12918 break;
12919 case ERROR_MARK:
12920 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12921 break;
12922 /* If C or D is a N where (N & M) == 0, it can be
12923 omitted (replaced with 0). */
12924 if ((code == PLUS_EXPR
12925 || (code == MINUS_EXPR && which == 0))
12926 && (cst1 & wi::to_wide (pmop[which])) == 0)
12927 pmop[which] = build_int_cst (type, 0);
12928 /* Similarly, with C - N where (-N & M) == 0. */
12929 if (code == MINUS_EXPR
12930 && which == 1
12931 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12932 pmop[which] = build_int_cst (type, 0);
12933 break;
12934 default:
12935 gcc_unreachable ();
12938 /* Only build anything new if we optimized one or both arguments above. */
12939 if (pmop[0] == arg00 && pmop[1] == arg01)
12940 return NULL_TREE;
12942 if (TYPE_OVERFLOW_WRAPS (type))
12943 return type;
12944 else
12945 return unsigned_type_for (type);
12948 /* Used by contains_label_[p1]. */
12950 struct contains_label_data
12952 hash_set<tree> *pset;
12953 bool inside_switch_p;
12956 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12957 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12958 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12960 static tree
12961 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12963 contains_label_data *d = (contains_label_data *) data;
12964 switch (TREE_CODE (*tp))
12966 case LABEL_EXPR:
12967 return *tp;
12969 case CASE_LABEL_EXPR:
12970 if (!d->inside_switch_p)
12971 return *tp;
12972 return NULL_TREE;
12974 case SWITCH_EXPR:
12975 if (!d->inside_switch_p)
12977 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12978 return *tp;
12979 d->inside_switch_p = true;
12980 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12981 return *tp;
12982 d->inside_switch_p = false;
12983 *walk_subtrees = 0;
12985 return NULL_TREE;
12987 case GOTO_EXPR:
12988 *walk_subtrees = 0;
12989 return NULL_TREE;
12991 default:
12992 return NULL_TREE;
12996 /* Return whether the sub-tree ST contains a label which is accessible from
12997 outside the sub-tree. */
12999 static bool
13000 contains_label_p (tree st)
13002 hash_set<tree> pset;
13003 contains_label_data data = { &pset, false };
13004 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13007 /* Fold a ternary expression of code CODE and type TYPE with operands
13008 OP0, OP1, and OP2. Return the folded expression if folding is
13009 successful. Otherwise, return NULL_TREE. */
13011 tree
13012 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13013 tree op0, tree op1, tree op2)
13015 tree tem;
13016 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13017 enum tree_code_class kind = TREE_CODE_CLASS (code);
13019 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13020 && TREE_CODE_LENGTH (code) == 3);
13022 /* If this is a commutative operation, and OP0 is a constant, move it
13023 to OP1 to reduce the number of tests below. */
13024 if (commutative_ternary_tree_code (code)
13025 && tree_swap_operands_p (op0, op1))
13026 return fold_build3_loc (loc, code, type, op1, op0, op2);
13028 tem = generic_simplify (loc, code, type, op0, op1, op2);
13029 if (tem)
13030 return tem;
13032 /* Strip any conversions that don't change the mode. This is safe
13033 for every expression, except for a comparison expression because
13034 its signedness is derived from its operands. So, in the latter
13035 case, only strip conversions that don't change the signedness.
13037 Note that this is done as an internal manipulation within the
13038 constant folder, in order to find the simplest representation of
13039 the arguments so that their form can be studied. In any cases,
13040 the appropriate type conversions should be put back in the tree
13041 that will get out of the constant folder. */
13042 if (op0)
13044 arg0 = op0;
13045 STRIP_NOPS (arg0);
13048 if (op1)
13050 arg1 = op1;
13051 STRIP_NOPS (arg1);
13054 if (op2)
13056 arg2 = op2;
13057 STRIP_NOPS (arg2);
13060 switch (code)
13062 case COMPONENT_REF:
13063 if (TREE_CODE (arg0) == CONSTRUCTOR
13064 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13066 unsigned HOST_WIDE_INT idx;
13067 tree field, value;
13068 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13069 if (field == arg1)
13070 return value;
13072 return NULL_TREE;
13074 case COND_EXPR:
13075 case VEC_COND_EXPR:
13076 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13077 so all simple results must be passed through pedantic_non_lvalue. */
13078 if (TREE_CODE (arg0) == INTEGER_CST)
13080 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13081 tem = integer_zerop (arg0) ? op2 : op1;
13082 /* Only optimize constant conditions when the selected branch
13083 has the same type as the COND_EXPR. This avoids optimizing
13084 away "c ? x : throw", where the throw has a void type.
13085 Avoid throwing away that operand which contains label. */
13086 if ((!TREE_SIDE_EFFECTS (unused_op)
13087 || !contains_label_p (unused_op))
13088 && (! VOID_TYPE_P (TREE_TYPE (tem))
13089 || VOID_TYPE_P (type)))
13090 return protected_set_expr_location_unshare (tem, loc);
13091 return NULL_TREE;
13093 else if (TREE_CODE (arg0) == VECTOR_CST)
13095 unsigned HOST_WIDE_INT nelts;
13096 if ((TREE_CODE (arg1) == VECTOR_CST
13097 || TREE_CODE (arg1) == CONSTRUCTOR)
13098 && (TREE_CODE (arg2) == VECTOR_CST
13099 || TREE_CODE (arg2) == CONSTRUCTOR)
13100 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13102 vec_perm_builder sel (nelts, nelts, 1);
13103 for (unsigned int i = 0; i < nelts; i++)
13105 tree val = VECTOR_CST_ELT (arg0, i);
13106 if (integer_all_onesp (val))
13107 sel.quick_push (i);
13108 else if (integer_zerop (val))
13109 sel.quick_push (nelts + i);
13110 else /* Currently unreachable. */
13111 return NULL_TREE;
13113 vec_perm_indices indices (sel, 2, nelts);
13114 tree t = fold_vec_perm (type, arg1, arg2, indices);
13115 if (t != NULL_TREE)
13116 return t;
13120 /* If we have A op B ? A : C, we may be able to convert this to a
13121 simpler expression, depending on the operation and the values
13122 of B and C. Signed zeros prevent all of these transformations,
13123 for reasons given above each one.
13125 Also try swapping the arguments and inverting the conditional. */
13126 if (COMPARISON_CLASS_P (arg0)
13127 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13128 && !HONOR_SIGNED_ZEROS (op1))
13130 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13131 TREE_OPERAND (arg0, 0),
13132 TREE_OPERAND (arg0, 1),
13133 op1, op2);
13134 if (tem)
13135 return tem;
13138 if (COMPARISON_CLASS_P (arg0)
13139 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13140 && !HONOR_SIGNED_ZEROS (op2))
13142 enum tree_code comp_code = TREE_CODE (arg0);
13143 tree arg00 = TREE_OPERAND (arg0, 0);
13144 tree arg01 = TREE_OPERAND (arg0, 1);
13145 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13146 if (comp_code != ERROR_MARK)
13147 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13148 arg00,
13149 arg01,
13150 op2, op1);
13151 if (tem)
13152 return tem;
13155 /* If the second operand is simpler than the third, swap them
13156 since that produces better jump optimization results. */
13157 if (truth_value_p (TREE_CODE (arg0))
13158 && tree_swap_operands_p (op1, op2))
13160 location_t loc0 = expr_location_or (arg0, loc);
13161 /* See if this can be inverted. If it can't, possibly because
13162 it was a floating-point inequality comparison, don't do
13163 anything. */
13164 tem = fold_invert_truthvalue (loc0, arg0);
13165 if (tem)
13166 return fold_build3_loc (loc, code, type, tem, op2, op1);
13169 /* Convert A ? 1 : 0 to simply A. */
13170 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13171 : (integer_onep (op1)
13172 && !VECTOR_TYPE_P (type)))
13173 && integer_zerop (op2)
13174 /* If we try to convert OP0 to our type, the
13175 call to fold will try to move the conversion inside
13176 a COND, which will recurse. In that case, the COND_EXPR
13177 is probably the best choice, so leave it alone. */
13178 && type == TREE_TYPE (arg0))
13179 return protected_set_expr_location_unshare (arg0, loc);
13181 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13182 over COND_EXPR in cases such as floating point comparisons. */
13183 if (integer_zerop (op1)
13184 && code == COND_EXPR
13185 && integer_onep (op2)
13186 && !VECTOR_TYPE_P (type)
13187 && truth_value_p (TREE_CODE (arg0)))
13188 return fold_convert_loc (loc, type,
13189 invert_truthvalue_loc (loc, arg0));
13191 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13192 if (TREE_CODE (arg0) == LT_EXPR
13193 && integer_zerop (TREE_OPERAND (arg0, 1))
13194 && integer_zerop (op2)
13195 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13197 /* sign_bit_p looks through both zero and sign extensions,
13198 but for this optimization only sign extensions are
13199 usable. */
13200 tree tem2 = TREE_OPERAND (arg0, 0);
13201 while (tem != tem2)
13203 if (TREE_CODE (tem2) != NOP_EXPR
13204 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13206 tem = NULL_TREE;
13207 break;
13209 tem2 = TREE_OPERAND (tem2, 0);
13211 /* sign_bit_p only checks ARG1 bits within A's precision.
13212 If <sign bit of A> has wider type than A, bits outside
13213 of A's precision in <sign bit of A> need to be checked.
13214 If they are all 0, this optimization needs to be done
13215 in unsigned A's type, if they are all 1 in signed A's type,
13216 otherwise this can't be done. */
13217 if (tem
13218 && TYPE_PRECISION (TREE_TYPE (tem))
13219 < TYPE_PRECISION (TREE_TYPE (arg1))
13220 && TYPE_PRECISION (TREE_TYPE (tem))
13221 < TYPE_PRECISION (type))
13223 int inner_width, outer_width;
13224 tree tem_type;
13226 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13227 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13228 if (outer_width > TYPE_PRECISION (type))
13229 outer_width = TYPE_PRECISION (type);
13231 wide_int mask = wi::shifted_mask
13232 (inner_width, outer_width - inner_width, false,
13233 TYPE_PRECISION (TREE_TYPE (arg1)));
13235 wide_int common = mask & wi::to_wide (arg1);
13236 if (common == mask)
13238 tem_type = signed_type_for (TREE_TYPE (tem));
13239 tem = fold_convert_loc (loc, tem_type, tem);
13241 else if (common == 0)
13243 tem_type = unsigned_type_for (TREE_TYPE (tem));
13244 tem = fold_convert_loc (loc, tem_type, tem);
13246 else
13247 tem = NULL;
13250 if (tem)
13251 return
13252 fold_convert_loc (loc, type,
13253 fold_build2_loc (loc, BIT_AND_EXPR,
13254 TREE_TYPE (tem), tem,
13255 fold_convert_loc (loc,
13256 TREE_TYPE (tem),
13257 arg1)));
13260 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13261 already handled above. */
13262 if (TREE_CODE (arg0) == BIT_AND_EXPR
13263 && integer_onep (TREE_OPERAND (arg0, 1))
13264 && integer_zerop (op2)
13265 && integer_pow2p (arg1))
13267 tree tem = TREE_OPERAND (arg0, 0);
13268 STRIP_NOPS (tem);
13269 if (TREE_CODE (tem) == RSHIFT_EXPR
13270 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13271 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13272 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13273 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13274 fold_convert_loc (loc, type,
13275 TREE_OPERAND (tem, 0)),
13276 op1);
13279 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13280 is probably obsolete because the first operand should be a
13281 truth value (that's why we have the two cases above), but let's
13282 leave it in until we can confirm this for all front-ends. */
13283 if (integer_zerop (op2)
13284 && TREE_CODE (arg0) == NE_EXPR
13285 && integer_zerop (TREE_OPERAND (arg0, 1))
13286 && integer_pow2p (arg1)
13287 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13288 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13289 arg1, OEP_ONLY_CONST)
13290 /* operand_equal_p compares just value, not precision, so e.g.
13291 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13292 second operand 32-bit -128, which is not a power of two (or vice
13293 versa. */
13294 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13295 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13297 /* Disable the transformations below for vectors, since
13298 fold_binary_op_with_conditional_arg may undo them immediately,
13299 yielding an infinite loop. */
13300 if (code == VEC_COND_EXPR)
13301 return NULL_TREE;
13303 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13304 if (integer_zerop (op2)
13305 && truth_value_p (TREE_CODE (arg0))
13306 && truth_value_p (TREE_CODE (arg1))
13307 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13308 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13309 : TRUTH_ANDIF_EXPR,
13310 type, fold_convert_loc (loc, type, arg0), op1);
13312 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13313 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13314 && truth_value_p (TREE_CODE (arg0))
13315 && truth_value_p (TREE_CODE (arg1))
13316 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13318 location_t loc0 = expr_location_or (arg0, loc);
13319 /* Only perform transformation if ARG0 is easily inverted. */
13320 tem = fold_invert_truthvalue (loc0, arg0);
13321 if (tem)
13322 return fold_build2_loc (loc, code == VEC_COND_EXPR
13323 ? BIT_IOR_EXPR
13324 : TRUTH_ORIF_EXPR,
13325 type, fold_convert_loc (loc, type, tem),
13326 op1);
13329 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13330 if (integer_zerop (arg1)
13331 && truth_value_p (TREE_CODE (arg0))
13332 && truth_value_p (TREE_CODE (op2))
13333 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13335 location_t loc0 = expr_location_or (arg0, loc);
13336 /* Only perform transformation if ARG0 is easily inverted. */
13337 tem = fold_invert_truthvalue (loc0, arg0);
13338 if (tem)
13339 return fold_build2_loc (loc, code == VEC_COND_EXPR
13340 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13341 type, fold_convert_loc (loc, type, tem),
13342 op2);
13345 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13346 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13347 && truth_value_p (TREE_CODE (arg0))
13348 && truth_value_p (TREE_CODE (op2))
13349 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13350 return fold_build2_loc (loc, code == VEC_COND_EXPR
13351 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13352 type, fold_convert_loc (loc, type, arg0), op2);
13354 return NULL_TREE;
13356 case CALL_EXPR:
13357 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13358 of fold_ternary on them. */
13359 gcc_unreachable ();
13361 case BIT_FIELD_REF:
13362 if (TREE_CODE (arg0) == VECTOR_CST
13363 && (type == TREE_TYPE (TREE_TYPE (arg0))
13364 || (VECTOR_TYPE_P (type)
13365 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13366 && tree_fits_uhwi_p (op1)
13367 && tree_fits_uhwi_p (op2))
13369 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13370 unsigned HOST_WIDE_INT width
13371 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13372 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13373 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13374 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13376 if (n != 0
13377 && (idx % width) == 0
13378 && (n % width) == 0
13379 && known_le ((idx + n) / width,
13380 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13382 idx = idx / width;
13383 n = n / width;
13385 if (TREE_CODE (arg0) == VECTOR_CST)
13387 if (n == 1)
13389 tem = VECTOR_CST_ELT (arg0, idx);
13390 if (VECTOR_TYPE_P (type))
13391 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13392 return tem;
13395 tree_vector_builder vals (type, n, 1);
13396 for (unsigned i = 0; i < n; ++i)
13397 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13398 return vals.build ();
13403 /* On constants we can use native encode/interpret to constant
13404 fold (nearly) all BIT_FIELD_REFs. */
13405 if (CONSTANT_CLASS_P (arg0)
13406 && can_native_interpret_type_p (type)
13407 && BITS_PER_UNIT == 8
13408 && tree_fits_uhwi_p (op1)
13409 && tree_fits_uhwi_p (op2))
13411 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13412 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13413 /* Limit us to a reasonable amount of work. To relax the
13414 other limitations we need bit-shifting of the buffer
13415 and rounding up the size. */
13416 if (bitpos % BITS_PER_UNIT == 0
13417 && bitsize % BITS_PER_UNIT == 0
13418 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13420 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13421 unsigned HOST_WIDE_INT len
13422 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13423 bitpos / BITS_PER_UNIT);
13424 if (len > 0
13425 && len * BITS_PER_UNIT >= bitsize)
13427 tree v = native_interpret_expr (type, b,
13428 bitsize / BITS_PER_UNIT);
13429 if (v)
13430 return v;
13435 return NULL_TREE;
13437 case VEC_PERM_EXPR:
13438 /* Perform constant folding of BIT_INSERT_EXPR. */
13439 if (TREE_CODE (arg2) == VECTOR_CST
13440 && TREE_CODE (op0) == VECTOR_CST
13441 && TREE_CODE (op1) == VECTOR_CST)
13443 /* Build a vector of integers from the tree mask. */
13444 vec_perm_builder builder;
13445 if (!tree_to_vec_perm_builder (&builder, arg2))
13446 return NULL_TREE;
13448 /* Create a vec_perm_indices for the integer vector. */
13449 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13450 bool single_arg = (op0 == op1);
13451 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13452 return fold_vec_perm (type, op0, op1, sel);
13454 return NULL_TREE;
13456 case BIT_INSERT_EXPR:
13457 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13458 if (TREE_CODE (arg0) == INTEGER_CST
13459 && TREE_CODE (arg1) == INTEGER_CST)
13461 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13462 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13463 wide_int tem = (wi::to_wide (arg0)
13464 & wi::shifted_mask (bitpos, bitsize, true,
13465 TYPE_PRECISION (type)));
13466 wide_int tem2
13467 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13468 bitsize), bitpos);
13469 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13471 else if (TREE_CODE (arg0) == VECTOR_CST
13472 && CONSTANT_CLASS_P (arg1)
13473 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13474 TREE_TYPE (arg1)))
13476 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13477 unsigned HOST_WIDE_INT elsize
13478 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13479 if (bitpos % elsize == 0)
13481 unsigned k = bitpos / elsize;
13482 unsigned HOST_WIDE_INT nelts;
13483 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13484 return arg0;
13485 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13487 tree_vector_builder elts (type, nelts, 1);
13488 elts.quick_grow (nelts);
13489 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13490 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13491 return elts.build ();
13495 return NULL_TREE;
13497 default:
13498 return NULL_TREE;
13499 } /* switch (code) */
13502 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13503 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13504 constructor element index of the value returned. If the element is
13505 not found NULL_TREE is returned and *CTOR_IDX is updated to
13506 the index of the element after the ACCESS_INDEX position (which
13507 may be outside of the CTOR array). */
13509 tree
13510 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13511 unsigned *ctor_idx)
13513 tree index_type = NULL_TREE;
13514 signop index_sgn = UNSIGNED;
13515 offset_int low_bound = 0;
13517 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13519 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13520 if (domain_type && TYPE_MIN_VALUE (domain_type))
13522 /* Static constructors for variably sized objects makes no sense. */
13523 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13524 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13525 /* ??? When it is obvious that the range is signed, treat it so. */
13526 if (TYPE_UNSIGNED (index_type)
13527 && TYPE_MAX_VALUE (domain_type)
13528 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13529 TYPE_MIN_VALUE (domain_type)))
13531 index_sgn = SIGNED;
13532 low_bound
13533 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13534 SIGNED);
13536 else
13538 index_sgn = TYPE_SIGN (index_type);
13539 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13544 if (index_type)
13545 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13546 index_sgn);
13548 offset_int index = low_bound;
13549 if (index_type)
13550 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13552 offset_int max_index = index;
13553 unsigned cnt;
13554 tree cfield, cval;
13555 bool first_p = true;
13557 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13559 /* Array constructor might explicitly set index, or specify a range,
13560 or leave index NULL meaning that it is next index after previous
13561 one. */
13562 if (cfield)
13564 if (TREE_CODE (cfield) == INTEGER_CST)
13565 max_index = index
13566 = offset_int::from (wi::to_wide (cfield), index_sgn);
13567 else
13569 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13570 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13571 index_sgn);
13572 max_index
13573 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13574 index_sgn);
13575 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13578 else if (!first_p)
13580 index = max_index + 1;
13581 if (index_type)
13582 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13583 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13584 max_index = index;
13586 else
13587 first_p = false;
13589 /* Do we have match? */
13590 if (wi::cmp (access_index, index, index_sgn) >= 0)
13592 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13594 if (ctor_idx)
13595 *ctor_idx = cnt;
13596 return cval;
13599 else if (in_gimple_form)
13600 /* We're past the element we search for. Note during parsing
13601 the elements might not be sorted.
13602 ??? We should use a binary search and a flag on the
13603 CONSTRUCTOR as to whether elements are sorted in declaration
13604 order. */
13605 break;
13607 if (ctor_idx)
13608 *ctor_idx = cnt;
13609 return NULL_TREE;
13612 /* Perform constant folding and related simplification of EXPR.
13613 The related simplifications include x*1 => x, x*0 => 0, etc.,
13614 and application of the associative law.
13615 NOP_EXPR conversions may be removed freely (as long as we
13616 are careful not to change the type of the overall expression).
13617 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13618 but we can constant-fold them if they have constant operands. */
13620 #ifdef ENABLE_FOLD_CHECKING
13621 # define fold(x) fold_1 (x)
13622 static tree fold_1 (tree);
13623 static
13624 #endif
13625 tree
13626 fold (tree expr)
13628 const tree t = expr;
13629 enum tree_code code = TREE_CODE (t);
13630 enum tree_code_class kind = TREE_CODE_CLASS (code);
13631 tree tem;
13632 location_t loc = EXPR_LOCATION (expr);
13634 /* Return right away if a constant. */
13635 if (kind == tcc_constant)
13636 return t;
13638 /* CALL_EXPR-like objects with variable numbers of operands are
13639 treated specially. */
13640 if (kind == tcc_vl_exp)
13642 if (code == CALL_EXPR)
13644 tem = fold_call_expr (loc, expr, false);
13645 return tem ? tem : expr;
13647 return expr;
13650 if (IS_EXPR_CODE_CLASS (kind))
13652 tree type = TREE_TYPE (t);
13653 tree op0, op1, op2;
13655 switch (TREE_CODE_LENGTH (code))
13657 case 1:
13658 op0 = TREE_OPERAND (t, 0);
13659 tem = fold_unary_loc (loc, code, type, op0);
13660 return tem ? tem : expr;
13661 case 2:
13662 op0 = TREE_OPERAND (t, 0);
13663 op1 = TREE_OPERAND (t, 1);
13664 tem = fold_binary_loc (loc, code, type, op0, op1);
13665 return tem ? tem : expr;
13666 case 3:
13667 op0 = TREE_OPERAND (t, 0);
13668 op1 = TREE_OPERAND (t, 1);
13669 op2 = TREE_OPERAND (t, 2);
13670 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13671 return tem ? tem : expr;
13672 default:
13673 break;
13677 switch (code)
13679 case ARRAY_REF:
13681 tree op0 = TREE_OPERAND (t, 0);
13682 tree op1 = TREE_OPERAND (t, 1);
13684 if (TREE_CODE (op1) == INTEGER_CST
13685 && TREE_CODE (op0) == CONSTRUCTOR
13686 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13688 tree val = get_array_ctor_element_at_index (op0,
13689 wi::to_offset (op1));
13690 if (val)
13691 return val;
13694 return t;
13697 /* Return a VECTOR_CST if possible. */
13698 case CONSTRUCTOR:
13700 tree type = TREE_TYPE (t);
13701 if (TREE_CODE (type) != VECTOR_TYPE)
13702 return t;
13704 unsigned i;
13705 tree val;
13706 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13707 if (! CONSTANT_CLASS_P (val))
13708 return t;
13710 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13713 case CONST_DECL:
13714 return fold (DECL_INITIAL (t));
13716 default:
13717 return t;
13718 } /* switch (code) */
13721 #ifdef ENABLE_FOLD_CHECKING
13722 #undef fold
13724 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13725 hash_table<nofree_ptr_hash<const tree_node> > *);
13726 static void fold_check_failed (const_tree, const_tree);
13727 void print_fold_checksum (const_tree);
13729 /* When --enable-checking=fold, compute a digest of expr before
13730 and after actual fold call to see if fold did not accidentally
13731 change original expr. */
13733 tree
13734 fold (tree expr)
13736 tree ret;
13737 struct md5_ctx ctx;
13738 unsigned char checksum_before[16], checksum_after[16];
13739 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13741 md5_init_ctx (&ctx);
13742 fold_checksum_tree (expr, &ctx, &ht);
13743 md5_finish_ctx (&ctx, checksum_before);
13744 ht.empty ();
13746 ret = fold_1 (expr);
13748 md5_init_ctx (&ctx);
13749 fold_checksum_tree (expr, &ctx, &ht);
13750 md5_finish_ctx (&ctx, checksum_after);
13752 if (memcmp (checksum_before, checksum_after, 16))
13753 fold_check_failed (expr, ret);
13755 return ret;
13758 void
13759 print_fold_checksum (const_tree expr)
13761 struct md5_ctx ctx;
13762 unsigned char checksum[16], cnt;
13763 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13765 md5_init_ctx (&ctx);
13766 fold_checksum_tree (expr, &ctx, &ht);
13767 md5_finish_ctx (&ctx, checksum);
13768 for (cnt = 0; cnt < 16; ++cnt)
13769 fprintf (stderr, "%02x", checksum[cnt]);
13770 putc ('\n', stderr);
13773 static void
13774 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13776 internal_error ("fold check: original tree changed by fold");
13779 static void
13780 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13781 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13783 const tree_node **slot;
13784 enum tree_code code;
13785 union tree_node *buf;
13786 int i, len;
13788 recursive_label:
13789 if (expr == NULL)
13790 return;
13791 slot = ht->find_slot (expr, INSERT);
13792 if (*slot != NULL)
13793 return;
13794 *slot = expr;
13795 code = TREE_CODE (expr);
13796 if (TREE_CODE_CLASS (code) == tcc_declaration
13797 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13799 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13800 size_t sz = tree_size (expr);
13801 buf = XALLOCAVAR (union tree_node, sz);
13802 memcpy ((char *) buf, expr, sz);
13803 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13804 buf->decl_with_vis.symtab_node = NULL;
13805 buf->base.nowarning_flag = 0;
13806 expr = (tree) buf;
13808 else if (TREE_CODE_CLASS (code) == tcc_type
13809 && (TYPE_POINTER_TO (expr)
13810 || TYPE_REFERENCE_TO (expr)
13811 || TYPE_CACHED_VALUES_P (expr)
13812 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13813 || TYPE_NEXT_VARIANT (expr)
13814 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13816 /* Allow these fields to be modified. */
13817 tree tmp;
13818 size_t sz = tree_size (expr);
13819 buf = XALLOCAVAR (union tree_node, sz);
13820 memcpy ((char *) buf, expr, sz);
13821 expr = tmp = (tree) buf;
13822 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13823 TYPE_POINTER_TO (tmp) = NULL;
13824 TYPE_REFERENCE_TO (tmp) = NULL;
13825 TYPE_NEXT_VARIANT (tmp) = NULL;
13826 TYPE_ALIAS_SET (tmp) = -1;
13827 if (TYPE_CACHED_VALUES_P (tmp))
13829 TYPE_CACHED_VALUES_P (tmp) = 0;
13830 TYPE_CACHED_VALUES (tmp) = NULL;
13833 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13835 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13836 that and change builtins.cc etc. instead - see PR89543. */
13837 size_t sz = tree_size (expr);
13838 buf = XALLOCAVAR (union tree_node, sz);
13839 memcpy ((char *) buf, expr, sz);
13840 buf->base.nowarning_flag = 0;
13841 expr = (tree) buf;
13843 md5_process_bytes (expr, tree_size (expr), ctx);
13844 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13845 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13846 if (TREE_CODE_CLASS (code) != tcc_type
13847 && TREE_CODE_CLASS (code) != tcc_declaration
13848 && code != TREE_LIST
13849 && code != SSA_NAME
13850 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13851 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13852 switch (TREE_CODE_CLASS (code))
13854 case tcc_constant:
13855 switch (code)
13857 case STRING_CST:
13858 md5_process_bytes (TREE_STRING_POINTER (expr),
13859 TREE_STRING_LENGTH (expr), ctx);
13860 break;
13861 case COMPLEX_CST:
13862 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13863 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13864 break;
13865 case VECTOR_CST:
13866 len = vector_cst_encoded_nelts (expr);
13867 for (i = 0; i < len; ++i)
13868 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13869 break;
13870 default:
13871 break;
13873 break;
13874 case tcc_exceptional:
13875 switch (code)
13877 case TREE_LIST:
13878 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13879 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13880 expr = TREE_CHAIN (expr);
13881 goto recursive_label;
13882 break;
13883 case TREE_VEC:
13884 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13885 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13886 break;
13887 default:
13888 break;
13890 break;
13891 case tcc_expression:
13892 case tcc_reference:
13893 case tcc_comparison:
13894 case tcc_unary:
13895 case tcc_binary:
13896 case tcc_statement:
13897 case tcc_vl_exp:
13898 len = TREE_OPERAND_LENGTH (expr);
13899 for (i = 0; i < len; ++i)
13900 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13901 break;
13902 case tcc_declaration:
13903 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13904 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13905 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13907 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13908 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13909 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13910 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13911 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13914 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13916 if (TREE_CODE (expr) == FUNCTION_DECL)
13918 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13919 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13921 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13923 break;
13924 case tcc_type:
13925 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13926 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13927 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13928 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13929 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13930 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13931 if (INTEGRAL_TYPE_P (expr)
13932 || SCALAR_FLOAT_TYPE_P (expr))
13934 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13935 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13937 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13938 if (RECORD_OR_UNION_TYPE_P (expr))
13939 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13940 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13941 break;
13942 default:
13943 break;
13947 /* Helper function for outputting the checksum of a tree T. When
13948 debugging with gdb, you can "define mynext" to be "next" followed
13949 by "call debug_fold_checksum (op0)", then just trace down till the
13950 outputs differ. */
13952 DEBUG_FUNCTION void
13953 debug_fold_checksum (const_tree t)
13955 int i;
13956 unsigned char checksum[16];
13957 struct md5_ctx ctx;
13958 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13960 md5_init_ctx (&ctx);
13961 fold_checksum_tree (t, &ctx, &ht);
13962 md5_finish_ctx (&ctx, checksum);
13963 ht.empty ();
13965 for (i = 0; i < 16; i++)
13966 fprintf (stderr, "%d ", checksum[i]);
13968 fprintf (stderr, "\n");
13971 #endif
13973 /* Fold a unary tree expression with code CODE of type TYPE with an
13974 operand OP0. LOC is the location of the resulting expression.
13975 Return a folded expression if successful. Otherwise, return a tree
13976 expression with code CODE of type TYPE with an operand OP0. */
13978 tree
13979 fold_build1_loc (location_t loc,
13980 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13982 tree tem;
13983 #ifdef ENABLE_FOLD_CHECKING
13984 unsigned char checksum_before[16], checksum_after[16];
13985 struct md5_ctx ctx;
13986 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13988 md5_init_ctx (&ctx);
13989 fold_checksum_tree (op0, &ctx, &ht);
13990 md5_finish_ctx (&ctx, checksum_before);
13991 ht.empty ();
13992 #endif
13994 tem = fold_unary_loc (loc, code, type, op0);
13995 if (!tem)
13996 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13998 #ifdef ENABLE_FOLD_CHECKING
13999 md5_init_ctx (&ctx);
14000 fold_checksum_tree (op0, &ctx, &ht);
14001 md5_finish_ctx (&ctx, checksum_after);
14003 if (memcmp (checksum_before, checksum_after, 16))
14004 fold_check_failed (op0, tem);
14005 #endif
14006 return tem;
14009 /* Fold a binary tree expression with code CODE of type TYPE with
14010 operands OP0 and OP1. LOC is the location of the resulting
14011 expression. Return a folded expression if successful. Otherwise,
14012 return a tree expression with code CODE of type TYPE with operands
14013 OP0 and OP1. */
14015 tree
14016 fold_build2_loc (location_t loc,
14017 enum tree_code code, tree type, tree op0, tree op1
14018 MEM_STAT_DECL)
14020 tree tem;
14021 #ifdef ENABLE_FOLD_CHECKING
14022 unsigned char checksum_before_op0[16],
14023 checksum_before_op1[16],
14024 checksum_after_op0[16],
14025 checksum_after_op1[16];
14026 struct md5_ctx ctx;
14027 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14029 md5_init_ctx (&ctx);
14030 fold_checksum_tree (op0, &ctx, &ht);
14031 md5_finish_ctx (&ctx, checksum_before_op0);
14032 ht.empty ();
14034 md5_init_ctx (&ctx);
14035 fold_checksum_tree (op1, &ctx, &ht);
14036 md5_finish_ctx (&ctx, checksum_before_op1);
14037 ht.empty ();
14038 #endif
14040 tem = fold_binary_loc (loc, code, type, op0, op1);
14041 if (!tem)
14042 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14044 #ifdef ENABLE_FOLD_CHECKING
14045 md5_init_ctx (&ctx);
14046 fold_checksum_tree (op0, &ctx, &ht);
14047 md5_finish_ctx (&ctx, checksum_after_op0);
14048 ht.empty ();
14050 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14051 fold_check_failed (op0, tem);
14053 md5_init_ctx (&ctx);
14054 fold_checksum_tree (op1, &ctx, &ht);
14055 md5_finish_ctx (&ctx, checksum_after_op1);
14057 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14058 fold_check_failed (op1, tem);
14059 #endif
14060 return tem;
14063 /* Fold a ternary tree expression with code CODE of type TYPE with
14064 operands OP0, OP1, and OP2. Return a folded expression if
14065 successful. Otherwise, return a tree expression with code CODE of
14066 type TYPE with operands OP0, OP1, and OP2. */
14068 tree
14069 fold_build3_loc (location_t loc, enum tree_code code, tree type,
14070 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14072 tree tem;
14073 #ifdef ENABLE_FOLD_CHECKING
14074 unsigned char checksum_before_op0[16],
14075 checksum_before_op1[16],
14076 checksum_before_op2[16],
14077 checksum_after_op0[16],
14078 checksum_after_op1[16],
14079 checksum_after_op2[16];
14080 struct md5_ctx ctx;
14081 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14083 md5_init_ctx (&ctx);
14084 fold_checksum_tree (op0, &ctx, &ht);
14085 md5_finish_ctx (&ctx, checksum_before_op0);
14086 ht.empty ();
14088 md5_init_ctx (&ctx);
14089 fold_checksum_tree (op1, &ctx, &ht);
14090 md5_finish_ctx (&ctx, checksum_before_op1);
14091 ht.empty ();
14093 md5_init_ctx (&ctx);
14094 fold_checksum_tree (op2, &ctx, &ht);
14095 md5_finish_ctx (&ctx, checksum_before_op2);
14096 ht.empty ();
14097 #endif
14099 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14100 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14101 if (!tem)
14102 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14104 #ifdef ENABLE_FOLD_CHECKING
14105 md5_init_ctx (&ctx);
14106 fold_checksum_tree (op0, &ctx, &ht);
14107 md5_finish_ctx (&ctx, checksum_after_op0);
14108 ht.empty ();
14110 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14111 fold_check_failed (op0, tem);
14113 md5_init_ctx (&ctx);
14114 fold_checksum_tree (op1, &ctx, &ht);
14115 md5_finish_ctx (&ctx, checksum_after_op1);
14116 ht.empty ();
14118 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14119 fold_check_failed (op1, tem);
14121 md5_init_ctx (&ctx);
14122 fold_checksum_tree (op2, &ctx, &ht);
14123 md5_finish_ctx (&ctx, checksum_after_op2);
14125 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14126 fold_check_failed (op2, tem);
14127 #endif
14128 return tem;
14131 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14132 arguments in ARGARRAY, and a null static chain.
14133 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14134 of type TYPE from the given operands as constructed by build_call_array. */
14136 tree
14137 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14138 int nargs, tree *argarray)
14140 tree tem;
14141 #ifdef ENABLE_FOLD_CHECKING
14142 unsigned char checksum_before_fn[16],
14143 checksum_before_arglist[16],
14144 checksum_after_fn[16],
14145 checksum_after_arglist[16];
14146 struct md5_ctx ctx;
14147 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14148 int i;
14150 md5_init_ctx (&ctx);
14151 fold_checksum_tree (fn, &ctx, &ht);
14152 md5_finish_ctx (&ctx, checksum_before_fn);
14153 ht.empty ();
14155 md5_init_ctx (&ctx);
14156 for (i = 0; i < nargs; i++)
14157 fold_checksum_tree (argarray[i], &ctx, &ht);
14158 md5_finish_ctx (&ctx, checksum_before_arglist);
14159 ht.empty ();
14160 #endif
14162 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14163 if (!tem)
14164 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14166 #ifdef ENABLE_FOLD_CHECKING
14167 md5_init_ctx (&ctx);
14168 fold_checksum_tree (fn, &ctx, &ht);
14169 md5_finish_ctx (&ctx, checksum_after_fn);
14170 ht.empty ();
14172 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14173 fold_check_failed (fn, tem);
14175 md5_init_ctx (&ctx);
14176 for (i = 0; i < nargs; i++)
14177 fold_checksum_tree (argarray[i], &ctx, &ht);
14178 md5_finish_ctx (&ctx, checksum_after_arglist);
14180 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14181 fold_check_failed (NULL_TREE, tem);
14182 #endif
14183 return tem;
14186 /* Perform constant folding and related simplification of initializer
14187 expression EXPR. These behave identically to "fold_buildN" but ignore
14188 potential run-time traps and exceptions that fold must preserve. */
14190 #define START_FOLD_INIT \
14191 int saved_signaling_nans = flag_signaling_nans;\
14192 int saved_trapping_math = flag_trapping_math;\
14193 int saved_rounding_math = flag_rounding_math;\
14194 int saved_trapv = flag_trapv;\
14195 int saved_folding_initializer = folding_initializer;\
14196 flag_signaling_nans = 0;\
14197 flag_trapping_math = 0;\
14198 flag_rounding_math = 0;\
14199 flag_trapv = 0;\
14200 folding_initializer = 1;
14202 #define END_FOLD_INIT \
14203 flag_signaling_nans = saved_signaling_nans;\
14204 flag_trapping_math = saved_trapping_math;\
14205 flag_rounding_math = saved_rounding_math;\
14206 flag_trapv = saved_trapv;\
14207 folding_initializer = saved_folding_initializer;
14209 tree
14210 fold_init (tree expr)
14212 tree result;
14213 START_FOLD_INIT;
14215 result = fold (expr);
14217 END_FOLD_INIT;
14218 return result;
14221 tree
14222 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14223 tree type, tree op)
14225 tree result;
14226 START_FOLD_INIT;
14228 result = fold_build1_loc (loc, code, type, op);
14230 END_FOLD_INIT;
14231 return result;
14234 tree
14235 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14236 tree type, tree op0, tree op1)
14238 tree result;
14239 START_FOLD_INIT;
14241 result = fold_build2_loc (loc, code, type, op0, op1);
14243 END_FOLD_INIT;
14244 return result;
14247 tree
14248 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14249 int nargs, tree *argarray)
14251 tree result;
14252 START_FOLD_INIT;
14254 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14256 END_FOLD_INIT;
14257 return result;
14260 tree
14261 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14262 tree lhs, tree rhs)
14264 tree result;
14265 START_FOLD_INIT;
14267 result = fold_binary_loc (loc, code, type, lhs, rhs);
14269 END_FOLD_INIT;
14270 return result;
14273 #undef START_FOLD_INIT
14274 #undef END_FOLD_INIT
14276 /* Determine if first argument is a multiple of second argument. Return
14277 false if it is not, or we cannot easily determined it to be.
14279 An example of the sort of thing we care about (at this point; this routine
14280 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14281 fold cases do now) is discovering that
14283 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14285 is a multiple of
14287 SAVE_EXPR (J * 8)
14289 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14291 This code also handles discovering that
14293 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14295 is a multiple of 8 so we don't have to worry about dealing with a
14296 possible remainder.
14298 Note that we *look* inside a SAVE_EXPR only to determine how it was
14299 calculated; it is not safe for fold to do much of anything else with the
14300 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14301 at run time. For example, the latter example above *cannot* be implemented
14302 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14303 evaluation time of the original SAVE_EXPR is not necessarily the same at
14304 the time the new expression is evaluated. The only optimization of this
14305 sort that would be valid is changing
14307 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14309 divided by 8 to
14311 SAVE_EXPR (I) * SAVE_EXPR (J)
14313 (where the same SAVE_EXPR (J) is used in the original and the
14314 transformed version).
14316 NOWRAP specifies whether all outer operations in TYPE should
14317 be considered not wrapping. Any type conversion within TOP acts
14318 as a barrier and we will fall back to NOWRAP being false.
14319 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14320 as not wrapping even though they are generally using unsigned arithmetic. */
14322 bool
14323 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14325 gimple *stmt;
14326 tree op1, op2;
14328 if (operand_equal_p (top, bottom, 0))
14329 return true;
14331 if (TREE_CODE (type) != INTEGER_TYPE)
14332 return false;
14334 switch (TREE_CODE (top))
14336 case BIT_AND_EXPR:
14337 /* Bitwise and provides a power of two multiple. If the mask is
14338 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14339 if (!integer_pow2p (bottom))
14340 return false;
14341 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14342 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14344 case MULT_EXPR:
14345 /* If the multiplication can wrap we cannot recurse further unless
14346 the bottom is a power of two which is where wrapping does not
14347 matter. */
14348 if (!nowrap
14349 && !TYPE_OVERFLOW_UNDEFINED (type)
14350 && !integer_pow2p (bottom))
14351 return false;
14352 if (TREE_CODE (bottom) == INTEGER_CST)
14354 op1 = TREE_OPERAND (top, 0);
14355 op2 = TREE_OPERAND (top, 1);
14356 if (TREE_CODE (op1) == INTEGER_CST)
14357 std::swap (op1, op2);
14358 if (TREE_CODE (op2) == INTEGER_CST)
14360 if (multiple_of_p (type, op2, bottom, nowrap))
14361 return true;
14362 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14363 if (multiple_of_p (type, bottom, op2, nowrap))
14365 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14366 wi::to_widest (op2));
14367 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14369 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14370 return multiple_of_p (type, op1, op2, nowrap);
14373 return multiple_of_p (type, op1, bottom, nowrap);
14376 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14377 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14379 case LSHIFT_EXPR:
14380 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14381 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14383 op1 = TREE_OPERAND (top, 1);
14384 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14386 wide_int mul_op
14387 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14388 return multiple_of_p (type,
14389 wide_int_to_tree (type, mul_op), bottom,
14390 nowrap);
14393 return false;
14395 case MINUS_EXPR:
14396 case PLUS_EXPR:
14397 /* If the addition or subtraction can wrap we cannot recurse further
14398 unless bottom is a power of two which is where wrapping does not
14399 matter. */
14400 if (!nowrap
14401 && !TYPE_OVERFLOW_UNDEFINED (type)
14402 && !integer_pow2p (bottom))
14403 return false;
14405 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14406 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14407 but 0xfffffffd is not. */
14408 op1 = TREE_OPERAND (top, 1);
14409 if (TREE_CODE (top) == PLUS_EXPR
14410 && nowrap
14411 && TYPE_UNSIGNED (type)
14412 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14413 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14415 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14416 precisely, so be conservative here checking if both op0 and op1
14417 are multiple of bottom. Note we check the second operand first
14418 since it's usually simpler. */
14419 return (multiple_of_p (type, op1, bottom, nowrap)
14420 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14422 CASE_CONVERT:
14423 /* Can't handle conversions from non-integral or wider integral type. */
14424 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14425 || (TYPE_PRECISION (type)
14426 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14427 return false;
14428 /* NOWRAP only extends to operations in the outermost type so
14429 make sure to strip it off here. */
14430 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14431 TREE_OPERAND (top, 0), bottom, false);
14433 case SAVE_EXPR:
14434 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14436 case COND_EXPR:
14437 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14438 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14440 case INTEGER_CST:
14441 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14442 return false;
14443 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14444 SIGNED);
14446 case SSA_NAME:
14447 if (TREE_CODE (bottom) == INTEGER_CST
14448 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14449 && gimple_code (stmt) == GIMPLE_ASSIGN)
14451 enum tree_code code = gimple_assign_rhs_code (stmt);
14453 /* Check for special cases to see if top is defined as multiple
14454 of bottom:
14456 top = (X & ~(bottom - 1) ; bottom is power of 2
14460 Y = X % bottom
14461 top = X - Y. */
14462 if (code == BIT_AND_EXPR
14463 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14464 && TREE_CODE (op2) == INTEGER_CST
14465 && integer_pow2p (bottom)
14466 && wi::multiple_of_p (wi::to_widest (op2),
14467 wi::to_widest (bottom), UNSIGNED))
14468 return true;
14470 op1 = gimple_assign_rhs1 (stmt);
14471 if (code == MINUS_EXPR
14472 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14473 && TREE_CODE (op2) == SSA_NAME
14474 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14475 && gimple_code (stmt) == GIMPLE_ASSIGN
14476 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14477 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14478 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14479 return true;
14482 /* fall through */
14484 default:
14485 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14486 return multiple_p (wi::to_poly_widest (top),
14487 wi::to_poly_widest (bottom));
14489 return false;
14493 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14494 This function returns true for integer expressions, and returns
14495 false if uncertain. */
14497 bool
14498 tree_expr_finite_p (const_tree x)
14500 machine_mode mode = element_mode (x);
14501 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14502 return true;
14503 switch (TREE_CODE (x))
14505 case REAL_CST:
14506 return real_isfinite (TREE_REAL_CST_PTR (x));
14507 case COMPLEX_CST:
14508 return tree_expr_finite_p (TREE_REALPART (x))
14509 && tree_expr_finite_p (TREE_IMAGPART (x));
14510 case FLOAT_EXPR:
14511 return true;
14512 case ABS_EXPR:
14513 case CONVERT_EXPR:
14514 case NON_LVALUE_EXPR:
14515 case NEGATE_EXPR:
14516 case SAVE_EXPR:
14517 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14518 case MIN_EXPR:
14519 case MAX_EXPR:
14520 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14521 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14522 case COND_EXPR:
14523 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14524 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14525 case CALL_EXPR:
14526 switch (get_call_combined_fn (x))
14528 CASE_CFN_FABS:
14529 CASE_CFN_FABS_FN:
14530 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14531 CASE_CFN_FMAX:
14532 CASE_CFN_FMAX_FN:
14533 CASE_CFN_FMIN:
14534 CASE_CFN_FMIN_FN:
14535 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14536 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14537 default:
14538 return false;
14541 default:
14542 return false;
14546 /* Return true if expression X evaluates to an infinity.
14547 This function returns false for integer expressions. */
14549 bool
14550 tree_expr_infinite_p (const_tree x)
14552 if (!HONOR_INFINITIES (x))
14553 return false;
14554 switch (TREE_CODE (x))
14556 case REAL_CST:
14557 return real_isinf (TREE_REAL_CST_PTR (x));
14558 case ABS_EXPR:
14559 case NEGATE_EXPR:
14560 case NON_LVALUE_EXPR:
14561 case SAVE_EXPR:
14562 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14563 case COND_EXPR:
14564 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14565 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14566 default:
14567 return false;
14571 /* Return true if expression X could evaluate to an infinity.
14572 This function returns false for integer expressions, and returns
14573 true if uncertain. */
14575 bool
14576 tree_expr_maybe_infinite_p (const_tree x)
14578 if (!HONOR_INFINITIES (x))
14579 return false;
14580 switch (TREE_CODE (x))
14582 case REAL_CST:
14583 return real_isinf (TREE_REAL_CST_PTR (x));
14584 case FLOAT_EXPR:
14585 return false;
14586 case ABS_EXPR:
14587 case NEGATE_EXPR:
14588 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14589 case COND_EXPR:
14590 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14591 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14592 default:
14593 return true;
14597 /* Return true if expression X evaluates to a signaling NaN.
14598 This function returns false for integer expressions. */
14600 bool
14601 tree_expr_signaling_nan_p (const_tree x)
14603 if (!HONOR_SNANS (x))
14604 return false;
14605 switch (TREE_CODE (x))
14607 case REAL_CST:
14608 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14609 case NON_LVALUE_EXPR:
14610 case SAVE_EXPR:
14611 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14612 case COND_EXPR:
14613 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14614 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14615 default:
14616 return false;
14620 /* Return true if expression X could evaluate to a signaling NaN.
14621 This function returns false for integer expressions, and returns
14622 true if uncertain. */
14624 bool
14625 tree_expr_maybe_signaling_nan_p (const_tree x)
14627 if (!HONOR_SNANS (x))
14628 return false;
14629 switch (TREE_CODE (x))
14631 case REAL_CST:
14632 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14633 case FLOAT_EXPR:
14634 return false;
14635 case ABS_EXPR:
14636 case CONVERT_EXPR:
14637 case NEGATE_EXPR:
14638 case NON_LVALUE_EXPR:
14639 case SAVE_EXPR:
14640 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14641 case MIN_EXPR:
14642 case MAX_EXPR:
14643 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14644 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14645 case COND_EXPR:
14646 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14647 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14648 case CALL_EXPR:
14649 switch (get_call_combined_fn (x))
14651 CASE_CFN_FABS:
14652 CASE_CFN_FABS_FN:
14653 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14654 CASE_CFN_FMAX:
14655 CASE_CFN_FMAX_FN:
14656 CASE_CFN_FMIN:
14657 CASE_CFN_FMIN_FN:
14658 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14659 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14660 default:
14661 return true;
14663 default:
14664 return true;
14668 /* Return true if expression X evaluates to a NaN.
14669 This function returns false for integer expressions. */
14671 bool
14672 tree_expr_nan_p (const_tree x)
14674 if (!HONOR_NANS (x))
14675 return false;
14676 switch (TREE_CODE (x))
14678 case REAL_CST:
14679 return real_isnan (TREE_REAL_CST_PTR (x));
14680 case NON_LVALUE_EXPR:
14681 case SAVE_EXPR:
14682 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14683 case COND_EXPR:
14684 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14685 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14686 default:
14687 return false;
14691 /* Return true if expression X could evaluate to a NaN.
14692 This function returns false for integer expressions, and returns
14693 true if uncertain. */
14695 bool
14696 tree_expr_maybe_nan_p (const_tree x)
14698 if (!HONOR_NANS (x))
14699 return false;
14700 switch (TREE_CODE (x))
14702 case REAL_CST:
14703 return real_isnan (TREE_REAL_CST_PTR (x));
14704 case FLOAT_EXPR:
14705 return false;
14706 case PLUS_EXPR:
14707 case MINUS_EXPR:
14708 case MULT_EXPR:
14709 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14710 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14711 case ABS_EXPR:
14712 case CONVERT_EXPR:
14713 case NEGATE_EXPR:
14714 case NON_LVALUE_EXPR:
14715 case SAVE_EXPR:
14716 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14717 case MIN_EXPR:
14718 case MAX_EXPR:
14719 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14720 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14721 case COND_EXPR:
14722 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14723 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14724 case CALL_EXPR:
14725 switch (get_call_combined_fn (x))
14727 CASE_CFN_FABS:
14728 CASE_CFN_FABS_FN:
14729 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14730 CASE_CFN_FMAX:
14731 CASE_CFN_FMAX_FN:
14732 CASE_CFN_FMIN:
14733 CASE_CFN_FMIN_FN:
14734 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14735 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14736 default:
14737 return true;
14739 default:
14740 return true;
14744 /* Return true if expression X could evaluate to -0.0.
14745 This function returns true if uncertain. */
14747 bool
14748 tree_expr_maybe_real_minus_zero_p (const_tree x)
14750 if (!HONOR_SIGNED_ZEROS (x))
14751 return false;
14752 switch (TREE_CODE (x))
14754 case REAL_CST:
14755 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14756 case INTEGER_CST:
14757 case FLOAT_EXPR:
14758 case ABS_EXPR:
14759 return false;
14760 case NON_LVALUE_EXPR:
14761 case SAVE_EXPR:
14762 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14763 case COND_EXPR:
14764 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14765 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14766 case CALL_EXPR:
14767 switch (get_call_combined_fn (x))
14769 CASE_CFN_FABS:
14770 CASE_CFN_FABS_FN:
14771 return false;
14772 default:
14773 break;
14775 default:
14776 break;
14778 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14779 * but currently those predicates require tree and not const_tree. */
14780 return true;
14783 #define tree_expr_nonnegative_warnv_p(X, Y) \
14784 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14786 #define RECURSE(X) \
14787 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14789 /* Return true if CODE or TYPE is known to be non-negative. */
14791 static bool
14792 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14794 if (!VECTOR_TYPE_P (type)
14795 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14796 && truth_value_p (code))
14797 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14798 have a signed:1 type (where the value is -1 and 0). */
14799 return true;
14800 return false;
14803 /* Return true if (CODE OP0) is known to be non-negative. If the return
14804 value is based on the assumption that signed overflow is undefined,
14805 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14808 bool
14809 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14810 bool *strict_overflow_p, int depth)
14812 if (TYPE_UNSIGNED (type))
14813 return true;
14815 switch (code)
14817 case ABS_EXPR:
14818 /* We can't return 1 if flag_wrapv is set because
14819 ABS_EXPR<INT_MIN> = INT_MIN. */
14820 if (!ANY_INTEGRAL_TYPE_P (type))
14821 return true;
14822 if (TYPE_OVERFLOW_UNDEFINED (type))
14824 *strict_overflow_p = true;
14825 return true;
14827 break;
14829 case NON_LVALUE_EXPR:
14830 case FLOAT_EXPR:
14831 case FIX_TRUNC_EXPR:
14832 return RECURSE (op0);
14834 CASE_CONVERT:
14836 tree inner_type = TREE_TYPE (op0);
14837 tree outer_type = type;
14839 if (SCALAR_FLOAT_TYPE_P (outer_type))
14841 if (SCALAR_FLOAT_TYPE_P (inner_type))
14842 return RECURSE (op0);
14843 if (INTEGRAL_TYPE_P (inner_type))
14845 if (TYPE_UNSIGNED (inner_type))
14846 return true;
14847 return RECURSE (op0);
14850 else if (INTEGRAL_TYPE_P (outer_type))
14852 if (SCALAR_FLOAT_TYPE_P (inner_type))
14853 return RECURSE (op0);
14854 if (INTEGRAL_TYPE_P (inner_type))
14855 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14856 && TYPE_UNSIGNED (inner_type);
14859 break;
14861 default:
14862 return tree_simple_nonnegative_warnv_p (code, type);
14865 /* We don't know sign of `t', so be conservative and return false. */
14866 return false;
14869 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14870 value is based on the assumption that signed overflow is undefined,
14871 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14872 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14874 bool
14875 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14876 tree op1, bool *strict_overflow_p,
14877 int depth)
14879 if (TYPE_UNSIGNED (type))
14880 return true;
14882 switch (code)
14884 case POINTER_PLUS_EXPR:
14885 case PLUS_EXPR:
14886 if (FLOAT_TYPE_P (type))
14887 return RECURSE (op0) && RECURSE (op1);
14889 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14890 both unsigned and at least 2 bits shorter than the result. */
14891 if (TREE_CODE (type) == INTEGER_TYPE
14892 && TREE_CODE (op0) == NOP_EXPR
14893 && TREE_CODE (op1) == NOP_EXPR)
14895 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14896 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14897 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14898 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14900 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14901 TYPE_PRECISION (inner2)) + 1;
14902 return prec < TYPE_PRECISION (type);
14905 break;
14907 case MULT_EXPR:
14908 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14910 /* x * x is always non-negative for floating point x
14911 or without overflow. */
14912 if (operand_equal_p (op0, op1, 0)
14913 || (RECURSE (op0) && RECURSE (op1)))
14915 if (ANY_INTEGRAL_TYPE_P (type)
14916 && TYPE_OVERFLOW_UNDEFINED (type))
14917 *strict_overflow_p = true;
14918 return true;
14922 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14923 both unsigned and their total bits is shorter than the result. */
14924 if (TREE_CODE (type) == INTEGER_TYPE
14925 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14926 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14928 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14929 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14930 : TREE_TYPE (op0);
14931 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14932 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14933 : TREE_TYPE (op1);
14935 bool unsigned0 = TYPE_UNSIGNED (inner0);
14936 bool unsigned1 = TYPE_UNSIGNED (inner1);
14938 if (TREE_CODE (op0) == INTEGER_CST)
14939 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14941 if (TREE_CODE (op1) == INTEGER_CST)
14942 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14944 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14945 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14947 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14948 ? tree_int_cst_min_precision (op0, UNSIGNED)
14949 : TYPE_PRECISION (inner0);
14951 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14952 ? tree_int_cst_min_precision (op1, UNSIGNED)
14953 : TYPE_PRECISION (inner1);
14955 return precision0 + precision1 < TYPE_PRECISION (type);
14958 return false;
14960 case BIT_AND_EXPR:
14961 return RECURSE (op0) || RECURSE (op1);
14963 case MAX_EXPR:
14964 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14965 things. */
14966 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14967 return RECURSE (op0) && RECURSE (op1);
14968 return RECURSE (op0) || RECURSE (op1);
14970 case BIT_IOR_EXPR:
14971 case BIT_XOR_EXPR:
14972 case MIN_EXPR:
14973 case RDIV_EXPR:
14974 case TRUNC_DIV_EXPR:
14975 case CEIL_DIV_EXPR:
14976 case FLOOR_DIV_EXPR:
14977 case ROUND_DIV_EXPR:
14978 return RECURSE (op0) && RECURSE (op1);
14980 case TRUNC_MOD_EXPR:
14981 return RECURSE (op0);
14983 case FLOOR_MOD_EXPR:
14984 return RECURSE (op1);
14986 case CEIL_MOD_EXPR:
14987 case ROUND_MOD_EXPR:
14988 default:
14989 return tree_simple_nonnegative_warnv_p (code, type);
14992 /* We don't know sign of `t', so be conservative and return false. */
14993 return false;
14996 /* Return true if T is known to be non-negative. If the return
14997 value is based on the assumption that signed overflow is undefined,
14998 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14999 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15001 bool
15002 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15004 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15005 return true;
15007 switch (TREE_CODE (t))
15009 case INTEGER_CST:
15010 return tree_int_cst_sgn (t) >= 0;
15012 case REAL_CST:
15013 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15015 case FIXED_CST:
15016 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15018 case COND_EXPR:
15019 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15021 case SSA_NAME:
15022 /* Limit the depth of recursion to avoid quadratic behavior.
15023 This is expected to catch almost all occurrences in practice.
15024 If this code misses important cases that unbounded recursion
15025 would not, passes that need this information could be revised
15026 to provide it through dataflow propagation. */
15027 return (!name_registered_for_update_p (t)
15028 && depth < param_max_ssa_name_query_depth
15029 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15030 strict_overflow_p, depth));
15032 default:
15033 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15037 /* Return true if T is known to be non-negative. If the return
15038 value is based on the assumption that signed overflow is undefined,
15039 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15040 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15042 bool
15043 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15044 bool *strict_overflow_p, int depth)
15046 switch (fn)
15048 CASE_CFN_ACOS:
15049 CASE_CFN_ACOS_FN:
15050 CASE_CFN_ACOSH:
15051 CASE_CFN_ACOSH_FN:
15052 CASE_CFN_CABS:
15053 CASE_CFN_CABS_FN:
15054 CASE_CFN_COSH:
15055 CASE_CFN_COSH_FN:
15056 CASE_CFN_ERFC:
15057 CASE_CFN_ERFC_FN:
15058 CASE_CFN_EXP:
15059 CASE_CFN_EXP_FN:
15060 CASE_CFN_EXP10:
15061 CASE_CFN_EXP2:
15062 CASE_CFN_EXP2_FN:
15063 CASE_CFN_FABS:
15064 CASE_CFN_FABS_FN:
15065 CASE_CFN_FDIM:
15066 CASE_CFN_FDIM_FN:
15067 CASE_CFN_HYPOT:
15068 CASE_CFN_HYPOT_FN:
15069 CASE_CFN_POW10:
15070 CASE_CFN_FFS:
15071 CASE_CFN_PARITY:
15072 CASE_CFN_POPCOUNT:
15073 CASE_CFN_CLZ:
15074 CASE_CFN_CLRSB:
15075 case CFN_BUILT_IN_BSWAP16:
15076 case CFN_BUILT_IN_BSWAP32:
15077 case CFN_BUILT_IN_BSWAP64:
15078 case CFN_BUILT_IN_BSWAP128:
15079 /* Always true. */
15080 return true;
15082 CASE_CFN_SQRT:
15083 CASE_CFN_SQRT_FN:
15084 /* sqrt(-0.0) is -0.0. */
15085 if (!HONOR_SIGNED_ZEROS (type))
15086 return true;
15087 return RECURSE (arg0);
15089 CASE_CFN_ASINH:
15090 CASE_CFN_ASINH_FN:
15091 CASE_CFN_ATAN:
15092 CASE_CFN_ATAN_FN:
15093 CASE_CFN_ATANH:
15094 CASE_CFN_ATANH_FN:
15095 CASE_CFN_CBRT:
15096 CASE_CFN_CBRT_FN:
15097 CASE_CFN_CEIL:
15098 CASE_CFN_CEIL_FN:
15099 CASE_CFN_ERF:
15100 CASE_CFN_ERF_FN:
15101 CASE_CFN_EXPM1:
15102 CASE_CFN_EXPM1_FN:
15103 CASE_CFN_FLOOR:
15104 CASE_CFN_FLOOR_FN:
15105 CASE_CFN_FMOD:
15106 CASE_CFN_FMOD_FN:
15107 CASE_CFN_FREXP:
15108 CASE_CFN_FREXP_FN:
15109 CASE_CFN_ICEIL:
15110 CASE_CFN_IFLOOR:
15111 CASE_CFN_IRINT:
15112 CASE_CFN_IROUND:
15113 CASE_CFN_LCEIL:
15114 CASE_CFN_LDEXP:
15115 CASE_CFN_LFLOOR:
15116 CASE_CFN_LLCEIL:
15117 CASE_CFN_LLFLOOR:
15118 CASE_CFN_LLRINT:
15119 CASE_CFN_LLRINT_FN:
15120 CASE_CFN_LLROUND:
15121 CASE_CFN_LLROUND_FN:
15122 CASE_CFN_LRINT:
15123 CASE_CFN_LRINT_FN:
15124 CASE_CFN_LROUND:
15125 CASE_CFN_LROUND_FN:
15126 CASE_CFN_MODF:
15127 CASE_CFN_MODF_FN:
15128 CASE_CFN_NEARBYINT:
15129 CASE_CFN_NEARBYINT_FN:
15130 CASE_CFN_RINT:
15131 CASE_CFN_RINT_FN:
15132 CASE_CFN_ROUND:
15133 CASE_CFN_ROUND_FN:
15134 CASE_CFN_ROUNDEVEN:
15135 CASE_CFN_ROUNDEVEN_FN:
15136 CASE_CFN_SCALB:
15137 CASE_CFN_SCALBLN:
15138 CASE_CFN_SCALBLN_FN:
15139 CASE_CFN_SCALBN:
15140 CASE_CFN_SCALBN_FN:
15141 CASE_CFN_SIGNBIT:
15142 CASE_CFN_SIGNIFICAND:
15143 CASE_CFN_SINH:
15144 CASE_CFN_SINH_FN:
15145 CASE_CFN_TANH:
15146 CASE_CFN_TANH_FN:
15147 CASE_CFN_TRUNC:
15148 CASE_CFN_TRUNC_FN:
15149 /* True if the 1st argument is nonnegative. */
15150 return RECURSE (arg0);
15152 CASE_CFN_FMAX:
15153 CASE_CFN_FMAX_FN:
15154 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15155 things. In the presence of sNaNs, we're only guaranteed to be
15156 non-negative if both operands are non-negative. In the presence
15157 of qNaNs, we're non-negative if either operand is non-negative
15158 and can't be a qNaN, or if both operands are non-negative. */
15159 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15160 tree_expr_maybe_signaling_nan_p (arg1))
15161 return RECURSE (arg0) && RECURSE (arg1);
15162 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15163 || RECURSE (arg1))
15164 : (RECURSE (arg1)
15165 && !tree_expr_maybe_nan_p (arg1));
15167 CASE_CFN_FMIN:
15168 CASE_CFN_FMIN_FN:
15169 /* True if the 1st AND 2nd arguments are nonnegative. */
15170 return RECURSE (arg0) && RECURSE (arg1);
15172 CASE_CFN_COPYSIGN:
15173 CASE_CFN_COPYSIGN_FN:
15174 /* True if the 2nd argument is nonnegative. */
15175 return RECURSE (arg1);
15177 CASE_CFN_POWI:
15178 /* True if the 1st argument is nonnegative or the second
15179 argument is an even integer. */
15180 if (TREE_CODE (arg1) == INTEGER_CST
15181 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15182 return true;
15183 return RECURSE (arg0);
15185 CASE_CFN_POW:
15186 CASE_CFN_POW_FN:
15187 /* True if the 1st argument is nonnegative or the second
15188 argument is an even integer valued real. */
15189 if (TREE_CODE (arg1) == REAL_CST)
15191 REAL_VALUE_TYPE c;
15192 HOST_WIDE_INT n;
15194 c = TREE_REAL_CST (arg1);
15195 n = real_to_integer (&c);
15196 if ((n & 1) == 0)
15198 REAL_VALUE_TYPE cint;
15199 real_from_integer (&cint, VOIDmode, n, SIGNED);
15200 if (real_identical (&c, &cint))
15201 return true;
15204 return RECURSE (arg0);
15206 default:
15207 break;
15209 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15212 /* Return true if T is known to be non-negative. If the return
15213 value is based on the assumption that signed overflow is undefined,
15214 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15215 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15217 static bool
15218 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15220 enum tree_code code = TREE_CODE (t);
15221 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15222 return true;
15224 switch (code)
15226 case TARGET_EXPR:
15228 tree temp = TARGET_EXPR_SLOT (t);
15229 t = TARGET_EXPR_INITIAL (t);
15231 /* If the initializer is non-void, then it's a normal expression
15232 that will be assigned to the slot. */
15233 if (!VOID_TYPE_P (TREE_TYPE (t)))
15234 return RECURSE (t);
15236 /* Otherwise, the initializer sets the slot in some way. One common
15237 way is an assignment statement at the end of the initializer. */
15238 while (1)
15240 if (TREE_CODE (t) == BIND_EXPR)
15241 t = expr_last (BIND_EXPR_BODY (t));
15242 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15243 || TREE_CODE (t) == TRY_CATCH_EXPR)
15244 t = expr_last (TREE_OPERAND (t, 0));
15245 else if (TREE_CODE (t) == STATEMENT_LIST)
15246 t = expr_last (t);
15247 else
15248 break;
15250 if (TREE_CODE (t) == MODIFY_EXPR
15251 && TREE_OPERAND (t, 0) == temp)
15252 return RECURSE (TREE_OPERAND (t, 1));
15254 return false;
15257 case CALL_EXPR:
15259 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15260 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15262 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15263 get_call_combined_fn (t),
15264 arg0,
15265 arg1,
15266 strict_overflow_p, depth);
15268 case COMPOUND_EXPR:
15269 case MODIFY_EXPR:
15270 return RECURSE (TREE_OPERAND (t, 1));
15272 case BIND_EXPR:
15273 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15275 case SAVE_EXPR:
15276 return RECURSE (TREE_OPERAND (t, 0));
15278 default:
15279 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15283 #undef RECURSE
15284 #undef tree_expr_nonnegative_warnv_p
15286 /* Return true if T is known to be non-negative. If the return
15287 value is based on the assumption that signed overflow is undefined,
15288 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15289 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15291 bool
15292 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15294 enum tree_code code;
15295 if (t == error_mark_node)
15296 return false;
15298 code = TREE_CODE (t);
15299 switch (TREE_CODE_CLASS (code))
15301 case tcc_binary:
15302 case tcc_comparison:
15303 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15304 TREE_TYPE (t),
15305 TREE_OPERAND (t, 0),
15306 TREE_OPERAND (t, 1),
15307 strict_overflow_p, depth);
15309 case tcc_unary:
15310 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15311 TREE_TYPE (t),
15312 TREE_OPERAND (t, 0),
15313 strict_overflow_p, depth);
15315 case tcc_constant:
15316 case tcc_declaration:
15317 case tcc_reference:
15318 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15320 default:
15321 break;
15324 switch (code)
15326 case TRUTH_AND_EXPR:
15327 case TRUTH_OR_EXPR:
15328 case TRUTH_XOR_EXPR:
15329 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15330 TREE_TYPE (t),
15331 TREE_OPERAND (t, 0),
15332 TREE_OPERAND (t, 1),
15333 strict_overflow_p, depth);
15334 case TRUTH_NOT_EXPR:
15335 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15336 TREE_TYPE (t),
15337 TREE_OPERAND (t, 0),
15338 strict_overflow_p, depth);
15340 case COND_EXPR:
15341 case CONSTRUCTOR:
15342 case OBJ_TYPE_REF:
15343 case ADDR_EXPR:
15344 case WITH_SIZE_EXPR:
15345 case SSA_NAME:
15346 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15348 default:
15349 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15353 /* Return true if `t' is known to be non-negative. Handle warnings
15354 about undefined signed overflow. */
15356 bool
15357 tree_expr_nonnegative_p (tree t)
15359 bool ret, strict_overflow_p;
15361 strict_overflow_p = false;
15362 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15363 if (strict_overflow_p)
15364 fold_overflow_warning (("assuming signed overflow does not occur when "
15365 "determining that expression is always "
15366 "non-negative"),
15367 WARN_STRICT_OVERFLOW_MISC);
15368 return ret;
15372 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15373 For floating point we further ensure that T is not denormal.
15374 Similar logic is present in nonzero_address in rtlanal.h.
15376 If the return value is based on the assumption that signed overflow
15377 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15378 change *STRICT_OVERFLOW_P. */
15380 bool
15381 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15382 bool *strict_overflow_p)
15384 switch (code)
15386 case ABS_EXPR:
15387 return tree_expr_nonzero_warnv_p (op0,
15388 strict_overflow_p);
15390 case NOP_EXPR:
15392 tree inner_type = TREE_TYPE (op0);
15393 tree outer_type = type;
15395 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15396 && tree_expr_nonzero_warnv_p (op0,
15397 strict_overflow_p));
15399 break;
15401 case NON_LVALUE_EXPR:
15402 return tree_expr_nonzero_warnv_p (op0,
15403 strict_overflow_p);
15405 default:
15406 break;
15409 return false;
15412 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15413 For floating point we further ensure that T is not denormal.
15414 Similar logic is present in nonzero_address in rtlanal.h.
15416 If the return value is based on the assumption that signed overflow
15417 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15418 change *STRICT_OVERFLOW_P. */
15420 bool
15421 tree_binary_nonzero_warnv_p (enum tree_code code,
15422 tree type,
15423 tree op0,
15424 tree op1, bool *strict_overflow_p)
15426 bool sub_strict_overflow_p;
15427 switch (code)
15429 case POINTER_PLUS_EXPR:
15430 case PLUS_EXPR:
15431 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15433 /* With the presence of negative values it is hard
15434 to say something. */
15435 sub_strict_overflow_p = false;
15436 if (!tree_expr_nonnegative_warnv_p (op0,
15437 &sub_strict_overflow_p)
15438 || !tree_expr_nonnegative_warnv_p (op1,
15439 &sub_strict_overflow_p))
15440 return false;
15441 /* One of operands must be positive and the other non-negative. */
15442 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15443 overflows, on a twos-complement machine the sum of two
15444 nonnegative numbers can never be zero. */
15445 return (tree_expr_nonzero_warnv_p (op0,
15446 strict_overflow_p)
15447 || tree_expr_nonzero_warnv_p (op1,
15448 strict_overflow_p));
15450 break;
15452 case MULT_EXPR:
15453 if (TYPE_OVERFLOW_UNDEFINED (type))
15455 if (tree_expr_nonzero_warnv_p (op0,
15456 strict_overflow_p)
15457 && tree_expr_nonzero_warnv_p (op1,
15458 strict_overflow_p))
15460 *strict_overflow_p = true;
15461 return true;
15464 break;
15466 case MIN_EXPR:
15467 sub_strict_overflow_p = false;
15468 if (tree_expr_nonzero_warnv_p (op0,
15469 &sub_strict_overflow_p)
15470 && tree_expr_nonzero_warnv_p (op1,
15471 &sub_strict_overflow_p))
15473 if (sub_strict_overflow_p)
15474 *strict_overflow_p = true;
15476 break;
15478 case MAX_EXPR:
15479 sub_strict_overflow_p = false;
15480 if (tree_expr_nonzero_warnv_p (op0,
15481 &sub_strict_overflow_p))
15483 if (sub_strict_overflow_p)
15484 *strict_overflow_p = true;
15486 /* When both operands are nonzero, then MAX must be too. */
15487 if (tree_expr_nonzero_warnv_p (op1,
15488 strict_overflow_p))
15489 return true;
15491 /* MAX where operand 0 is positive is positive. */
15492 return tree_expr_nonnegative_warnv_p (op0,
15493 strict_overflow_p);
15495 /* MAX where operand 1 is positive is positive. */
15496 else if (tree_expr_nonzero_warnv_p (op1,
15497 &sub_strict_overflow_p)
15498 && tree_expr_nonnegative_warnv_p (op1,
15499 &sub_strict_overflow_p))
15501 if (sub_strict_overflow_p)
15502 *strict_overflow_p = true;
15503 return true;
15505 break;
15507 case BIT_IOR_EXPR:
15508 return (tree_expr_nonzero_warnv_p (op1,
15509 strict_overflow_p)
15510 || tree_expr_nonzero_warnv_p (op0,
15511 strict_overflow_p));
15513 default:
15514 break;
15517 return false;
15520 /* Return true when T is an address and is known to be nonzero.
15521 For floating point we further ensure that T is not denormal.
15522 Similar logic is present in nonzero_address in rtlanal.h.
15524 If the return value is based on the assumption that signed overflow
15525 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15526 change *STRICT_OVERFLOW_P. */
15528 bool
15529 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15531 bool sub_strict_overflow_p;
15532 switch (TREE_CODE (t))
15534 case INTEGER_CST:
15535 return !integer_zerop (t);
15537 case ADDR_EXPR:
15539 tree base = TREE_OPERAND (t, 0);
15541 if (!DECL_P (base))
15542 base = get_base_address (base);
15544 if (base && TREE_CODE (base) == TARGET_EXPR)
15545 base = TARGET_EXPR_SLOT (base);
15547 if (!base)
15548 return false;
15550 /* For objects in symbol table check if we know they are non-zero.
15551 Don't do anything for variables and functions before symtab is built;
15552 it is quite possible that they will be declared weak later. */
15553 int nonzero_addr = maybe_nonzero_address (base);
15554 if (nonzero_addr >= 0)
15555 return nonzero_addr;
15557 /* Constants are never weak. */
15558 if (CONSTANT_CLASS_P (base))
15559 return true;
15561 return false;
15564 case COND_EXPR:
15565 sub_strict_overflow_p = false;
15566 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15567 &sub_strict_overflow_p)
15568 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15569 &sub_strict_overflow_p))
15571 if (sub_strict_overflow_p)
15572 *strict_overflow_p = true;
15573 return true;
15575 break;
15577 case SSA_NAME:
15578 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15579 break;
15580 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15582 default:
15583 break;
15585 return false;
15588 #define integer_valued_real_p(X) \
15589 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15591 #define RECURSE(X) \
15592 ((integer_valued_real_p) (X, depth + 1))
15594 /* Return true if the floating point result of (CODE OP0) has an
15595 integer value. We also allow +Inf, -Inf and NaN to be considered
15596 integer values. Return false for signaling NaN.
15598 DEPTH is the current nesting depth of the query. */
15600 bool
15601 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15603 switch (code)
15605 case FLOAT_EXPR:
15606 return true;
15608 case ABS_EXPR:
15609 return RECURSE (op0);
15611 CASE_CONVERT:
15613 tree type = TREE_TYPE (op0);
15614 if (TREE_CODE (type) == INTEGER_TYPE)
15615 return true;
15616 if (SCALAR_FLOAT_TYPE_P (type))
15617 return RECURSE (op0);
15618 break;
15621 default:
15622 break;
15624 return false;
15627 /* Return true if the floating point result of (CODE OP0 OP1) has an
15628 integer value. We also allow +Inf, -Inf and NaN to be considered
15629 integer values. Return false for signaling NaN.
15631 DEPTH is the current nesting depth of the query. */
15633 bool
15634 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15636 switch (code)
15638 case PLUS_EXPR:
15639 case MINUS_EXPR:
15640 case MULT_EXPR:
15641 case MIN_EXPR:
15642 case MAX_EXPR:
15643 return RECURSE (op0) && RECURSE (op1);
15645 default:
15646 break;
15648 return false;
15651 /* Return true if the floating point result of calling FNDECL with arguments
15652 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15653 considered integer values. Return false for signaling NaN. If FNDECL
15654 takes fewer than 2 arguments, the remaining ARGn are null.
15656 DEPTH is the current nesting depth of the query. */
15658 bool
15659 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15661 switch (fn)
15663 CASE_CFN_CEIL:
15664 CASE_CFN_CEIL_FN:
15665 CASE_CFN_FLOOR:
15666 CASE_CFN_FLOOR_FN:
15667 CASE_CFN_NEARBYINT:
15668 CASE_CFN_NEARBYINT_FN:
15669 CASE_CFN_RINT:
15670 CASE_CFN_RINT_FN:
15671 CASE_CFN_ROUND:
15672 CASE_CFN_ROUND_FN:
15673 CASE_CFN_ROUNDEVEN:
15674 CASE_CFN_ROUNDEVEN_FN:
15675 CASE_CFN_TRUNC:
15676 CASE_CFN_TRUNC_FN:
15677 return true;
15679 CASE_CFN_FMIN:
15680 CASE_CFN_FMIN_FN:
15681 CASE_CFN_FMAX:
15682 CASE_CFN_FMAX_FN:
15683 return RECURSE (arg0) && RECURSE (arg1);
15685 default:
15686 break;
15688 return false;
15691 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15692 has an integer value. We also allow +Inf, -Inf and NaN to be
15693 considered integer values. Return false for signaling NaN.
15695 DEPTH is the current nesting depth of the query. */
15697 bool
15698 integer_valued_real_single_p (tree t, int depth)
15700 switch (TREE_CODE (t))
15702 case REAL_CST:
15703 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15705 case COND_EXPR:
15706 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15708 case SSA_NAME:
15709 /* Limit the depth of recursion to avoid quadratic behavior.
15710 This is expected to catch almost all occurrences in practice.
15711 If this code misses important cases that unbounded recursion
15712 would not, passes that need this information could be revised
15713 to provide it through dataflow propagation. */
15714 return (!name_registered_for_update_p (t)
15715 && depth < param_max_ssa_name_query_depth
15716 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15717 depth));
15719 default:
15720 break;
15722 return false;
15725 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15726 has an integer value. We also allow +Inf, -Inf and NaN to be
15727 considered integer values. Return false for signaling NaN.
15729 DEPTH is the current nesting depth of the query. */
15731 static bool
15732 integer_valued_real_invalid_p (tree t, int depth)
15734 switch (TREE_CODE (t))
15736 case COMPOUND_EXPR:
15737 case MODIFY_EXPR:
15738 case BIND_EXPR:
15739 return RECURSE (TREE_OPERAND (t, 1));
15741 case SAVE_EXPR:
15742 return RECURSE (TREE_OPERAND (t, 0));
15744 default:
15745 break;
15747 return false;
15750 #undef RECURSE
15751 #undef integer_valued_real_p
15753 /* Return true if the floating point expression T has an integer value.
15754 We also allow +Inf, -Inf and NaN to be considered integer values.
15755 Return false for signaling NaN.
15757 DEPTH is the current nesting depth of the query. */
15759 bool
15760 integer_valued_real_p (tree t, int depth)
15762 if (t == error_mark_node)
15763 return false;
15765 STRIP_ANY_LOCATION_WRAPPER (t);
15767 tree_code code = TREE_CODE (t);
15768 switch (TREE_CODE_CLASS (code))
15770 case tcc_binary:
15771 case tcc_comparison:
15772 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15773 TREE_OPERAND (t, 1), depth);
15775 case tcc_unary:
15776 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15778 case tcc_constant:
15779 case tcc_declaration:
15780 case tcc_reference:
15781 return integer_valued_real_single_p (t, depth);
15783 default:
15784 break;
15787 switch (code)
15789 case COND_EXPR:
15790 case SSA_NAME:
15791 return integer_valued_real_single_p (t, depth);
15793 case CALL_EXPR:
15795 tree arg0 = (call_expr_nargs (t) > 0
15796 ? CALL_EXPR_ARG (t, 0)
15797 : NULL_TREE);
15798 tree arg1 = (call_expr_nargs (t) > 1
15799 ? CALL_EXPR_ARG (t, 1)
15800 : NULL_TREE);
15801 return integer_valued_real_call_p (get_call_combined_fn (t),
15802 arg0, arg1, depth);
15805 default:
15806 return integer_valued_real_invalid_p (t, depth);
15810 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15811 attempt to fold the expression to a constant without modifying TYPE,
15812 OP0 or OP1.
15814 If the expression could be simplified to a constant, then return
15815 the constant. If the expression would not be simplified to a
15816 constant, then return NULL_TREE. */
15818 tree
15819 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15821 tree tem = fold_binary (code, type, op0, op1);
15822 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15825 /* Given the components of a unary expression CODE, TYPE and OP0,
15826 attempt to fold the expression to a constant without modifying
15827 TYPE or OP0.
15829 If the expression could be simplified to a constant, then return
15830 the constant. If the expression would not be simplified to a
15831 constant, then return NULL_TREE. */
15833 tree
15834 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15836 tree tem = fold_unary (code, type, op0);
15837 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15840 /* If EXP represents referencing an element in a constant string
15841 (either via pointer arithmetic or array indexing), return the
15842 tree representing the value accessed, otherwise return NULL. */
15844 tree
15845 fold_read_from_constant_string (tree exp)
15847 if ((INDIRECT_REF_P (exp)
15848 || TREE_CODE (exp) == ARRAY_REF)
15849 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15851 tree exp1 = TREE_OPERAND (exp, 0);
15852 tree index;
15853 tree string;
15854 location_t loc = EXPR_LOCATION (exp);
15856 if (INDIRECT_REF_P (exp))
15857 string = string_constant (exp1, &index, NULL, NULL);
15858 else
15860 tree low_bound = array_ref_low_bound (exp);
15861 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15863 /* Optimize the special-case of a zero lower bound.
15865 We convert the low_bound to sizetype to avoid some problems
15866 with constant folding. (E.g. suppose the lower bound is 1,
15867 and its mode is QI. Without the conversion,l (ARRAY
15868 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15869 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15870 if (! integer_zerop (low_bound))
15871 index = size_diffop_loc (loc, index,
15872 fold_convert_loc (loc, sizetype, low_bound));
15874 string = exp1;
15877 scalar_int_mode char_mode;
15878 if (string
15879 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15880 && TREE_CODE (string) == STRING_CST
15881 && tree_fits_uhwi_p (index)
15882 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15883 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15884 &char_mode)
15885 && GET_MODE_SIZE (char_mode) == 1)
15886 return build_int_cst_type (TREE_TYPE (exp),
15887 (TREE_STRING_POINTER (string)
15888 [TREE_INT_CST_LOW (index)]));
15890 return NULL;
15893 /* Folds a read from vector element at IDX of vector ARG. */
15895 tree
15896 fold_read_from_vector (tree arg, poly_uint64 idx)
15898 unsigned HOST_WIDE_INT i;
15899 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15900 && known_ge (idx, 0u)
15901 && idx.is_constant (&i))
15903 if (TREE_CODE (arg) == VECTOR_CST)
15904 return VECTOR_CST_ELT (arg, i);
15905 else if (TREE_CODE (arg) == CONSTRUCTOR)
15907 if (CONSTRUCTOR_NELTS (arg)
15908 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15909 return NULL_TREE;
15910 if (i >= CONSTRUCTOR_NELTS (arg))
15911 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15912 return CONSTRUCTOR_ELT (arg, i)->value;
15915 return NULL_TREE;
15918 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15919 an integer constant, real, or fixed-point constant.
15921 TYPE is the type of the result. */
15923 static tree
15924 fold_negate_const (tree arg0, tree type)
15926 tree t = NULL_TREE;
15928 switch (TREE_CODE (arg0))
15930 case REAL_CST:
15931 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15932 break;
15934 case FIXED_CST:
15936 FIXED_VALUE_TYPE f;
15937 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15938 &(TREE_FIXED_CST (arg0)), NULL,
15939 TYPE_SATURATING (type));
15940 t = build_fixed (type, f);
15941 /* Propagate overflow flags. */
15942 if (overflow_p | TREE_OVERFLOW (arg0))
15943 TREE_OVERFLOW (t) = 1;
15944 break;
15947 default:
15948 if (poly_int_tree_p (arg0))
15950 wi::overflow_type overflow;
15951 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15952 t = force_fit_type (type, res, 1,
15953 (overflow && ! TYPE_UNSIGNED (type))
15954 || TREE_OVERFLOW (arg0));
15955 break;
15958 gcc_unreachable ();
15961 return t;
15964 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15965 an integer constant or real constant.
15967 TYPE is the type of the result. */
15969 tree
15970 fold_abs_const (tree arg0, tree type)
15972 tree t = NULL_TREE;
15974 switch (TREE_CODE (arg0))
15976 case INTEGER_CST:
15978 /* If the value is unsigned or non-negative, then the absolute value
15979 is the same as the ordinary value. */
15980 wide_int val = wi::to_wide (arg0);
15981 wi::overflow_type overflow = wi::OVF_NONE;
15982 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15985 /* If the value is negative, then the absolute value is
15986 its negation. */
15987 else
15988 val = wi::neg (val, &overflow);
15990 /* Force to the destination type, set TREE_OVERFLOW for signed
15991 TYPE only. */
15992 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15994 break;
15996 case REAL_CST:
15997 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15998 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15999 else
16000 t = arg0;
16001 break;
16003 default:
16004 gcc_unreachable ();
16007 return t;
16010 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16011 constant. TYPE is the type of the result. */
16013 static tree
16014 fold_not_const (const_tree arg0, tree type)
16016 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16018 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
16021 /* Given CODE, a relational operator, the target type, TYPE and two
16022 constant operands OP0 and OP1, return the result of the
16023 relational operation. If the result is not a compile time
16024 constant, then return NULL_TREE. */
16026 static tree
16027 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16029 int result, invert;
16031 /* From here on, the only cases we handle are when the result is
16032 known to be a constant. */
16034 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16036 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16037 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16039 /* Handle the cases where either operand is a NaN. */
16040 if (real_isnan (c0) || real_isnan (c1))
16042 switch (code)
16044 case EQ_EXPR:
16045 case ORDERED_EXPR:
16046 result = 0;
16047 break;
16049 case NE_EXPR:
16050 case UNORDERED_EXPR:
16051 case UNLT_EXPR:
16052 case UNLE_EXPR:
16053 case UNGT_EXPR:
16054 case UNGE_EXPR:
16055 case UNEQ_EXPR:
16056 result = 1;
16057 break;
16059 case LT_EXPR:
16060 case LE_EXPR:
16061 case GT_EXPR:
16062 case GE_EXPR:
16063 case LTGT_EXPR:
16064 if (flag_trapping_math)
16065 return NULL_TREE;
16066 result = 0;
16067 break;
16069 default:
16070 gcc_unreachable ();
16073 return constant_boolean_node (result, type);
16076 return constant_boolean_node (real_compare (code, c0, c1), type);
16079 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16081 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16082 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16083 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16086 /* Handle equality/inequality of complex constants. */
16087 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16089 tree rcond = fold_relational_const (code, type,
16090 TREE_REALPART (op0),
16091 TREE_REALPART (op1));
16092 tree icond = fold_relational_const (code, type,
16093 TREE_IMAGPART (op0),
16094 TREE_IMAGPART (op1));
16095 if (code == EQ_EXPR)
16096 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16097 else if (code == NE_EXPR)
16098 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16099 else
16100 return NULL_TREE;
16103 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16105 if (!VECTOR_TYPE_P (type))
16107 /* Have vector comparison with scalar boolean result. */
16108 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16109 && known_eq (VECTOR_CST_NELTS (op0),
16110 VECTOR_CST_NELTS (op1)));
16111 unsigned HOST_WIDE_INT nunits;
16112 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16113 return NULL_TREE;
16114 for (unsigned i = 0; i < nunits; i++)
16116 tree elem0 = VECTOR_CST_ELT (op0, i);
16117 tree elem1 = VECTOR_CST_ELT (op1, i);
16118 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16119 if (tmp == NULL_TREE)
16120 return NULL_TREE;
16121 if (integer_zerop (tmp))
16122 return constant_boolean_node (code == NE_EXPR, type);
16124 return constant_boolean_node (code == EQ_EXPR, type);
16126 tree_vector_builder elts;
16127 if (!elts.new_binary_operation (type, op0, op1, false))
16128 return NULL_TREE;
16129 unsigned int count = elts.encoded_nelts ();
16130 for (unsigned i = 0; i < count; i++)
16132 tree elem_type = TREE_TYPE (type);
16133 tree elem0 = VECTOR_CST_ELT (op0, i);
16134 tree elem1 = VECTOR_CST_ELT (op1, i);
16136 tree tem = fold_relational_const (code, elem_type,
16137 elem0, elem1);
16139 if (tem == NULL_TREE)
16140 return NULL_TREE;
16142 elts.quick_push (build_int_cst (elem_type,
16143 integer_zerop (tem) ? 0 : -1));
16146 return elts.build ();
16149 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16151 To compute GT, swap the arguments and do LT.
16152 To compute GE, do LT and invert the result.
16153 To compute LE, swap the arguments, do LT and invert the result.
16154 To compute NE, do EQ and invert the result.
16156 Therefore, the code below must handle only EQ and LT. */
16158 if (code == LE_EXPR || code == GT_EXPR)
16160 std::swap (op0, op1);
16161 code = swap_tree_comparison (code);
16164 /* Note that it is safe to invert for real values here because we
16165 have already handled the one case that it matters. */
16167 invert = 0;
16168 if (code == NE_EXPR || code == GE_EXPR)
16170 invert = 1;
16171 code = invert_tree_comparison (code, false);
16174 /* Compute a result for LT or EQ if args permit;
16175 Otherwise return T. */
16176 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16178 if (code == EQ_EXPR)
16179 result = tree_int_cst_equal (op0, op1);
16180 else
16181 result = tree_int_cst_lt (op0, op1);
16183 else
16184 return NULL_TREE;
16186 if (invert)
16187 result ^= 1;
16188 return constant_boolean_node (result, type);
16191 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16192 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16193 itself. */
16195 tree
16196 fold_build_cleanup_point_expr (tree type, tree expr)
16198 /* If the expression does not have side effects then we don't have to wrap
16199 it with a cleanup point expression. */
16200 if (!TREE_SIDE_EFFECTS (expr))
16201 return expr;
16203 /* If the expression is a return, check to see if the expression inside the
16204 return has no side effects or the right hand side of the modify expression
16205 inside the return. If either don't have side effects set we don't need to
16206 wrap the expression in a cleanup point expression. Note we don't check the
16207 left hand side of the modify because it should always be a return decl. */
16208 if (TREE_CODE (expr) == RETURN_EXPR)
16210 tree op = TREE_OPERAND (expr, 0);
16211 if (!op || !TREE_SIDE_EFFECTS (op))
16212 return expr;
16213 op = TREE_OPERAND (op, 1);
16214 if (!TREE_SIDE_EFFECTS (op))
16215 return expr;
16218 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16221 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16222 of an indirection through OP0, or NULL_TREE if no simplification is
16223 possible. */
16225 tree
16226 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16228 tree sub = op0;
16229 tree subtype;
16230 poly_uint64 const_op01;
16232 STRIP_NOPS (sub);
16233 subtype = TREE_TYPE (sub);
16234 if (!POINTER_TYPE_P (subtype)
16235 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16236 return NULL_TREE;
16238 if (TREE_CODE (sub) == ADDR_EXPR)
16240 tree op = TREE_OPERAND (sub, 0);
16241 tree optype = TREE_TYPE (op);
16243 /* *&CONST_DECL -> to the value of the const decl. */
16244 if (TREE_CODE (op) == CONST_DECL)
16245 return DECL_INITIAL (op);
16246 /* *&p => p; make sure to handle *&"str"[cst] here. */
16247 if (type == optype)
16249 tree fop = fold_read_from_constant_string (op);
16250 if (fop)
16251 return fop;
16252 else
16253 return op;
16255 /* *(foo *)&fooarray => fooarray[0] */
16256 else if (TREE_CODE (optype) == ARRAY_TYPE
16257 && type == TREE_TYPE (optype)
16258 && (!in_gimple_form
16259 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16261 tree type_domain = TYPE_DOMAIN (optype);
16262 tree min_val = size_zero_node;
16263 if (type_domain && TYPE_MIN_VALUE (type_domain))
16264 min_val = TYPE_MIN_VALUE (type_domain);
16265 if (in_gimple_form
16266 && TREE_CODE (min_val) != INTEGER_CST)
16267 return NULL_TREE;
16268 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16269 NULL_TREE, NULL_TREE);
16271 /* *(foo *)&complexfoo => __real__ complexfoo */
16272 else if (TREE_CODE (optype) == COMPLEX_TYPE
16273 && type == TREE_TYPE (optype))
16274 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16275 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16276 else if (VECTOR_TYPE_P (optype)
16277 && type == TREE_TYPE (optype))
16279 tree part_width = TYPE_SIZE (type);
16280 tree index = bitsize_int (0);
16281 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16282 index);
16286 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16287 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16289 tree op00 = TREE_OPERAND (sub, 0);
16290 tree op01 = TREE_OPERAND (sub, 1);
16292 STRIP_NOPS (op00);
16293 if (TREE_CODE (op00) == ADDR_EXPR)
16295 tree op00type;
16296 op00 = TREE_OPERAND (op00, 0);
16297 op00type = TREE_TYPE (op00);
16299 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16300 if (VECTOR_TYPE_P (op00type)
16301 && type == TREE_TYPE (op00type)
16302 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16303 but we want to treat offsets with MSB set as negative.
16304 For the code below negative offsets are invalid and
16305 TYPE_SIZE of the element is something unsigned, so
16306 check whether op01 fits into poly_int64, which implies
16307 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16308 then just use poly_uint64 because we want to treat the
16309 value as unsigned. */
16310 && tree_fits_poly_int64_p (op01))
16312 tree part_width = TYPE_SIZE (type);
16313 poly_uint64 max_offset
16314 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16315 * TYPE_VECTOR_SUBPARTS (op00type));
16316 if (known_lt (const_op01, max_offset))
16318 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16319 return fold_build3_loc (loc,
16320 BIT_FIELD_REF, type, op00,
16321 part_width, index);
16324 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16325 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16326 && type == TREE_TYPE (op00type))
16328 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16329 const_op01))
16330 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16332 /* ((foo *)&fooarray)[1] => fooarray[1] */
16333 else if (TREE_CODE (op00type) == ARRAY_TYPE
16334 && type == TREE_TYPE (op00type))
16336 tree type_domain = TYPE_DOMAIN (op00type);
16337 tree min_val = size_zero_node;
16338 if (type_domain && TYPE_MIN_VALUE (type_domain))
16339 min_val = TYPE_MIN_VALUE (type_domain);
16340 poly_uint64 type_size, index;
16341 if (poly_int_tree_p (min_val)
16342 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16343 && multiple_p (const_op01, type_size, &index))
16345 poly_offset_int off = index + wi::to_poly_offset (min_val);
16346 op01 = wide_int_to_tree (sizetype, off);
16347 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16348 NULL_TREE, NULL_TREE);
16354 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16355 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16356 && type == TREE_TYPE (TREE_TYPE (subtype))
16357 && (!in_gimple_form
16358 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16360 tree type_domain;
16361 tree min_val = size_zero_node;
16362 sub = build_fold_indirect_ref_loc (loc, sub);
16363 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16364 if (type_domain && TYPE_MIN_VALUE (type_domain))
16365 min_val = TYPE_MIN_VALUE (type_domain);
16366 if (in_gimple_form
16367 && TREE_CODE (min_val) != INTEGER_CST)
16368 return NULL_TREE;
16369 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16370 NULL_TREE);
16373 return NULL_TREE;
16376 /* Builds an expression for an indirection through T, simplifying some
16377 cases. */
16379 tree
16380 build_fold_indirect_ref_loc (location_t loc, tree t)
16382 tree type = TREE_TYPE (TREE_TYPE (t));
16383 tree sub = fold_indirect_ref_1 (loc, type, t);
16385 if (sub)
16386 return sub;
16388 return build1_loc (loc, INDIRECT_REF, type, t);
16391 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16393 tree
16394 fold_indirect_ref_loc (location_t loc, tree t)
16396 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16398 if (sub)
16399 return sub;
16400 else
16401 return t;
16404 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16405 whose result is ignored. The type of the returned tree need not be
16406 the same as the original expression. */
16408 tree
16409 fold_ignored_result (tree t)
16411 if (!TREE_SIDE_EFFECTS (t))
16412 return integer_zero_node;
16414 for (;;)
16415 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16417 case tcc_unary:
16418 t = TREE_OPERAND (t, 0);
16419 break;
16421 case tcc_binary:
16422 case tcc_comparison:
16423 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16424 t = TREE_OPERAND (t, 0);
16425 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16426 t = TREE_OPERAND (t, 1);
16427 else
16428 return t;
16429 break;
16431 case tcc_expression:
16432 switch (TREE_CODE (t))
16434 case COMPOUND_EXPR:
16435 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16436 return t;
16437 t = TREE_OPERAND (t, 0);
16438 break;
16440 case COND_EXPR:
16441 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16442 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16443 return t;
16444 t = TREE_OPERAND (t, 0);
16445 break;
16447 default:
16448 return t;
16450 break;
16452 default:
16453 return t;
16457 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16459 tree
16460 round_up_loc (location_t loc, tree value, unsigned int divisor)
16462 tree div = NULL_TREE;
16464 if (divisor == 1)
16465 return value;
16467 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16468 have to do anything. Only do this when we are not given a const,
16469 because in that case, this check is more expensive than just
16470 doing it. */
16471 if (TREE_CODE (value) != INTEGER_CST)
16473 div = build_int_cst (TREE_TYPE (value), divisor);
16475 if (multiple_of_p (TREE_TYPE (value), value, div))
16476 return value;
16479 /* If divisor is a power of two, simplify this to bit manipulation. */
16480 if (pow2_or_zerop (divisor))
16482 if (TREE_CODE (value) == INTEGER_CST)
16484 wide_int val = wi::to_wide (value);
16485 bool overflow_p;
16487 if ((val & (divisor - 1)) == 0)
16488 return value;
16490 overflow_p = TREE_OVERFLOW (value);
16491 val += divisor - 1;
16492 val &= (int) -divisor;
16493 if (val == 0)
16494 overflow_p = true;
16496 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16498 else
16500 tree t;
16502 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16503 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16504 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16505 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16508 else
16510 if (!div)
16511 div = build_int_cst (TREE_TYPE (value), divisor);
16512 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16513 value = size_binop_loc (loc, MULT_EXPR, value, div);
16516 return value;
16519 /* Likewise, but round down. */
16521 tree
16522 round_down_loc (location_t loc, tree value, int divisor)
16524 tree div = NULL_TREE;
16526 gcc_assert (divisor > 0);
16527 if (divisor == 1)
16528 return value;
16530 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16531 have to do anything. Only do this when we are not given a const,
16532 because in that case, this check is more expensive than just
16533 doing it. */
16534 if (TREE_CODE (value) != INTEGER_CST)
16536 div = build_int_cst (TREE_TYPE (value), divisor);
16538 if (multiple_of_p (TREE_TYPE (value), value, div))
16539 return value;
16542 /* If divisor is a power of two, simplify this to bit manipulation. */
16543 if (pow2_or_zerop (divisor))
16545 tree t;
16547 t = build_int_cst (TREE_TYPE (value), -divisor);
16548 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16550 else
16552 if (!div)
16553 div = build_int_cst (TREE_TYPE (value), divisor);
16554 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16555 value = size_binop_loc (loc, MULT_EXPR, value, div);
16558 return value;
16561 /* Returns the pointer to the base of the object addressed by EXP and
16562 extracts the information about the offset of the access, storing it
16563 to PBITPOS and POFFSET. */
16565 static tree
16566 split_address_to_core_and_offset (tree exp,
16567 poly_int64 *pbitpos, tree *poffset)
16569 tree core;
16570 machine_mode mode;
16571 int unsignedp, reversep, volatilep;
16572 poly_int64 bitsize;
16573 location_t loc = EXPR_LOCATION (exp);
16575 if (TREE_CODE (exp) == SSA_NAME)
16576 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16577 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16578 exp = gimple_assign_rhs1 (def);
16580 if (TREE_CODE (exp) == ADDR_EXPR)
16582 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16583 poffset, &mode, &unsignedp, &reversep,
16584 &volatilep);
16585 core = build_fold_addr_expr_loc (loc, core);
16587 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16589 core = TREE_OPERAND (exp, 0);
16590 STRIP_NOPS (core);
16591 *pbitpos = 0;
16592 *poffset = TREE_OPERAND (exp, 1);
16593 if (poly_int_tree_p (*poffset))
16595 poly_offset_int tem
16596 = wi::sext (wi::to_poly_offset (*poffset),
16597 TYPE_PRECISION (TREE_TYPE (*poffset)));
16598 tem <<= LOG2_BITS_PER_UNIT;
16599 if (tem.to_shwi (pbitpos))
16600 *poffset = NULL_TREE;
16603 else
16605 core = exp;
16606 *pbitpos = 0;
16607 *poffset = NULL_TREE;
16610 return core;
16613 /* Returns true if addresses of E1 and E2 differ by a constant, false
16614 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16616 bool
16617 ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16619 tree core1, core2;
16620 poly_int64 bitpos1, bitpos2;
16621 tree toffset1, toffset2, tdiff, type;
16623 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16624 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16626 poly_int64 bytepos1, bytepos2;
16627 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16628 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16629 || !operand_equal_p (core1, core2, 0))
16630 return false;
16632 if (toffset1 && toffset2)
16634 type = TREE_TYPE (toffset1);
16635 if (type != TREE_TYPE (toffset2))
16636 toffset2 = fold_convert (type, toffset2);
16638 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16639 if (!cst_and_fits_in_hwi (tdiff))
16640 return false;
16642 *diff = int_cst_value (tdiff);
16644 else if (toffset1 || toffset2)
16646 /* If only one of the offsets is non-constant, the difference cannot
16647 be a constant. */
16648 return false;
16650 else
16651 *diff = 0;
16653 *diff += bytepos1 - bytepos2;
16654 return true;
16657 /* Return OFF converted to a pointer offset type suitable as offset for
16658 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16659 tree
16660 convert_to_ptrofftype_loc (location_t loc, tree off)
16662 if (ptrofftype_p (TREE_TYPE (off)))
16663 return off;
16664 return fold_convert_loc (loc, sizetype, off);
16667 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16668 tree
16669 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16671 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16672 ptr, convert_to_ptrofftype_loc (loc, off));
16675 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16676 tree
16677 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16679 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16680 ptr, size_int (off));
16683 /* Return a pointer to a NUL-terminated string containing the sequence
16684 of bytes corresponding to the representation of the object referred to
16685 by SRC (or a subsequence of such bytes within it if SRC is a reference
16686 to an initialized constant array plus some constant offset).
16687 Set *STRSIZE the number of bytes in the constant sequence including
16688 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16689 where A is the array that stores the constant sequence that SRC points
16690 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16691 need not point to a string or even an array of characters but may point
16692 to an object of any type. */
16694 const char *
16695 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16697 /* The offset into the array A storing the string, and A's byte size. */
16698 tree offset_node;
16699 tree mem_size;
16701 if (strsize)
16702 *strsize = 0;
16704 if (strsize)
16705 src = byte_representation (src, &offset_node, &mem_size, NULL);
16706 else
16707 src = string_constant (src, &offset_node, &mem_size, NULL);
16708 if (!src)
16709 return NULL;
16711 unsigned HOST_WIDE_INT offset = 0;
16712 if (offset_node != NULL_TREE)
16714 if (!tree_fits_uhwi_p (offset_node))
16715 return NULL;
16716 else
16717 offset = tree_to_uhwi (offset_node);
16720 if (!tree_fits_uhwi_p (mem_size))
16721 return NULL;
16723 /* ARRAY_SIZE is the byte size of the array the constant sequence
16724 is stored in and equal to sizeof A. INIT_BYTES is the number
16725 of bytes in the constant sequence used to initialize the array,
16726 including any embedded NULs as well as the terminating NUL (for
16727 strings), but not including any trailing zeros/NULs past
16728 the terminating one appended implicitly to a string literal to
16729 zero out the remainder of the array it's stored in. For example,
16730 given:
16731 const char a[7] = "abc\0d";
16732 n = strlen (a + 1);
16733 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16734 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16735 is equal to strlen (A) + 1. */
16736 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16737 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16738 const char *string = TREE_STRING_POINTER (src);
16740 /* Ideally this would turn into a gcc_checking_assert over time. */
16741 if (init_bytes > array_size)
16742 init_bytes = array_size;
16744 if (init_bytes == 0 || offset >= array_size)
16745 return NULL;
16747 if (strsize)
16749 /* Compute and store the number of characters from the beginning
16750 of the substring at OFFSET to the end, including the terminating
16751 nul. Offsets past the initial length refer to null strings. */
16752 if (offset < init_bytes)
16753 *strsize = init_bytes - offset;
16754 else
16755 *strsize = 1;
16757 else
16759 tree eltype = TREE_TYPE (TREE_TYPE (src));
16760 /* Support only properly NUL-terminated single byte strings. */
16761 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16762 return NULL;
16763 if (string[init_bytes - 1] != '\0')
16764 return NULL;
16767 return offset < init_bytes ? string + offset : "";
16770 /* Return a pointer to a NUL-terminated string corresponding to
16771 the expression STR referencing a constant string, possibly
16772 involving a constant offset. Return null if STR either doesn't
16773 reference a constant string or if it involves a nonconstant
16774 offset. */
16776 const char *
16777 c_getstr (tree str)
16779 return getbyterep (str, NULL);
16782 /* Given a tree T, compute which bits in T may be nonzero. */
16784 wide_int
16785 tree_nonzero_bits (const_tree t)
16787 switch (TREE_CODE (t))
16789 case INTEGER_CST:
16790 return wi::to_wide (t);
16791 case SSA_NAME:
16792 return get_nonzero_bits (t);
16793 case NON_LVALUE_EXPR:
16794 case SAVE_EXPR:
16795 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16796 case BIT_AND_EXPR:
16797 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16798 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16799 case BIT_IOR_EXPR:
16800 case BIT_XOR_EXPR:
16801 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16802 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16803 case COND_EXPR:
16804 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16805 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16806 CASE_CONVERT:
16807 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16808 TYPE_PRECISION (TREE_TYPE (t)),
16809 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16810 case PLUS_EXPR:
16811 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16813 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16814 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16815 if (wi::bit_and (nzbits1, nzbits2) == 0)
16816 return wi::bit_or (nzbits1, nzbits2);
16818 break;
16819 case LSHIFT_EXPR:
16820 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16822 tree type = TREE_TYPE (t);
16823 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16824 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16825 TYPE_PRECISION (type));
16826 return wi::neg_p (arg1)
16827 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16828 : wi::lshift (nzbits, arg1);
16830 break;
16831 case RSHIFT_EXPR:
16832 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16834 tree type = TREE_TYPE (t);
16835 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16836 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16837 TYPE_PRECISION (type));
16838 return wi::neg_p (arg1)
16839 ? wi::lshift (nzbits, -arg1)
16840 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16842 break;
16843 default:
16844 break;
16847 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16850 /* Helper function for address compare simplifications in match.pd.
16851 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16852 TYPE is the type of comparison operands.
16853 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16854 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16855 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16856 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16857 and 2 if unknown. */
16860 address_compare (tree_code code, tree type, tree op0, tree op1,
16861 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16862 bool generic)
16864 if (TREE_CODE (op0) == SSA_NAME)
16865 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16866 if (TREE_CODE (op1) == SSA_NAME)
16867 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16868 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16869 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16870 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16871 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16872 if (base0 && TREE_CODE (base0) == MEM_REF)
16874 off0 += mem_ref_offset (base0).force_shwi ();
16875 base0 = TREE_OPERAND (base0, 0);
16877 if (base1 && TREE_CODE (base1) == MEM_REF)
16879 off1 += mem_ref_offset (base1).force_shwi ();
16880 base1 = TREE_OPERAND (base1, 0);
16882 if (base0 == NULL_TREE || base1 == NULL_TREE)
16883 return 2;
16885 int equal = 2;
16886 /* Punt in GENERIC on variables with value expressions;
16887 the value expressions might point to fields/elements
16888 of other vars etc. */
16889 if (generic
16890 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16891 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16892 return 2;
16893 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16895 symtab_node *node0 = symtab_node::get_create (base0);
16896 symtab_node *node1 = symtab_node::get_create (base1);
16897 equal = node0->equal_address_to (node1);
16899 else if ((DECL_P (base0)
16900 || TREE_CODE (base0) == SSA_NAME
16901 || TREE_CODE (base0) == STRING_CST)
16902 && (DECL_P (base1)
16903 || TREE_CODE (base1) == SSA_NAME
16904 || TREE_CODE (base1) == STRING_CST))
16905 equal = (base0 == base1);
16906 /* Assume different STRING_CSTs with the same content will be
16907 merged. */
16908 if (equal == 0
16909 && TREE_CODE (base0) == STRING_CST
16910 && TREE_CODE (base1) == STRING_CST
16911 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16912 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16913 TREE_STRING_LENGTH (base0)) == 0)
16914 equal = 1;
16915 if (equal == 1)
16917 if (code == EQ_EXPR
16918 || code == NE_EXPR
16919 /* If the offsets are equal we can ignore overflow. */
16920 || known_eq (off0, off1)
16921 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16922 /* Or if we compare using pointers to decls or strings. */
16923 || (POINTER_TYPE_P (type)
16924 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16925 return 1;
16926 return 2;
16928 if (equal != 0)
16929 return equal;
16930 if (code != EQ_EXPR && code != NE_EXPR)
16931 return 2;
16933 /* At this point we know (or assume) the two pointers point at
16934 different objects. */
16935 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16936 off0.is_constant (&ioff0);
16937 off1.is_constant (&ioff1);
16938 /* Punt on non-zero offsets from functions. */
16939 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16940 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16941 return 2;
16942 /* Or if the bases are neither decls nor string literals. */
16943 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16944 return 2;
16945 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16946 return 2;
16947 /* For initializers, assume addresses of different functions are
16948 different. */
16949 if (folding_initializer
16950 && TREE_CODE (base0) == FUNCTION_DECL
16951 && TREE_CODE (base1) == FUNCTION_DECL)
16952 return 0;
16954 /* Compute whether one address points to the start of one
16955 object and another one to the end of another one. */
16956 poly_int64 size0 = 0, size1 = 0;
16957 if (TREE_CODE (base0) == STRING_CST)
16959 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16960 equal = 2;
16961 else
16962 size0 = TREE_STRING_LENGTH (base0);
16964 else if (TREE_CODE (base0) == FUNCTION_DECL)
16965 size0 = 1;
16966 else
16968 tree sz0 = DECL_SIZE_UNIT (base0);
16969 if (!tree_fits_poly_int64_p (sz0))
16970 equal = 2;
16971 else
16972 size0 = tree_to_poly_int64 (sz0);
16974 if (TREE_CODE (base1) == STRING_CST)
16976 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16977 equal = 2;
16978 else
16979 size1 = TREE_STRING_LENGTH (base1);
16981 else if (TREE_CODE (base1) == FUNCTION_DECL)
16982 size1 = 1;
16983 else
16985 tree sz1 = DECL_SIZE_UNIT (base1);
16986 if (!tree_fits_poly_int64_p (sz1))
16987 equal = 2;
16988 else
16989 size1 = tree_to_poly_int64 (sz1);
16991 if (equal == 0)
16993 /* If one offset is pointing (or could be) to the beginning of one
16994 object and the other is pointing to one past the last byte of the
16995 other object, punt. */
16996 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16997 equal = 2;
16998 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16999 equal = 2;
17000 /* If both offsets are the same, there are some cases we know that are
17001 ok. Either if we know they aren't zero, or if we know both sizes
17002 are no zero. */
17003 if (equal == 2
17004 && known_eq (off0, off1)
17005 && (known_ne (off0, 0)
17006 || (known_ne (size0, 0) && known_ne (size1, 0))))
17007 equal = 0;
17010 /* At this point, equal is 2 if either one or both pointers are out of
17011 bounds of their object, or one points to start of its object and the
17012 other points to end of its object. This is unspecified behavior
17013 e.g. in C++. Otherwise equal is 0. */
17014 if (folding_cxx_constexpr && equal)
17015 return equal;
17017 /* When both pointers point to string literals, even when equal is 0,
17018 due to tail merging of string literals the pointers might be the same. */
17019 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17021 if (ioff0 < 0
17022 || ioff1 < 0
17023 || ioff0 > TREE_STRING_LENGTH (base0)
17024 || ioff1 > TREE_STRING_LENGTH (base1))
17025 return 2;
17027 /* If the bytes in the string literals starting at the pointers
17028 differ, the pointers need to be different. */
17029 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17030 TREE_STRING_POINTER (base1) + ioff1,
17031 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17032 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17034 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17035 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17036 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17037 ioffmin) == 0)
17038 /* If even the bytes in the string literal before the
17039 pointers are the same, the string literals could be
17040 tail merged. */
17041 return 2;
17043 return 0;
17046 if (folding_cxx_constexpr)
17047 return 0;
17049 /* If this is a pointer comparison, ignore for now even
17050 valid equalities where one pointer is the offset zero
17051 of one object and the other to one past end of another one. */
17052 if (!INTEGRAL_TYPE_P (type))
17053 return 0;
17055 /* Assume that string literals can't be adjacent to variables
17056 (automatic or global). */
17057 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17058 return 0;
17060 /* Assume that automatic variables can't be adjacent to global
17061 variables. */
17062 if (is_global_var (base0) != is_global_var (base1))
17063 return 0;
17065 return equal;
17068 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17069 tree
17070 ctor_single_nonzero_element (const_tree t)
17072 unsigned HOST_WIDE_INT idx;
17073 constructor_elt *ce;
17074 tree elt = NULL_TREE;
17076 if (TREE_CODE (t) != CONSTRUCTOR)
17077 return NULL_TREE;
17078 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17079 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17081 if (elt)
17082 return NULL_TREE;
17083 elt = ce->value;
17085 return elt;
17088 #if CHECKING_P
17090 namespace selftest {
17092 /* Helper functions for writing tests of folding trees. */
17094 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17096 static void
17097 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17098 tree constant)
17100 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17103 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17104 wrapping WRAPPED_EXPR. */
17106 static void
17107 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17108 tree wrapped_expr)
17110 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17111 ASSERT_NE (wrapped_expr, result);
17112 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17113 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17116 /* Verify that various arithmetic binary operations are folded
17117 correctly. */
17119 static void
17120 test_arithmetic_folding ()
17122 tree type = integer_type_node;
17123 tree x = create_tmp_var_raw (type, "x");
17124 tree zero = build_zero_cst (type);
17125 tree one = build_int_cst (type, 1);
17127 /* Addition. */
17128 /* 1 <-- (0 + 1) */
17129 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17130 one);
17131 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17132 one);
17134 /* (nonlvalue)x <-- (x + 0) */
17135 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17138 /* Subtraction. */
17139 /* 0 <-- (x - x) */
17140 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17141 zero);
17142 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17145 /* Multiplication. */
17146 /* 0 <-- (x * 0) */
17147 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17148 zero);
17150 /* (nonlvalue)x <-- (x * 1) */
17151 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17155 namespace test_fold_vec_perm_cst {
17157 /* Build a VECTOR_CST corresponding to VMODE, and has
17158 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17159 Fill it with randomized elements, using rand() % THRESHOLD. */
17161 static tree
17162 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17163 unsigned nelts_per_pattern,
17164 int step = 0, int threshold = 100)
17166 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17167 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17168 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17170 // Fill a0 for each pattern
17171 for (unsigned i = 0; i < npatterns; i++)
17172 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17174 if (nelts_per_pattern == 1)
17175 return builder.build ();
17177 // Fill a1 for each pattern
17178 for (unsigned i = 0; i < npatterns; i++)
17179 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17181 if (nelts_per_pattern == 2)
17182 return builder.build ();
17184 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17186 tree prev_elem = builder[i - npatterns];
17187 int prev_elem_val = TREE_INT_CST_LOW (prev_elem);
17188 int val = prev_elem_val + step;
17189 builder.quick_push (build_int_cst (inner_type, val));
17192 return builder.build ();
17195 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17196 when result is VLA. */
17198 static void
17199 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17200 tree res, tree *expected_res)
17202 /* Actual npatterns and encoded_elts in res may be less than expected due
17203 to canonicalization. */
17204 ASSERT_TRUE (res != NULL_TREE);
17205 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17206 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17208 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17209 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17212 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17213 when the result is VLS. */
17215 static void
17216 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17218 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17219 for (unsigned i = 0; i < expected_nelts; i++)
17220 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17223 /* Helper routine to push multiple elements into BUILDER. */
17224 template<unsigned N>
17225 static void builder_push_elems (vec_perm_builder& builder,
17226 poly_uint64 (&elems)[N])
17228 for (unsigned i = 0; i < N; i++)
17229 builder.quick_push (elems[i]);
17232 #define ARG0(index) vector_cst_elt (arg0, index)
17233 #define ARG1(index) vector_cst_elt (arg1, index)
17235 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17237 static void
17238 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17240 for (int i = 0; i < 10; i++)
17242 /* Case 1:
17243 sel = { 0, 4, 1, 5, ... }
17244 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17246 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17247 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17249 tree inner_type
17250 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17251 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17253 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17254 vec_perm_builder builder (res_len, 4, 1);
17255 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17256 builder_push_elems (builder, mask_elems);
17258 vec_perm_indices sel (builder, 2, res_len);
17259 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17261 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17262 validate_res (4, 1, res, expected_res);
17265 /* Case 2: Same as case 1, but contains an out of bounds access which
17266 should wrap around.
17267 sel = {0, 8, 4, 12, ...} (4, 1)
17268 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17270 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17271 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17273 tree inner_type
17274 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17275 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17277 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17278 vec_perm_builder builder (res_len, 4, 1);
17279 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17280 builder_push_elems (builder, mask_elems);
17282 vec_perm_indices sel (builder, 2, res_len);
17283 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17285 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17286 validate_res (4, 1, res, expected_res);
17291 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17293 static void
17294 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17296 for (int i = 0; i < 10; i++)
17298 /* Case 1:
17299 sel = { 0, 1, 2, 3}
17300 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17302 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17303 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17305 tree inner_type
17306 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17307 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17309 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17310 vec_perm_builder builder (res_len, 4, 1);
17311 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17312 builder_push_elems (builder, mask_elems);
17314 vec_perm_indices sel (builder, 2, res_len);
17315 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17317 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17318 validate_res_vls (res, expected_res, 4);
17321 /* Case 2: Same as Case 1, but crossing input vector.
17322 sel = {0, 2, 4, 6}
17323 In this case,the index 4 is ambiguous since len = 4 + 4x.
17324 Since we cannot determine, which vector to choose from during
17325 compile time, should return NULL_TREE. */
17327 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17328 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17330 tree inner_type
17331 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17332 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17334 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17335 vec_perm_builder builder (res_len, 4, 1);
17336 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17337 builder_push_elems (builder, mask_elems);
17339 vec_perm_indices sel (builder, 2, res_len);
17340 const char *reason;
17341 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17343 ASSERT_TRUE (res == NULL_TREE);
17344 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17349 /* Test all input vectors. */
17351 static void
17352 test_all_nunits (machine_mode vmode)
17354 /* Test with 10 different inputs. */
17355 for (int i = 0; i < 10; i++)
17357 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17358 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17359 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17361 /* Case 1: mask = {0, ...} // (1, 1)
17362 res = { arg0[0], ... } // (1, 1) */
17364 vec_perm_builder builder (len, 1, 1);
17365 builder.quick_push (0);
17366 vec_perm_indices sel (builder, 2, len);
17367 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17368 tree expected_res[] = { ARG0(0) };
17369 validate_res (1, 1, res, expected_res);
17372 /* Case 2: mask = {len, ...} // (1, 1)
17373 res = { arg1[0], ... } // (1, 1) */
17375 vec_perm_builder builder (len, 1, 1);
17376 builder.quick_push (len);
17377 vec_perm_indices sel (builder, 2, len);
17378 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17380 tree expected_res[] = { ARG1(0) };
17381 validate_res (1, 1, res, expected_res);
17386 /* Test all vectors which contain at-least 2 elements. */
17388 static void
17389 test_nunits_min_2 (machine_mode vmode)
17391 for (int i = 0; i < 10; i++)
17393 /* Case 1: mask = { 0, len, ... } // (2, 1)
17394 res = { arg0[0], arg1[0], ... } // (2, 1) */
17396 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17397 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17398 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17400 vec_perm_builder builder (len, 2, 1);
17401 poly_uint64 mask_elems[] = { 0, len };
17402 builder_push_elems (builder, mask_elems);
17404 vec_perm_indices sel (builder, 2, len);
17405 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17407 tree expected_res[] = { ARG0(0), ARG1(0) };
17408 validate_res (2, 1, res, expected_res);
17411 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17412 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17414 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17415 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17416 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17418 vec_perm_builder builder (len, 2, 2);
17419 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17420 builder_push_elems (builder, mask_elems);
17422 vec_perm_indices sel (builder, 2, len);
17423 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17425 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17426 validate_res (2, 2, res, expected_res);
17429 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17430 Test that the stepped sequence of the pattern selects from
17431 same input pattern. Since input vectors have npatterns = 2,
17432 and step (a2 - a1) = 1, step is not a multiple of npatterns
17433 in input vector. So return NULL_TREE. */
17435 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1);
17436 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17437 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17439 vec_perm_builder builder (len, 1, 3);
17440 poly_uint64 mask_elems[] = { 0, 0, 1 };
17441 builder_push_elems (builder, mask_elems);
17443 vec_perm_indices sel (builder, 2, len);
17444 const char *reason;
17445 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17446 &reason);
17447 ASSERT_TRUE (res == NULL_TREE);
17448 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17451 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17452 Test that stepped sequence of the pattern selects from arg0.
17453 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17455 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17456 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17457 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17459 vec_perm_builder builder (len, 1, 3);
17460 poly_uint64 mask_elems[] = { len, 0, 1 };
17461 builder_push_elems (builder, mask_elems);
17463 vec_perm_indices sel (builder, 2, len);
17464 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17466 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17467 validate_res (1, 3, res, expected_res);
17472 /* Test all vectors which contain at-least 4 elements. */
17474 static void
17475 test_nunits_min_4 (machine_mode vmode)
17477 for (int i = 0; i < 10; i++)
17479 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17480 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17482 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17483 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17484 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17486 vec_perm_builder builder (len, 4, 1);
17487 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17488 builder_push_elems (builder, mask_elems);
17490 vec_perm_indices sel (builder, 2, len);
17491 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17493 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17494 validate_res (4, 1, res, expected_res);
17497 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17498 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17500 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17501 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17502 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17504 vec_perm_builder builder (arg0_len, 1, 3);
17505 poly_uint64 mask_elems[] = {0, 1, 2};
17506 builder_push_elems (builder, mask_elems);
17508 vec_perm_indices sel (builder, 2, arg0_len);
17509 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17510 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17511 validate_res (1, 3, res, expected_res);
17514 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17515 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17517 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17518 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17519 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17521 vec_perm_builder builder (len, 1, 3);
17522 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17523 builder_push_elems (builder, mask_elems);
17525 vec_perm_indices sel (builder, 2, len);
17526 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17527 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17528 validate_res (1, 3, res, expected_res);
17531 /* Case 4:
17532 sel = { len, 0, 2, ... } // (1, 3)
17533 This should return NULL because we cross the input vectors.
17534 Because,
17535 Let's assume len = C + Cx
17536 a1 = 0
17537 S = 2
17538 esel = arg0_len / sel_npatterns = C + Cx
17539 ae = 0 + (esel - 2) * S
17540 = 0 + (C + Cx - 2) * 2
17541 = 2(C-2) + 2Cx
17543 For C >= 4:
17544 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17545 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17546 Since q1 != qe, we cross input vectors.
17547 So return NULL_TREE. */
17549 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17550 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17551 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17553 vec_perm_builder builder (arg0_len, 1, 3);
17554 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17555 builder_push_elems (builder, mask_elems);
17557 vec_perm_indices sel (builder, 2, arg0_len);
17558 const char *reason;
17559 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17560 ASSERT_TRUE (res == NULL_TREE);
17561 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17564 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17565 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17566 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17568 Note that fold_vec_perm_cst will set
17569 res_npatterns = max(4, max(4, 2)) = 4
17570 However after canonicalizing, we will end up with shape (2, 2). */
17572 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17573 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17574 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17576 vec_perm_builder builder (len, 2, 2);
17577 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17578 builder_push_elems (builder, mask_elems);
17580 vec_perm_indices sel (builder, 2, len);
17581 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17582 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17583 validate_res (2, 2, res, expected_res);
17586 /* Case 6: Test combination in sel, where one pattern is dup and other
17587 is stepped sequence.
17588 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17589 res = { arg0[0], arg0[0], arg0[0],
17590 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17592 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17593 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17594 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17596 vec_perm_builder builder (len, 2, 3);
17597 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17598 builder_push_elems (builder, mask_elems);
17600 vec_perm_indices sel (builder, 2, len);
17601 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17603 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17604 ARG0(1), ARG0(0), ARG0(2) };
17605 validate_res (2, 3, res, expected_res);
17608 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17609 when arg0, arg1 and sel have different number of patterns.
17610 arg0 is of shape (1, 1)
17611 arg1 is of shape (4, 1)
17612 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17614 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17615 However,
17616 step = (len+2) - (len+1) = 1
17617 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17618 Since step is not a multiple of arg_npatterns,
17619 valid_mask_for_fold_vec_perm_cst should return false,
17620 and thus fold_vec_perm_cst should return NULL_TREE. */
17622 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17623 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17624 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17626 vec_perm_builder builder (len, 2, 3);
17627 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17628 builder_push_elems (builder, mask_elems);
17630 vec_perm_indices sel (builder, 2, len);
17631 const char *reason;
17632 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17634 ASSERT_TRUE (res == NULL_TREE);
17635 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17640 /* Test all vectors which contain at-least 8 elements. */
17642 static void
17643 test_nunits_min_8 (machine_mode vmode)
17645 for (int i = 0; i < 10; i++)
17647 /* Case 1: sel_npatterns (4) > input npatterns (2)
17648 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17649 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17650 arg0[2], arg0[0], arg0[3], arg1[0],
17651 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17653 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17654 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17655 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17657 vec_perm_builder builder(len, 4, 3);
17658 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17659 4, 0, 5, len };
17660 builder_push_elems (builder, mask_elems);
17662 vec_perm_indices sel (builder, 2, len);
17663 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17665 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17666 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17667 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17668 validate_res (4, 3, res, expected_res);
17673 /* Test vectors for which nunits[0] <= 4. */
17675 static void
17676 test_nunits_max_4 (machine_mode vmode)
17678 /* Case 1: mask = {0, 4, ...} // (1, 2)
17679 This should return NULL_TREE because the index 4 may choose
17680 from either arg0 or arg1 depending on vector length. */
17682 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17683 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17684 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17686 vec_perm_builder builder (len, 1, 2);
17687 poly_uint64 mask_elems[] = {0, 4};
17688 builder_push_elems (builder, mask_elems);
17690 vec_perm_indices sel (builder, 2, len);
17691 const char *reason;
17692 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17693 ASSERT_TRUE (res == NULL_TREE);
17694 ASSERT_TRUE (reason != NULL);
17695 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17699 #undef ARG0
17700 #undef ARG1
17702 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17704 static bool
17705 is_simple_vla_size (poly_uint64 size)
17707 if (size.is_constant ()
17708 || !pow2p_hwi (size.coeffs[0]))
17709 return false;
17710 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17711 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17712 return false;
17713 return true;
17716 /* Execute fold_vec_perm_cst unit tests. */
17718 static void
17719 test ()
17721 machine_mode vnx4si_mode = E_VOIDmode;
17722 machine_mode v4si_mode = E_VOIDmode;
17724 machine_mode vmode;
17725 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17727 /* Obtain modes corresponding to VNx4SI and V4SI,
17728 to call mixed mode tests below.
17729 FIXME: Is there a better way to do this ? */
17730 if (GET_MODE_INNER (vmode) == SImode)
17732 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17733 if (is_simple_vla_size (nunits)
17734 && nunits.coeffs[0] == 4)
17735 vnx4si_mode = vmode;
17736 else if (known_eq (nunits, poly_uint64 (4)))
17737 v4si_mode = vmode;
17740 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17741 || !targetm.vector_mode_supported_p (vmode))
17742 continue;
17744 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17745 test_all_nunits (vmode);
17746 if (nunits.coeffs[0] >= 2)
17747 test_nunits_min_2 (vmode);
17748 if (nunits.coeffs[0] >= 4)
17749 test_nunits_min_4 (vmode);
17750 if (nunits.coeffs[0] >= 8)
17751 test_nunits_min_8 (vmode);
17753 if (nunits.coeffs[0] <= 4)
17754 test_nunits_max_4 (vmode);
17757 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
17758 && targetm.vector_mode_supported_p (vnx4si_mode)
17759 && targetm.vector_mode_supported_p (v4si_mode))
17761 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
17762 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
17765 } // end of test_fold_vec_perm_cst namespace
17767 /* Verify that various binary operations on vectors are folded
17768 correctly. */
17770 static void
17771 test_vector_folding ()
17773 tree inner_type = integer_type_node;
17774 tree type = build_vector_type (inner_type, 4);
17775 tree zero = build_zero_cst (type);
17776 tree one = build_one_cst (type);
17777 tree index = build_index_vector (type, 0, 1);
17779 /* Verify equality tests that return a scalar boolean result. */
17780 tree res_type = boolean_type_node;
17781 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
17782 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17783 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17784 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17785 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17786 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17787 index, one)));
17788 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17789 index, index)));
17790 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17791 index, index)));
17794 /* Verify folding of VEC_DUPLICATE_EXPRs. */
17796 static void
17797 test_vec_duplicate_folding ()
17799 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17800 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17801 /* This will be 1 if VEC_MODE isn't a vector mode. */
17802 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17804 tree type = build_vector_type (ssizetype, nunits);
17805 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17806 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17807 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17810 /* Run all of the selftests within this file. */
17812 void
17813 fold_const_cc_tests ()
17815 test_arithmetic_folding ();
17816 test_vector_folding ();
17817 test_vec_duplicate_folding ();
17818 test_fold_vec_perm_cst::test ();
17821 } // namespace selftest
17823 #endif /* CHECKING_P */