Handle BITINT_TYPE in build_{,minus_}one_cst [PR102989]
[official-gcc.git] / gcc / fold-const.cc
blob5b481a247f2f23861f518c1ef11d8e3f62105f2b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1217 produce a new constant in RES. Return FALSE if we don't know how
1218 to evaluate CODE at compile-time. */
1220 static bool
1221 poly_int_binop (poly_wide_int &res, enum tree_code code,
1222 const_tree arg1, const_tree arg2,
1223 signop sign, wi::overflow_type *overflow)
1225 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1226 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1227 switch (code)
1229 case PLUS_EXPR:
1230 res = wi::add (wi::to_poly_wide (arg1),
1231 wi::to_poly_wide (arg2), sign, overflow);
1232 break;
1234 case MINUS_EXPR:
1235 res = wi::sub (wi::to_poly_wide (arg1),
1236 wi::to_poly_wide (arg2), sign, overflow);
1237 break;
1239 case MULT_EXPR:
1240 if (TREE_CODE (arg2) == INTEGER_CST)
1241 res = wi::mul (wi::to_poly_wide (arg1),
1242 wi::to_wide (arg2), sign, overflow);
1243 else if (TREE_CODE (arg1) == INTEGER_CST)
1244 res = wi::mul (wi::to_poly_wide (arg2),
1245 wi::to_wide (arg1), sign, overflow);
1246 else
1247 return NULL_TREE;
1248 break;
1250 case LSHIFT_EXPR:
1251 if (TREE_CODE (arg2) == INTEGER_CST)
1252 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1253 else
1254 return false;
1255 break;
1257 case BIT_IOR_EXPR:
1258 if (TREE_CODE (arg2) != INTEGER_CST
1259 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1260 &res))
1261 return false;
1262 break;
1264 default:
1265 return false;
1267 return true;
1270 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1271 produce a new constant. Return NULL_TREE if we don't know how to
1272 evaluate CODE at compile-time. */
1274 tree
1275 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1276 int overflowable)
1278 poly_wide_int poly_res;
1279 tree type = TREE_TYPE (arg1);
1280 signop sign = TYPE_SIGN (type);
1281 wi::overflow_type overflow = wi::OVF_NONE;
1283 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1285 wide_int warg1 = wi::to_wide (arg1), res;
1286 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1287 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1288 return NULL_TREE;
1289 poly_res = res;
1291 else if (!poly_int_tree_p (arg1)
1292 || !poly_int_tree_p (arg2)
1293 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1294 return NULL_TREE;
1295 return force_fit_type (type, poly_res, overflowable,
1296 (((sign == SIGNED || overflowable == -1)
1297 && overflow)
1298 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1301 /* Return true if binary operation OP distributes over addition in operand
1302 OPNO, with the other operand being held constant. OPNO counts from 1. */
1304 static bool
1305 distributes_over_addition_p (tree_code op, int opno)
1307 switch (op)
1309 case PLUS_EXPR:
1310 case MINUS_EXPR:
1311 case MULT_EXPR:
1312 return true;
1314 case LSHIFT_EXPR:
1315 return opno == 1;
1317 default:
1318 return false;
1322 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1323 constant. We assume ARG1 and ARG2 have the same data type, or at least
1324 are the same kind of constant and the same machine mode. Return zero if
1325 combining the constants is not allowed in the current operating mode. */
1327 static tree
1328 const_binop (enum tree_code code, tree arg1, tree arg2)
1330 /* Sanity check for the recursive cases. */
1331 if (!arg1 || !arg2)
1332 return NULL_TREE;
1334 STRIP_NOPS (arg1);
1335 STRIP_NOPS (arg2);
1337 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1339 if (code == POINTER_PLUS_EXPR)
1340 return int_const_binop (PLUS_EXPR,
1341 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1343 return int_const_binop (code, arg1, arg2);
1346 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1348 machine_mode mode;
1349 REAL_VALUE_TYPE d1;
1350 REAL_VALUE_TYPE d2;
1351 REAL_VALUE_TYPE value;
1352 REAL_VALUE_TYPE result;
1353 bool inexact;
1354 tree t, type;
1356 /* The following codes are handled by real_arithmetic. */
1357 switch (code)
1359 case PLUS_EXPR:
1360 case MINUS_EXPR:
1361 case MULT_EXPR:
1362 case RDIV_EXPR:
1363 case MIN_EXPR:
1364 case MAX_EXPR:
1365 break;
1367 default:
1368 return NULL_TREE;
1371 d1 = TREE_REAL_CST (arg1);
1372 d2 = TREE_REAL_CST (arg2);
1374 type = TREE_TYPE (arg1);
1375 mode = TYPE_MODE (type);
1377 /* Don't perform operation if we honor signaling NaNs and
1378 either operand is a signaling NaN. */
1379 if (HONOR_SNANS (mode)
1380 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1381 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1382 return NULL_TREE;
1384 /* Don't perform operation if it would raise a division
1385 by zero exception. */
1386 if (code == RDIV_EXPR
1387 && real_equal (&d2, &dconst0)
1388 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1389 return NULL_TREE;
1391 /* If either operand is a NaN, just return it. Otherwise, set up
1392 for floating-point trap; we return an overflow. */
1393 if (REAL_VALUE_ISNAN (d1))
1395 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1396 is off. */
1397 d1.signalling = 0;
1398 t = build_real (type, d1);
1399 return t;
1401 else if (REAL_VALUE_ISNAN (d2))
1403 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1404 is off. */
1405 d2.signalling = 0;
1406 t = build_real (type, d2);
1407 return t;
1410 inexact = real_arithmetic (&value, code, &d1, &d2);
1411 real_convert (&result, mode, &value);
1413 /* Don't constant fold this floating point operation if
1414 both operands are not NaN but the result is NaN, and
1415 flag_trapping_math. Such operations should raise an
1416 invalid operation exception. */
1417 if (flag_trapping_math
1418 && MODE_HAS_NANS (mode)
1419 && REAL_VALUE_ISNAN (result)
1420 && !REAL_VALUE_ISNAN (d1)
1421 && !REAL_VALUE_ISNAN (d2))
1422 return NULL_TREE;
1424 /* Don't constant fold this floating point operation if
1425 the result has overflowed and flag_trapping_math. */
1426 if (flag_trapping_math
1427 && MODE_HAS_INFINITIES (mode)
1428 && REAL_VALUE_ISINF (result)
1429 && !REAL_VALUE_ISINF (d1)
1430 && !REAL_VALUE_ISINF (d2))
1431 return NULL_TREE;
1433 /* Don't constant fold this floating point operation if the
1434 result may dependent upon the run-time rounding mode and
1435 flag_rounding_math is set, or if GCC's software emulation
1436 is unable to accurately represent the result. */
1437 if ((flag_rounding_math
1438 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1439 && (inexact || !real_identical (&result, &value)))
1440 return NULL_TREE;
1442 t = build_real (type, result);
1444 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1445 return t;
1448 if (TREE_CODE (arg1) == FIXED_CST)
1450 FIXED_VALUE_TYPE f1;
1451 FIXED_VALUE_TYPE f2;
1452 FIXED_VALUE_TYPE result;
1453 tree t, type;
1454 bool sat_p;
1455 bool overflow_p;
1457 /* The following codes are handled by fixed_arithmetic. */
1458 switch (code)
1460 case PLUS_EXPR:
1461 case MINUS_EXPR:
1462 case MULT_EXPR:
1463 case TRUNC_DIV_EXPR:
1464 if (TREE_CODE (arg2) != FIXED_CST)
1465 return NULL_TREE;
1466 f2 = TREE_FIXED_CST (arg2);
1467 break;
1469 case LSHIFT_EXPR:
1470 case RSHIFT_EXPR:
1472 if (TREE_CODE (arg2) != INTEGER_CST)
1473 return NULL_TREE;
1474 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1475 f2.data.high = w2.elt (1);
1476 f2.data.low = w2.ulow ();
1477 f2.mode = SImode;
1479 break;
1481 default:
1482 return NULL_TREE;
1485 f1 = TREE_FIXED_CST (arg1);
1486 type = TREE_TYPE (arg1);
1487 sat_p = TYPE_SATURATING (type);
1488 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1489 t = build_fixed (type, result);
1490 /* Propagate overflow flags. */
1491 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1492 TREE_OVERFLOW (t) = 1;
1493 return t;
1496 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1498 tree type = TREE_TYPE (arg1);
1499 tree r1 = TREE_REALPART (arg1);
1500 tree i1 = TREE_IMAGPART (arg1);
1501 tree r2 = TREE_REALPART (arg2);
1502 tree i2 = TREE_IMAGPART (arg2);
1503 tree real, imag;
1505 switch (code)
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 real = const_binop (code, r1, r2);
1510 imag = const_binop (code, i1, i2);
1511 break;
1513 case MULT_EXPR:
1514 if (COMPLEX_FLOAT_TYPE_P (type))
1515 return do_mpc_arg2 (arg1, arg2, type,
1516 /* do_nonfinite= */ folding_initializer,
1517 mpc_mul);
1519 real = const_binop (MINUS_EXPR,
1520 const_binop (MULT_EXPR, r1, r2),
1521 const_binop (MULT_EXPR, i1, i2));
1522 imag = const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR, r1, i2),
1524 const_binop (MULT_EXPR, i1, r2));
1525 break;
1527 case RDIV_EXPR:
1528 if (COMPLEX_FLOAT_TYPE_P (type))
1529 return do_mpc_arg2 (arg1, arg2, type,
1530 /* do_nonfinite= */ folding_initializer,
1531 mpc_div);
1532 /* Fallthru. */
1533 case TRUNC_DIV_EXPR:
1534 case CEIL_DIV_EXPR:
1535 case FLOOR_DIV_EXPR:
1536 case ROUND_DIV_EXPR:
1537 if (flag_complex_method == 0)
1539 /* Keep this algorithm in sync with
1540 tree-complex.cc:expand_complex_div_straight().
1542 Expand complex division to scalars, straightforward algorithm.
1543 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1544 t = br*br + bi*bi
1546 tree magsquared
1547 = const_binop (PLUS_EXPR,
1548 const_binop (MULT_EXPR, r2, r2),
1549 const_binop (MULT_EXPR, i2, i2));
1550 tree t1
1551 = const_binop (PLUS_EXPR,
1552 const_binop (MULT_EXPR, r1, r2),
1553 const_binop (MULT_EXPR, i1, i2));
1554 tree t2
1555 = const_binop (MINUS_EXPR,
1556 const_binop (MULT_EXPR, i1, r2),
1557 const_binop (MULT_EXPR, r1, i2));
1559 real = const_binop (code, t1, magsquared);
1560 imag = const_binop (code, t2, magsquared);
1562 else
1564 /* Keep this algorithm in sync with
1565 tree-complex.cc:expand_complex_div_wide().
1567 Expand complex division to scalars, modified algorithm to minimize
1568 overflow with wide input ranges. */
1569 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1570 fold_abs_const (r2, TREE_TYPE (type)),
1571 fold_abs_const (i2, TREE_TYPE (type)));
1573 if (integer_nonzerop (compare))
1575 /* In the TRUE branch, we compute
1576 ratio = br/bi;
1577 div = (br * ratio) + bi;
1578 tr = (ar * ratio) + ai;
1579 ti = (ai * ratio) - ar;
1580 tr = tr / div;
1581 ti = ti / div; */
1582 tree ratio = const_binop (code, r2, i2);
1583 tree div = const_binop (PLUS_EXPR, i2,
1584 const_binop (MULT_EXPR, r2, ratio));
1585 real = const_binop (MULT_EXPR, r1, ratio);
1586 real = const_binop (PLUS_EXPR, real, i1);
1587 real = const_binop (code, real, div);
1589 imag = const_binop (MULT_EXPR, i1, ratio);
1590 imag = const_binop (MINUS_EXPR, imag, r1);
1591 imag = const_binop (code, imag, div);
1593 else
1595 /* In the FALSE branch, we compute
1596 ratio = d/c;
1597 divisor = (d * ratio) + c;
1598 tr = (b * ratio) + a;
1599 ti = b - (a * ratio);
1600 tr = tr / div;
1601 ti = ti / div; */
1602 tree ratio = const_binop (code, i2, r2);
1603 tree div = const_binop (PLUS_EXPR, r2,
1604 const_binop (MULT_EXPR, i2, ratio));
1606 real = const_binop (MULT_EXPR, i1, ratio);
1607 real = const_binop (PLUS_EXPR, real, r1);
1608 real = const_binop (code, real, div);
1610 imag = const_binop (MULT_EXPR, r1, ratio);
1611 imag = const_binop (MINUS_EXPR, i1, imag);
1612 imag = const_binop (code, imag, div);
1615 break;
1617 default:
1618 return NULL_TREE;
1621 if (real && imag)
1622 return build_complex (type, real, imag);
1625 if (TREE_CODE (arg1) == VECTOR_CST
1626 && TREE_CODE (arg2) == VECTOR_CST
1627 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1628 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1630 tree type = TREE_TYPE (arg1);
1631 bool step_ok_p;
1632 if (VECTOR_CST_STEPPED_P (arg1)
1633 && VECTOR_CST_STEPPED_P (arg2))
1634 /* We can operate directly on the encoding if:
1636 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1637 implies
1638 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1640 Addition and subtraction are the supported operators
1641 for which this is true. */
1642 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1643 else if (VECTOR_CST_STEPPED_P (arg1))
1644 /* We can operate directly on stepped encodings if:
1646 a3 - a2 == a2 - a1
1647 implies:
1648 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1650 which is true if (x -> x op c) distributes over addition. */
1651 step_ok_p = distributes_over_addition_p (code, 1);
1652 else
1653 /* Similarly in reverse. */
1654 step_ok_p = distributes_over_addition_p (code, 2);
1655 tree_vector_builder elts;
1656 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1657 return NULL_TREE;
1658 unsigned int count = elts.encoded_nelts ();
1659 for (unsigned int i = 0; i < count; ++i)
1661 tree elem1 = VECTOR_CST_ELT (arg1, i);
1662 tree elem2 = VECTOR_CST_ELT (arg2, i);
1664 tree elt = const_binop (code, elem1, elem2);
1666 /* It is possible that const_binop cannot handle the given
1667 code and return NULL_TREE */
1668 if (elt == NULL_TREE)
1669 return NULL_TREE;
1670 elts.quick_push (elt);
1673 return elts.build ();
1676 /* Shifts allow a scalar offset for a vector. */
1677 if (TREE_CODE (arg1) == VECTOR_CST
1678 && TREE_CODE (arg2) == INTEGER_CST)
1680 tree type = TREE_TYPE (arg1);
1681 bool step_ok_p = distributes_over_addition_p (code, 1);
1682 tree_vector_builder elts;
1683 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1684 return NULL_TREE;
1685 unsigned int count = elts.encoded_nelts ();
1686 for (unsigned int i = 0; i < count; ++i)
1688 tree elem1 = VECTOR_CST_ELT (arg1, i);
1690 tree elt = const_binop (code, elem1, arg2);
1692 /* It is possible that const_binop cannot handle the given
1693 code and return NULL_TREE. */
1694 if (elt == NULL_TREE)
1695 return NULL_TREE;
1696 elts.quick_push (elt);
1699 return elts.build ();
1701 return NULL_TREE;
1704 /* Overload that adds a TYPE parameter to be able to dispatch
1705 to fold_relational_const. */
1707 tree
1708 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1710 if (TREE_CODE_CLASS (code) == tcc_comparison)
1711 return fold_relational_const (code, type, arg1, arg2);
1713 /* ??? Until we make the const_binop worker take the type of the
1714 result as argument put those cases that need it here. */
1715 switch (code)
1717 case VEC_SERIES_EXPR:
1718 if (CONSTANT_CLASS_P (arg1)
1719 && CONSTANT_CLASS_P (arg2))
1720 return build_vec_series (type, arg1, arg2);
1721 return NULL_TREE;
1723 case COMPLEX_EXPR:
1724 if ((TREE_CODE (arg1) == REAL_CST
1725 && TREE_CODE (arg2) == REAL_CST)
1726 || (TREE_CODE (arg1) == INTEGER_CST
1727 && TREE_CODE (arg2) == INTEGER_CST))
1728 return build_complex (type, arg1, arg2);
1729 return NULL_TREE;
1731 case POINTER_DIFF_EXPR:
1732 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1734 poly_offset_int res = (wi::to_poly_offset (arg1)
1735 - wi::to_poly_offset (arg2));
1736 return force_fit_type (type, res, 1,
1737 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1739 return NULL_TREE;
1741 case VEC_PACK_TRUNC_EXPR:
1742 case VEC_PACK_FIX_TRUNC_EXPR:
1743 case VEC_PACK_FLOAT_EXPR:
1745 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1747 if (TREE_CODE (arg1) != VECTOR_CST
1748 || TREE_CODE (arg2) != VECTOR_CST)
1749 return NULL_TREE;
1751 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1752 return NULL_TREE;
1754 out_nelts = in_nelts * 2;
1755 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1756 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1758 tree_vector_builder elts (type, out_nelts, 1);
1759 for (i = 0; i < out_nelts; i++)
1761 tree elt = (i < in_nelts
1762 ? VECTOR_CST_ELT (arg1, i)
1763 : VECTOR_CST_ELT (arg2, i - in_nelts));
1764 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1765 ? NOP_EXPR
1766 : code == VEC_PACK_FLOAT_EXPR
1767 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1768 TREE_TYPE (type), elt);
1769 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1770 return NULL_TREE;
1771 elts.quick_push (elt);
1774 return elts.build ();
1777 case VEC_WIDEN_MULT_LO_EXPR:
1778 case VEC_WIDEN_MULT_HI_EXPR:
1779 case VEC_WIDEN_MULT_EVEN_EXPR:
1780 case VEC_WIDEN_MULT_ODD_EXPR:
1782 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1784 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1785 return NULL_TREE;
1787 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1788 return NULL_TREE;
1789 out_nelts = in_nelts / 2;
1790 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1791 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1793 if (code == VEC_WIDEN_MULT_LO_EXPR)
1794 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1795 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1796 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1797 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1798 scale = 1, ofs = 0;
1799 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1800 scale = 1, ofs = 1;
1802 tree_vector_builder elts (type, out_nelts, 1);
1803 for (out = 0; out < out_nelts; out++)
1805 unsigned int in = (out << scale) + ofs;
1806 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1807 VECTOR_CST_ELT (arg1, in));
1808 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1809 VECTOR_CST_ELT (arg2, in));
1811 if (t1 == NULL_TREE || t2 == NULL_TREE)
1812 return NULL_TREE;
1813 tree elt = const_binop (MULT_EXPR, t1, t2);
1814 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1815 return NULL_TREE;
1816 elts.quick_push (elt);
1819 return elts.build ();
1822 default:;
1825 if (TREE_CODE_CLASS (code) != tcc_binary)
1826 return NULL_TREE;
1828 /* Make sure type and arg0 have the same saturating flag. */
1829 gcc_checking_assert (TYPE_SATURATING (type)
1830 == TYPE_SATURATING (TREE_TYPE (arg1)));
1832 return const_binop (code, arg1, arg2);
1835 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1836 Return zero if computing the constants is not possible. */
1838 tree
1839 const_unop (enum tree_code code, tree type, tree arg0)
1841 /* Don't perform the operation, other than NEGATE and ABS, if
1842 flag_signaling_nans is on and the operand is a signaling NaN. */
1843 if (TREE_CODE (arg0) == REAL_CST
1844 && HONOR_SNANS (arg0)
1845 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1846 && code != NEGATE_EXPR
1847 && code != ABS_EXPR
1848 && code != ABSU_EXPR)
1849 return NULL_TREE;
1851 switch (code)
1853 CASE_CONVERT:
1854 case FLOAT_EXPR:
1855 case FIX_TRUNC_EXPR:
1856 case FIXED_CONVERT_EXPR:
1857 return fold_convert_const (code, type, arg0);
1859 case ADDR_SPACE_CONVERT_EXPR:
1860 /* If the source address is 0, and the source address space
1861 cannot have a valid object at 0, fold to dest type null. */
1862 if (integer_zerop (arg0)
1863 && !(targetm.addr_space.zero_address_valid
1864 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1865 return fold_convert_const (code, type, arg0);
1866 break;
1868 case VIEW_CONVERT_EXPR:
1869 return fold_view_convert_expr (type, arg0);
1871 case NEGATE_EXPR:
1873 /* Can't call fold_negate_const directly here as that doesn't
1874 handle all cases and we might not be able to negate some
1875 constants. */
1876 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1877 if (tem && CONSTANT_CLASS_P (tem))
1878 return tem;
1879 break;
1882 case ABS_EXPR:
1883 case ABSU_EXPR:
1884 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1885 return fold_abs_const (arg0, type);
1886 break;
1888 case CONJ_EXPR:
1889 if (TREE_CODE (arg0) == COMPLEX_CST)
1891 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1892 TREE_TYPE (type));
1893 return build_complex (type, TREE_REALPART (arg0), ipart);
1895 break;
1897 case BIT_NOT_EXPR:
1898 if (TREE_CODE (arg0) == INTEGER_CST)
1899 return fold_not_const (arg0, type);
1900 else if (POLY_INT_CST_P (arg0))
1901 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1902 /* Perform BIT_NOT_EXPR on each element individually. */
1903 else if (TREE_CODE (arg0) == VECTOR_CST)
1905 tree elem;
1907 /* This can cope with stepped encodings because ~x == -1 - x. */
1908 tree_vector_builder elements;
1909 elements.new_unary_operation (type, arg0, true);
1910 unsigned int i, count = elements.encoded_nelts ();
1911 for (i = 0; i < count; ++i)
1913 elem = VECTOR_CST_ELT (arg0, i);
1914 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1915 if (elem == NULL_TREE)
1916 break;
1917 elements.quick_push (elem);
1919 if (i == count)
1920 return elements.build ();
1922 break;
1924 case TRUTH_NOT_EXPR:
1925 if (TREE_CODE (arg0) == INTEGER_CST)
1926 return constant_boolean_node (integer_zerop (arg0), type);
1927 break;
1929 case REALPART_EXPR:
1930 if (TREE_CODE (arg0) == COMPLEX_CST)
1931 return fold_convert (type, TREE_REALPART (arg0));
1932 break;
1934 case IMAGPART_EXPR:
1935 if (TREE_CODE (arg0) == COMPLEX_CST)
1936 return fold_convert (type, TREE_IMAGPART (arg0));
1937 break;
1939 case VEC_UNPACK_LO_EXPR:
1940 case VEC_UNPACK_HI_EXPR:
1941 case VEC_UNPACK_FLOAT_LO_EXPR:
1942 case VEC_UNPACK_FLOAT_HI_EXPR:
1943 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1944 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1946 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1947 enum tree_code subcode;
1949 if (TREE_CODE (arg0) != VECTOR_CST)
1950 return NULL_TREE;
1952 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1953 return NULL_TREE;
1954 out_nelts = in_nelts / 2;
1955 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1957 unsigned int offset = 0;
1958 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1959 || code == VEC_UNPACK_FLOAT_LO_EXPR
1960 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1961 offset = out_nelts;
1963 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1964 subcode = NOP_EXPR;
1965 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1966 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1967 subcode = FLOAT_EXPR;
1968 else
1969 subcode = FIX_TRUNC_EXPR;
1971 tree_vector_builder elts (type, out_nelts, 1);
1972 for (i = 0; i < out_nelts; i++)
1974 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1975 VECTOR_CST_ELT (arg0, i + offset));
1976 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1977 return NULL_TREE;
1978 elts.quick_push (elt);
1981 return elts.build ();
1984 case VEC_DUPLICATE_EXPR:
1985 if (CONSTANT_CLASS_P (arg0))
1986 return build_vector_from_val (type, arg0);
1987 return NULL_TREE;
1989 default:
1990 break;
1993 return NULL_TREE;
1996 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1997 indicates which particular sizetype to create. */
1999 tree
2000 size_int_kind (poly_int64 number, enum size_type_kind kind)
2002 return build_int_cst (sizetype_tab[(int) kind], number);
2005 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2006 is a tree code. The type of the result is taken from the operands.
2007 Both must be equivalent integer types, ala int_binop_types_match_p.
2008 If the operands are constant, so is the result. */
2010 tree
2011 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2013 tree type = TREE_TYPE (arg0);
2015 if (arg0 == error_mark_node || arg1 == error_mark_node)
2016 return error_mark_node;
2018 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2019 TREE_TYPE (arg1)));
2021 /* Handle the special case of two poly_int constants faster. */
2022 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2024 /* And some specific cases even faster than that. */
2025 if (code == PLUS_EXPR)
2027 if (integer_zerop (arg0)
2028 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2029 return arg1;
2030 if (integer_zerop (arg1)
2031 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2032 return arg0;
2034 else if (code == MINUS_EXPR)
2036 if (integer_zerop (arg1)
2037 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2038 return arg0;
2040 else if (code == MULT_EXPR)
2042 if (integer_onep (arg0)
2043 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2044 return arg1;
2047 /* Handle general case of two integer constants. For sizetype
2048 constant calculations we always want to know about overflow,
2049 even in the unsigned case. */
2050 tree res = int_const_binop (code, arg0, arg1, -1);
2051 if (res != NULL_TREE)
2052 return res;
2055 return fold_build2_loc (loc, code, type, arg0, arg1);
2058 /* Given two values, either both of sizetype or both of bitsizetype,
2059 compute the difference between the two values. Return the value
2060 in signed type corresponding to the type of the operands. */
2062 tree
2063 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2065 tree type = TREE_TYPE (arg0);
2066 tree ctype;
2068 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2069 TREE_TYPE (arg1)));
2071 /* If the type is already signed, just do the simple thing. */
2072 if (!TYPE_UNSIGNED (type))
2073 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2075 if (type == sizetype)
2076 ctype = ssizetype;
2077 else if (type == bitsizetype)
2078 ctype = sbitsizetype;
2079 else
2080 ctype = signed_type_for (type);
2082 /* If either operand is not a constant, do the conversions to the signed
2083 type and subtract. The hardware will do the right thing with any
2084 overflow in the subtraction. */
2085 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2086 return size_binop_loc (loc, MINUS_EXPR,
2087 fold_convert_loc (loc, ctype, arg0),
2088 fold_convert_loc (loc, ctype, arg1));
2090 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2091 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2092 overflow) and negate (which can't either). Special-case a result
2093 of zero while we're here. */
2094 if (tree_int_cst_equal (arg0, arg1))
2095 return build_int_cst (ctype, 0);
2096 else if (tree_int_cst_lt (arg1, arg0))
2097 return fold_convert_loc (loc, ctype,
2098 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2099 else
2100 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2101 fold_convert_loc (loc, ctype,
2102 size_binop_loc (loc,
2103 MINUS_EXPR,
2104 arg1, arg0)));
2107 /* A subroutine of fold_convert_const handling conversions of an
2108 INTEGER_CST to another integer type. */
2110 static tree
2111 fold_convert_const_int_from_int (tree type, const_tree arg1)
2113 /* Given an integer constant, make new constant with new type,
2114 appropriately sign-extended or truncated. Use widest_int
2115 so that any extension is done according ARG1's type. */
2116 return force_fit_type (type, wi::to_widest (arg1),
2117 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2118 TREE_OVERFLOW (arg1));
2121 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2122 to an integer type. */
2124 static tree
2125 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2127 bool overflow = false;
2128 tree t;
2130 /* The following code implements the floating point to integer
2131 conversion rules required by the Java Language Specification,
2132 that IEEE NaNs are mapped to zero and values that overflow
2133 the target precision saturate, i.e. values greater than
2134 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2135 are mapped to INT_MIN. These semantics are allowed by the
2136 C and C++ standards that simply state that the behavior of
2137 FP-to-integer conversion is unspecified upon overflow. */
2139 wide_int val;
2140 REAL_VALUE_TYPE r;
2141 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2143 switch (code)
2145 case FIX_TRUNC_EXPR:
2146 real_trunc (&r, VOIDmode, &x);
2147 break;
2149 default:
2150 gcc_unreachable ();
2153 /* If R is NaN, return zero and show we have an overflow. */
2154 if (REAL_VALUE_ISNAN (r))
2156 overflow = true;
2157 val = wi::zero (TYPE_PRECISION (type));
2160 /* See if R is less than the lower bound or greater than the
2161 upper bound. */
2163 if (! overflow)
2165 tree lt = TYPE_MIN_VALUE (type);
2166 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2167 if (real_less (&r, &l))
2169 overflow = true;
2170 val = wi::to_wide (lt);
2174 if (! overflow)
2176 tree ut = TYPE_MAX_VALUE (type);
2177 if (ut)
2179 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2180 if (real_less (&u, &r))
2182 overflow = true;
2183 val = wi::to_wide (ut);
2188 if (! overflow)
2189 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2191 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2192 return t;
2195 /* A subroutine of fold_convert_const handling conversions of a
2196 FIXED_CST to an integer type. */
2198 static tree
2199 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2201 tree t;
2202 double_int temp, temp_trunc;
2203 scalar_mode mode;
2205 /* Right shift FIXED_CST to temp by fbit. */
2206 temp = TREE_FIXED_CST (arg1).data;
2207 mode = TREE_FIXED_CST (arg1).mode;
2208 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2210 temp = temp.rshift (GET_MODE_FBIT (mode),
2211 HOST_BITS_PER_DOUBLE_INT,
2212 SIGNED_FIXED_POINT_MODE_P (mode));
2214 /* Left shift temp to temp_trunc by fbit. */
2215 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2216 HOST_BITS_PER_DOUBLE_INT,
2217 SIGNED_FIXED_POINT_MODE_P (mode));
2219 else
2221 temp = double_int_zero;
2222 temp_trunc = double_int_zero;
2225 /* If FIXED_CST is negative, we need to round the value toward 0.
2226 By checking if the fractional bits are not zero to add 1 to temp. */
2227 if (SIGNED_FIXED_POINT_MODE_P (mode)
2228 && temp_trunc.is_negative ()
2229 && TREE_FIXED_CST (arg1).data != temp_trunc)
2230 temp += double_int_one;
2232 /* Given a fixed-point constant, make new constant with new type,
2233 appropriately sign-extended or truncated. */
2234 t = force_fit_type (type, temp, -1,
2235 (temp.is_negative ()
2236 && (TYPE_UNSIGNED (type)
2237 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2238 | TREE_OVERFLOW (arg1));
2240 return t;
2243 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2244 to another floating point type. */
2246 static tree
2247 fold_convert_const_real_from_real (tree type, const_tree arg1)
2249 REAL_VALUE_TYPE value;
2250 tree t;
2252 /* If the underlying modes are the same, simply treat it as
2253 copy and rebuild with TREE_REAL_CST information and the
2254 given type. */
2255 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2257 t = build_real (type, TREE_REAL_CST (arg1));
2258 return t;
2261 /* Don't perform the operation if flag_signaling_nans is on
2262 and the operand is a signaling NaN. */
2263 if (HONOR_SNANS (arg1)
2264 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2265 return NULL_TREE;
2267 /* With flag_rounding_math we should respect the current rounding mode
2268 unless the conversion is exact. */
2269 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2270 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2271 return NULL_TREE;
2273 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2274 t = build_real (type, value);
2276 /* If converting an infinity or NAN to a representation that doesn't
2277 have one, set the overflow bit so that we can produce some kind of
2278 error message at the appropriate point if necessary. It's not the
2279 most user-friendly message, but it's better than nothing. */
2280 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2281 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2282 TREE_OVERFLOW (t) = 1;
2283 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2284 && !MODE_HAS_NANS (TYPE_MODE (type)))
2285 TREE_OVERFLOW (t) = 1;
2286 /* Regular overflow, conversion produced an infinity in a mode that
2287 can't represent them. */
2288 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2289 && REAL_VALUE_ISINF (value)
2290 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2291 TREE_OVERFLOW (t) = 1;
2292 else
2293 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 return t;
2297 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2298 to a floating point type. */
2300 static tree
2301 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2303 REAL_VALUE_TYPE value;
2304 tree t;
2306 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2307 &TREE_FIXED_CST (arg1));
2308 t = build_real (type, value);
2310 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2311 return t;
2314 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2315 to another fixed-point type. */
2317 static tree
2318 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2320 FIXED_VALUE_TYPE value;
2321 tree t;
2322 bool overflow_p;
2324 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2325 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2326 t = build_fixed (type, value);
2328 /* Propagate overflow flags. */
2329 if (overflow_p | TREE_OVERFLOW (arg1))
2330 TREE_OVERFLOW (t) = 1;
2331 return t;
2334 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2335 to a fixed-point type. */
2337 static tree
2338 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2340 FIXED_VALUE_TYPE value;
2341 tree t;
2342 bool overflow_p;
2343 double_int di;
2345 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2347 di.low = TREE_INT_CST_ELT (arg1, 0);
2348 if (TREE_INT_CST_NUNITS (arg1) == 1)
2349 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2350 else
2351 di.high = TREE_INT_CST_ELT (arg1, 1);
2353 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2354 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2355 TYPE_SATURATING (type));
2356 t = build_fixed (type, value);
2358 /* Propagate overflow flags. */
2359 if (overflow_p | TREE_OVERFLOW (arg1))
2360 TREE_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2381 TREE_OVERFLOW (t) = 1;
2382 return t;
2385 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2386 type TYPE. If no simplification can be done return NULL_TREE. */
2388 static tree
2389 fold_convert_const (enum tree_code code, tree type, tree arg1)
2391 tree arg_type = TREE_TYPE (arg1);
2392 if (arg_type == type)
2393 return arg1;
2395 /* We can't widen types, since the runtime value could overflow the
2396 original type before being extended to the new type. */
2397 if (POLY_INT_CST_P (arg1)
2398 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2399 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2400 return build_poly_int_cst (type,
2401 poly_wide_int::from (poly_int_cst_value (arg1),
2402 TYPE_PRECISION (type),
2403 TYPE_SIGN (arg_type)));
2405 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2406 || TREE_CODE (type) == OFFSET_TYPE)
2408 if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_int_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_int_from_real (code, type, arg1);
2412 else if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_int_from_fixed (type, arg1);
2415 else if (SCALAR_FLOAT_TYPE_P (type))
2417 if (TREE_CODE (arg1) == INTEGER_CST)
2419 tree res = build_real_from_int_cst (type, arg1);
2420 /* Avoid the folding if flag_rounding_math is on and the
2421 conversion is not exact. */
2422 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2424 bool fail = false;
2425 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2426 TYPE_PRECISION (TREE_TYPE (arg1)));
2427 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2428 return NULL_TREE;
2430 return res;
2432 else if (TREE_CODE (arg1) == REAL_CST)
2433 return fold_convert_const_real_from_real (type, arg1);
2434 else if (TREE_CODE (arg1) == FIXED_CST)
2435 return fold_convert_const_real_from_fixed (type, arg1);
2437 else if (FIXED_POINT_TYPE_P (type))
2439 if (TREE_CODE (arg1) == FIXED_CST)
2440 return fold_convert_const_fixed_from_fixed (type, arg1);
2441 else if (TREE_CODE (arg1) == INTEGER_CST)
2442 return fold_convert_const_fixed_from_int (type, arg1);
2443 else if (TREE_CODE (arg1) == REAL_CST)
2444 return fold_convert_const_fixed_from_real (type, arg1);
2446 else if (VECTOR_TYPE_P (type))
2448 if (TREE_CODE (arg1) == VECTOR_CST
2449 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2451 tree elttype = TREE_TYPE (type);
2452 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2453 /* We can't handle steps directly when extending, since the
2454 values need to wrap at the original precision first. */
2455 bool step_ok_p
2456 = (INTEGRAL_TYPE_P (elttype)
2457 && INTEGRAL_TYPE_P (arg1_elttype)
2458 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2459 tree_vector_builder v;
2460 if (!v.new_unary_operation (type, arg1, step_ok_p))
2461 return NULL_TREE;
2462 unsigned int len = v.encoded_nelts ();
2463 for (unsigned int i = 0; i < len; ++i)
2465 tree elt = VECTOR_CST_ELT (arg1, i);
2466 tree cvt = fold_convert_const (code, elttype, elt);
2467 if (cvt == NULL_TREE)
2468 return NULL_TREE;
2469 v.quick_push (cvt);
2471 return v.build ();
2474 return NULL_TREE;
2477 /* Construct a vector of zero elements of vector type TYPE. */
2479 static tree
2480 build_zero_vector (tree type)
2482 tree t;
2484 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2485 return build_vector_from_val (type, t);
2488 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2490 bool
2491 fold_convertible_p (const_tree type, const_tree arg)
2493 const_tree orig = TREE_TYPE (arg);
2495 if (type == orig)
2496 return true;
2498 if (TREE_CODE (arg) == ERROR_MARK
2499 || TREE_CODE (type) == ERROR_MARK
2500 || TREE_CODE (orig) == ERROR_MARK)
2501 return false;
2503 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2504 return true;
2506 switch (TREE_CODE (type))
2508 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2509 case POINTER_TYPE: case REFERENCE_TYPE:
2510 case OFFSET_TYPE:
2511 return (INTEGRAL_TYPE_P (orig)
2512 || (POINTER_TYPE_P (orig)
2513 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2514 || TREE_CODE (orig) == OFFSET_TYPE);
2516 case REAL_TYPE:
2517 case FIXED_POINT_TYPE:
2518 case VOID_TYPE:
2519 return TREE_CODE (type) == TREE_CODE (orig);
2521 case VECTOR_TYPE:
2522 return (VECTOR_TYPE_P (orig)
2523 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2524 TYPE_VECTOR_SUBPARTS (orig))
2525 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2527 default:
2528 return false;
2532 /* Convert expression ARG to type TYPE. Used by the middle-end for
2533 simple conversions in preference to calling the front-end's convert. */
2535 tree
2536 fold_convert_loc (location_t loc, tree type, tree arg)
2538 tree orig = TREE_TYPE (arg);
2539 tree tem;
2541 if (type == orig)
2542 return arg;
2544 if (TREE_CODE (arg) == ERROR_MARK
2545 || TREE_CODE (type) == ERROR_MARK
2546 || TREE_CODE (orig) == ERROR_MARK)
2547 return error_mark_node;
2549 switch (TREE_CODE (type))
2551 case POINTER_TYPE:
2552 case REFERENCE_TYPE:
2553 /* Handle conversions between pointers to different address spaces. */
2554 if (POINTER_TYPE_P (orig)
2555 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2556 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2557 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2558 /* fall through */
2560 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2561 case OFFSET_TYPE: case BITINT_TYPE:
2562 if (TREE_CODE (arg) == INTEGER_CST)
2564 tem = fold_convert_const (NOP_EXPR, type, arg);
2565 if (tem != NULL_TREE)
2566 return tem;
2568 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2569 || TREE_CODE (orig) == OFFSET_TYPE)
2570 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2571 if (TREE_CODE (orig) == COMPLEX_TYPE)
2572 return fold_convert_loc (loc, type,
2573 fold_build1_loc (loc, REALPART_EXPR,
2574 TREE_TYPE (orig), arg));
2575 gcc_assert (VECTOR_TYPE_P (orig)
2576 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2577 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2579 case REAL_TYPE:
2580 if (TREE_CODE (arg) == INTEGER_CST)
2582 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2583 if (tem != NULL_TREE)
2584 return tem;
2586 else if (TREE_CODE (arg) == REAL_CST)
2588 tem = fold_convert_const (NOP_EXPR, type, arg);
2589 if (tem != NULL_TREE)
2590 return tem;
2592 else if (TREE_CODE (arg) == FIXED_CST)
2594 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2595 if (tem != NULL_TREE)
2596 return tem;
2599 switch (TREE_CODE (orig))
2601 case INTEGER_TYPE: case BITINT_TYPE:
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2606 case REAL_TYPE:
2607 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2609 case FIXED_POINT_TYPE:
2610 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2612 case COMPLEX_TYPE:
2613 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2614 return fold_convert_loc (loc, type, tem);
2616 default:
2617 gcc_unreachable ();
2620 case FIXED_POINT_TYPE:
2621 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2622 || TREE_CODE (arg) == REAL_CST)
2624 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2625 if (tem != NULL_TREE)
2626 goto fold_convert_exit;
2629 switch (TREE_CODE (orig))
2631 case FIXED_POINT_TYPE:
2632 case INTEGER_TYPE:
2633 case ENUMERAL_TYPE:
2634 case BOOLEAN_TYPE:
2635 case REAL_TYPE:
2636 case BITINT_TYPE:
2637 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2639 case COMPLEX_TYPE:
2640 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2641 return fold_convert_loc (loc, type, tem);
2643 default:
2644 gcc_unreachable ();
2647 case COMPLEX_TYPE:
2648 switch (TREE_CODE (orig))
2650 case INTEGER_TYPE: case BITINT_TYPE:
2651 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2652 case POINTER_TYPE: case REFERENCE_TYPE:
2653 case REAL_TYPE:
2654 case FIXED_POINT_TYPE:
2655 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2656 fold_convert_loc (loc, TREE_TYPE (type), arg),
2657 fold_convert_loc (loc, TREE_TYPE (type),
2658 integer_zero_node));
2659 case COMPLEX_TYPE:
2661 tree rpart, ipart;
2663 if (TREE_CODE (arg) == COMPLEX_EXPR)
2665 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2666 TREE_OPERAND (arg, 0));
2667 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2668 TREE_OPERAND (arg, 1));
2669 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2672 arg = save_expr (arg);
2673 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2674 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2675 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2676 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2677 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2680 default:
2681 gcc_unreachable ();
2684 case VECTOR_TYPE:
2685 if (integer_zerop (arg))
2686 return build_zero_vector (type);
2687 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2688 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2689 || VECTOR_TYPE_P (orig));
2690 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2692 case VOID_TYPE:
2693 tem = fold_ignored_result (arg);
2694 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2696 default:
2697 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2698 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2699 gcc_unreachable ();
2701 fold_convert_exit:
2702 tem = protected_set_expr_location_unshare (tem, loc);
2703 return tem;
2706 /* Return false if expr can be assumed not to be an lvalue, true
2707 otherwise. */
2709 static bool
2710 maybe_lvalue_p (const_tree x)
2712 /* We only need to wrap lvalue tree codes. */
2713 switch (TREE_CODE (x))
2715 case VAR_DECL:
2716 case PARM_DECL:
2717 case RESULT_DECL:
2718 case LABEL_DECL:
2719 case FUNCTION_DECL:
2720 case SSA_NAME:
2721 case COMPOUND_LITERAL_EXPR:
2723 case COMPONENT_REF:
2724 case MEM_REF:
2725 case INDIRECT_REF:
2726 case ARRAY_REF:
2727 case ARRAY_RANGE_REF:
2728 case BIT_FIELD_REF:
2729 case OBJ_TYPE_REF:
2731 case REALPART_EXPR:
2732 case IMAGPART_EXPR:
2733 case PREINCREMENT_EXPR:
2734 case PREDECREMENT_EXPR:
2735 case SAVE_EXPR:
2736 case TRY_CATCH_EXPR:
2737 case WITH_CLEANUP_EXPR:
2738 case COMPOUND_EXPR:
2739 case MODIFY_EXPR:
2740 case TARGET_EXPR:
2741 case COND_EXPR:
2742 case BIND_EXPR:
2743 case VIEW_CONVERT_EXPR:
2744 break;
2746 default:
2747 /* Assume the worst for front-end tree codes. */
2748 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2749 break;
2750 return false;
2753 return true;
2756 /* Return an expr equal to X but certainly not valid as an lvalue. */
2758 tree
2759 non_lvalue_loc (location_t loc, tree x)
2761 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2762 us. */
2763 if (in_gimple_form)
2764 return x;
2766 if (! maybe_lvalue_p (x))
2767 return x;
2768 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2771 /* Given a tree comparison code, return the code that is the logical inverse.
2772 It is generally not safe to do this for floating-point comparisons, except
2773 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2774 ERROR_MARK in this case. */
2776 enum tree_code
2777 invert_tree_comparison (enum tree_code code, bool honor_nans)
2779 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2780 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2781 return ERROR_MARK;
2783 switch (code)
2785 case EQ_EXPR:
2786 return NE_EXPR;
2787 case NE_EXPR:
2788 return EQ_EXPR;
2789 case GT_EXPR:
2790 return honor_nans ? UNLE_EXPR : LE_EXPR;
2791 case GE_EXPR:
2792 return honor_nans ? UNLT_EXPR : LT_EXPR;
2793 case LT_EXPR:
2794 return honor_nans ? UNGE_EXPR : GE_EXPR;
2795 case LE_EXPR:
2796 return honor_nans ? UNGT_EXPR : GT_EXPR;
2797 case LTGT_EXPR:
2798 return UNEQ_EXPR;
2799 case UNEQ_EXPR:
2800 return LTGT_EXPR;
2801 case UNGT_EXPR:
2802 return LE_EXPR;
2803 case UNGE_EXPR:
2804 return LT_EXPR;
2805 case UNLT_EXPR:
2806 return GE_EXPR;
2807 case UNLE_EXPR:
2808 return GT_EXPR;
2809 case ORDERED_EXPR:
2810 return UNORDERED_EXPR;
2811 case UNORDERED_EXPR:
2812 return ORDERED_EXPR;
2813 default:
2814 gcc_unreachable ();
2818 /* Similar, but return the comparison that results if the operands are
2819 swapped. This is safe for floating-point. */
2821 enum tree_code
2822 swap_tree_comparison (enum tree_code code)
2824 switch (code)
2826 case EQ_EXPR:
2827 case NE_EXPR:
2828 case ORDERED_EXPR:
2829 case UNORDERED_EXPR:
2830 case LTGT_EXPR:
2831 case UNEQ_EXPR:
2832 return code;
2833 case GT_EXPR:
2834 return LT_EXPR;
2835 case GE_EXPR:
2836 return LE_EXPR;
2837 case LT_EXPR:
2838 return GT_EXPR;
2839 case LE_EXPR:
2840 return GE_EXPR;
2841 case UNGT_EXPR:
2842 return UNLT_EXPR;
2843 case UNGE_EXPR:
2844 return UNLE_EXPR;
2845 case UNLT_EXPR:
2846 return UNGT_EXPR;
2847 case UNLE_EXPR:
2848 return UNGE_EXPR;
2849 default:
2850 gcc_unreachable ();
2855 /* Convert a comparison tree code from an enum tree_code representation
2856 into a compcode bit-based encoding. This function is the inverse of
2857 compcode_to_comparison. */
2859 static enum comparison_code
2860 comparison_to_compcode (enum tree_code code)
2862 switch (code)
2864 case LT_EXPR:
2865 return COMPCODE_LT;
2866 case EQ_EXPR:
2867 return COMPCODE_EQ;
2868 case LE_EXPR:
2869 return COMPCODE_LE;
2870 case GT_EXPR:
2871 return COMPCODE_GT;
2872 case NE_EXPR:
2873 return COMPCODE_NE;
2874 case GE_EXPR:
2875 return COMPCODE_GE;
2876 case ORDERED_EXPR:
2877 return COMPCODE_ORD;
2878 case UNORDERED_EXPR:
2879 return COMPCODE_UNORD;
2880 case UNLT_EXPR:
2881 return COMPCODE_UNLT;
2882 case UNEQ_EXPR:
2883 return COMPCODE_UNEQ;
2884 case UNLE_EXPR:
2885 return COMPCODE_UNLE;
2886 case UNGT_EXPR:
2887 return COMPCODE_UNGT;
2888 case LTGT_EXPR:
2889 return COMPCODE_LTGT;
2890 case UNGE_EXPR:
2891 return COMPCODE_UNGE;
2892 default:
2893 gcc_unreachable ();
2897 /* Convert a compcode bit-based encoding of a comparison operator back
2898 to GCC's enum tree_code representation. This function is the
2899 inverse of comparison_to_compcode. */
2901 static enum tree_code
2902 compcode_to_comparison (enum comparison_code code)
2904 switch (code)
2906 case COMPCODE_LT:
2907 return LT_EXPR;
2908 case COMPCODE_EQ:
2909 return EQ_EXPR;
2910 case COMPCODE_LE:
2911 return LE_EXPR;
2912 case COMPCODE_GT:
2913 return GT_EXPR;
2914 case COMPCODE_NE:
2915 return NE_EXPR;
2916 case COMPCODE_GE:
2917 return GE_EXPR;
2918 case COMPCODE_ORD:
2919 return ORDERED_EXPR;
2920 case COMPCODE_UNORD:
2921 return UNORDERED_EXPR;
2922 case COMPCODE_UNLT:
2923 return UNLT_EXPR;
2924 case COMPCODE_UNEQ:
2925 return UNEQ_EXPR;
2926 case COMPCODE_UNLE:
2927 return UNLE_EXPR;
2928 case COMPCODE_UNGT:
2929 return UNGT_EXPR;
2930 case COMPCODE_LTGT:
2931 return LTGT_EXPR;
2932 case COMPCODE_UNGE:
2933 return UNGE_EXPR;
2934 default:
2935 gcc_unreachable ();
2939 /* Return true if COND1 tests the opposite condition of COND2. */
2941 bool
2942 inverse_conditions_p (const_tree cond1, const_tree cond2)
2944 return (COMPARISON_CLASS_P (cond1)
2945 && COMPARISON_CLASS_P (cond2)
2946 && (invert_tree_comparison
2947 (TREE_CODE (cond1),
2948 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2949 && operand_equal_p (TREE_OPERAND (cond1, 0),
2950 TREE_OPERAND (cond2, 0), 0)
2951 && operand_equal_p (TREE_OPERAND (cond1, 1),
2952 TREE_OPERAND (cond2, 1), 0));
2955 /* Return a tree for the comparison which is the combination of
2956 doing the AND or OR (depending on CODE) of the two operations LCODE
2957 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2958 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2959 if this makes the transformation invalid. */
2961 tree
2962 combine_comparisons (location_t loc,
2963 enum tree_code code, enum tree_code lcode,
2964 enum tree_code rcode, tree truth_type,
2965 tree ll_arg, tree lr_arg)
2967 bool honor_nans = HONOR_NANS (ll_arg);
2968 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2969 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2970 int compcode;
2972 switch (code)
2974 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2975 compcode = lcompcode & rcompcode;
2976 break;
2978 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2979 compcode = lcompcode | rcompcode;
2980 break;
2982 default:
2983 return NULL_TREE;
2986 if (!honor_nans)
2988 /* Eliminate unordered comparisons, as well as LTGT and ORD
2989 which are not used unless the mode has NaNs. */
2990 compcode &= ~COMPCODE_UNORD;
2991 if (compcode == COMPCODE_LTGT)
2992 compcode = COMPCODE_NE;
2993 else if (compcode == COMPCODE_ORD)
2994 compcode = COMPCODE_TRUE;
2996 else if (flag_trapping_math)
2998 /* Check that the original operation and the optimized ones will trap
2999 under the same condition. */
3000 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3001 && (lcompcode != COMPCODE_EQ)
3002 && (lcompcode != COMPCODE_ORD);
3003 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3004 && (rcompcode != COMPCODE_EQ)
3005 && (rcompcode != COMPCODE_ORD);
3006 bool trap = (compcode & COMPCODE_UNORD) == 0
3007 && (compcode != COMPCODE_EQ)
3008 && (compcode != COMPCODE_ORD);
3010 /* In a short-circuited boolean expression the LHS might be
3011 such that the RHS, if evaluated, will never trap. For
3012 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3013 if neither x nor y is NaN. (This is a mixed blessing: for
3014 example, the expression above will never trap, hence
3015 optimizing it to x < y would be invalid). */
3016 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3017 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3018 rtrap = false;
3020 /* If the comparison was short-circuited, and only the RHS
3021 trapped, we may now generate a spurious trap. */
3022 if (rtrap && !ltrap
3023 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3024 return NULL_TREE;
3026 /* If we changed the conditions that cause a trap, we lose. */
3027 if ((ltrap || rtrap) != trap)
3028 return NULL_TREE;
3031 if (compcode == COMPCODE_TRUE)
3032 return constant_boolean_node (true, truth_type);
3033 else if (compcode == COMPCODE_FALSE)
3034 return constant_boolean_node (false, truth_type);
3035 else
3037 enum tree_code tcode;
3039 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3040 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3044 /* Return nonzero if two operands (typically of the same tree node)
3045 are necessarily equal. FLAGS modifies behavior as follows:
3047 If OEP_ONLY_CONST is set, only return nonzero for constants.
3048 This function tests whether the operands are indistinguishable;
3049 it does not test whether they are equal using C's == operation.
3050 The distinction is important for IEEE floating point, because
3051 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3052 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3054 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3055 even though it may hold multiple values during a function.
3056 This is because a GCC tree node guarantees that nothing else is
3057 executed between the evaluation of its "operands" (which may often
3058 be evaluated in arbitrary order). Hence if the operands themselves
3059 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3060 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3061 unset means assuming isochronic (or instantaneous) tree equivalence.
3062 Unless comparing arbitrary expression trees, such as from different
3063 statements, this flag can usually be left unset.
3065 If OEP_PURE_SAME is set, then pure functions with identical arguments
3066 are considered the same. It is used when the caller has other ways
3067 to ensure that global memory is unchanged in between.
3069 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3070 not values of expressions.
3072 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3073 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3075 If OEP_BITWISE is set, then require the values to be bitwise identical
3076 rather than simply numerically equal. Do not take advantage of things
3077 like math-related flags or undefined behavior; only return true for
3078 values that are provably bitwise identical in all circumstances.
3080 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3081 any operand with side effect. This is unnecesarily conservative in the
3082 case we know that arg0 and arg1 are in disjoint code paths (such as in
3083 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3084 addresses with TREE_CONSTANT flag set so we know that &var == &var
3085 even if var is volatile. */
3087 bool
3088 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3089 unsigned int flags)
3091 bool r;
3092 if (verify_hash_value (arg0, arg1, flags, &r))
3093 return r;
3095 STRIP_ANY_LOCATION_WRAPPER (arg0);
3096 STRIP_ANY_LOCATION_WRAPPER (arg1);
3098 /* If either is ERROR_MARK, they aren't equal. */
3099 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3100 || TREE_TYPE (arg0) == error_mark_node
3101 || TREE_TYPE (arg1) == error_mark_node)
3102 return false;
3104 /* Similar, if either does not have a type (like a template id),
3105 they aren't equal. */
3106 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3107 return false;
3109 /* Bitwise identity makes no sense if the values have different layouts. */
3110 if ((flags & OEP_BITWISE)
3111 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3112 return false;
3114 /* We cannot consider pointers to different address space equal. */
3115 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3116 && POINTER_TYPE_P (TREE_TYPE (arg1))
3117 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3118 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3119 return false;
3121 /* Check equality of integer constants before bailing out due to
3122 precision differences. */
3123 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3125 /* Address of INTEGER_CST is not defined; check that we did not forget
3126 to drop the OEP_ADDRESS_OF flags. */
3127 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3128 return tree_int_cst_equal (arg0, arg1);
3131 if (!(flags & OEP_ADDRESS_OF))
3133 /* If both types don't have the same signedness, then we can't consider
3134 them equal. We must check this before the STRIP_NOPS calls
3135 because they may change the signedness of the arguments. As pointers
3136 strictly don't have a signedness, require either two pointers or
3137 two non-pointers as well. */
3138 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3139 || POINTER_TYPE_P (TREE_TYPE (arg0))
3140 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3141 return false;
3143 /* If both types don't have the same precision, then it is not safe
3144 to strip NOPs. */
3145 if (element_precision (TREE_TYPE (arg0))
3146 != element_precision (TREE_TYPE (arg1)))
3147 return false;
3149 STRIP_NOPS (arg0);
3150 STRIP_NOPS (arg1);
3152 #if 0
3153 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3154 sanity check once the issue is solved. */
3155 else
3156 /* Addresses of conversions and SSA_NAMEs (and many other things)
3157 are not defined. Check that we did not forget to drop the
3158 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3159 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3160 && TREE_CODE (arg0) != SSA_NAME);
3161 #endif
3163 /* In case both args are comparisons but with different comparison
3164 code, try to swap the comparison operands of one arg to produce
3165 a match and compare that variant. */
3166 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3167 && COMPARISON_CLASS_P (arg0)
3168 && COMPARISON_CLASS_P (arg1))
3170 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3172 if (TREE_CODE (arg0) == swap_code)
3173 return operand_equal_p (TREE_OPERAND (arg0, 0),
3174 TREE_OPERAND (arg1, 1), flags)
3175 && operand_equal_p (TREE_OPERAND (arg0, 1),
3176 TREE_OPERAND (arg1, 0), flags);
3179 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3181 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3182 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3184 else if (flags & OEP_ADDRESS_OF)
3186 /* If we are interested in comparing addresses ignore
3187 MEM_REF wrappings of the base that can appear just for
3188 TBAA reasons. */
3189 if (TREE_CODE (arg0) == MEM_REF
3190 && DECL_P (arg1)
3191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3192 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3193 && integer_zerop (TREE_OPERAND (arg0, 1)))
3194 return true;
3195 else if (TREE_CODE (arg1) == MEM_REF
3196 && DECL_P (arg0)
3197 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3198 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3199 && integer_zerop (TREE_OPERAND (arg1, 1)))
3200 return true;
3201 return false;
3203 else
3204 return false;
3207 /* When not checking adddresses, this is needed for conversions and for
3208 COMPONENT_REF. Might as well play it safe and always test this. */
3209 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3210 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3211 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3212 && !(flags & OEP_ADDRESS_OF)))
3213 return false;
3215 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3216 We don't care about side effects in that case because the SAVE_EXPR
3217 takes care of that for us. In all other cases, two expressions are
3218 equal if they have no side effects. If we have two identical
3219 expressions with side effects that should be treated the same due
3220 to the only side effects being identical SAVE_EXPR's, that will
3221 be detected in the recursive calls below.
3222 If we are taking an invariant address of two identical objects
3223 they are necessarily equal as well. */
3224 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3225 && (TREE_CODE (arg0) == SAVE_EXPR
3226 || (flags & OEP_MATCH_SIDE_EFFECTS)
3227 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3228 return true;
3230 /* Next handle constant cases, those for which we can return 1 even
3231 if ONLY_CONST is set. */
3232 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3233 switch (TREE_CODE (arg0))
3235 case INTEGER_CST:
3236 return tree_int_cst_equal (arg0, arg1);
3238 case FIXED_CST:
3239 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3240 TREE_FIXED_CST (arg1));
3242 case REAL_CST:
3243 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3244 return true;
3246 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3248 /* If we do not distinguish between signed and unsigned zero,
3249 consider them equal. */
3250 if (real_zerop (arg0) && real_zerop (arg1))
3251 return true;
3253 return false;
3255 case VECTOR_CST:
3257 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3258 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3259 return false;
3261 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3262 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3263 return false;
3265 unsigned int count = vector_cst_encoded_nelts (arg0);
3266 for (unsigned int i = 0; i < count; ++i)
3267 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3268 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3269 return false;
3270 return true;
3273 case COMPLEX_CST:
3274 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3275 flags)
3276 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3277 flags));
3279 case STRING_CST:
3280 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3281 && ! memcmp (TREE_STRING_POINTER (arg0),
3282 TREE_STRING_POINTER (arg1),
3283 TREE_STRING_LENGTH (arg0)));
3285 case ADDR_EXPR:
3286 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3287 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3288 flags | OEP_ADDRESS_OF
3289 | OEP_MATCH_SIDE_EFFECTS);
3290 case CONSTRUCTOR:
3291 /* In GIMPLE empty constructors are allowed in initializers of
3292 aggregates. */
3293 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3294 default:
3295 break;
3298 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3299 two instances of undefined behavior will give identical results. */
3300 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3301 return false;
3303 /* Define macros to test an operand from arg0 and arg1 for equality and a
3304 variant that allows null and views null as being different from any
3305 non-null value. In the latter case, if either is null, the both
3306 must be; otherwise, do the normal comparison. */
3307 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3308 TREE_OPERAND (arg1, N), flags)
3310 #define OP_SAME_WITH_NULL(N) \
3311 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3312 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3314 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3316 case tcc_unary:
3317 /* Two conversions are equal only if signedness and modes match. */
3318 switch (TREE_CODE (arg0))
3320 CASE_CONVERT:
3321 case FIX_TRUNC_EXPR:
3322 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3323 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3324 return false;
3325 break;
3326 default:
3327 break;
3330 return OP_SAME (0);
3333 case tcc_comparison:
3334 case tcc_binary:
3335 if (OP_SAME (0) && OP_SAME (1))
3336 return true;
3338 /* For commutative ops, allow the other order. */
3339 return (commutative_tree_code (TREE_CODE (arg0))
3340 && operand_equal_p (TREE_OPERAND (arg0, 0),
3341 TREE_OPERAND (arg1, 1), flags)
3342 && operand_equal_p (TREE_OPERAND (arg0, 1),
3343 TREE_OPERAND (arg1, 0), flags));
3345 case tcc_reference:
3346 /* If either of the pointer (or reference) expressions we are
3347 dereferencing contain a side effect, these cannot be equal,
3348 but their addresses can be. */
3349 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3350 && (TREE_SIDE_EFFECTS (arg0)
3351 || TREE_SIDE_EFFECTS (arg1)))
3352 return false;
3354 switch (TREE_CODE (arg0))
3356 case INDIRECT_REF:
3357 if (!(flags & OEP_ADDRESS_OF))
3359 if (TYPE_ALIGN (TREE_TYPE (arg0))
3360 != TYPE_ALIGN (TREE_TYPE (arg1)))
3361 return false;
3362 /* Verify that the access types are compatible. */
3363 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3364 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3365 return false;
3367 flags &= ~OEP_ADDRESS_OF;
3368 return OP_SAME (0);
3370 case IMAGPART_EXPR:
3371 /* Require the same offset. */
3372 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3373 TYPE_SIZE (TREE_TYPE (arg1)),
3374 flags & ~OEP_ADDRESS_OF))
3375 return false;
3377 /* Fallthru. */
3378 case REALPART_EXPR:
3379 case VIEW_CONVERT_EXPR:
3380 return OP_SAME (0);
3382 case TARGET_MEM_REF:
3383 case MEM_REF:
3384 if (!(flags & OEP_ADDRESS_OF))
3386 /* Require equal access sizes */
3387 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3388 && (!TYPE_SIZE (TREE_TYPE (arg0))
3389 || !TYPE_SIZE (TREE_TYPE (arg1))
3390 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3391 TYPE_SIZE (TREE_TYPE (arg1)),
3392 flags)))
3393 return false;
3394 /* Verify that access happens in similar types. */
3395 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3396 return false;
3397 /* Verify that accesses are TBAA compatible. */
3398 if (!alias_ptr_types_compatible_p
3399 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3400 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3401 || (MR_DEPENDENCE_CLIQUE (arg0)
3402 != MR_DEPENDENCE_CLIQUE (arg1))
3403 || (MR_DEPENDENCE_BASE (arg0)
3404 != MR_DEPENDENCE_BASE (arg1)))
3405 return false;
3406 /* Verify that alignment is compatible. */
3407 if (TYPE_ALIGN (TREE_TYPE (arg0))
3408 != TYPE_ALIGN (TREE_TYPE (arg1)))
3409 return false;
3411 flags &= ~OEP_ADDRESS_OF;
3412 return (OP_SAME (0) && OP_SAME (1)
3413 /* TARGET_MEM_REF require equal extra operands. */
3414 && (TREE_CODE (arg0) != TARGET_MEM_REF
3415 || (OP_SAME_WITH_NULL (2)
3416 && OP_SAME_WITH_NULL (3)
3417 && OP_SAME_WITH_NULL (4))));
3419 case ARRAY_REF:
3420 case ARRAY_RANGE_REF:
3421 if (!OP_SAME (0))
3422 return false;
3423 flags &= ~OEP_ADDRESS_OF;
3424 /* Compare the array index by value if it is constant first as we
3425 may have different types but same value here. */
3426 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3427 TREE_OPERAND (arg1, 1))
3428 || OP_SAME (1))
3429 && OP_SAME_WITH_NULL (2)
3430 && OP_SAME_WITH_NULL (3)
3431 /* Compare low bound and element size as with OEP_ADDRESS_OF
3432 we have to account for the offset of the ref. */
3433 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3434 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3435 || (operand_equal_p (array_ref_low_bound
3436 (CONST_CAST_TREE (arg0)),
3437 array_ref_low_bound
3438 (CONST_CAST_TREE (arg1)), flags)
3439 && operand_equal_p (array_ref_element_size
3440 (CONST_CAST_TREE (arg0)),
3441 array_ref_element_size
3442 (CONST_CAST_TREE (arg1)),
3443 flags))));
3445 case COMPONENT_REF:
3446 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3447 may be NULL when we're called to compare MEM_EXPRs. */
3448 if (!OP_SAME_WITH_NULL (0))
3449 return false;
3451 bool compare_address = flags & OEP_ADDRESS_OF;
3453 /* Most of time we only need to compare FIELD_DECLs for equality.
3454 However when determining address look into actual offsets.
3455 These may match for unions and unshared record types. */
3456 flags &= ~OEP_ADDRESS_OF;
3457 if (!OP_SAME (1))
3459 if (compare_address
3460 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3462 tree field0 = TREE_OPERAND (arg0, 1);
3463 tree field1 = TREE_OPERAND (arg1, 1);
3465 /* Non-FIELD_DECL operands can appear in C++ templates. */
3466 if (TREE_CODE (field0) != FIELD_DECL
3467 || TREE_CODE (field1) != FIELD_DECL
3468 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3469 DECL_FIELD_OFFSET (field1), flags)
3470 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3471 DECL_FIELD_BIT_OFFSET (field1),
3472 flags))
3473 return false;
3475 else
3476 return false;
3479 return OP_SAME_WITH_NULL (2);
3481 case BIT_FIELD_REF:
3482 if (!OP_SAME (0))
3483 return false;
3484 flags &= ~OEP_ADDRESS_OF;
3485 return OP_SAME (1) && OP_SAME (2);
3487 default:
3488 return false;
3491 case tcc_expression:
3492 switch (TREE_CODE (arg0))
3494 case ADDR_EXPR:
3495 /* Be sure we pass right ADDRESS_OF flag. */
3496 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3497 return operand_equal_p (TREE_OPERAND (arg0, 0),
3498 TREE_OPERAND (arg1, 0),
3499 flags | OEP_ADDRESS_OF);
3501 case TRUTH_NOT_EXPR:
3502 return OP_SAME (0);
3504 case TRUTH_ANDIF_EXPR:
3505 case TRUTH_ORIF_EXPR:
3506 return OP_SAME (0) && OP_SAME (1);
3508 case WIDEN_MULT_PLUS_EXPR:
3509 case WIDEN_MULT_MINUS_EXPR:
3510 if (!OP_SAME (2))
3511 return false;
3512 /* The multiplcation operands are commutative. */
3513 /* FALLTHRU */
3515 case TRUTH_AND_EXPR:
3516 case TRUTH_OR_EXPR:
3517 case TRUTH_XOR_EXPR:
3518 if (OP_SAME (0) && OP_SAME (1))
3519 return true;
3521 /* Otherwise take into account this is a commutative operation. */
3522 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3523 TREE_OPERAND (arg1, 1), flags)
3524 && operand_equal_p (TREE_OPERAND (arg0, 1),
3525 TREE_OPERAND (arg1, 0), flags));
3527 case COND_EXPR:
3528 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3529 return false;
3530 flags &= ~OEP_ADDRESS_OF;
3531 return OP_SAME (0);
3533 case BIT_INSERT_EXPR:
3534 /* BIT_INSERT_EXPR has an implict operand as the type precision
3535 of op1. Need to check to make sure they are the same. */
3536 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3537 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3538 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3539 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3540 return false;
3541 /* FALLTHRU */
3543 case VEC_COND_EXPR:
3544 case DOT_PROD_EXPR:
3545 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3547 case MODIFY_EXPR:
3548 case INIT_EXPR:
3549 case COMPOUND_EXPR:
3550 case PREDECREMENT_EXPR:
3551 case PREINCREMENT_EXPR:
3552 case POSTDECREMENT_EXPR:
3553 case POSTINCREMENT_EXPR:
3554 if (flags & OEP_LEXICOGRAPHIC)
3555 return OP_SAME (0) && OP_SAME (1);
3556 return false;
3558 case CLEANUP_POINT_EXPR:
3559 case EXPR_STMT:
3560 case SAVE_EXPR:
3561 if (flags & OEP_LEXICOGRAPHIC)
3562 return OP_SAME (0);
3563 return false;
3565 case OBJ_TYPE_REF:
3566 /* Virtual table reference. */
3567 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3568 OBJ_TYPE_REF_EXPR (arg1), flags))
3569 return false;
3570 flags &= ~OEP_ADDRESS_OF;
3571 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3572 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3573 return false;
3574 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3575 OBJ_TYPE_REF_OBJECT (arg1), flags))
3576 return false;
3577 if (virtual_method_call_p (arg0))
3579 if (!virtual_method_call_p (arg1))
3580 return false;
3581 return types_same_for_odr (obj_type_ref_class (arg0),
3582 obj_type_ref_class (arg1));
3584 return false;
3586 default:
3587 return false;
3590 case tcc_vl_exp:
3591 switch (TREE_CODE (arg0))
3593 case CALL_EXPR:
3594 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3595 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3596 /* If not both CALL_EXPRs are either internal or normal function
3597 functions, then they are not equal. */
3598 return false;
3599 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3601 /* If the CALL_EXPRs call different internal functions, then they
3602 are not equal. */
3603 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3604 return false;
3606 else
3608 /* If the CALL_EXPRs call different functions, then they are not
3609 equal. */
3610 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3611 flags))
3612 return false;
3615 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3617 unsigned int cef = call_expr_flags (arg0);
3618 if (flags & OEP_PURE_SAME)
3619 cef &= ECF_CONST | ECF_PURE;
3620 else
3621 cef &= ECF_CONST;
3622 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3623 return false;
3626 /* Now see if all the arguments are the same. */
3628 const_call_expr_arg_iterator iter0, iter1;
3629 const_tree a0, a1;
3630 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3631 a1 = first_const_call_expr_arg (arg1, &iter1);
3632 a0 && a1;
3633 a0 = next_const_call_expr_arg (&iter0),
3634 a1 = next_const_call_expr_arg (&iter1))
3635 if (! operand_equal_p (a0, a1, flags))
3636 return false;
3638 /* If we get here and both argument lists are exhausted
3639 then the CALL_EXPRs are equal. */
3640 return ! (a0 || a1);
3642 default:
3643 return false;
3646 case tcc_declaration:
3647 /* Consider __builtin_sqrt equal to sqrt. */
3648 if (TREE_CODE (arg0) == FUNCTION_DECL)
3649 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3650 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3651 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3652 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3654 if (DECL_P (arg0)
3655 && (flags & OEP_DECL_NAME)
3656 && (flags & OEP_LEXICOGRAPHIC))
3658 /* Consider decls with the same name equal. The caller needs
3659 to make sure they refer to the same entity (such as a function
3660 formal parameter). */
3661 tree a0name = DECL_NAME (arg0);
3662 tree a1name = DECL_NAME (arg1);
3663 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3664 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3665 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3667 return false;
3669 case tcc_exceptional:
3670 if (TREE_CODE (arg0) == CONSTRUCTOR)
3672 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3673 return false;
3675 /* In GIMPLE constructors are used only to build vectors from
3676 elements. Individual elements in the constructor must be
3677 indexed in increasing order and form an initial sequence.
3679 We make no effort to compare constructors in generic.
3680 (see sem_variable::equals in ipa-icf which can do so for
3681 constants). */
3682 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3683 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3684 return false;
3686 /* Be sure that vectors constructed have the same representation.
3687 We only tested element precision and modes to match.
3688 Vectors may be BLKmode and thus also check that the number of
3689 parts match. */
3690 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3691 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3692 return false;
3694 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3695 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3696 unsigned int len = vec_safe_length (v0);
3698 if (len != vec_safe_length (v1))
3699 return false;
3701 for (unsigned int i = 0; i < len; i++)
3703 constructor_elt *c0 = &(*v0)[i];
3704 constructor_elt *c1 = &(*v1)[i];
3706 if (!operand_equal_p (c0->value, c1->value, flags)
3707 /* In GIMPLE the indexes can be either NULL or matching i.
3708 Double check this so we won't get false
3709 positives for GENERIC. */
3710 || (c0->index
3711 && (TREE_CODE (c0->index) != INTEGER_CST
3712 || compare_tree_int (c0->index, i)))
3713 || (c1->index
3714 && (TREE_CODE (c1->index) != INTEGER_CST
3715 || compare_tree_int (c1->index, i))))
3716 return false;
3718 return true;
3720 else if (TREE_CODE (arg0) == STATEMENT_LIST
3721 && (flags & OEP_LEXICOGRAPHIC))
3723 /* Compare the STATEMENT_LISTs. */
3724 tree_stmt_iterator tsi1, tsi2;
3725 tree body1 = CONST_CAST_TREE (arg0);
3726 tree body2 = CONST_CAST_TREE (arg1);
3727 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3728 tsi_next (&tsi1), tsi_next (&tsi2))
3730 /* The lists don't have the same number of statements. */
3731 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3732 return false;
3733 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3734 return true;
3735 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3736 flags & (OEP_LEXICOGRAPHIC
3737 | OEP_NO_HASH_CHECK)))
3738 return false;
3741 return false;
3743 case tcc_statement:
3744 switch (TREE_CODE (arg0))
3746 case RETURN_EXPR:
3747 if (flags & OEP_LEXICOGRAPHIC)
3748 return OP_SAME_WITH_NULL (0);
3749 return false;
3750 case DEBUG_BEGIN_STMT:
3751 if (flags & OEP_LEXICOGRAPHIC)
3752 return true;
3753 return false;
3754 default:
3755 return false;
3758 default:
3759 return false;
3762 #undef OP_SAME
3763 #undef OP_SAME_WITH_NULL
3766 /* Generate a hash value for an expression. This can be used iteratively
3767 by passing a previous result as the HSTATE argument. */
3769 void
3770 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3771 unsigned int flags)
3773 int i;
3774 enum tree_code code;
3775 enum tree_code_class tclass;
3777 if (t == NULL_TREE || t == error_mark_node)
3779 hstate.merge_hash (0);
3780 return;
3783 STRIP_ANY_LOCATION_WRAPPER (t);
3785 if (!(flags & OEP_ADDRESS_OF))
3786 STRIP_NOPS (t);
3788 code = TREE_CODE (t);
3790 switch (code)
3792 /* Alas, constants aren't shared, so we can't rely on pointer
3793 identity. */
3794 case VOID_CST:
3795 hstate.merge_hash (0);
3796 return;
3797 case INTEGER_CST:
3798 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3799 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3800 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3801 return;
3802 case REAL_CST:
3804 unsigned int val2;
3805 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3806 val2 = rvc_zero;
3807 else
3808 val2 = real_hash (TREE_REAL_CST_PTR (t));
3809 hstate.merge_hash (val2);
3810 return;
3812 case FIXED_CST:
3814 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3815 hstate.merge_hash (val2);
3816 return;
3818 case STRING_CST:
3819 hstate.add ((const void *) TREE_STRING_POINTER (t),
3820 TREE_STRING_LENGTH (t));
3821 return;
3822 case COMPLEX_CST:
3823 hash_operand (TREE_REALPART (t), hstate, flags);
3824 hash_operand (TREE_IMAGPART (t), hstate, flags);
3825 return;
3826 case VECTOR_CST:
3828 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3829 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3830 unsigned int count = vector_cst_encoded_nelts (t);
3831 for (unsigned int i = 0; i < count; ++i)
3832 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3833 return;
3835 case SSA_NAME:
3836 /* We can just compare by pointer. */
3837 hstate.add_hwi (SSA_NAME_VERSION (t));
3838 return;
3839 case PLACEHOLDER_EXPR:
3840 /* The node itself doesn't matter. */
3841 return;
3842 case BLOCK:
3843 case OMP_CLAUSE:
3844 /* Ignore. */
3845 return;
3846 case TREE_LIST:
3847 /* A list of expressions, for a CALL_EXPR or as the elements of a
3848 VECTOR_CST. */
3849 for (; t; t = TREE_CHAIN (t))
3850 hash_operand (TREE_VALUE (t), hstate, flags);
3851 return;
3852 case CONSTRUCTOR:
3854 unsigned HOST_WIDE_INT idx;
3855 tree field, value;
3856 flags &= ~OEP_ADDRESS_OF;
3857 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3858 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3860 /* In GIMPLE the indexes can be either NULL or matching i. */
3861 if (field == NULL_TREE)
3862 field = bitsize_int (idx);
3863 hash_operand (field, hstate, flags);
3864 hash_operand (value, hstate, flags);
3866 return;
3868 case STATEMENT_LIST:
3870 tree_stmt_iterator i;
3871 for (i = tsi_start (CONST_CAST_TREE (t));
3872 !tsi_end_p (i); tsi_next (&i))
3873 hash_operand (tsi_stmt (i), hstate, flags);
3874 return;
3876 case TREE_VEC:
3877 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3878 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3879 return;
3880 case IDENTIFIER_NODE:
3881 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3882 return;
3883 case FUNCTION_DECL:
3884 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3885 Otherwise nodes that compare equal according to operand_equal_p might
3886 get different hash codes. However, don't do this for machine specific
3887 or front end builtins, since the function code is overloaded in those
3888 cases. */
3889 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3890 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3892 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3893 code = TREE_CODE (t);
3895 /* FALL THROUGH */
3896 default:
3897 if (POLY_INT_CST_P (t))
3899 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3900 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3901 return;
3903 tclass = TREE_CODE_CLASS (code);
3905 if (tclass == tcc_declaration)
3907 /* DECL's have a unique ID */
3908 hstate.add_hwi (DECL_UID (t));
3910 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3912 /* For comparisons that can be swapped, use the lower
3913 tree code. */
3914 enum tree_code ccode = swap_tree_comparison (code);
3915 if (code < ccode)
3916 ccode = code;
3917 hstate.add_object (ccode);
3918 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3919 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3921 else if (CONVERT_EXPR_CODE_P (code))
3923 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3924 operand_equal_p. */
3925 enum tree_code ccode = NOP_EXPR;
3926 hstate.add_object (ccode);
3928 /* Don't hash the type, that can lead to having nodes which
3929 compare equal according to operand_equal_p, but which
3930 have different hash codes. Make sure to include signedness
3931 in the hash computation. */
3932 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3933 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3935 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3936 else if (code == MEM_REF
3937 && (flags & OEP_ADDRESS_OF) != 0
3938 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3939 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3940 && integer_zerop (TREE_OPERAND (t, 1)))
3941 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3942 hstate, flags);
3943 /* Don't ICE on FE specific trees, or their arguments etc.
3944 during operand_equal_p hash verification. */
3945 else if (!IS_EXPR_CODE_CLASS (tclass))
3946 gcc_assert (flags & OEP_HASH_CHECK);
3947 else
3949 unsigned int sflags = flags;
3951 hstate.add_object (code);
3953 switch (code)
3955 case ADDR_EXPR:
3956 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3957 flags |= OEP_ADDRESS_OF;
3958 sflags = flags;
3959 break;
3961 case INDIRECT_REF:
3962 case MEM_REF:
3963 case TARGET_MEM_REF:
3964 flags &= ~OEP_ADDRESS_OF;
3965 sflags = flags;
3966 break;
3968 case COMPONENT_REF:
3969 if (sflags & OEP_ADDRESS_OF)
3971 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3972 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3973 hstate, flags & ~OEP_ADDRESS_OF);
3974 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3975 hstate, flags & ~OEP_ADDRESS_OF);
3976 return;
3978 break;
3979 case ARRAY_REF:
3980 case ARRAY_RANGE_REF:
3981 case BIT_FIELD_REF:
3982 sflags &= ~OEP_ADDRESS_OF;
3983 break;
3985 case COND_EXPR:
3986 flags &= ~OEP_ADDRESS_OF;
3987 break;
3989 case WIDEN_MULT_PLUS_EXPR:
3990 case WIDEN_MULT_MINUS_EXPR:
3992 /* The multiplication operands are commutative. */
3993 inchash::hash one, two;
3994 hash_operand (TREE_OPERAND (t, 0), one, flags);
3995 hash_operand (TREE_OPERAND (t, 1), two, flags);
3996 hstate.add_commutative (one, two);
3997 hash_operand (TREE_OPERAND (t, 2), two, flags);
3998 return;
4001 case CALL_EXPR:
4002 if (CALL_EXPR_FN (t) == NULL_TREE)
4003 hstate.add_int (CALL_EXPR_IFN (t));
4004 break;
4006 case TARGET_EXPR:
4007 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4008 Usually different TARGET_EXPRs just should use
4009 different temporaries in their slots. */
4010 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4011 return;
4013 case OBJ_TYPE_REF:
4014 /* Virtual table reference. */
4015 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4016 flags &= ~OEP_ADDRESS_OF;
4017 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4018 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4019 if (!virtual_method_call_p (t))
4020 return;
4021 if (tree c = obj_type_ref_class (t))
4023 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4024 /* We compute mangled names only when free_lang_data is run.
4025 In that case we can hash precisely. */
4026 if (TREE_CODE (c) == TYPE_DECL
4027 && DECL_ASSEMBLER_NAME_SET_P (c))
4028 hstate.add_object
4029 (IDENTIFIER_HASH_VALUE
4030 (DECL_ASSEMBLER_NAME (c)));
4032 return;
4033 default:
4034 break;
4037 /* Don't hash the type, that can lead to having nodes which
4038 compare equal according to operand_equal_p, but which
4039 have different hash codes. */
4040 if (code == NON_LVALUE_EXPR)
4042 /* Make sure to include signness in the hash computation. */
4043 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4044 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4047 else if (commutative_tree_code (code))
4049 /* It's a commutative expression. We want to hash it the same
4050 however it appears. We do this by first hashing both operands
4051 and then rehashing based on the order of their independent
4052 hashes. */
4053 inchash::hash one, two;
4054 hash_operand (TREE_OPERAND (t, 0), one, flags);
4055 hash_operand (TREE_OPERAND (t, 1), two, flags);
4056 hstate.add_commutative (one, two);
4058 else
4059 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4060 hash_operand (TREE_OPERAND (t, i), hstate,
4061 i == 0 ? flags : sflags);
4063 return;
4067 bool
4068 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4069 unsigned int flags, bool *ret)
4071 /* When checking and unless comparing DECL names, verify that if
4072 the outermost operand_equal_p call returns non-zero then ARG0
4073 and ARG1 have the same hash value. */
4074 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4076 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4078 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4080 inchash::hash hstate0 (0), hstate1 (0);
4081 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4082 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4083 hashval_t h0 = hstate0.end ();
4084 hashval_t h1 = hstate1.end ();
4085 gcc_assert (h0 == h1);
4087 *ret = true;
4089 else
4090 *ret = false;
4092 return true;
4095 return false;
4099 static operand_compare default_compare_instance;
4101 /* Conveinece wrapper around operand_compare class because usually we do
4102 not need to play with the valueizer. */
4104 bool
4105 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4107 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4110 namespace inchash
4113 /* Generate a hash value for an expression. This can be used iteratively
4114 by passing a previous result as the HSTATE argument.
4116 This function is intended to produce the same hash for expressions which
4117 would compare equal using operand_equal_p. */
4118 void
4119 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4121 default_compare_instance.hash_operand (t, hstate, flags);
4126 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4127 with a different signedness or a narrower precision. */
4129 static bool
4130 operand_equal_for_comparison_p (tree arg0, tree arg1)
4132 if (operand_equal_p (arg0, arg1, 0))
4133 return true;
4135 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4136 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4137 return false;
4139 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4140 and see if the inner values are the same. This removes any
4141 signedness comparison, which doesn't matter here. */
4142 tree op0 = arg0;
4143 tree op1 = arg1;
4144 STRIP_NOPS (op0);
4145 STRIP_NOPS (op1);
4146 if (operand_equal_p (op0, op1, 0))
4147 return true;
4149 /* Discard a single widening conversion from ARG1 and see if the inner
4150 value is the same as ARG0. */
4151 if (CONVERT_EXPR_P (arg1)
4152 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4153 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4154 < TYPE_PRECISION (TREE_TYPE (arg1))
4155 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4156 return true;
4158 return false;
4161 /* See if ARG is an expression that is either a comparison or is performing
4162 arithmetic on comparisons. The comparisons must only be comparing
4163 two different values, which will be stored in *CVAL1 and *CVAL2; if
4164 they are nonzero it means that some operands have already been found.
4165 No variables may be used anywhere else in the expression except in the
4166 comparisons.
4168 If this is true, return 1. Otherwise, return zero. */
4170 static bool
4171 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4173 enum tree_code code = TREE_CODE (arg);
4174 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4176 /* We can handle some of the tcc_expression cases here. */
4177 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4178 tclass = tcc_unary;
4179 else if (tclass == tcc_expression
4180 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4181 || code == COMPOUND_EXPR))
4182 tclass = tcc_binary;
4184 switch (tclass)
4186 case tcc_unary:
4187 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4189 case tcc_binary:
4190 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4191 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4193 case tcc_constant:
4194 return true;
4196 case tcc_expression:
4197 if (code == COND_EXPR)
4198 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4199 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4200 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4201 return false;
4203 case tcc_comparison:
4204 /* First see if we can handle the first operand, then the second. For
4205 the second operand, we know *CVAL1 can't be zero. It must be that
4206 one side of the comparison is each of the values; test for the
4207 case where this isn't true by failing if the two operands
4208 are the same. */
4210 if (operand_equal_p (TREE_OPERAND (arg, 0),
4211 TREE_OPERAND (arg, 1), 0))
4212 return false;
4214 if (*cval1 == 0)
4215 *cval1 = TREE_OPERAND (arg, 0);
4216 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4218 else if (*cval2 == 0)
4219 *cval2 = TREE_OPERAND (arg, 0);
4220 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4222 else
4223 return false;
4225 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4227 else if (*cval2 == 0)
4228 *cval2 = TREE_OPERAND (arg, 1);
4229 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4231 else
4232 return false;
4234 return true;
4236 default:
4237 return false;
4241 /* ARG is a tree that is known to contain just arithmetic operations and
4242 comparisons. Evaluate the operations in the tree substituting NEW0 for
4243 any occurrence of OLD0 as an operand of a comparison and likewise for
4244 NEW1 and OLD1. */
4246 static tree
4247 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4248 tree old1, tree new1)
4250 tree type = TREE_TYPE (arg);
4251 enum tree_code code = TREE_CODE (arg);
4252 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4254 /* We can handle some of the tcc_expression cases here. */
4255 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4256 tclass = tcc_unary;
4257 else if (tclass == tcc_expression
4258 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4259 tclass = tcc_binary;
4261 switch (tclass)
4263 case tcc_unary:
4264 return fold_build1_loc (loc, code, type,
4265 eval_subst (loc, TREE_OPERAND (arg, 0),
4266 old0, new0, old1, new1));
4268 case tcc_binary:
4269 return fold_build2_loc (loc, code, type,
4270 eval_subst (loc, TREE_OPERAND (arg, 0),
4271 old0, new0, old1, new1),
4272 eval_subst (loc, TREE_OPERAND (arg, 1),
4273 old0, new0, old1, new1));
4275 case tcc_expression:
4276 switch (code)
4278 case SAVE_EXPR:
4279 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4280 old1, new1);
4282 case COMPOUND_EXPR:
4283 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4284 old1, new1);
4286 case COND_EXPR:
4287 return fold_build3_loc (loc, code, type,
4288 eval_subst (loc, TREE_OPERAND (arg, 0),
4289 old0, new0, old1, new1),
4290 eval_subst (loc, TREE_OPERAND (arg, 1),
4291 old0, new0, old1, new1),
4292 eval_subst (loc, TREE_OPERAND (arg, 2),
4293 old0, new0, old1, new1));
4294 default:
4295 break;
4297 /* Fall through - ??? */
4299 case tcc_comparison:
4301 tree arg0 = TREE_OPERAND (arg, 0);
4302 tree arg1 = TREE_OPERAND (arg, 1);
4304 /* We need to check both for exact equality and tree equality. The
4305 former will be true if the operand has a side-effect. In that
4306 case, we know the operand occurred exactly once. */
4308 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4309 arg0 = new0;
4310 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4311 arg0 = new1;
4313 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4314 arg1 = new0;
4315 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4316 arg1 = new1;
4318 return fold_build2_loc (loc, code, type, arg0, arg1);
4321 default:
4322 return arg;
4326 /* Return a tree for the case when the result of an expression is RESULT
4327 converted to TYPE and OMITTED was previously an operand of the expression
4328 but is now not needed (e.g., we folded OMITTED * 0).
4330 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4331 the conversion of RESULT to TYPE. */
4333 tree
4334 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4336 tree t = fold_convert_loc (loc, type, result);
4338 /* If the resulting operand is an empty statement, just return the omitted
4339 statement casted to void. */
4340 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4341 return build1_loc (loc, NOP_EXPR, void_type_node,
4342 fold_ignored_result (omitted));
4344 if (TREE_SIDE_EFFECTS (omitted))
4345 return build2_loc (loc, COMPOUND_EXPR, type,
4346 fold_ignored_result (omitted), t);
4348 return non_lvalue_loc (loc, t);
4351 /* Return a tree for the case when the result of an expression is RESULT
4352 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4353 of the expression but are now not needed.
4355 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4356 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4357 evaluated before OMITTED2. Otherwise, if neither has side effects,
4358 just do the conversion of RESULT to TYPE. */
4360 tree
4361 omit_two_operands_loc (location_t loc, tree type, tree result,
4362 tree omitted1, tree omitted2)
4364 tree t = fold_convert_loc (loc, type, result);
4366 if (TREE_SIDE_EFFECTS (omitted2))
4367 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4368 if (TREE_SIDE_EFFECTS (omitted1))
4369 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4371 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4375 /* Return a simplified tree node for the truth-negation of ARG. This
4376 never alters ARG itself. We assume that ARG is an operation that
4377 returns a truth value (0 or 1).
4379 FIXME: one would think we would fold the result, but it causes
4380 problems with the dominator optimizer. */
4382 static tree
4383 fold_truth_not_expr (location_t loc, tree arg)
4385 tree type = TREE_TYPE (arg);
4386 enum tree_code code = TREE_CODE (arg);
4387 location_t loc1, loc2;
4389 /* If this is a comparison, we can simply invert it, except for
4390 floating-point non-equality comparisons, in which case we just
4391 enclose a TRUTH_NOT_EXPR around what we have. */
4393 if (TREE_CODE_CLASS (code) == tcc_comparison)
4395 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4396 if (FLOAT_TYPE_P (op_type)
4397 && flag_trapping_math
4398 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4399 && code != NE_EXPR && code != EQ_EXPR)
4400 return NULL_TREE;
4402 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4403 if (code == ERROR_MARK)
4404 return NULL_TREE;
4406 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4407 TREE_OPERAND (arg, 1));
4408 copy_warning (ret, arg);
4409 return ret;
4412 switch (code)
4414 case INTEGER_CST:
4415 return constant_boolean_node (integer_zerop (arg), type);
4417 case TRUTH_AND_EXPR:
4418 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4419 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4420 return build2_loc (loc, TRUTH_OR_EXPR, type,
4421 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4422 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4424 case TRUTH_OR_EXPR:
4425 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4426 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4427 return build2_loc (loc, TRUTH_AND_EXPR, type,
4428 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4429 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4431 case TRUTH_XOR_EXPR:
4432 /* Here we can invert either operand. We invert the first operand
4433 unless the second operand is a TRUTH_NOT_EXPR in which case our
4434 result is the XOR of the first operand with the inside of the
4435 negation of the second operand. */
4437 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4438 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4439 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4440 else
4441 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4442 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4443 TREE_OPERAND (arg, 1));
4445 case TRUTH_ANDIF_EXPR:
4446 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4447 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4448 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4449 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4450 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4452 case TRUTH_ORIF_EXPR:
4453 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4454 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4455 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4456 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4457 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4459 case TRUTH_NOT_EXPR:
4460 return TREE_OPERAND (arg, 0);
4462 case COND_EXPR:
4464 tree arg1 = TREE_OPERAND (arg, 1);
4465 tree arg2 = TREE_OPERAND (arg, 2);
4467 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4468 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4470 /* A COND_EXPR may have a throw as one operand, which
4471 then has void type. Just leave void operands
4472 as they are. */
4473 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4474 VOID_TYPE_P (TREE_TYPE (arg1))
4475 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4476 VOID_TYPE_P (TREE_TYPE (arg2))
4477 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4480 case COMPOUND_EXPR:
4481 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4482 return build2_loc (loc, COMPOUND_EXPR, type,
4483 TREE_OPERAND (arg, 0),
4484 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4486 case NON_LVALUE_EXPR:
4487 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4488 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4490 CASE_CONVERT:
4491 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4492 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4494 /* fall through */
4496 case FLOAT_EXPR:
4497 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4498 return build1_loc (loc, TREE_CODE (arg), type,
4499 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4501 case BIT_AND_EXPR:
4502 if (!integer_onep (TREE_OPERAND (arg, 1)))
4503 return NULL_TREE;
4504 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4506 case SAVE_EXPR:
4507 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4509 case CLEANUP_POINT_EXPR:
4510 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4511 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4512 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4514 default:
4515 return NULL_TREE;
4519 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4520 assume that ARG is an operation that returns a truth value (0 or 1
4521 for scalars, 0 or -1 for vectors). Return the folded expression if
4522 folding is successful. Otherwise, return NULL_TREE. */
4524 static tree
4525 fold_invert_truthvalue (location_t loc, tree arg)
4527 tree type = TREE_TYPE (arg);
4528 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4529 ? BIT_NOT_EXPR
4530 : TRUTH_NOT_EXPR,
4531 type, arg);
4534 /* Return a simplified tree node for the truth-negation of ARG. This
4535 never alters ARG itself. We assume that ARG is an operation that
4536 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4538 tree
4539 invert_truthvalue_loc (location_t loc, tree arg)
4541 if (TREE_CODE (arg) == ERROR_MARK)
4542 return arg;
4544 tree type = TREE_TYPE (arg);
4545 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4546 ? BIT_NOT_EXPR
4547 : TRUTH_NOT_EXPR,
4548 type, arg);
4551 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4552 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4553 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4554 is the original memory reference used to preserve the alias set of
4555 the access. */
4557 static tree
4558 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4559 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4560 int unsignedp, int reversep)
4562 tree result, bftype;
4564 /* Attempt not to lose the access path if possible. */
4565 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4567 tree ninner = TREE_OPERAND (orig_inner, 0);
4568 machine_mode nmode;
4569 poly_int64 nbitsize, nbitpos;
4570 tree noffset;
4571 int nunsignedp, nreversep, nvolatilep = 0;
4572 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4573 &noffset, &nmode, &nunsignedp,
4574 &nreversep, &nvolatilep);
4575 if (base == inner
4576 && noffset == NULL_TREE
4577 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4578 && !reversep
4579 && !nreversep
4580 && !nvolatilep)
4582 inner = ninner;
4583 bitpos -= nbitpos;
4587 alias_set_type iset = get_alias_set (orig_inner);
4588 if (iset == 0 && get_alias_set (inner) != iset)
4589 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4590 build_fold_addr_expr (inner),
4591 build_int_cst (ptr_type_node, 0));
4593 if (known_eq (bitpos, 0) && !reversep)
4595 tree size = TYPE_SIZE (TREE_TYPE (inner));
4596 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4597 || POINTER_TYPE_P (TREE_TYPE (inner)))
4598 && tree_fits_shwi_p (size)
4599 && tree_to_shwi (size) == bitsize)
4600 return fold_convert_loc (loc, type, inner);
4603 bftype = type;
4604 if (TYPE_PRECISION (bftype) != bitsize
4605 || TYPE_UNSIGNED (bftype) == !unsignedp)
4606 bftype = build_nonstandard_integer_type (bitsize, 0);
4608 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4609 bitsize_int (bitsize), bitsize_int (bitpos));
4610 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4612 if (bftype != type)
4613 result = fold_convert_loc (loc, type, result);
4615 return result;
4618 /* Optimize a bit-field compare.
4620 There are two cases: First is a compare against a constant and the
4621 second is a comparison of two items where the fields are at the same
4622 bit position relative to the start of a chunk (byte, halfword, word)
4623 large enough to contain it. In these cases we can avoid the shift
4624 implicit in bitfield extractions.
4626 For constants, we emit a compare of the shifted constant with the
4627 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4628 compared. For two fields at the same position, we do the ANDs with the
4629 similar mask and compare the result of the ANDs.
4631 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4632 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4633 are the left and right operands of the comparison, respectively.
4635 If the optimization described above can be done, we return the resulting
4636 tree. Otherwise we return zero. */
4638 static tree
4639 optimize_bit_field_compare (location_t loc, enum tree_code code,
4640 tree compare_type, tree lhs, tree rhs)
4642 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4643 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4644 tree type = TREE_TYPE (lhs);
4645 tree unsigned_type;
4646 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4647 machine_mode lmode, rmode;
4648 scalar_int_mode nmode;
4649 int lunsignedp, runsignedp;
4650 int lreversep, rreversep;
4651 int lvolatilep = 0, rvolatilep = 0;
4652 tree linner, rinner = NULL_TREE;
4653 tree mask;
4654 tree offset;
4656 /* Get all the information about the extractions being done. If the bit size
4657 is the same as the size of the underlying object, we aren't doing an
4658 extraction at all and so can do nothing. We also don't want to
4659 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4660 then will no longer be able to replace it. */
4661 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4662 &lunsignedp, &lreversep, &lvolatilep);
4663 if (linner == lhs
4664 || !known_size_p (plbitsize)
4665 || !plbitsize.is_constant (&lbitsize)
4666 || !plbitpos.is_constant (&lbitpos)
4667 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4668 || offset != 0
4669 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4670 || lvolatilep)
4671 return 0;
4673 if (const_p)
4674 rreversep = lreversep;
4675 else
4677 /* If this is not a constant, we can only do something if bit positions,
4678 sizes, signedness and storage order are the same. */
4679 rinner
4680 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4681 &runsignedp, &rreversep, &rvolatilep);
4683 if (rinner == rhs
4684 || maybe_ne (lbitpos, rbitpos)
4685 || maybe_ne (lbitsize, rbitsize)
4686 || lunsignedp != runsignedp
4687 || lreversep != rreversep
4688 || offset != 0
4689 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4690 || rvolatilep)
4691 return 0;
4694 /* Honor the C++ memory model and mimic what RTL expansion does. */
4695 poly_uint64 bitstart = 0;
4696 poly_uint64 bitend = 0;
4697 if (TREE_CODE (lhs) == COMPONENT_REF)
4699 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4700 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4701 return 0;
4704 /* See if we can find a mode to refer to this field. We should be able to,
4705 but fail if we can't. */
4706 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4707 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4708 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4709 TYPE_ALIGN (TREE_TYPE (rinner))),
4710 BITS_PER_WORD, false, &nmode))
4711 return 0;
4713 /* Set signed and unsigned types of the precision of this mode for the
4714 shifts below. */
4715 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4717 /* Compute the bit position and size for the new reference and our offset
4718 within it. If the new reference is the same size as the original, we
4719 won't optimize anything, so return zero. */
4720 nbitsize = GET_MODE_BITSIZE (nmode);
4721 nbitpos = lbitpos & ~ (nbitsize - 1);
4722 lbitpos -= nbitpos;
4723 if (nbitsize == lbitsize)
4724 return 0;
4726 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4727 lbitpos = nbitsize - lbitsize - lbitpos;
4729 /* Make the mask to be used against the extracted field. */
4730 mask = build_int_cst_type (unsigned_type, -1);
4731 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4732 mask = const_binop (RSHIFT_EXPR, mask,
4733 size_int (nbitsize - lbitsize - lbitpos));
4735 if (! const_p)
4737 if (nbitpos < 0)
4738 return 0;
4740 /* If not comparing with constant, just rework the comparison
4741 and return. */
4742 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4743 nbitsize, nbitpos, 1, lreversep);
4744 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4745 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4746 nbitsize, nbitpos, 1, rreversep);
4747 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4748 return fold_build2_loc (loc, code, compare_type, t1, t2);
4751 /* Otherwise, we are handling the constant case. See if the constant is too
4752 big for the field. Warn and return a tree for 0 (false) if so. We do
4753 this not only for its own sake, but to avoid having to test for this
4754 error case below. If we didn't, we might generate wrong code.
4756 For unsigned fields, the constant shifted right by the field length should
4757 be all zero. For signed fields, the high-order bits should agree with
4758 the sign bit. */
4760 if (lunsignedp)
4762 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4764 warning (0, "comparison is always %d due to width of bit-field",
4765 code == NE_EXPR);
4766 return constant_boolean_node (code == NE_EXPR, compare_type);
4769 else
4771 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4772 if (tem != 0 && tem != -1)
4774 warning (0, "comparison is always %d due to width of bit-field",
4775 code == NE_EXPR);
4776 return constant_boolean_node (code == NE_EXPR, compare_type);
4780 if (nbitpos < 0)
4781 return 0;
4783 /* Single-bit compares should always be against zero. */
4784 if (lbitsize == 1 && ! integer_zerop (rhs))
4786 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4787 rhs = build_int_cst (type, 0);
4790 /* Make a new bitfield reference, shift the constant over the
4791 appropriate number of bits and mask it with the computed mask
4792 (in case this was a signed field). If we changed it, make a new one. */
4793 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4794 nbitsize, nbitpos, 1, lreversep);
4796 rhs = const_binop (BIT_AND_EXPR,
4797 const_binop (LSHIFT_EXPR,
4798 fold_convert_loc (loc, unsigned_type, rhs),
4799 size_int (lbitpos)),
4800 mask);
4802 lhs = build2_loc (loc, code, compare_type,
4803 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4804 return lhs;
4807 /* Subroutine for fold_truth_andor_1: decode a field reference.
4809 If EXP is a comparison reference, we return the innermost reference.
4811 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4812 set to the starting bit number.
4814 If the innermost field can be completely contained in a mode-sized
4815 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4817 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4818 otherwise it is not changed.
4820 *PUNSIGNEDP is set to the signedness of the field.
4822 *PREVERSEP is set to the storage order of the field.
4824 *PMASK is set to the mask used. This is either contained in a
4825 BIT_AND_EXPR or derived from the width of the field.
4827 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4829 Return 0 if this is not a component reference or is one that we can't
4830 do anything with. */
4832 static tree
4833 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4834 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4835 int *punsignedp, int *preversep, int *pvolatilep,
4836 tree *pmask, tree *pand_mask)
4838 tree exp = *exp_;
4839 tree outer_type = 0;
4840 tree and_mask = 0;
4841 tree mask, inner, offset;
4842 tree unsigned_type;
4843 unsigned int precision;
4845 /* All the optimizations using this function assume integer fields.
4846 There are problems with FP fields since the type_for_size call
4847 below can fail for, e.g., XFmode. */
4848 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4849 return NULL_TREE;
4851 /* We are interested in the bare arrangement of bits, so strip everything
4852 that doesn't affect the machine mode. However, record the type of the
4853 outermost expression if it may matter below. */
4854 if (CONVERT_EXPR_P (exp)
4855 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4856 outer_type = TREE_TYPE (exp);
4857 STRIP_NOPS (exp);
4859 if (TREE_CODE (exp) == BIT_AND_EXPR)
4861 and_mask = TREE_OPERAND (exp, 1);
4862 exp = TREE_OPERAND (exp, 0);
4863 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4864 if (TREE_CODE (and_mask) != INTEGER_CST)
4865 return NULL_TREE;
4868 poly_int64 poly_bitsize, poly_bitpos;
4869 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4870 pmode, punsignedp, preversep, pvolatilep);
4871 if ((inner == exp && and_mask == 0)
4872 || !poly_bitsize.is_constant (pbitsize)
4873 || !poly_bitpos.is_constant (pbitpos)
4874 || *pbitsize < 0
4875 || offset != 0
4876 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4877 /* Reject out-of-bound accesses (PR79731). */
4878 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4879 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4880 *pbitpos + *pbitsize) < 0))
4881 return NULL_TREE;
4883 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4884 if (unsigned_type == NULL_TREE)
4885 return NULL_TREE;
4887 *exp_ = exp;
4889 /* If the number of bits in the reference is the same as the bitsize of
4890 the outer type, then the outer type gives the signedness. Otherwise
4891 (in case of a small bitfield) the signedness is unchanged. */
4892 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4893 *punsignedp = TYPE_UNSIGNED (outer_type);
4895 /* Compute the mask to access the bitfield. */
4896 precision = TYPE_PRECISION (unsigned_type);
4898 mask = build_int_cst_type (unsigned_type, -1);
4900 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4901 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4903 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4904 if (and_mask != 0)
4905 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4906 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4908 *pmask = mask;
4909 *pand_mask = and_mask;
4910 return inner;
4913 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4914 bit positions and MASK is SIGNED. */
4916 static bool
4917 all_ones_mask_p (const_tree mask, unsigned int size)
4919 tree type = TREE_TYPE (mask);
4920 unsigned int precision = TYPE_PRECISION (type);
4922 /* If this function returns true when the type of the mask is
4923 UNSIGNED, then there will be errors. In particular see
4924 gcc.c-torture/execute/990326-1.c. There does not appear to be
4925 any documentation paper trail as to why this is so. But the pre
4926 wide-int worked with that restriction and it has been preserved
4927 here. */
4928 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4929 return false;
4931 return wi::mask (size, false, precision) == wi::to_wide (mask);
4934 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4935 represents the sign bit of EXP's type. If EXP represents a sign
4936 or zero extension, also test VAL against the unextended type.
4937 The return value is the (sub)expression whose sign bit is VAL,
4938 or NULL_TREE otherwise. */
4940 tree
4941 sign_bit_p (tree exp, const_tree val)
4943 int width;
4944 tree t;
4946 /* Tree EXP must have an integral type. */
4947 t = TREE_TYPE (exp);
4948 if (! INTEGRAL_TYPE_P (t))
4949 return NULL_TREE;
4951 /* Tree VAL must be an integer constant. */
4952 if (TREE_CODE (val) != INTEGER_CST
4953 || TREE_OVERFLOW (val))
4954 return NULL_TREE;
4956 width = TYPE_PRECISION (t);
4957 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4958 return exp;
4960 /* Handle extension from a narrower type. */
4961 if (TREE_CODE (exp) == NOP_EXPR
4962 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4963 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4965 return NULL_TREE;
4968 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4969 operand is simple enough to be evaluated unconditionally. */
4971 static bool
4972 simple_operand_p (const_tree exp)
4974 /* Strip any conversions that don't change the machine mode. */
4975 STRIP_NOPS (exp);
4977 return (CONSTANT_CLASS_P (exp)
4978 || TREE_CODE (exp) == SSA_NAME
4979 || (DECL_P (exp)
4980 && ! TREE_ADDRESSABLE (exp)
4981 && ! TREE_THIS_VOLATILE (exp)
4982 && ! DECL_NONLOCAL (exp)
4983 /* Don't regard global variables as simple. They may be
4984 allocated in ways unknown to the compiler (shared memory,
4985 #pragma weak, etc). */
4986 && ! TREE_PUBLIC (exp)
4987 && ! DECL_EXTERNAL (exp)
4988 /* Weakrefs are not safe to be read, since they can be NULL.
4989 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4990 have DECL_WEAK flag set. */
4991 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4992 /* Loading a static variable is unduly expensive, but global
4993 registers aren't expensive. */
4994 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4997 /* Determine if an operand is simple enough to be evaluated unconditionally.
4998 In addition to simple_operand_p, we assume that comparisons, conversions,
4999 and logic-not operations are simple, if their operands are simple, too. */
5001 bool
5002 simple_condition_p (tree exp)
5004 enum tree_code code;
5006 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5007 return false;
5009 while (CONVERT_EXPR_P (exp))
5010 exp = TREE_OPERAND (exp, 0);
5012 code = TREE_CODE (exp);
5014 if (TREE_CODE_CLASS (code) == tcc_comparison)
5015 return (simple_operand_p (TREE_OPERAND (exp, 0))
5016 && simple_operand_p (TREE_OPERAND (exp, 1)));
5018 if (code == TRUTH_NOT_EXPR)
5019 return simple_condition_p (TREE_OPERAND (exp, 0));
5021 return simple_operand_p (exp);
5025 /* The following functions are subroutines to fold_range_test and allow it to
5026 try to change a logical combination of comparisons into a range test.
5028 For example, both
5029 X == 2 || X == 3 || X == 4 || X == 5
5031 X >= 2 && X <= 5
5032 are converted to
5033 (unsigned) (X - 2) <= 3
5035 We describe each set of comparisons as being either inside or outside
5036 a range, using a variable named like IN_P, and then describe the
5037 range with a lower and upper bound. If one of the bounds is omitted,
5038 it represents either the highest or lowest value of the type.
5040 In the comments below, we represent a range by two numbers in brackets
5041 preceded by a "+" to designate being inside that range, or a "-" to
5042 designate being outside that range, so the condition can be inverted by
5043 flipping the prefix. An omitted bound is represented by a "-". For
5044 example, "- [-, 10]" means being outside the range starting at the lowest
5045 possible value and ending at 10, in other words, being greater than 10.
5046 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5047 always false.
5049 We set up things so that the missing bounds are handled in a consistent
5050 manner so neither a missing bound nor "true" and "false" need to be
5051 handled using a special case. */
5053 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5054 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5055 and UPPER1_P are nonzero if the respective argument is an upper bound
5056 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5057 must be specified for a comparison. ARG1 will be converted to ARG0's
5058 type if both are specified. */
5060 static tree
5061 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5062 tree arg1, int upper1_p)
5064 tree tem;
5065 int result;
5066 int sgn0, sgn1;
5068 /* If neither arg represents infinity, do the normal operation.
5069 Else, if not a comparison, return infinity. Else handle the special
5070 comparison rules. Note that most of the cases below won't occur, but
5071 are handled for consistency. */
5073 if (arg0 != 0 && arg1 != 0)
5075 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5076 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5077 STRIP_NOPS (tem);
5078 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5081 if (TREE_CODE_CLASS (code) != tcc_comparison)
5082 return 0;
5084 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5085 for neither. In real maths, we cannot assume open ended ranges are
5086 the same. But, this is computer arithmetic, where numbers are finite.
5087 We can therefore make the transformation of any unbounded range with
5088 the value Z, Z being greater than any representable number. This permits
5089 us to treat unbounded ranges as equal. */
5090 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5091 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5092 switch (code)
5094 case EQ_EXPR:
5095 result = sgn0 == sgn1;
5096 break;
5097 case NE_EXPR:
5098 result = sgn0 != sgn1;
5099 break;
5100 case LT_EXPR:
5101 result = sgn0 < sgn1;
5102 break;
5103 case LE_EXPR:
5104 result = sgn0 <= sgn1;
5105 break;
5106 case GT_EXPR:
5107 result = sgn0 > sgn1;
5108 break;
5109 case GE_EXPR:
5110 result = sgn0 >= sgn1;
5111 break;
5112 default:
5113 gcc_unreachable ();
5116 return constant_boolean_node (result, type);
5119 /* Helper routine for make_range. Perform one step for it, return
5120 new expression if the loop should continue or NULL_TREE if it should
5121 stop. */
5123 tree
5124 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5125 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5126 bool *strict_overflow_p)
5128 tree arg0_type = TREE_TYPE (arg0);
5129 tree n_low, n_high, low = *p_low, high = *p_high;
5130 int in_p = *p_in_p, n_in_p;
5132 switch (code)
5134 case TRUTH_NOT_EXPR:
5135 /* We can only do something if the range is testing for zero. */
5136 if (low == NULL_TREE || high == NULL_TREE
5137 || ! integer_zerop (low) || ! integer_zerop (high))
5138 return NULL_TREE;
5139 *p_in_p = ! in_p;
5140 return arg0;
5142 case EQ_EXPR: case NE_EXPR:
5143 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5144 /* We can only do something if the range is testing for zero
5145 and if the second operand is an integer constant. Note that
5146 saying something is "in" the range we make is done by
5147 complementing IN_P since it will set in the initial case of
5148 being not equal to zero; "out" is leaving it alone. */
5149 if (low == NULL_TREE || high == NULL_TREE
5150 || ! integer_zerop (low) || ! integer_zerop (high)
5151 || TREE_CODE (arg1) != INTEGER_CST)
5152 return NULL_TREE;
5154 switch (code)
5156 case NE_EXPR: /* - [c, c] */
5157 low = high = arg1;
5158 break;
5159 case EQ_EXPR: /* + [c, c] */
5160 in_p = ! in_p, low = high = arg1;
5161 break;
5162 case GT_EXPR: /* - [-, c] */
5163 low = 0, high = arg1;
5164 break;
5165 case GE_EXPR: /* + [c, -] */
5166 in_p = ! in_p, low = arg1, high = 0;
5167 break;
5168 case LT_EXPR: /* - [c, -] */
5169 low = arg1, high = 0;
5170 break;
5171 case LE_EXPR: /* + [-, c] */
5172 in_p = ! in_p, low = 0, high = arg1;
5173 break;
5174 default:
5175 gcc_unreachable ();
5178 /* If this is an unsigned comparison, we also know that EXP is
5179 greater than or equal to zero. We base the range tests we make
5180 on that fact, so we record it here so we can parse existing
5181 range tests. We test arg0_type since often the return type
5182 of, e.g. EQ_EXPR, is boolean. */
5183 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5185 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5186 in_p, low, high, 1,
5187 build_int_cst (arg0_type, 0),
5188 NULL_TREE))
5189 return NULL_TREE;
5191 in_p = n_in_p, low = n_low, high = n_high;
5193 /* If the high bound is missing, but we have a nonzero low
5194 bound, reverse the range so it goes from zero to the low bound
5195 minus 1. */
5196 if (high == 0 && low && ! integer_zerop (low))
5198 in_p = ! in_p;
5199 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5200 build_int_cst (TREE_TYPE (low), 1), 0);
5201 low = build_int_cst (arg0_type, 0);
5205 *p_low = low;
5206 *p_high = high;
5207 *p_in_p = in_p;
5208 return arg0;
5210 case NEGATE_EXPR:
5211 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5212 low and high are non-NULL, then normalize will DTRT. */
5213 if (!TYPE_UNSIGNED (arg0_type)
5214 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5216 if (low == NULL_TREE)
5217 low = TYPE_MIN_VALUE (arg0_type);
5218 if (high == NULL_TREE)
5219 high = TYPE_MAX_VALUE (arg0_type);
5222 /* (-x) IN [a,b] -> x in [-b, -a] */
5223 n_low = range_binop (MINUS_EXPR, exp_type,
5224 build_int_cst (exp_type, 0),
5225 0, high, 1);
5226 n_high = range_binop (MINUS_EXPR, exp_type,
5227 build_int_cst (exp_type, 0),
5228 0, low, 0);
5229 if (n_high != 0 && TREE_OVERFLOW (n_high))
5230 return NULL_TREE;
5231 goto normalize;
5233 case BIT_NOT_EXPR:
5234 /* ~ X -> -X - 1 */
5235 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5236 build_int_cst (exp_type, 1));
5238 case PLUS_EXPR:
5239 case MINUS_EXPR:
5240 if (TREE_CODE (arg1) != INTEGER_CST)
5241 return NULL_TREE;
5243 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5244 move a constant to the other side. */
5245 if (!TYPE_UNSIGNED (arg0_type)
5246 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5247 return NULL_TREE;
5249 /* If EXP is signed, any overflow in the computation is undefined,
5250 so we don't worry about it so long as our computations on
5251 the bounds don't overflow. For unsigned, overflow is defined
5252 and this is exactly the right thing. */
5253 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5254 arg0_type, low, 0, arg1, 0);
5255 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5256 arg0_type, high, 1, arg1, 0);
5257 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5258 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5259 return NULL_TREE;
5261 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5262 *strict_overflow_p = true;
5264 normalize:
5265 /* Check for an unsigned range which has wrapped around the maximum
5266 value thus making n_high < n_low, and normalize it. */
5267 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5269 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5270 build_int_cst (TREE_TYPE (n_high), 1), 0);
5271 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5272 build_int_cst (TREE_TYPE (n_low), 1), 0);
5274 /* If the range is of the form +/- [ x+1, x ], we won't
5275 be able to normalize it. But then, it represents the
5276 whole range or the empty set, so make it
5277 +/- [ -, - ]. */
5278 if (tree_int_cst_equal (n_low, low)
5279 && tree_int_cst_equal (n_high, high))
5280 low = high = 0;
5281 else
5282 in_p = ! in_p;
5284 else
5285 low = n_low, high = n_high;
5287 *p_low = low;
5288 *p_high = high;
5289 *p_in_p = in_p;
5290 return arg0;
5292 CASE_CONVERT:
5293 case NON_LVALUE_EXPR:
5294 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5295 return NULL_TREE;
5297 if (! INTEGRAL_TYPE_P (arg0_type)
5298 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5299 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5300 return NULL_TREE;
5302 n_low = low, n_high = high;
5304 if (n_low != 0)
5305 n_low = fold_convert_loc (loc, arg0_type, n_low);
5307 if (n_high != 0)
5308 n_high = fold_convert_loc (loc, arg0_type, n_high);
5310 /* If we're converting arg0 from an unsigned type, to exp,
5311 a signed type, we will be doing the comparison as unsigned.
5312 The tests above have already verified that LOW and HIGH
5313 are both positive.
5315 So we have to ensure that we will handle large unsigned
5316 values the same way that the current signed bounds treat
5317 negative values. */
5319 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5321 tree high_positive;
5322 tree equiv_type;
5323 /* For fixed-point modes, we need to pass the saturating flag
5324 as the 2nd parameter. */
5325 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5326 equiv_type
5327 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5328 TYPE_SATURATING (arg0_type));
5329 else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5330 equiv_type = arg0_type;
5331 else
5332 equiv_type
5333 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5335 /* A range without an upper bound is, naturally, unbounded.
5336 Since convert would have cropped a very large value, use
5337 the max value for the destination type. */
5338 high_positive
5339 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5340 : TYPE_MAX_VALUE (arg0_type);
5342 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5343 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5344 fold_convert_loc (loc, arg0_type,
5345 high_positive),
5346 build_int_cst (arg0_type, 1));
5348 /* If the low bound is specified, "and" the range with the
5349 range for which the original unsigned value will be
5350 positive. */
5351 if (low != 0)
5353 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5354 1, fold_convert_loc (loc, arg0_type,
5355 integer_zero_node),
5356 high_positive))
5357 return NULL_TREE;
5359 in_p = (n_in_p == in_p);
5361 else
5363 /* Otherwise, "or" the range with the range of the input
5364 that will be interpreted as negative. */
5365 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5366 1, fold_convert_loc (loc, arg0_type,
5367 integer_zero_node),
5368 high_positive))
5369 return NULL_TREE;
5371 in_p = (in_p != n_in_p);
5375 /* Otherwise, if we are converting arg0 from signed type, to exp,
5376 an unsigned type, we will do the comparison as signed. If
5377 high is non-NULL, we punt above if it doesn't fit in the signed
5378 type, so if we get through here, +[-, high] or +[low, high] are
5379 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5380 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5381 high is not, the +[low, -] range is equivalent to union of
5382 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5383 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5384 low being 0, which should be treated as [-, -]. */
5385 else if (TYPE_UNSIGNED (exp_type)
5386 && !TYPE_UNSIGNED (arg0_type)
5387 && low
5388 && !high)
5390 if (integer_zerop (low))
5391 n_low = NULL_TREE;
5392 else
5394 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5395 n_low, build_int_cst (arg0_type, -1));
5396 n_low = build_zero_cst (arg0_type);
5397 in_p = !in_p;
5401 *p_low = n_low;
5402 *p_high = n_high;
5403 *p_in_p = in_p;
5404 return arg0;
5406 default:
5407 return NULL_TREE;
5411 /* Given EXP, a logical expression, set the range it is testing into
5412 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5413 actually being tested. *PLOW and *PHIGH will be made of the same
5414 type as the returned expression. If EXP is not a comparison, we
5415 will most likely not be returning a useful value and range. Set
5416 *STRICT_OVERFLOW_P to true if the return value is only valid
5417 because signed overflow is undefined; otherwise, do not change
5418 *STRICT_OVERFLOW_P. */
5420 tree
5421 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5422 bool *strict_overflow_p)
5424 enum tree_code code;
5425 tree arg0, arg1 = NULL_TREE;
5426 tree exp_type, nexp;
5427 int in_p;
5428 tree low, high;
5429 location_t loc = EXPR_LOCATION (exp);
5431 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5432 and see if we can refine the range. Some of the cases below may not
5433 happen, but it doesn't seem worth worrying about this. We "continue"
5434 the outer loop when we've changed something; otherwise we "break"
5435 the switch, which will "break" the while. */
5437 in_p = 0;
5438 low = high = build_int_cst (TREE_TYPE (exp), 0);
5440 while (1)
5442 code = TREE_CODE (exp);
5443 exp_type = TREE_TYPE (exp);
5444 arg0 = NULL_TREE;
5446 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5448 if (TREE_OPERAND_LENGTH (exp) > 0)
5449 arg0 = TREE_OPERAND (exp, 0);
5450 if (TREE_CODE_CLASS (code) == tcc_binary
5451 || TREE_CODE_CLASS (code) == tcc_comparison
5452 || (TREE_CODE_CLASS (code) == tcc_expression
5453 && TREE_OPERAND_LENGTH (exp) > 1))
5454 arg1 = TREE_OPERAND (exp, 1);
5456 if (arg0 == NULL_TREE)
5457 break;
5459 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5460 &high, &in_p, strict_overflow_p);
5461 if (nexp == NULL_TREE)
5462 break;
5463 exp = nexp;
5466 /* If EXP is a constant, we can evaluate whether this is true or false. */
5467 if (TREE_CODE (exp) == INTEGER_CST)
5469 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5470 exp, 0, low, 0))
5471 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5472 exp, 1, high, 1)));
5473 low = high = 0;
5474 exp = 0;
5477 *pin_p = in_p, *plow = low, *phigh = high;
5478 return exp;
5481 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5482 a bitwise check i.e. when
5483 LOW == 0xXX...X00...0
5484 HIGH == 0xXX...X11...1
5485 Return corresponding mask in MASK and stem in VALUE. */
5487 static bool
5488 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5489 tree *value)
5491 if (TREE_CODE (low) != INTEGER_CST
5492 || TREE_CODE (high) != INTEGER_CST)
5493 return false;
5495 unsigned prec = TYPE_PRECISION (type);
5496 wide_int lo = wi::to_wide (low, prec);
5497 wide_int hi = wi::to_wide (high, prec);
5499 wide_int end_mask = lo ^ hi;
5500 if ((end_mask & (end_mask + 1)) != 0
5501 || (lo & end_mask) != 0)
5502 return false;
5504 wide_int stem_mask = ~end_mask;
5505 wide_int stem = lo & stem_mask;
5506 if (stem != (hi & stem_mask))
5507 return false;
5509 *mask = wide_int_to_tree (type, stem_mask);
5510 *value = wide_int_to_tree (type, stem);
5512 return true;
5515 /* Helper routine for build_range_check and match.pd. Return the type to
5516 perform the check or NULL if it shouldn't be optimized. */
5518 tree
5519 range_check_type (tree etype)
5521 /* First make sure that arithmetics in this type is valid, then make sure
5522 that it wraps around. */
5523 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5524 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5526 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5528 tree utype, minv, maxv;
5530 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5531 for the type in question, as we rely on this here. */
5532 utype = unsigned_type_for (etype);
5533 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5534 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5535 build_int_cst (TREE_TYPE (maxv), 1), 1);
5536 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5538 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5539 minv, 1, maxv, 1)))
5540 etype = utype;
5541 else
5542 return NULL_TREE;
5544 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5545 etype = unsigned_type_for (etype);
5546 return etype;
5549 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5550 type, TYPE, return an expression to test if EXP is in (or out of, depending
5551 on IN_P) the range. Return 0 if the test couldn't be created. */
5553 tree
5554 build_range_check (location_t loc, tree type, tree exp, int in_p,
5555 tree low, tree high)
5557 tree etype = TREE_TYPE (exp), mask, value;
5559 /* Disable this optimization for function pointer expressions
5560 on targets that require function pointer canonicalization. */
5561 if (targetm.have_canonicalize_funcptr_for_compare ()
5562 && POINTER_TYPE_P (etype)
5563 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5564 return NULL_TREE;
5566 if (! in_p)
5568 value = build_range_check (loc, type, exp, 1, low, high);
5569 if (value != 0)
5570 return invert_truthvalue_loc (loc, value);
5572 return 0;
5575 if (low == 0 && high == 0)
5576 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5578 if (low == 0)
5579 return fold_build2_loc (loc, LE_EXPR, type, exp,
5580 fold_convert_loc (loc, etype, high));
5582 if (high == 0)
5583 return fold_build2_loc (loc, GE_EXPR, type, exp,
5584 fold_convert_loc (loc, etype, low));
5586 if (operand_equal_p (low, high, 0))
5587 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5588 fold_convert_loc (loc, etype, low));
5590 if (TREE_CODE (exp) == BIT_AND_EXPR
5591 && maskable_range_p (low, high, etype, &mask, &value))
5592 return fold_build2_loc (loc, EQ_EXPR, type,
5593 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5594 exp, mask),
5595 value);
5597 if (integer_zerop (low))
5599 if (! TYPE_UNSIGNED (etype))
5601 etype = unsigned_type_for (etype);
5602 high = fold_convert_loc (loc, etype, high);
5603 exp = fold_convert_loc (loc, etype, exp);
5605 return build_range_check (loc, type, exp, 1, 0, high);
5608 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5609 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5611 int prec = TYPE_PRECISION (etype);
5613 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5615 if (TYPE_UNSIGNED (etype))
5617 tree signed_etype = signed_type_for (etype);
5618 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5619 etype
5620 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5621 else
5622 etype = signed_etype;
5623 exp = fold_convert_loc (loc, etype, exp);
5625 return fold_build2_loc (loc, GT_EXPR, type, exp,
5626 build_int_cst (etype, 0));
5630 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5631 This requires wrap-around arithmetics for the type of the expression. */
5632 etype = range_check_type (etype);
5633 if (etype == NULL_TREE)
5634 return NULL_TREE;
5636 high = fold_convert_loc (loc, etype, high);
5637 low = fold_convert_loc (loc, etype, low);
5638 exp = fold_convert_loc (loc, etype, exp);
5640 value = const_binop (MINUS_EXPR, high, low);
5642 if (value != 0 && !TREE_OVERFLOW (value))
5643 return build_range_check (loc, type,
5644 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5645 1, build_int_cst (etype, 0), value);
5647 return 0;
5650 /* Return the predecessor of VAL in its type, handling the infinite case. */
5652 static tree
5653 range_predecessor (tree val)
5655 tree type = TREE_TYPE (val);
5657 if (INTEGRAL_TYPE_P (type)
5658 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5659 return 0;
5660 else
5661 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5662 build_int_cst (TREE_TYPE (val), 1), 0);
5665 /* Return the successor of VAL in its type, handling the infinite case. */
5667 static tree
5668 range_successor (tree val)
5670 tree type = TREE_TYPE (val);
5672 if (INTEGRAL_TYPE_P (type)
5673 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5674 return 0;
5675 else
5676 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5677 build_int_cst (TREE_TYPE (val), 1), 0);
5680 /* Given two ranges, see if we can merge them into one. Return 1 if we
5681 can, 0 if we can't. Set the output range into the specified parameters. */
5683 bool
5684 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5685 tree high0, int in1_p, tree low1, tree high1)
5687 bool no_overlap;
5688 int subset;
5689 int temp;
5690 tree tem;
5691 int in_p;
5692 tree low, high;
5693 int lowequal = ((low0 == 0 && low1 == 0)
5694 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5695 low0, 0, low1, 0)));
5696 int highequal = ((high0 == 0 && high1 == 0)
5697 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5698 high0, 1, high1, 1)));
5700 /* Make range 0 be the range that starts first, or ends last if they
5701 start at the same value. Swap them if it isn't. */
5702 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5703 low0, 0, low1, 0))
5704 || (lowequal
5705 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5706 high1, 1, high0, 1))))
5708 temp = in0_p, in0_p = in1_p, in1_p = temp;
5709 tem = low0, low0 = low1, low1 = tem;
5710 tem = high0, high0 = high1, high1 = tem;
5713 /* If the second range is != high1 where high1 is the type maximum of
5714 the type, try first merging with < high1 range. */
5715 if (low1
5716 && high1
5717 && TREE_CODE (low1) == INTEGER_CST
5718 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5719 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5720 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5721 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5722 && operand_equal_p (low1, high1, 0))
5724 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5725 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5726 !in1_p, NULL_TREE, range_predecessor (low1)))
5727 return true;
5728 /* Similarly for the second range != low1 where low1 is the type minimum
5729 of the type, try first merging with > low1 range. */
5730 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5731 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5732 !in1_p, range_successor (low1), NULL_TREE))
5733 return true;
5736 /* Now flag two cases, whether the ranges are disjoint or whether the
5737 second range is totally subsumed in the first. Note that the tests
5738 below are simplified by the ones above. */
5739 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5740 high0, 1, low1, 0));
5741 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5742 high1, 1, high0, 1));
5744 /* We now have four cases, depending on whether we are including or
5745 excluding the two ranges. */
5746 if (in0_p && in1_p)
5748 /* If they don't overlap, the result is false. If the second range
5749 is a subset it is the result. Otherwise, the range is from the start
5750 of the second to the end of the first. */
5751 if (no_overlap)
5752 in_p = 0, low = high = 0;
5753 else if (subset)
5754 in_p = 1, low = low1, high = high1;
5755 else
5756 in_p = 1, low = low1, high = high0;
5759 else if (in0_p && ! in1_p)
5761 /* If they don't overlap, the result is the first range. If they are
5762 equal, the result is false. If the second range is a subset of the
5763 first, and the ranges begin at the same place, we go from just after
5764 the end of the second range to the end of the first. If the second
5765 range is not a subset of the first, or if it is a subset and both
5766 ranges end at the same place, the range starts at the start of the
5767 first range and ends just before the second range.
5768 Otherwise, we can't describe this as a single range. */
5769 if (no_overlap)
5770 in_p = 1, low = low0, high = high0;
5771 else if (lowequal && highequal)
5772 in_p = 0, low = high = 0;
5773 else if (subset && lowequal)
5775 low = range_successor (high1);
5776 high = high0;
5777 in_p = 1;
5778 if (low == 0)
5780 /* We are in the weird situation where high0 > high1 but
5781 high1 has no successor. Punt. */
5782 return 0;
5785 else if (! subset || highequal)
5787 low = low0;
5788 high = range_predecessor (low1);
5789 in_p = 1;
5790 if (high == 0)
5792 /* low0 < low1 but low1 has no predecessor. Punt. */
5793 return 0;
5796 else
5797 return 0;
5800 else if (! in0_p && in1_p)
5802 /* If they don't overlap, the result is the second range. If the second
5803 is a subset of the first, the result is false. Otherwise,
5804 the range starts just after the first range and ends at the
5805 end of the second. */
5806 if (no_overlap)
5807 in_p = 1, low = low1, high = high1;
5808 else if (subset || highequal)
5809 in_p = 0, low = high = 0;
5810 else
5812 low = range_successor (high0);
5813 high = high1;
5814 in_p = 1;
5815 if (low == 0)
5817 /* high1 > high0 but high0 has no successor. Punt. */
5818 return 0;
5823 else
5825 /* The case where we are excluding both ranges. Here the complex case
5826 is if they don't overlap. In that case, the only time we have a
5827 range is if they are adjacent. If the second is a subset of the
5828 first, the result is the first. Otherwise, the range to exclude
5829 starts at the beginning of the first range and ends at the end of the
5830 second. */
5831 if (no_overlap)
5833 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5834 range_successor (high0),
5835 1, low1, 0)))
5836 in_p = 0, low = low0, high = high1;
5837 else
5839 /* Canonicalize - [min, x] into - [-, x]. */
5840 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5841 switch (TREE_CODE (TREE_TYPE (low0)))
5843 case ENUMERAL_TYPE:
5844 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5845 GET_MODE_BITSIZE
5846 (TYPE_MODE (TREE_TYPE (low0)))))
5847 break;
5848 /* FALLTHROUGH */
5849 case INTEGER_TYPE:
5850 if (tree_int_cst_equal (low0,
5851 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5852 low0 = 0;
5853 break;
5854 case POINTER_TYPE:
5855 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5856 && integer_zerop (low0))
5857 low0 = 0;
5858 break;
5859 default:
5860 break;
5863 /* Canonicalize - [x, max] into - [x, -]. */
5864 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5865 switch (TREE_CODE (TREE_TYPE (high1)))
5867 case ENUMERAL_TYPE:
5868 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5869 GET_MODE_BITSIZE
5870 (TYPE_MODE (TREE_TYPE (high1)))))
5871 break;
5872 /* FALLTHROUGH */
5873 case INTEGER_TYPE:
5874 if (tree_int_cst_equal (high1,
5875 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5876 high1 = 0;
5877 break;
5878 case POINTER_TYPE:
5879 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5880 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5881 high1, 1,
5882 build_int_cst (TREE_TYPE (high1), 1),
5883 1)))
5884 high1 = 0;
5885 break;
5886 default:
5887 break;
5890 /* The ranges might be also adjacent between the maximum and
5891 minimum values of the given type. For
5892 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5893 return + [x + 1, y - 1]. */
5894 if (low0 == 0 && high1 == 0)
5896 low = range_successor (high0);
5897 high = range_predecessor (low1);
5898 if (low == 0 || high == 0)
5899 return 0;
5901 in_p = 1;
5903 else
5904 return 0;
5907 else if (subset)
5908 in_p = 0, low = low0, high = high0;
5909 else
5910 in_p = 0, low = low0, high = high1;
5913 *pin_p = in_p, *plow = low, *phigh = high;
5914 return 1;
5918 /* Subroutine of fold, looking inside expressions of the form
5919 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5920 are the three operands of the COND_EXPR. This function is
5921 being used also to optimize A op B ? C : A, by reversing the
5922 comparison first.
5924 Return a folded expression whose code is not a COND_EXPR
5925 anymore, or NULL_TREE if no folding opportunity is found. */
5927 static tree
5928 fold_cond_expr_with_comparison (location_t loc, tree type,
5929 enum tree_code comp_code,
5930 tree arg00, tree arg01, tree arg1, tree arg2)
5932 tree arg1_type = TREE_TYPE (arg1);
5933 tree tem;
5935 STRIP_NOPS (arg1);
5936 STRIP_NOPS (arg2);
5938 /* If we have A op 0 ? A : -A, consider applying the following
5939 transformations:
5941 A == 0? A : -A same as -A
5942 A != 0? A : -A same as A
5943 A >= 0? A : -A same as abs (A)
5944 A > 0? A : -A same as abs (A)
5945 A <= 0? A : -A same as -abs (A)
5946 A < 0? A : -A same as -abs (A)
5948 None of these transformations work for modes with signed
5949 zeros. If A is +/-0, the first two transformations will
5950 change the sign of the result (from +0 to -0, or vice
5951 versa). The last four will fix the sign of the result,
5952 even though the original expressions could be positive or
5953 negative, depending on the sign of A.
5955 Note that all these transformations are correct if A is
5956 NaN, since the two alternatives (A and -A) are also NaNs. */
5957 if (!HONOR_SIGNED_ZEROS (type)
5958 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5959 ? real_zerop (arg01)
5960 : integer_zerop (arg01))
5961 && ((TREE_CODE (arg2) == NEGATE_EXPR
5962 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5963 /* In the case that A is of the form X-Y, '-A' (arg2) may
5964 have already been folded to Y-X, check for that. */
5965 || (TREE_CODE (arg1) == MINUS_EXPR
5966 && TREE_CODE (arg2) == MINUS_EXPR
5967 && operand_equal_p (TREE_OPERAND (arg1, 0),
5968 TREE_OPERAND (arg2, 1), 0)
5969 && operand_equal_p (TREE_OPERAND (arg1, 1),
5970 TREE_OPERAND (arg2, 0), 0))))
5971 switch (comp_code)
5973 case EQ_EXPR:
5974 case UNEQ_EXPR:
5975 tem = fold_convert_loc (loc, arg1_type, arg1);
5976 return fold_convert_loc (loc, type, negate_expr (tem));
5977 case NE_EXPR:
5978 case LTGT_EXPR:
5979 return fold_convert_loc (loc, type, arg1);
5980 case UNGE_EXPR:
5981 case UNGT_EXPR:
5982 if (flag_trapping_math)
5983 break;
5984 /* Fall through. */
5985 case GE_EXPR:
5986 case GT_EXPR:
5987 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5988 break;
5989 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5990 return fold_convert_loc (loc, type, tem);
5991 case UNLE_EXPR:
5992 case UNLT_EXPR:
5993 if (flag_trapping_math)
5994 break;
5995 /* FALLTHRU */
5996 case LE_EXPR:
5997 case LT_EXPR:
5998 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5999 break;
6000 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6001 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6003 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6004 is not, invokes UB both in abs and in the negation of it.
6005 So, use ABSU_EXPR instead. */
6006 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6007 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6008 tem = negate_expr (tem);
6009 return fold_convert_loc (loc, type, tem);
6011 else
6013 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6014 return negate_expr (fold_convert_loc (loc, type, tem));
6016 default:
6017 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6018 break;
6021 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6022 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6023 both transformations are correct when A is NaN: A != 0
6024 is then true, and A == 0 is false. */
6026 if (!HONOR_SIGNED_ZEROS (type)
6027 && integer_zerop (arg01) && integer_zerop (arg2))
6029 if (comp_code == NE_EXPR)
6030 return fold_convert_loc (loc, type, arg1);
6031 else if (comp_code == EQ_EXPR)
6032 return build_zero_cst (type);
6035 /* Try some transformations of A op B ? A : B.
6037 A == B? A : B same as B
6038 A != B? A : B same as A
6039 A >= B? A : B same as max (A, B)
6040 A > B? A : B same as max (B, A)
6041 A <= B? A : B same as min (A, B)
6042 A < B? A : B same as min (B, A)
6044 As above, these transformations don't work in the presence
6045 of signed zeros. For example, if A and B are zeros of
6046 opposite sign, the first two transformations will change
6047 the sign of the result. In the last four, the original
6048 expressions give different results for (A=+0, B=-0) and
6049 (A=-0, B=+0), but the transformed expressions do not.
6051 The first two transformations are correct if either A or B
6052 is a NaN. In the first transformation, the condition will
6053 be false, and B will indeed be chosen. In the case of the
6054 second transformation, the condition A != B will be true,
6055 and A will be chosen.
6057 The conversions to max() and min() are not correct if B is
6058 a number and A is not. The conditions in the original
6059 expressions will be false, so all four give B. The min()
6060 and max() versions would give a NaN instead. */
6061 if (!HONOR_SIGNED_ZEROS (type)
6062 && operand_equal_for_comparison_p (arg01, arg2)
6063 /* Avoid these transformations if the COND_EXPR may be used
6064 as an lvalue in the C++ front-end. PR c++/19199. */
6065 && (in_gimple_form
6066 || VECTOR_TYPE_P (type)
6067 || (! lang_GNU_CXX ()
6068 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6069 || ! maybe_lvalue_p (arg1)
6070 || ! maybe_lvalue_p (arg2)))
6072 tree comp_op0 = arg00;
6073 tree comp_op1 = arg01;
6074 tree comp_type = TREE_TYPE (comp_op0);
6076 switch (comp_code)
6078 case EQ_EXPR:
6079 return fold_convert_loc (loc, type, arg2);
6080 case NE_EXPR:
6081 return fold_convert_loc (loc, type, arg1);
6082 case LE_EXPR:
6083 case LT_EXPR:
6084 case UNLE_EXPR:
6085 case UNLT_EXPR:
6086 /* In C++ a ?: expression can be an lvalue, so put the
6087 operand which will be used if they are equal first
6088 so that we can convert this back to the
6089 corresponding COND_EXPR. */
6090 if (!HONOR_NANS (arg1))
6092 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6093 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6094 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6095 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6096 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6097 comp_op1, comp_op0);
6098 return fold_convert_loc (loc, type, tem);
6100 break;
6101 case GE_EXPR:
6102 case GT_EXPR:
6103 case UNGE_EXPR:
6104 case UNGT_EXPR:
6105 if (!HONOR_NANS (arg1))
6107 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6108 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6109 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6110 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6111 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6112 comp_op1, comp_op0);
6113 return fold_convert_loc (loc, type, tem);
6115 break;
6116 case UNEQ_EXPR:
6117 if (!HONOR_NANS (arg1))
6118 return fold_convert_loc (loc, type, arg2);
6119 break;
6120 case LTGT_EXPR:
6121 if (!HONOR_NANS (arg1))
6122 return fold_convert_loc (loc, type, arg1);
6123 break;
6124 default:
6125 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6126 break;
6130 return NULL_TREE;
6135 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6136 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6137 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6138 false) >= 2)
6139 #endif
6141 /* EXP is some logical combination of boolean tests. See if we can
6142 merge it into some range test. Return the new tree if so. */
6144 static tree
6145 fold_range_test (location_t loc, enum tree_code code, tree type,
6146 tree op0, tree op1)
6148 int or_op = (code == TRUTH_ORIF_EXPR
6149 || code == TRUTH_OR_EXPR);
6150 int in0_p, in1_p, in_p;
6151 tree low0, low1, low, high0, high1, high;
6152 bool strict_overflow_p = false;
6153 tree tem, lhs, rhs;
6154 const char * const warnmsg = G_("assuming signed overflow does not occur "
6155 "when simplifying range test");
6157 if (!INTEGRAL_TYPE_P (type))
6158 return 0;
6160 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6161 /* If op0 is known true or false and this is a short-circuiting
6162 operation we must not merge with op1 since that makes side-effects
6163 unconditional. So special-case this. */
6164 if (!lhs
6165 && ((code == TRUTH_ORIF_EXPR && in0_p)
6166 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6167 return op0;
6168 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6170 /* If this is an OR operation, invert both sides; we will invert
6171 again at the end. */
6172 if (or_op)
6173 in0_p = ! in0_p, in1_p = ! in1_p;
6175 /* If both expressions are the same, if we can merge the ranges, and we
6176 can build the range test, return it or it inverted. If one of the
6177 ranges is always true or always false, consider it to be the same
6178 expression as the other. */
6179 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6180 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6181 in1_p, low1, high1)
6182 && (tem = (build_range_check (loc, type,
6183 lhs != 0 ? lhs
6184 : rhs != 0 ? rhs : integer_zero_node,
6185 in_p, low, high))) != 0)
6187 if (strict_overflow_p)
6188 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6189 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6192 /* On machines where the branch cost is expensive, if this is a
6193 short-circuited branch and the underlying object on both sides
6194 is the same, make a non-short-circuit operation. */
6195 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6196 if (param_logical_op_non_short_circuit != -1)
6197 logical_op_non_short_circuit
6198 = param_logical_op_non_short_circuit;
6199 if (logical_op_non_short_circuit
6200 && !sanitize_coverage_p ()
6201 && lhs != 0 && rhs != 0
6202 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6203 && operand_equal_p (lhs, rhs, 0))
6205 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6206 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6207 which cases we can't do this. */
6208 if (simple_operand_p (lhs))
6209 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6210 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6211 type, op0, op1);
6213 else if (!lang_hooks.decls.global_bindings_p ()
6214 && !CONTAINS_PLACEHOLDER_P (lhs))
6216 tree common = save_expr (lhs);
6218 if ((lhs = build_range_check (loc, type, common,
6219 or_op ? ! in0_p : in0_p,
6220 low0, high0)) != 0
6221 && (rhs = build_range_check (loc, type, common,
6222 or_op ? ! in1_p : in1_p,
6223 low1, high1)) != 0)
6225 if (strict_overflow_p)
6226 fold_overflow_warning (warnmsg,
6227 WARN_STRICT_OVERFLOW_COMPARISON);
6228 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6229 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6230 type, lhs, rhs);
6235 return 0;
6238 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6239 bit value. Arrange things so the extra bits will be set to zero if and
6240 only if C is signed-extended to its full width. If MASK is nonzero,
6241 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6243 static tree
6244 unextend (tree c, int p, int unsignedp, tree mask)
6246 tree type = TREE_TYPE (c);
6247 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6248 tree temp;
6250 if (p == modesize || unsignedp)
6251 return c;
6253 /* We work by getting just the sign bit into the low-order bit, then
6254 into the high-order bit, then sign-extend. We then XOR that value
6255 with C. */
6256 temp = build_int_cst (TREE_TYPE (c),
6257 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6259 /* We must use a signed type in order to get an arithmetic right shift.
6260 However, we must also avoid introducing accidental overflows, so that
6261 a subsequent call to integer_zerop will work. Hence we must
6262 do the type conversion here. At this point, the constant is either
6263 zero or one, and the conversion to a signed type can never overflow.
6264 We could get an overflow if this conversion is done anywhere else. */
6265 if (TYPE_UNSIGNED (type))
6266 temp = fold_convert (signed_type_for (type), temp);
6268 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6269 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6270 if (mask != 0)
6271 temp = const_binop (BIT_AND_EXPR, temp,
6272 fold_convert (TREE_TYPE (c), mask));
6273 /* If necessary, convert the type back to match the type of C. */
6274 if (TYPE_UNSIGNED (type))
6275 temp = fold_convert (type, temp);
6277 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6280 /* For an expression that has the form
6281 (A && B) || ~B
6283 (A || B) && ~B,
6284 we can drop one of the inner expressions and simplify to
6285 A || ~B
6287 A && ~B
6288 LOC is the location of the resulting expression. OP is the inner
6289 logical operation; the left-hand side in the examples above, while CMPOP
6290 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6291 removing a condition that guards another, as in
6292 (A != NULL && A->...) || A == NULL
6293 which we must not transform. If RHS_ONLY is true, only eliminate the
6294 right-most operand of the inner logical operation. */
6296 static tree
6297 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6298 bool rhs_only)
6300 tree type = TREE_TYPE (cmpop);
6301 enum tree_code code = TREE_CODE (cmpop);
6302 enum tree_code truthop_code = TREE_CODE (op);
6303 tree lhs = TREE_OPERAND (op, 0);
6304 tree rhs = TREE_OPERAND (op, 1);
6305 tree orig_lhs = lhs, orig_rhs = rhs;
6306 enum tree_code rhs_code = TREE_CODE (rhs);
6307 enum tree_code lhs_code = TREE_CODE (lhs);
6308 enum tree_code inv_code;
6310 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6311 return NULL_TREE;
6313 if (TREE_CODE_CLASS (code) != tcc_comparison)
6314 return NULL_TREE;
6316 if (rhs_code == truthop_code)
6318 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6319 if (newrhs != NULL_TREE)
6321 rhs = newrhs;
6322 rhs_code = TREE_CODE (rhs);
6325 if (lhs_code == truthop_code && !rhs_only)
6327 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6328 if (newlhs != NULL_TREE)
6330 lhs = newlhs;
6331 lhs_code = TREE_CODE (lhs);
6335 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6336 if (inv_code == rhs_code
6337 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6338 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6339 return lhs;
6340 if (!rhs_only && inv_code == lhs_code
6341 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6342 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6343 return rhs;
6344 if (rhs != orig_rhs || lhs != orig_lhs)
6345 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6346 lhs, rhs);
6347 return NULL_TREE;
6350 /* Find ways of folding logical expressions of LHS and RHS:
6351 Try to merge two comparisons to the same innermost item.
6352 Look for range tests like "ch >= '0' && ch <= '9'".
6353 Look for combinations of simple terms on machines with expensive branches
6354 and evaluate the RHS unconditionally.
6356 For example, if we have p->a == 2 && p->b == 4 and we can make an
6357 object large enough to span both A and B, we can do this with a comparison
6358 against the object ANDed with the a mask.
6360 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6361 operations to do this with one comparison.
6363 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6364 function and the one above.
6366 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6367 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6369 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6370 two operands.
6372 We return the simplified tree or 0 if no optimization is possible. */
6374 static tree
6375 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6376 tree lhs, tree rhs)
6378 /* If this is the "or" of two comparisons, we can do something if
6379 the comparisons are NE_EXPR. If this is the "and", we can do something
6380 if the comparisons are EQ_EXPR. I.e.,
6381 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6383 WANTED_CODE is this operation code. For single bit fields, we can
6384 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6385 comparison for one-bit fields. */
6387 enum tree_code wanted_code;
6388 enum tree_code lcode, rcode;
6389 tree ll_arg, lr_arg, rl_arg, rr_arg;
6390 tree ll_inner, lr_inner, rl_inner, rr_inner;
6391 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6392 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6393 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6394 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6395 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6396 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6397 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6398 scalar_int_mode lnmode, rnmode;
6399 tree ll_mask, lr_mask, rl_mask, rr_mask;
6400 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6401 tree l_const, r_const;
6402 tree lntype, rntype, result;
6403 HOST_WIDE_INT first_bit, end_bit;
6404 int volatilep;
6406 /* Start by getting the comparison codes. Fail if anything is volatile.
6407 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6408 it were surrounded with a NE_EXPR. */
6410 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6411 return 0;
6413 lcode = TREE_CODE (lhs);
6414 rcode = TREE_CODE (rhs);
6416 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6418 lhs = build2 (NE_EXPR, truth_type, lhs,
6419 build_int_cst (TREE_TYPE (lhs), 0));
6420 lcode = NE_EXPR;
6423 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6425 rhs = build2 (NE_EXPR, truth_type, rhs,
6426 build_int_cst (TREE_TYPE (rhs), 0));
6427 rcode = NE_EXPR;
6430 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6431 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6432 return 0;
6434 ll_arg = TREE_OPERAND (lhs, 0);
6435 lr_arg = TREE_OPERAND (lhs, 1);
6436 rl_arg = TREE_OPERAND (rhs, 0);
6437 rr_arg = TREE_OPERAND (rhs, 1);
6439 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6440 if (simple_operand_p (ll_arg)
6441 && simple_operand_p (lr_arg))
6443 if (operand_equal_p (ll_arg, rl_arg, 0)
6444 && operand_equal_p (lr_arg, rr_arg, 0))
6446 result = combine_comparisons (loc, code, lcode, rcode,
6447 truth_type, ll_arg, lr_arg);
6448 if (result)
6449 return result;
6451 else if (operand_equal_p (ll_arg, rr_arg, 0)
6452 && operand_equal_p (lr_arg, rl_arg, 0))
6454 result = combine_comparisons (loc, code, lcode,
6455 swap_tree_comparison (rcode),
6456 truth_type, ll_arg, lr_arg);
6457 if (result)
6458 return result;
6462 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6463 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6465 /* If the RHS can be evaluated unconditionally and its operands are
6466 simple, it wins to evaluate the RHS unconditionally on machines
6467 with expensive branches. In this case, this isn't a comparison
6468 that can be merged. */
6470 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6471 false) >= 2
6472 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6473 && simple_operand_p (rl_arg)
6474 && simple_operand_p (rr_arg))
6476 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6477 if (code == TRUTH_OR_EXPR
6478 && lcode == NE_EXPR && integer_zerop (lr_arg)
6479 && rcode == NE_EXPR && integer_zerop (rr_arg)
6480 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6481 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6482 return build2_loc (loc, NE_EXPR, truth_type,
6483 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6484 ll_arg, rl_arg),
6485 build_int_cst (TREE_TYPE (ll_arg), 0));
6487 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6488 if (code == TRUTH_AND_EXPR
6489 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6490 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6491 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6492 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6493 return build2_loc (loc, EQ_EXPR, truth_type,
6494 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6495 ll_arg, rl_arg),
6496 build_int_cst (TREE_TYPE (ll_arg), 0));
6499 /* See if the comparisons can be merged. Then get all the parameters for
6500 each side. */
6502 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6503 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6504 return 0;
6506 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6507 volatilep = 0;
6508 ll_inner = decode_field_reference (loc, &ll_arg,
6509 &ll_bitsize, &ll_bitpos, &ll_mode,
6510 &ll_unsignedp, &ll_reversep, &volatilep,
6511 &ll_mask, &ll_and_mask);
6512 lr_inner = decode_field_reference (loc, &lr_arg,
6513 &lr_bitsize, &lr_bitpos, &lr_mode,
6514 &lr_unsignedp, &lr_reversep, &volatilep,
6515 &lr_mask, &lr_and_mask);
6516 rl_inner = decode_field_reference (loc, &rl_arg,
6517 &rl_bitsize, &rl_bitpos, &rl_mode,
6518 &rl_unsignedp, &rl_reversep, &volatilep,
6519 &rl_mask, &rl_and_mask);
6520 rr_inner = decode_field_reference (loc, &rr_arg,
6521 &rr_bitsize, &rr_bitpos, &rr_mode,
6522 &rr_unsignedp, &rr_reversep, &volatilep,
6523 &rr_mask, &rr_and_mask);
6525 /* It must be true that the inner operation on the lhs of each
6526 comparison must be the same if we are to be able to do anything.
6527 Then see if we have constants. If not, the same must be true for
6528 the rhs's. */
6529 if (volatilep
6530 || ll_reversep != rl_reversep
6531 || ll_inner == 0 || rl_inner == 0
6532 || ! operand_equal_p (ll_inner, rl_inner, 0))
6533 return 0;
6535 if (TREE_CODE (lr_arg) == INTEGER_CST
6536 && TREE_CODE (rr_arg) == INTEGER_CST)
6538 l_const = lr_arg, r_const = rr_arg;
6539 lr_reversep = ll_reversep;
6541 else if (lr_reversep != rr_reversep
6542 || lr_inner == 0 || rr_inner == 0
6543 || ! operand_equal_p (lr_inner, rr_inner, 0))
6544 return 0;
6545 else
6546 l_const = r_const = 0;
6548 /* If either comparison code is not correct for our logical operation,
6549 fail. However, we can convert a one-bit comparison against zero into
6550 the opposite comparison against that bit being set in the field. */
6552 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6553 if (lcode != wanted_code)
6555 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6557 /* Make the left operand unsigned, since we are only interested
6558 in the value of one bit. Otherwise we are doing the wrong
6559 thing below. */
6560 ll_unsignedp = 1;
6561 l_const = ll_mask;
6563 else
6564 return 0;
6567 /* This is analogous to the code for l_const above. */
6568 if (rcode != wanted_code)
6570 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6572 rl_unsignedp = 1;
6573 r_const = rl_mask;
6575 else
6576 return 0;
6579 /* See if we can find a mode that contains both fields being compared on
6580 the left. If we can't, fail. Otherwise, update all constants and masks
6581 to be relative to a field of that size. */
6582 first_bit = MIN (ll_bitpos, rl_bitpos);
6583 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6584 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6585 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6586 volatilep, &lnmode))
6587 return 0;
6589 lnbitsize = GET_MODE_BITSIZE (lnmode);
6590 lnbitpos = first_bit & ~ (lnbitsize - 1);
6591 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6592 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6594 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6596 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6597 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6600 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6601 size_int (xll_bitpos));
6602 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6603 size_int (xrl_bitpos));
6604 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6605 return 0;
6607 if (l_const)
6609 l_const = fold_convert_loc (loc, lntype, l_const);
6610 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6611 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6612 if (l_const == NULL_TREE)
6613 return 0;
6614 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6615 fold_build1_loc (loc, BIT_NOT_EXPR,
6616 lntype, ll_mask))))
6618 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6620 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6623 if (r_const)
6625 r_const = fold_convert_loc (loc, lntype, r_const);
6626 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6627 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6628 if (r_const == NULL_TREE)
6629 return 0;
6630 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6631 fold_build1_loc (loc, BIT_NOT_EXPR,
6632 lntype, rl_mask))))
6634 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6636 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6640 /* If the right sides are not constant, do the same for it. Also,
6641 disallow this optimization if a size, signedness or storage order
6642 mismatch occurs between the left and right sides. */
6643 if (l_const == 0)
6645 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6646 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6647 || ll_reversep != lr_reversep
6648 /* Make sure the two fields on the right
6649 correspond to the left without being swapped. */
6650 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6651 return 0;
6653 first_bit = MIN (lr_bitpos, rr_bitpos);
6654 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6655 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6656 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6657 volatilep, &rnmode))
6658 return 0;
6660 rnbitsize = GET_MODE_BITSIZE (rnmode);
6661 rnbitpos = first_bit & ~ (rnbitsize - 1);
6662 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6663 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6665 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6667 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6668 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6671 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6672 rntype, lr_mask),
6673 size_int (xlr_bitpos));
6674 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6675 rntype, rr_mask),
6676 size_int (xrr_bitpos));
6677 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6678 return 0;
6680 /* Make a mask that corresponds to both fields being compared.
6681 Do this for both items being compared. If the operands are the
6682 same size and the bits being compared are in the same position
6683 then we can do this by masking both and comparing the masked
6684 results. */
6685 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6686 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6687 if (lnbitsize == rnbitsize
6688 && xll_bitpos == xlr_bitpos
6689 && lnbitpos >= 0
6690 && rnbitpos >= 0)
6692 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6693 lntype, lnbitsize, lnbitpos,
6694 ll_unsignedp || rl_unsignedp, ll_reversep);
6695 if (! all_ones_mask_p (ll_mask, lnbitsize))
6696 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6698 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6699 rntype, rnbitsize, rnbitpos,
6700 lr_unsignedp || rr_unsignedp, lr_reversep);
6701 if (! all_ones_mask_p (lr_mask, rnbitsize))
6702 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6704 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6707 /* There is still another way we can do something: If both pairs of
6708 fields being compared are adjacent, we may be able to make a wider
6709 field containing them both.
6711 Note that we still must mask the lhs/rhs expressions. Furthermore,
6712 the mask must be shifted to account for the shift done by
6713 make_bit_field_ref. */
6714 if (((ll_bitsize + ll_bitpos == rl_bitpos
6715 && lr_bitsize + lr_bitpos == rr_bitpos)
6716 || (ll_bitpos == rl_bitpos + rl_bitsize
6717 && lr_bitpos == rr_bitpos + rr_bitsize))
6718 && ll_bitpos >= 0
6719 && rl_bitpos >= 0
6720 && lr_bitpos >= 0
6721 && rr_bitpos >= 0)
6723 tree type;
6725 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6726 ll_bitsize + rl_bitsize,
6727 MIN (ll_bitpos, rl_bitpos),
6728 ll_unsignedp, ll_reversep);
6729 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6730 lr_bitsize + rr_bitsize,
6731 MIN (lr_bitpos, rr_bitpos),
6732 lr_unsignedp, lr_reversep);
6734 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6735 size_int (MIN (xll_bitpos, xrl_bitpos)));
6736 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6737 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6738 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6739 return 0;
6741 /* Convert to the smaller type before masking out unwanted bits. */
6742 type = lntype;
6743 if (lntype != rntype)
6745 if (lnbitsize > rnbitsize)
6747 lhs = fold_convert_loc (loc, rntype, lhs);
6748 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6749 type = rntype;
6751 else if (lnbitsize < rnbitsize)
6753 rhs = fold_convert_loc (loc, lntype, rhs);
6754 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6755 type = lntype;
6759 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6760 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6762 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6763 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6765 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6768 return 0;
6771 /* Handle the case of comparisons with constants. If there is something in
6772 common between the masks, those bits of the constants must be the same.
6773 If not, the condition is always false. Test for this to avoid generating
6774 incorrect code below. */
6775 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6776 if (! integer_zerop (result)
6777 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6778 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6780 if (wanted_code == NE_EXPR)
6782 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6783 return constant_boolean_node (true, truth_type);
6785 else
6787 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6788 return constant_boolean_node (false, truth_type);
6792 if (lnbitpos < 0)
6793 return 0;
6795 /* Construct the expression we will return. First get the component
6796 reference we will make. Unless the mask is all ones the width of
6797 that field, perform the mask operation. Then compare with the
6798 merged constant. */
6799 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6800 lntype, lnbitsize, lnbitpos,
6801 ll_unsignedp || rl_unsignedp, ll_reversep);
6803 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6804 if (! all_ones_mask_p (ll_mask, lnbitsize))
6805 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6807 return build2_loc (loc, wanted_code, truth_type, result,
6808 const_binop (BIT_IOR_EXPR, l_const, r_const));
6811 /* T is an integer expression that is being multiplied, divided, or taken a
6812 modulus (CODE says which and what kind of divide or modulus) by a
6813 constant C. See if we can eliminate that operation by folding it with
6814 other operations already in T. WIDE_TYPE, if non-null, is a type that
6815 should be used for the computation if wider than our type.
6817 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6818 (X * 2) + (Y * 4). We must, however, be assured that either the original
6819 expression would not overflow or that overflow is undefined for the type
6820 in the language in question.
6822 If we return a non-null expression, it is an equivalent form of the
6823 original computation, but need not be in the original type.
6825 We set *STRICT_OVERFLOW_P to true if the return values depends on
6826 signed overflow being undefined. Otherwise we do not change
6827 *STRICT_OVERFLOW_P. */
6829 static tree
6830 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6831 bool *strict_overflow_p)
6833 /* To avoid exponential search depth, refuse to allow recursion past
6834 three levels. Beyond that (1) it's highly unlikely that we'll find
6835 something interesting and (2) we've probably processed it before
6836 when we built the inner expression. */
6838 static int depth;
6839 tree ret;
6841 if (depth > 3)
6842 return NULL;
6844 depth++;
6845 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6846 depth--;
6848 return ret;
6851 static tree
6852 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6853 bool *strict_overflow_p)
6855 tree type = TREE_TYPE (t);
6856 enum tree_code tcode = TREE_CODE (t);
6857 tree ctype = type;
6858 if (wide_type)
6860 if (TREE_CODE (type) == BITINT_TYPE
6861 || TREE_CODE (wide_type) == BITINT_TYPE)
6863 if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6864 ctype = wide_type;
6866 else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6867 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6868 ctype = wide_type;
6870 tree t1, t2;
6871 bool same_p = tcode == code;
6872 tree op0 = NULL_TREE, op1 = NULL_TREE;
6873 bool sub_strict_overflow_p;
6875 /* Don't deal with constants of zero here; they confuse the code below. */
6876 if (integer_zerop (c))
6877 return NULL_TREE;
6879 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6880 op0 = TREE_OPERAND (t, 0);
6882 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6883 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6885 /* Note that we need not handle conditional operations here since fold
6886 already handles those cases. So just do arithmetic here. */
6887 switch (tcode)
6889 case INTEGER_CST:
6890 /* For a constant, we can always simplify if we are a multiply
6891 or (for divide and modulus) if it is a multiple of our constant. */
6892 if (code == MULT_EXPR
6893 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6894 TYPE_SIGN (type)))
6896 tree tem = const_binop (code, fold_convert (ctype, t),
6897 fold_convert (ctype, c));
6898 /* If the multiplication overflowed, we lost information on it.
6899 See PR68142 and PR69845. */
6900 if (TREE_OVERFLOW (tem))
6901 return NULL_TREE;
6902 return tem;
6904 break;
6906 CASE_CONVERT: case NON_LVALUE_EXPR:
6907 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6908 break;
6909 /* If op0 is an expression ... */
6910 if ((COMPARISON_CLASS_P (op0)
6911 || UNARY_CLASS_P (op0)
6912 || BINARY_CLASS_P (op0)
6913 || VL_EXP_CLASS_P (op0)
6914 || EXPRESSION_CLASS_P (op0))
6915 /* ... and has wrapping overflow, and its type is smaller
6916 than ctype, then we cannot pass through as widening. */
6917 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6918 && (TYPE_PRECISION (ctype)
6919 > TYPE_PRECISION (TREE_TYPE (op0))))
6920 /* ... or this is a truncation (t is narrower than op0),
6921 then we cannot pass through this narrowing. */
6922 || (TYPE_PRECISION (type)
6923 < TYPE_PRECISION (TREE_TYPE (op0)))
6924 /* ... or signedness changes for division or modulus,
6925 then we cannot pass through this conversion. */
6926 || (code != MULT_EXPR
6927 && (TYPE_UNSIGNED (ctype)
6928 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6929 /* ... or has undefined overflow while the converted to
6930 type has not, we cannot do the operation in the inner type
6931 as that would introduce undefined overflow. */
6932 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6933 && !TYPE_OVERFLOW_UNDEFINED (type))))
6934 break;
6936 /* Pass the constant down and see if we can make a simplification. If
6937 we can, replace this expression with the inner simplification for
6938 possible later conversion to our or some other type. */
6939 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6940 && TREE_CODE (t2) == INTEGER_CST
6941 && !TREE_OVERFLOW (t2)
6942 && (t1 = extract_muldiv (op0, t2, code,
6943 code == MULT_EXPR ? ctype : NULL_TREE,
6944 strict_overflow_p)) != 0)
6945 return t1;
6946 break;
6948 case ABS_EXPR:
6949 /* If widening the type changes it from signed to unsigned, then we
6950 must avoid building ABS_EXPR itself as unsigned. */
6951 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6953 tree cstype = (*signed_type_for) (ctype);
6954 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6955 != 0)
6957 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6958 return fold_convert (ctype, t1);
6960 break;
6962 /* If the constant is negative, we cannot simplify this. */
6963 if (tree_int_cst_sgn (c) == -1)
6964 break;
6965 /* FALLTHROUGH */
6966 case NEGATE_EXPR:
6967 /* For division and modulus, type can't be unsigned, as e.g.
6968 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6969 For signed types, even with wrapping overflow, this is fine. */
6970 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6971 break;
6972 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6973 != 0)
6974 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6975 break;
6977 case MIN_EXPR: case MAX_EXPR:
6978 /* If widening the type changes the signedness, then we can't perform
6979 this optimization as that changes the result. */
6980 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6981 break;
6983 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6984 sub_strict_overflow_p = false;
6985 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6986 &sub_strict_overflow_p)) != 0
6987 && (t2 = extract_muldiv (op1, c, code, wide_type,
6988 &sub_strict_overflow_p)) != 0)
6990 if (tree_int_cst_sgn (c) < 0)
6991 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6992 if (sub_strict_overflow_p)
6993 *strict_overflow_p = true;
6994 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6995 fold_convert (ctype, t2));
6997 break;
6999 case LSHIFT_EXPR: case RSHIFT_EXPR:
7000 /* If the second operand is constant, this is a multiplication
7001 or floor division, by a power of two, so we can treat it that
7002 way unless the multiplier or divisor overflows. Signed
7003 left-shift overflow is implementation-defined rather than
7004 undefined in C90, so do not convert signed left shift into
7005 multiplication. */
7006 if (TREE_CODE (op1) == INTEGER_CST
7007 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7008 /* const_binop may not detect overflow correctly,
7009 so check for it explicitly here. */
7010 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7011 wi::to_wide (op1))
7012 && (t1 = fold_convert (ctype,
7013 const_binop (LSHIFT_EXPR, size_one_node,
7014 op1))) != 0
7015 && !TREE_OVERFLOW (t1))
7016 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7017 ? MULT_EXPR : FLOOR_DIV_EXPR,
7018 ctype,
7019 fold_convert (ctype, op0),
7020 t1),
7021 c, code, wide_type, strict_overflow_p);
7022 break;
7024 case PLUS_EXPR: case MINUS_EXPR:
7025 /* See if we can eliminate the operation on both sides. If we can, we
7026 can return a new PLUS or MINUS. If we can't, the only remaining
7027 cases where we can do anything are if the second operand is a
7028 constant. */
7029 sub_strict_overflow_p = false;
7030 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7031 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7032 if (t1 != 0 && t2 != 0
7033 && TYPE_OVERFLOW_WRAPS (ctype)
7034 && (code == MULT_EXPR
7035 /* If not multiplication, we can only do this if both operands
7036 are divisible by c. */
7037 || (multiple_of_p (ctype, op0, c)
7038 && multiple_of_p (ctype, op1, c))))
7040 if (sub_strict_overflow_p)
7041 *strict_overflow_p = true;
7042 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7043 fold_convert (ctype, t2));
7046 /* If this was a subtraction, negate OP1 and set it to be an addition.
7047 This simplifies the logic below. */
7048 if (tcode == MINUS_EXPR)
7050 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7051 /* If OP1 was not easily negatable, the constant may be OP0. */
7052 if (TREE_CODE (op0) == INTEGER_CST)
7054 std::swap (op0, op1);
7055 std::swap (t1, t2);
7059 if (TREE_CODE (op1) != INTEGER_CST)
7060 break;
7062 /* If either OP1 or C are negative, this optimization is not safe for
7063 some of the division and remainder types while for others we need
7064 to change the code. */
7065 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7067 if (code == CEIL_DIV_EXPR)
7068 code = FLOOR_DIV_EXPR;
7069 else if (code == FLOOR_DIV_EXPR)
7070 code = CEIL_DIV_EXPR;
7071 else if (code != MULT_EXPR
7072 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7073 break;
7076 /* If it's a multiply or a division/modulus operation of a multiple
7077 of our constant, do the operation and verify it doesn't overflow. */
7078 if (code == MULT_EXPR
7079 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7080 TYPE_SIGN (type)))
7082 op1 = const_binop (code, fold_convert (ctype, op1),
7083 fold_convert (ctype, c));
7084 /* We allow the constant to overflow with wrapping semantics. */
7085 if (op1 == 0
7086 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7087 break;
7089 else
7090 break;
7092 /* If we have an unsigned type, we cannot widen the operation since it
7093 will change the result if the original computation overflowed. */
7094 if (TYPE_UNSIGNED (ctype) && ctype != type)
7095 break;
7097 /* The last case is if we are a multiply. In that case, we can
7098 apply the distributive law to commute the multiply and addition
7099 if the multiplication of the constants doesn't overflow
7100 and overflow is defined. With undefined overflow
7101 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7102 But fold_plusminus_mult_expr would factor back any power-of-two
7103 value so do not distribute in the first place in this case. */
7104 if (code == MULT_EXPR
7105 && TYPE_OVERFLOW_WRAPS (ctype)
7106 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7107 return fold_build2 (tcode, ctype,
7108 fold_build2 (code, ctype,
7109 fold_convert (ctype, op0),
7110 fold_convert (ctype, c)),
7111 op1);
7113 break;
7115 case MULT_EXPR:
7116 /* We have a special case here if we are doing something like
7117 (C * 8) % 4 since we know that's zero. */
7118 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7119 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7120 /* If the multiplication can overflow we cannot optimize this. */
7121 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7122 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7123 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7124 TYPE_SIGN (type)))
7126 *strict_overflow_p = true;
7127 return omit_one_operand (type, integer_zero_node, op0);
7130 /* ... fall through ... */
7132 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7133 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7134 /* If we can extract our operation from the LHS, do so and return a
7135 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7136 do something only if the second operand is a constant. */
7137 if (same_p
7138 && TYPE_OVERFLOW_WRAPS (ctype)
7139 && (t1 = extract_muldiv (op0, c, code, wide_type,
7140 strict_overflow_p)) != 0)
7141 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7142 fold_convert (ctype, op1));
7143 else if (tcode == MULT_EXPR && code == MULT_EXPR
7144 && TYPE_OVERFLOW_WRAPS (ctype)
7145 && (t1 = extract_muldiv (op1, c, code, wide_type,
7146 strict_overflow_p)) != 0)
7147 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7148 fold_convert (ctype, t1));
7149 else if (TREE_CODE (op1) != INTEGER_CST)
7150 return 0;
7152 /* If these are the same operation types, we can associate them
7153 assuming no overflow. */
7154 if (tcode == code)
7156 bool overflow_p = false;
7157 wi::overflow_type overflow_mul;
7158 signop sign = TYPE_SIGN (ctype);
7159 unsigned prec = TYPE_PRECISION (ctype);
7160 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7161 wi::to_wide (c, prec),
7162 sign, &overflow_mul);
7163 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7164 if (overflow_mul
7165 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7166 overflow_p = true;
7167 if (!overflow_p)
7168 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7169 wide_int_to_tree (ctype, mul));
7172 /* If these operations "cancel" each other, we have the main
7173 optimizations of this pass, which occur when either constant is a
7174 multiple of the other, in which case we replace this with either an
7175 operation or CODE or TCODE.
7177 If we have an unsigned type, we cannot do this since it will change
7178 the result if the original computation overflowed. */
7179 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7180 && !TYPE_OVERFLOW_SANITIZED (ctype)
7181 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7182 || (tcode == MULT_EXPR
7183 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7184 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7185 && code != MULT_EXPR)))
7187 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7188 TYPE_SIGN (type)))
7190 *strict_overflow_p = true;
7191 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7192 fold_convert (ctype,
7193 const_binop (TRUNC_DIV_EXPR,
7194 op1, c)));
7196 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7197 TYPE_SIGN (type)))
7199 *strict_overflow_p = true;
7200 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7201 fold_convert (ctype,
7202 const_binop (TRUNC_DIV_EXPR,
7203 c, op1)));
7206 break;
7208 default:
7209 break;
7212 return 0;
7215 /* Return a node which has the indicated constant VALUE (either 0 or
7216 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7217 and is of the indicated TYPE. */
7219 tree
7220 constant_boolean_node (bool value, tree type)
7222 if (type == integer_type_node)
7223 return value ? integer_one_node : integer_zero_node;
7224 else if (type == boolean_type_node)
7225 return value ? boolean_true_node : boolean_false_node;
7226 else if (VECTOR_TYPE_P (type))
7227 return build_vector_from_val (type,
7228 build_int_cst (TREE_TYPE (type),
7229 value ? -1 : 0));
7230 else
7231 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7235 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7236 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7237 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7238 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7239 COND is the first argument to CODE; otherwise (as in the example
7240 given here), it is the second argument. TYPE is the type of the
7241 original expression. Return NULL_TREE if no simplification is
7242 possible. */
7244 static tree
7245 fold_binary_op_with_conditional_arg (location_t loc,
7246 enum tree_code code,
7247 tree type, tree op0, tree op1,
7248 tree cond, tree arg, int cond_first_p)
7250 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7251 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7252 tree test, true_value, false_value;
7253 tree lhs = NULL_TREE;
7254 tree rhs = NULL_TREE;
7255 enum tree_code cond_code = COND_EXPR;
7257 /* Do not move possibly trapping operations into the conditional as this
7258 pessimizes code and causes gimplification issues when applied late. */
7259 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7260 ANY_INTEGRAL_TYPE_P (type)
7261 && TYPE_OVERFLOW_TRAPS (type), op1))
7262 return NULL_TREE;
7264 if (TREE_CODE (cond) == COND_EXPR
7265 || TREE_CODE (cond) == VEC_COND_EXPR)
7267 test = TREE_OPERAND (cond, 0);
7268 true_value = TREE_OPERAND (cond, 1);
7269 false_value = TREE_OPERAND (cond, 2);
7270 /* If this operand throws an expression, then it does not make
7271 sense to try to perform a logical or arithmetic operation
7272 involving it. */
7273 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7274 lhs = true_value;
7275 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7276 rhs = false_value;
7278 else if (!(TREE_CODE (type) != VECTOR_TYPE
7279 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7281 tree testtype = TREE_TYPE (cond);
7282 test = cond;
7283 true_value = constant_boolean_node (true, testtype);
7284 false_value = constant_boolean_node (false, testtype);
7286 else
7287 /* Detect the case of mixing vector and scalar types - bail out. */
7288 return NULL_TREE;
7290 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7291 cond_code = VEC_COND_EXPR;
7293 /* This transformation is only worthwhile if we don't have to wrap ARG
7294 in a SAVE_EXPR and the operation can be simplified without recursing
7295 on at least one of the branches once its pushed inside the COND_EXPR. */
7296 if (!TREE_CONSTANT (arg)
7297 && (TREE_SIDE_EFFECTS (arg)
7298 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7299 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7300 return NULL_TREE;
7302 arg = fold_convert_loc (loc, arg_type, arg);
7303 if (lhs == 0)
7305 true_value = fold_convert_loc (loc, cond_type, true_value);
7306 if (cond_first_p)
7307 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7308 else
7309 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7311 if (rhs == 0)
7313 false_value = fold_convert_loc (loc, cond_type, false_value);
7314 if (cond_first_p)
7315 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7316 else
7317 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7320 /* Check that we have simplified at least one of the branches. */
7321 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7322 return NULL_TREE;
7324 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7328 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7330 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7331 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7332 if ARG - ZERO_ARG is the same as X.
7334 If ARG is NULL, check for any value of type TYPE.
7336 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7337 and finite. The problematic cases are when X is zero, and its mode
7338 has signed zeros. In the case of rounding towards -infinity,
7339 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7340 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7342 bool
7343 fold_real_zero_addition_p (const_tree type, const_tree arg,
7344 const_tree zero_arg, int negate)
7346 if (!real_zerop (zero_arg))
7347 return false;
7349 /* Don't allow the fold with -fsignaling-nans. */
7350 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7351 return false;
7353 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7354 if (!HONOR_SIGNED_ZEROS (type))
7355 return true;
7357 /* There is no case that is safe for all rounding modes. */
7358 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7359 return false;
7361 /* In a vector or complex, we would need to check the sign of all zeros. */
7362 if (TREE_CODE (zero_arg) == VECTOR_CST)
7363 zero_arg = uniform_vector_p (zero_arg);
7364 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7365 return false;
7367 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7368 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7369 negate = !negate;
7371 /* The mode has signed zeros, and we have to honor their sign.
7372 In this situation, there are only two cases we can return true for.
7373 (i) X - 0 is the same as X with default rounding.
7374 (ii) X + 0 is X when X can't possibly be -0.0. */
7375 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7378 /* Subroutine of match.pd that optimizes comparisons of a division by
7379 a nonzero integer constant against an integer constant, i.e.
7380 X/C1 op C2.
7382 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7383 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7385 enum tree_code
7386 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7387 tree *hi, bool *neg_overflow)
7389 tree prod, tmp, type = TREE_TYPE (c1);
7390 signop sign = TYPE_SIGN (type);
7391 wi::overflow_type overflow;
7393 /* We have to do this the hard way to detect unsigned overflow.
7394 prod = int_const_binop (MULT_EXPR, c1, c2); */
7395 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7396 prod = force_fit_type (type, val, -1, overflow);
7397 *neg_overflow = false;
7399 if (sign == UNSIGNED)
7401 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7402 *lo = prod;
7404 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7405 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7406 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7408 else if (tree_int_cst_sgn (c1) >= 0)
7410 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7411 switch (tree_int_cst_sgn (c2))
7413 case -1:
7414 *neg_overflow = true;
7415 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7416 *hi = prod;
7417 break;
7419 case 0:
7420 *lo = fold_negate_const (tmp, type);
7421 *hi = tmp;
7422 break;
7424 case 1:
7425 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7426 *lo = prod;
7427 break;
7429 default:
7430 gcc_unreachable ();
7433 else
7435 /* A negative divisor reverses the relational operators. */
7436 code = swap_tree_comparison (code);
7438 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7439 switch (tree_int_cst_sgn (c2))
7441 case -1:
7442 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7443 *lo = prod;
7444 break;
7446 case 0:
7447 *hi = fold_negate_const (tmp, type);
7448 *lo = tmp;
7449 break;
7451 case 1:
7452 *neg_overflow = true;
7453 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7454 *hi = prod;
7455 break;
7457 default:
7458 gcc_unreachable ();
7462 if (code != EQ_EXPR && code != NE_EXPR)
7463 return code;
7465 if (TREE_OVERFLOW (*lo)
7466 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7467 *lo = NULL_TREE;
7468 if (TREE_OVERFLOW (*hi)
7469 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7470 *hi = NULL_TREE;
7472 return code;
7475 /* Test whether it is preferable to swap two operands, ARG0 and
7476 ARG1, for example because ARG0 is an integer constant and ARG1
7477 isn't. */
7479 bool
7480 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7482 if (CONSTANT_CLASS_P (arg1))
7483 return false;
7484 if (CONSTANT_CLASS_P (arg0))
7485 return true;
7487 STRIP_NOPS (arg0);
7488 STRIP_NOPS (arg1);
7490 if (TREE_CONSTANT (arg1))
7491 return false;
7492 if (TREE_CONSTANT (arg0))
7493 return true;
7495 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7496 for commutative and comparison operators. Ensuring a canonical
7497 form allows the optimizers to find additional redundancies without
7498 having to explicitly check for both orderings. */
7499 if (TREE_CODE (arg0) == SSA_NAME
7500 && TREE_CODE (arg1) == SSA_NAME
7501 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7502 return true;
7504 /* Put SSA_NAMEs last. */
7505 if (TREE_CODE (arg1) == SSA_NAME)
7506 return false;
7507 if (TREE_CODE (arg0) == SSA_NAME)
7508 return true;
7510 /* Put variables last. */
7511 if (DECL_P (arg1))
7512 return false;
7513 if (DECL_P (arg0))
7514 return true;
7516 return false;
7520 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7521 means A >= Y && A != MAX, but in this case we know that
7522 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7524 static tree
7525 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7527 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7529 if (TREE_CODE (bound) == LT_EXPR)
7530 a = TREE_OPERAND (bound, 0);
7531 else if (TREE_CODE (bound) == GT_EXPR)
7532 a = TREE_OPERAND (bound, 1);
7533 else
7534 return NULL_TREE;
7536 typea = TREE_TYPE (a);
7537 if (!INTEGRAL_TYPE_P (typea)
7538 && !POINTER_TYPE_P (typea))
7539 return NULL_TREE;
7541 if (TREE_CODE (ineq) == LT_EXPR)
7543 a1 = TREE_OPERAND (ineq, 1);
7544 y = TREE_OPERAND (ineq, 0);
7546 else if (TREE_CODE (ineq) == GT_EXPR)
7548 a1 = TREE_OPERAND (ineq, 0);
7549 y = TREE_OPERAND (ineq, 1);
7551 else
7552 return NULL_TREE;
7554 if (TREE_TYPE (a1) != typea)
7555 return NULL_TREE;
7557 if (POINTER_TYPE_P (typea))
7559 /* Convert the pointer types into integer before taking the difference. */
7560 tree ta = fold_convert_loc (loc, ssizetype, a);
7561 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7562 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7564 else
7565 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7567 if (!diff || !integer_onep (diff))
7568 return NULL_TREE;
7570 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7573 /* Fold a sum or difference of at least one multiplication.
7574 Returns the folded tree or NULL if no simplification could be made. */
7576 static tree
7577 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7578 tree arg0, tree arg1)
7580 tree arg00, arg01, arg10, arg11;
7581 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7583 /* (A * C) +- (B * C) -> (A+-B) * C.
7584 (A * C) +- A -> A * (C+-1).
7585 We are most concerned about the case where C is a constant,
7586 but other combinations show up during loop reduction. Since
7587 it is not difficult, try all four possibilities. */
7589 if (TREE_CODE (arg0) == MULT_EXPR)
7591 arg00 = TREE_OPERAND (arg0, 0);
7592 arg01 = TREE_OPERAND (arg0, 1);
7594 else if (TREE_CODE (arg0) == INTEGER_CST)
7596 arg00 = build_one_cst (type);
7597 arg01 = arg0;
7599 else
7601 /* We cannot generate constant 1 for fract. */
7602 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7603 return NULL_TREE;
7604 arg00 = arg0;
7605 arg01 = build_one_cst (type);
7607 if (TREE_CODE (arg1) == MULT_EXPR)
7609 arg10 = TREE_OPERAND (arg1, 0);
7610 arg11 = TREE_OPERAND (arg1, 1);
7612 else if (TREE_CODE (arg1) == INTEGER_CST)
7614 arg10 = build_one_cst (type);
7615 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7616 the purpose of this canonicalization. */
7617 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7618 && negate_expr_p (arg1)
7619 && code == PLUS_EXPR)
7621 arg11 = negate_expr (arg1);
7622 code = MINUS_EXPR;
7624 else
7625 arg11 = arg1;
7627 else
7629 /* We cannot generate constant 1 for fract. */
7630 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7631 return NULL_TREE;
7632 arg10 = arg1;
7633 arg11 = build_one_cst (type);
7635 same = NULL_TREE;
7637 /* Prefer factoring a common non-constant. */
7638 if (operand_equal_p (arg00, arg10, 0))
7639 same = arg00, alt0 = arg01, alt1 = arg11;
7640 else if (operand_equal_p (arg01, arg11, 0))
7641 same = arg01, alt0 = arg00, alt1 = arg10;
7642 else if (operand_equal_p (arg00, arg11, 0))
7643 same = arg00, alt0 = arg01, alt1 = arg10;
7644 else if (operand_equal_p (arg01, arg10, 0))
7645 same = arg01, alt0 = arg00, alt1 = arg11;
7647 /* No identical multiplicands; see if we can find a common
7648 power-of-two factor in non-power-of-two multiplies. This
7649 can help in multi-dimensional array access. */
7650 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7652 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7653 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7654 HOST_WIDE_INT tmp;
7655 bool swap = false;
7656 tree maybe_same;
7658 /* Move min of absolute values to int11. */
7659 if (absu_hwi (int01) < absu_hwi (int11))
7661 tmp = int01, int01 = int11, int11 = tmp;
7662 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7663 maybe_same = arg01;
7664 swap = true;
7666 else
7667 maybe_same = arg11;
7669 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7670 if (factor > 1
7671 && pow2p_hwi (factor)
7672 && (int01 & (factor - 1)) == 0
7673 /* The remainder should not be a constant, otherwise we
7674 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7675 increased the number of multiplications necessary. */
7676 && TREE_CODE (arg10) != INTEGER_CST)
7678 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7679 build_int_cst (TREE_TYPE (arg00),
7680 int01 / int11));
7681 alt1 = arg10;
7682 same = maybe_same;
7683 if (swap)
7684 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7688 if (!same)
7689 return NULL_TREE;
7691 if (! ANY_INTEGRAL_TYPE_P (type)
7692 || TYPE_OVERFLOW_WRAPS (type)
7693 /* We are neither factoring zero nor minus one. */
7694 || TREE_CODE (same) == INTEGER_CST)
7695 return fold_build2_loc (loc, MULT_EXPR, type,
7696 fold_build2_loc (loc, code, type,
7697 fold_convert_loc (loc, type, alt0),
7698 fold_convert_loc (loc, type, alt1)),
7699 fold_convert_loc (loc, type, same));
7701 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7702 same may be minus one and thus the multiplication may overflow. Perform
7703 the sum operation in an unsigned type. */
7704 tree utype = unsigned_type_for (type);
7705 tree tem = fold_build2_loc (loc, code, utype,
7706 fold_convert_loc (loc, utype, alt0),
7707 fold_convert_loc (loc, utype, alt1));
7708 /* If the sum evaluated to a constant that is not -INF the multiplication
7709 cannot overflow. */
7710 if (TREE_CODE (tem) == INTEGER_CST
7711 && (wi::to_wide (tem)
7712 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7713 return fold_build2_loc (loc, MULT_EXPR, type,
7714 fold_convert (type, tem), same);
7716 /* Do not resort to unsigned multiplication because
7717 we lose the no-overflow property of the expression. */
7718 return NULL_TREE;
7721 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7722 specified by EXPR into the buffer PTR of length LEN bytes.
7723 Return the number of bytes placed in the buffer, or zero
7724 upon failure. */
7726 static int
7727 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7729 tree type = TREE_TYPE (expr);
7730 int total_bytes;
7731 if (TREE_CODE (type) == BITINT_TYPE)
7733 struct bitint_info info;
7734 gcc_assert (targetm.c.bitint_type_info (TYPE_PRECISION (type),
7735 &info));
7736 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7737 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7739 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7740 /* More work is needed when adding _BitInt support to PDP endian
7741 if limb is smaller than word, or if _BitInt limb ordering doesn't
7742 match target endianity here. */
7743 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7744 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7745 || (GET_MODE_SIZE (limb_mode)
7746 >= UNITS_PER_WORD)));
7748 else
7749 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7751 else
7752 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7753 int byte, offset, word, words;
7754 unsigned char value;
7756 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7757 return 0;
7758 if (off == -1)
7759 off = 0;
7761 if (ptr == NULL)
7762 /* Dry run. */
7763 return MIN (len, total_bytes - off);
7765 words = total_bytes / UNITS_PER_WORD;
7767 for (byte = 0; byte < total_bytes; byte++)
7769 int bitpos = byte * BITS_PER_UNIT;
7770 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7771 number of bytes. */
7772 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7774 if (total_bytes > UNITS_PER_WORD)
7776 word = byte / UNITS_PER_WORD;
7777 if (WORDS_BIG_ENDIAN)
7778 word = (words - 1) - word;
7779 offset = word * UNITS_PER_WORD;
7780 if (BYTES_BIG_ENDIAN)
7781 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7782 else
7783 offset += byte % UNITS_PER_WORD;
7785 else
7786 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7787 if (offset >= off && offset - off < len)
7788 ptr[offset - off] = value;
7790 return MIN (len, total_bytes - off);
7794 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7795 specified by EXPR into the buffer PTR of length LEN bytes.
7796 Return the number of bytes placed in the buffer, or zero
7797 upon failure. */
7799 static int
7800 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7802 tree type = TREE_TYPE (expr);
7803 scalar_mode mode = SCALAR_TYPE_MODE (type);
7804 int total_bytes = GET_MODE_SIZE (mode);
7805 FIXED_VALUE_TYPE value;
7806 tree i_value, i_type;
7808 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7809 return 0;
7811 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7813 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7814 return 0;
7816 value = TREE_FIXED_CST (expr);
7817 i_value = double_int_to_tree (i_type, value.data);
7819 return native_encode_int (i_value, ptr, len, off);
7823 /* Subroutine of native_encode_expr. Encode the REAL_CST
7824 specified by EXPR into the buffer PTR of length LEN bytes.
7825 Return the number of bytes placed in the buffer, or zero
7826 upon failure. */
7828 static int
7829 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7831 tree type = TREE_TYPE (expr);
7832 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7833 int byte, offset, word, words, bitpos;
7834 unsigned char value;
7836 /* There are always 32 bits in each long, no matter the size of
7837 the hosts long. We handle floating point representations with
7838 up to 192 bits. */
7839 long tmp[6];
7841 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7842 return 0;
7843 if (off == -1)
7844 off = 0;
7846 if (ptr == NULL)
7847 /* Dry run. */
7848 return MIN (len, total_bytes - off);
7850 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7852 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7854 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7855 bitpos += BITS_PER_UNIT)
7857 byte = (bitpos / BITS_PER_UNIT) & 3;
7858 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7860 if (UNITS_PER_WORD < 4)
7862 word = byte / UNITS_PER_WORD;
7863 if (WORDS_BIG_ENDIAN)
7864 word = (words - 1) - word;
7865 offset = word * UNITS_PER_WORD;
7866 if (BYTES_BIG_ENDIAN)
7867 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7868 else
7869 offset += byte % UNITS_PER_WORD;
7871 else
7873 offset = byte;
7874 if (BYTES_BIG_ENDIAN)
7876 /* Reverse bytes within each long, or within the entire float
7877 if it's smaller than a long (for HFmode). */
7878 offset = MIN (3, total_bytes - 1) - offset;
7879 gcc_assert (offset >= 0);
7882 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7883 if (offset >= off
7884 && offset - off < len)
7885 ptr[offset - off] = value;
7887 return MIN (len, total_bytes - off);
7890 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7891 specified by EXPR into the buffer PTR of length LEN bytes.
7892 Return the number of bytes placed in the buffer, or zero
7893 upon failure. */
7895 static int
7896 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7898 int rsize, isize;
7899 tree part;
7901 part = TREE_REALPART (expr);
7902 rsize = native_encode_expr (part, ptr, len, off);
7903 if (off == -1 && rsize == 0)
7904 return 0;
7905 part = TREE_IMAGPART (expr);
7906 if (off != -1)
7907 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7908 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7909 len - rsize, off);
7910 if (off == -1 && isize != rsize)
7911 return 0;
7912 return rsize + isize;
7915 /* Like native_encode_vector, but only encode the first COUNT elements.
7916 The other arguments are as for native_encode_vector. */
7918 static int
7919 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7920 int off, unsigned HOST_WIDE_INT count)
7922 tree itype = TREE_TYPE (TREE_TYPE (expr));
7923 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7924 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7926 /* This is the only case in which elements can be smaller than a byte.
7927 Element 0 is always in the lsb of the containing byte. */
7928 unsigned int elt_bits = TYPE_PRECISION (itype);
7929 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7930 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7931 return 0;
7933 if (off == -1)
7934 off = 0;
7936 /* Zero the buffer and then set bits later where necessary. */
7937 int extract_bytes = MIN (len, total_bytes - off);
7938 if (ptr)
7939 memset (ptr, 0, extract_bytes);
7941 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7942 unsigned int first_elt = off * elts_per_byte;
7943 unsigned int extract_elts = extract_bytes * elts_per_byte;
7944 for (unsigned int i = 0; i < extract_elts; ++i)
7946 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7947 if (TREE_CODE (elt) != INTEGER_CST)
7948 return 0;
7950 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7952 unsigned int bit = i * elt_bits;
7953 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7956 return extract_bytes;
7959 int offset = 0;
7960 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7961 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7963 if (off >= size)
7965 off -= size;
7966 continue;
7968 tree elem = VECTOR_CST_ELT (expr, i);
7969 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7970 len - offset, off);
7971 if ((off == -1 && res != size) || res == 0)
7972 return 0;
7973 offset += res;
7974 if (offset >= len)
7975 return (off == -1 && i < count - 1) ? 0 : offset;
7976 if (off != -1)
7977 off = 0;
7979 return offset;
7982 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7983 specified by EXPR into the buffer PTR of length LEN bytes.
7984 Return the number of bytes placed in the buffer, or zero
7985 upon failure. */
7987 static int
7988 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7990 unsigned HOST_WIDE_INT count;
7991 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7992 return 0;
7993 return native_encode_vector_part (expr, ptr, len, off, count);
7997 /* Subroutine of native_encode_expr. Encode the STRING_CST
7998 specified by EXPR into the buffer PTR of length LEN bytes.
7999 Return the number of bytes placed in the buffer, or zero
8000 upon failure. */
8002 static int
8003 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8005 tree type = TREE_TYPE (expr);
8007 /* Wide-char strings are encoded in target byte-order so native
8008 encoding them is trivial. */
8009 if (BITS_PER_UNIT != CHAR_BIT
8010 || TREE_CODE (type) != ARRAY_TYPE
8011 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8012 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8013 return 0;
8015 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8016 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8017 return 0;
8018 if (off == -1)
8019 off = 0;
8020 len = MIN (total_bytes - off, len);
8021 if (ptr == NULL)
8022 /* Dry run. */;
8023 else
8025 int written = 0;
8026 if (off < TREE_STRING_LENGTH (expr))
8028 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8029 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8031 memset (ptr + written, 0, len - written);
8033 return len;
8037 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8038 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8039 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8040 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8041 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8042 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8043 Return the number of bytes placed in the buffer, or zero upon failure. */
8046 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8048 /* We don't support starting at negative offset and -1 is special. */
8049 if (off < -1)
8050 return 0;
8052 switch (TREE_CODE (expr))
8054 case INTEGER_CST:
8055 return native_encode_int (expr, ptr, len, off);
8057 case REAL_CST:
8058 return native_encode_real (expr, ptr, len, off);
8060 case FIXED_CST:
8061 return native_encode_fixed (expr, ptr, len, off);
8063 case COMPLEX_CST:
8064 return native_encode_complex (expr, ptr, len, off);
8066 case VECTOR_CST:
8067 return native_encode_vector (expr, ptr, len, off);
8069 case STRING_CST:
8070 return native_encode_string (expr, ptr, len, off);
8072 default:
8073 return 0;
8077 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8078 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8079 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8080 machine modes, we can't just use build_nonstandard_integer_type. */
8082 tree
8083 find_bitfield_repr_type (int fieldsize, int len)
8085 machine_mode mode;
8086 for (int pass = 0; pass < 2; pass++)
8088 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8089 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8090 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8091 && known_eq (GET_MODE_PRECISION (mode),
8092 GET_MODE_BITSIZE (mode))
8093 && known_le (GET_MODE_SIZE (mode), len))
8095 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8096 if (ret && TYPE_MODE (ret) == mode)
8097 return ret;
8101 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8102 if (int_n_enabled_p[i]
8103 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8104 && int_n_trees[i].unsigned_type)
8106 tree ret = int_n_trees[i].unsigned_type;
8107 mode = TYPE_MODE (ret);
8108 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8109 && known_eq (GET_MODE_PRECISION (mode),
8110 GET_MODE_BITSIZE (mode))
8111 && known_le (GET_MODE_SIZE (mode), len))
8112 return ret;
8115 return NULL_TREE;
8118 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8119 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8120 to be non-NULL and OFF zero), then in addition to filling the
8121 bytes pointed by PTR with the value also clear any bits pointed
8122 by MASK that are known to be initialized, keep them as is for
8123 e.g. uninitialized padding bits or uninitialized fields. */
8126 native_encode_initializer (tree init, unsigned char *ptr, int len,
8127 int off, unsigned char *mask)
8129 int r;
8131 /* We don't support starting at negative offset and -1 is special. */
8132 if (off < -1 || init == NULL_TREE)
8133 return 0;
8135 gcc_assert (mask == NULL || (off == 0 && ptr));
8137 STRIP_NOPS (init);
8138 switch (TREE_CODE (init))
8140 case VIEW_CONVERT_EXPR:
8141 case NON_LVALUE_EXPR:
8142 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8143 mask);
8144 default:
8145 r = native_encode_expr (init, ptr, len, off);
8146 if (mask)
8147 memset (mask, 0, r);
8148 return r;
8149 case CONSTRUCTOR:
8150 tree type = TREE_TYPE (init);
8151 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8152 if (total_bytes < 0)
8153 return 0;
8154 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8155 return 0;
8156 int o = off == -1 ? 0 : off;
8157 if (TREE_CODE (type) == ARRAY_TYPE)
8159 tree min_index;
8160 unsigned HOST_WIDE_INT cnt;
8161 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8162 constructor_elt *ce;
8164 if (!TYPE_DOMAIN (type)
8165 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8166 return 0;
8168 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8169 if (fieldsize <= 0)
8170 return 0;
8172 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8173 if (ptr)
8174 memset (ptr, '\0', MIN (total_bytes - off, len));
8176 for (cnt = 0; ; cnt++)
8178 tree val = NULL_TREE, index = NULL_TREE;
8179 HOST_WIDE_INT pos = curpos, count = 0;
8180 bool full = false;
8181 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8183 val = ce->value;
8184 index = ce->index;
8186 else if (mask == NULL
8187 || CONSTRUCTOR_NO_CLEARING (init)
8188 || curpos >= total_bytes)
8189 break;
8190 else
8191 pos = total_bytes;
8193 if (index && TREE_CODE (index) == RANGE_EXPR)
8195 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8196 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8197 return 0;
8198 offset_int wpos
8199 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8200 - wi::to_offset (min_index),
8201 TYPE_PRECISION (sizetype));
8202 wpos *= fieldsize;
8203 if (!wi::fits_shwi_p (pos))
8204 return 0;
8205 pos = wpos.to_shwi ();
8206 offset_int wcount
8207 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8208 - wi::to_offset (TREE_OPERAND (index, 0)),
8209 TYPE_PRECISION (sizetype));
8210 if (!wi::fits_shwi_p (wcount))
8211 return 0;
8212 count = wcount.to_shwi ();
8214 else if (index)
8216 if (TREE_CODE (index) != INTEGER_CST)
8217 return 0;
8218 offset_int wpos
8219 = wi::sext (wi::to_offset (index)
8220 - wi::to_offset (min_index),
8221 TYPE_PRECISION (sizetype));
8222 wpos *= fieldsize;
8223 if (!wi::fits_shwi_p (wpos))
8224 return 0;
8225 pos = wpos.to_shwi ();
8228 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8230 if (valueinit == -1)
8232 tree zero = build_zero_cst (TREE_TYPE (type));
8233 r = native_encode_initializer (zero, ptr + curpos,
8234 fieldsize, 0,
8235 mask + curpos);
8236 if (TREE_CODE (zero) == CONSTRUCTOR)
8237 ggc_free (zero);
8238 if (!r)
8239 return 0;
8240 valueinit = curpos;
8241 curpos += fieldsize;
8243 while (curpos != pos)
8245 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8246 memcpy (mask + curpos, mask + valueinit, fieldsize);
8247 curpos += fieldsize;
8251 curpos = pos;
8252 if (val)
8255 if (off == -1
8256 || (curpos >= off
8257 && (curpos + fieldsize
8258 <= (HOST_WIDE_INT) off + len)))
8260 if (full)
8262 if (ptr)
8263 memcpy (ptr + (curpos - o), ptr + (pos - o),
8264 fieldsize);
8265 if (mask)
8266 memcpy (mask + curpos, mask + pos, fieldsize);
8268 else if (!native_encode_initializer (val,
8270 ? ptr + curpos - o
8271 : NULL,
8272 fieldsize,
8273 off == -1 ? -1
8274 : 0,
8275 mask
8276 ? mask + curpos
8277 : NULL))
8278 return 0;
8279 else
8281 full = true;
8282 pos = curpos;
8285 else if (curpos + fieldsize > off
8286 && curpos < (HOST_WIDE_INT) off + len)
8288 /* Partial overlap. */
8289 unsigned char *p = NULL;
8290 int no = 0;
8291 int l;
8292 gcc_assert (mask == NULL);
8293 if (curpos >= off)
8295 if (ptr)
8296 p = ptr + curpos - off;
8297 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8298 fieldsize);
8300 else
8302 p = ptr;
8303 no = off - curpos;
8304 l = len;
8306 if (!native_encode_initializer (val, p, l, no, NULL))
8307 return 0;
8309 curpos += fieldsize;
8311 while (count-- != 0);
8313 return MIN (total_bytes - off, len);
8315 else if (TREE_CODE (type) == RECORD_TYPE
8316 || TREE_CODE (type) == UNION_TYPE)
8318 unsigned HOST_WIDE_INT cnt;
8319 constructor_elt *ce;
8320 tree fld_base = TYPE_FIELDS (type);
8321 tree to_free = NULL_TREE;
8323 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8324 if (ptr != NULL)
8325 memset (ptr, '\0', MIN (total_bytes - o, len));
8326 for (cnt = 0; ; cnt++)
8328 tree val = NULL_TREE, field = NULL_TREE;
8329 HOST_WIDE_INT pos = 0, fieldsize;
8330 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8332 if (to_free)
8334 ggc_free (to_free);
8335 to_free = NULL_TREE;
8338 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8340 val = ce->value;
8341 field = ce->index;
8342 if (field == NULL_TREE)
8343 return 0;
8345 pos = int_byte_position (field);
8346 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8347 continue;
8349 else if (mask == NULL
8350 || CONSTRUCTOR_NO_CLEARING (init))
8351 break;
8352 else
8353 pos = total_bytes;
8355 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8357 tree fld;
8358 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8360 if (TREE_CODE (fld) != FIELD_DECL)
8361 continue;
8362 if (fld == field)
8363 break;
8364 if (DECL_PADDING_P (fld))
8365 continue;
8366 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8367 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8368 return 0;
8369 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8370 continue;
8371 break;
8373 if (fld == NULL_TREE)
8375 if (ce == NULL)
8376 break;
8377 return 0;
8379 fld_base = DECL_CHAIN (fld);
8380 if (fld != field)
8382 cnt--;
8383 field = fld;
8384 pos = int_byte_position (field);
8385 val = build_zero_cst (TREE_TYPE (fld));
8386 if (TREE_CODE (val) == CONSTRUCTOR)
8387 to_free = val;
8391 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8392 && TYPE_DOMAIN (TREE_TYPE (field))
8393 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8395 if (mask || off != -1)
8396 return 0;
8397 if (val == NULL_TREE)
8398 continue;
8399 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8400 return 0;
8401 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8402 if (fieldsize < 0
8403 || (int) fieldsize != fieldsize
8404 || (pos + fieldsize) > INT_MAX)
8405 return 0;
8406 if (pos + fieldsize > total_bytes)
8408 if (ptr != NULL && total_bytes < len)
8409 memset (ptr + total_bytes, '\0',
8410 MIN (pos + fieldsize, len) - total_bytes);
8411 total_bytes = pos + fieldsize;
8414 else
8416 if (DECL_SIZE_UNIT (field) == NULL_TREE
8417 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8418 return 0;
8419 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8421 if (fieldsize == 0)
8422 continue;
8424 /* Prepare to deal with integral bit-fields and filter out other
8425 bit-fields that do not start and end on a byte boundary. */
8426 if (DECL_BIT_FIELD (field))
8428 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8429 return 0;
8430 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8431 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8433 bpos %= BITS_PER_UNIT;
8434 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8435 epos = fieldsize % BITS_PER_UNIT;
8436 fieldsize += BITS_PER_UNIT - 1;
8437 fieldsize /= BITS_PER_UNIT;
8439 else if (bpos % BITS_PER_UNIT
8440 || DECL_SIZE (field) == NULL_TREE
8441 || !tree_fits_shwi_p (DECL_SIZE (field))
8442 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8443 return 0;
8446 if (off != -1 && pos + fieldsize <= off)
8447 continue;
8449 if (val == NULL_TREE)
8450 continue;
8452 if (DECL_BIT_FIELD (field)
8453 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8455 /* FIXME: Handle PDP endian. */
8456 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8457 return 0;
8459 if (TREE_CODE (val) != INTEGER_CST)
8460 return 0;
8462 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8463 tree repr_type = NULL_TREE;
8464 HOST_WIDE_INT rpos = 0;
8465 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8467 rpos = int_byte_position (repr);
8468 repr_type = TREE_TYPE (repr);
8470 else
8472 repr_type = find_bitfield_repr_type (fieldsize, len);
8473 if (repr_type == NULL_TREE)
8474 return 0;
8475 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8476 gcc_assert (repr_size > 0 && repr_size <= len);
8477 if (pos + repr_size <= o + len)
8478 rpos = pos;
8479 else
8481 rpos = o + len - repr_size;
8482 gcc_assert (rpos <= pos);
8486 if (rpos > pos)
8487 return 0;
8488 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8489 int diff = (TYPE_PRECISION (repr_type)
8490 - TYPE_PRECISION (TREE_TYPE (field)));
8491 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8492 if (!BYTES_BIG_ENDIAN)
8493 w = wi::lshift (w, bitoff);
8494 else
8495 w = wi::lshift (w, diff - bitoff);
8496 val = wide_int_to_tree (repr_type, w);
8498 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8499 / BITS_PER_UNIT + 1];
8500 int l = native_encode_int (val, buf, sizeof buf, 0);
8501 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8502 return 0;
8504 if (ptr == NULL)
8505 continue;
8507 /* If the bitfield does not start at byte boundary, handle
8508 the partial byte at the start. */
8509 if (bpos
8510 && (off == -1 || (pos >= off && len >= 1)))
8512 if (!BYTES_BIG_ENDIAN)
8514 int msk = (1 << bpos) - 1;
8515 buf[pos - rpos] &= ~msk;
8516 buf[pos - rpos] |= ptr[pos - o] & msk;
8517 if (mask)
8519 if (fieldsize > 1 || epos == 0)
8520 mask[pos] &= msk;
8521 else
8522 mask[pos] &= (msk | ~((1 << epos) - 1));
8525 else
8527 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8528 buf[pos - rpos] &= msk;
8529 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8530 if (mask)
8532 if (fieldsize > 1 || epos == 0)
8533 mask[pos] &= ~msk;
8534 else
8535 mask[pos] &= (~msk
8536 | ((1 << (BITS_PER_UNIT - epos))
8537 - 1));
8541 /* If the bitfield does not end at byte boundary, handle
8542 the partial byte at the end. */
8543 if (epos
8544 && (off == -1
8545 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8547 if (!BYTES_BIG_ENDIAN)
8549 int msk = (1 << epos) - 1;
8550 buf[pos - rpos + fieldsize - 1] &= msk;
8551 buf[pos - rpos + fieldsize - 1]
8552 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8553 if (mask && (fieldsize > 1 || bpos == 0))
8554 mask[pos + fieldsize - 1] &= ~msk;
8556 else
8558 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8559 buf[pos - rpos + fieldsize - 1] &= ~msk;
8560 buf[pos - rpos + fieldsize - 1]
8561 |= ptr[pos + fieldsize - 1 - o] & msk;
8562 if (mask && (fieldsize > 1 || bpos == 0))
8563 mask[pos + fieldsize - 1] &= msk;
8566 if (off == -1
8567 || (pos >= off
8568 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8570 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8571 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8572 memset (mask + pos + (bpos != 0), 0,
8573 fieldsize - (bpos != 0) - (epos != 0));
8575 else
8577 /* Partial overlap. */
8578 HOST_WIDE_INT fsz = fieldsize;
8579 gcc_assert (mask == NULL);
8580 if (pos < off)
8582 fsz -= (off - pos);
8583 pos = off;
8585 if (pos + fsz > (HOST_WIDE_INT) off + len)
8586 fsz = (HOST_WIDE_INT) off + len - pos;
8587 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8589 continue;
8592 if (off == -1
8593 || (pos >= off
8594 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8596 int fldsize = fieldsize;
8597 if (off == -1)
8599 tree fld = DECL_CHAIN (field);
8600 while (fld)
8602 if (TREE_CODE (fld) == FIELD_DECL)
8603 break;
8604 fld = DECL_CHAIN (fld);
8606 if (fld == NULL_TREE)
8607 fldsize = len - pos;
8609 r = native_encode_initializer (val, ptr ? ptr + pos - o
8610 : NULL,
8611 fldsize,
8612 off == -1 ? -1 : 0,
8613 mask ? mask + pos : NULL);
8614 if (!r)
8615 return 0;
8616 if (off == -1
8617 && fldsize != fieldsize
8618 && r > fieldsize
8619 && pos + r > total_bytes)
8620 total_bytes = pos + r;
8622 else
8624 /* Partial overlap. */
8625 unsigned char *p = NULL;
8626 int no = 0;
8627 int l;
8628 gcc_assert (mask == NULL);
8629 if (pos >= off)
8631 if (ptr)
8632 p = ptr + pos - off;
8633 l = MIN ((HOST_WIDE_INT) off + len - pos,
8634 fieldsize);
8636 else
8638 p = ptr;
8639 no = off - pos;
8640 l = len;
8642 if (!native_encode_initializer (val, p, l, no, NULL))
8643 return 0;
8646 return MIN (total_bytes - off, len);
8648 return 0;
8653 /* Subroutine of native_interpret_expr. Interpret the contents of
8654 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8655 If the buffer cannot be interpreted, return NULL_TREE. */
8657 static tree
8658 native_interpret_int (tree type, const unsigned char *ptr, int len)
8660 int total_bytes;
8661 if (TREE_CODE (type) == BITINT_TYPE)
8663 struct bitint_info info;
8664 gcc_assert (targetm.c.bitint_type_info (TYPE_PRECISION (type),
8665 &info));
8666 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8667 if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8669 total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8670 /* More work is needed when adding _BitInt support to PDP endian
8671 if limb is smaller than word, or if _BitInt limb ordering doesn't
8672 match target endianity here. */
8673 gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8674 && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8675 || (GET_MODE_SIZE (limb_mode)
8676 >= UNITS_PER_WORD)));
8678 else
8679 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8681 else
8682 total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8684 if (total_bytes > len
8685 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8686 return NULL_TREE;
8688 wide_int result = wi::from_buffer (ptr, total_bytes);
8690 return wide_int_to_tree (type, result);
8694 /* Subroutine of native_interpret_expr. Interpret the contents of
8695 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8696 If the buffer cannot be interpreted, return NULL_TREE. */
8698 static tree
8699 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8701 scalar_mode mode = SCALAR_TYPE_MODE (type);
8702 int total_bytes = GET_MODE_SIZE (mode);
8703 double_int result;
8704 FIXED_VALUE_TYPE fixed_value;
8706 if (total_bytes > len
8707 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8708 return NULL_TREE;
8710 result = double_int::from_buffer (ptr, total_bytes);
8711 fixed_value = fixed_from_double_int (result, mode);
8713 return build_fixed (type, fixed_value);
8717 /* Subroutine of native_interpret_expr. Interpret the contents of
8718 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8719 If the buffer cannot be interpreted, return NULL_TREE. */
8721 tree
8722 native_interpret_real (tree type, const unsigned char *ptr, int len)
8724 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8725 int total_bytes = GET_MODE_SIZE (mode);
8726 unsigned char value;
8727 /* There are always 32 bits in each long, no matter the size of
8728 the hosts long. We handle floating point representations with
8729 up to 192 bits. */
8730 REAL_VALUE_TYPE r;
8731 long tmp[6];
8733 if (total_bytes > len || total_bytes > 24)
8734 return NULL_TREE;
8735 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8737 memset (tmp, 0, sizeof (tmp));
8738 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8739 bitpos += BITS_PER_UNIT)
8741 /* Both OFFSET and BYTE index within a long;
8742 bitpos indexes the whole float. */
8743 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8744 if (UNITS_PER_WORD < 4)
8746 int word = byte / UNITS_PER_WORD;
8747 if (WORDS_BIG_ENDIAN)
8748 word = (words - 1) - word;
8749 offset = word * UNITS_PER_WORD;
8750 if (BYTES_BIG_ENDIAN)
8751 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8752 else
8753 offset += byte % UNITS_PER_WORD;
8755 else
8757 offset = byte;
8758 if (BYTES_BIG_ENDIAN)
8760 /* Reverse bytes within each long, or within the entire float
8761 if it's smaller than a long (for HFmode). */
8762 offset = MIN (3, total_bytes - 1) - offset;
8763 gcc_assert (offset >= 0);
8766 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8768 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8771 real_from_target (&r, tmp, mode);
8772 return build_real (type, r);
8776 /* Subroutine of native_interpret_expr. Interpret the contents of
8777 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8778 If the buffer cannot be interpreted, return NULL_TREE. */
8780 static tree
8781 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8783 tree etype, rpart, ipart;
8784 int size;
8786 etype = TREE_TYPE (type);
8787 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8788 if (size * 2 > len)
8789 return NULL_TREE;
8790 rpart = native_interpret_expr (etype, ptr, size);
8791 if (!rpart)
8792 return NULL_TREE;
8793 ipart = native_interpret_expr (etype, ptr+size, size);
8794 if (!ipart)
8795 return NULL_TREE;
8796 return build_complex (type, rpart, ipart);
8799 /* Read a vector of type TYPE from the target memory image given by BYTES,
8800 which contains LEN bytes. The vector is known to be encodable using
8801 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8803 Return the vector on success, otherwise return null. */
8805 static tree
8806 native_interpret_vector_part (tree type, const unsigned char *bytes,
8807 unsigned int len, unsigned int npatterns,
8808 unsigned int nelts_per_pattern)
8810 tree elt_type = TREE_TYPE (type);
8811 if (VECTOR_BOOLEAN_TYPE_P (type)
8812 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8814 /* This is the only case in which elements can be smaller than a byte.
8815 Element 0 is always in the lsb of the containing byte. */
8816 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8817 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8818 return NULL_TREE;
8820 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8821 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8823 unsigned int bit_index = i * elt_bits;
8824 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8825 unsigned int lsb = bit_index % BITS_PER_UNIT;
8826 builder.quick_push (bytes[byte_index] & (1 << lsb)
8827 ? build_all_ones_cst (elt_type)
8828 : build_zero_cst (elt_type));
8830 return builder.build ();
8833 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8834 if (elt_bytes * npatterns * nelts_per_pattern > len)
8835 return NULL_TREE;
8837 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8838 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8840 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8841 if (!elt)
8842 return NULL_TREE;
8843 builder.quick_push (elt);
8844 bytes += elt_bytes;
8846 return builder.build ();
8849 /* Subroutine of native_interpret_expr. Interpret the contents of
8850 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8851 If the buffer cannot be interpreted, return NULL_TREE. */
8853 static tree
8854 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8856 unsigned HOST_WIDE_INT size;
8858 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8859 || size > len)
8860 return NULL_TREE;
8862 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8863 return native_interpret_vector_part (type, ptr, len, count, 1);
8867 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8868 the buffer PTR of length LEN as a constant of type TYPE. For
8869 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8870 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8871 return NULL_TREE. */
8873 tree
8874 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8876 switch (TREE_CODE (type))
8878 case INTEGER_TYPE:
8879 case ENUMERAL_TYPE:
8880 case BOOLEAN_TYPE:
8881 case POINTER_TYPE:
8882 case REFERENCE_TYPE:
8883 case OFFSET_TYPE:
8884 case BITINT_TYPE:
8885 return native_interpret_int (type, ptr, len);
8887 case REAL_TYPE:
8888 if (tree ret = native_interpret_real (type, ptr, len))
8890 /* For floating point values in composite modes, punt if this
8891 folding doesn't preserve bit representation. As the mode doesn't
8892 have fixed precision while GCC pretends it does, there could be
8893 valid values that GCC can't really represent accurately.
8894 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8895 bit combinationations which GCC doesn't preserve. */
8896 unsigned char buf[24 * 2];
8897 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8898 int total_bytes = GET_MODE_SIZE (mode);
8899 memcpy (buf + 24, ptr, total_bytes);
8900 clear_type_padding_in_mask (type, buf + 24);
8901 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8902 || memcmp (buf + 24, buf, total_bytes) != 0)
8903 return NULL_TREE;
8904 return ret;
8906 return NULL_TREE;
8908 case FIXED_POINT_TYPE:
8909 return native_interpret_fixed (type, ptr, len);
8911 case COMPLEX_TYPE:
8912 return native_interpret_complex (type, ptr, len);
8914 case VECTOR_TYPE:
8915 return native_interpret_vector (type, ptr, len);
8917 default:
8918 return NULL_TREE;
8922 /* Returns true if we can interpret the contents of a native encoding
8923 as TYPE. */
8925 bool
8926 can_native_interpret_type_p (tree type)
8928 switch (TREE_CODE (type))
8930 case INTEGER_TYPE:
8931 case ENUMERAL_TYPE:
8932 case BOOLEAN_TYPE:
8933 case POINTER_TYPE:
8934 case REFERENCE_TYPE:
8935 case FIXED_POINT_TYPE:
8936 case REAL_TYPE:
8937 case COMPLEX_TYPE:
8938 case VECTOR_TYPE:
8939 case OFFSET_TYPE:
8940 return true;
8941 default:
8942 return false;
8946 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8947 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8949 tree
8950 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8951 int len)
8953 vec<constructor_elt, va_gc> *elts = NULL;
8954 if (TREE_CODE (type) == ARRAY_TYPE)
8956 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8957 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8958 return NULL_TREE;
8960 HOST_WIDE_INT cnt = 0;
8961 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8963 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8964 return NULL_TREE;
8965 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8967 if (eltsz == 0)
8968 cnt = 0;
8969 HOST_WIDE_INT pos = 0;
8970 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8972 tree v = NULL_TREE;
8973 if (pos >= len || pos + eltsz > len)
8974 return NULL_TREE;
8975 if (can_native_interpret_type_p (TREE_TYPE (type)))
8977 v = native_interpret_expr (TREE_TYPE (type),
8978 ptr + off + pos, eltsz);
8979 if (v == NULL_TREE)
8980 return NULL_TREE;
8982 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8983 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8984 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8985 eltsz);
8986 if (v == NULL_TREE)
8987 return NULL_TREE;
8988 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8990 return build_constructor (type, elts);
8992 if (TREE_CODE (type) != RECORD_TYPE)
8993 return NULL_TREE;
8994 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8996 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
8997 || is_empty_type (TREE_TYPE (field)))
8998 continue;
8999 tree fld = field;
9000 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9001 int diff = 0;
9002 tree v = NULL_TREE;
9003 if (DECL_BIT_FIELD (field))
9005 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9006 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9008 poly_int64 bitoffset;
9009 poly_uint64 field_offset, fld_offset;
9010 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9011 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9012 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9013 else
9014 bitoffset = 0;
9015 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9016 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9017 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9018 - TYPE_PRECISION (TREE_TYPE (field)));
9019 if (!bitoffset.is_constant (&bitoff)
9020 || bitoff < 0
9021 || bitoff > diff)
9022 return NULL_TREE;
9024 else
9026 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9027 return NULL_TREE;
9028 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9029 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9030 bpos %= BITS_PER_UNIT;
9031 fieldsize += bpos;
9032 fieldsize += BITS_PER_UNIT - 1;
9033 fieldsize /= BITS_PER_UNIT;
9034 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9035 if (repr_type == NULL_TREE)
9036 return NULL_TREE;
9037 sz = int_size_in_bytes (repr_type);
9038 if (sz < 0 || sz > len)
9039 return NULL_TREE;
9040 pos = int_byte_position (field);
9041 if (pos < 0 || pos > len || pos + fieldsize > len)
9042 return NULL_TREE;
9043 HOST_WIDE_INT rpos;
9044 if (pos + sz <= len)
9045 rpos = pos;
9046 else
9048 rpos = len - sz;
9049 gcc_assert (rpos <= pos);
9051 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9052 pos = rpos;
9053 diff = (TYPE_PRECISION (repr_type)
9054 - TYPE_PRECISION (TREE_TYPE (field)));
9055 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9056 if (v == NULL_TREE)
9057 return NULL_TREE;
9058 fld = NULL_TREE;
9062 if (fld)
9064 sz = int_size_in_bytes (TREE_TYPE (fld));
9065 if (sz < 0 || sz > len)
9066 return NULL_TREE;
9067 tree byte_pos = byte_position (fld);
9068 if (!tree_fits_shwi_p (byte_pos))
9069 return NULL_TREE;
9070 pos = tree_to_shwi (byte_pos);
9071 if (pos < 0 || pos > len || pos + sz > len)
9072 return NULL_TREE;
9074 if (fld == NULL_TREE)
9075 /* Already handled above. */;
9076 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9078 v = native_interpret_expr (TREE_TYPE (fld),
9079 ptr + off + pos, sz);
9080 if (v == NULL_TREE)
9081 return NULL_TREE;
9083 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9084 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9085 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9086 if (v == NULL_TREE)
9087 return NULL_TREE;
9088 if (fld != field)
9090 if (TREE_CODE (v) != INTEGER_CST)
9091 return NULL_TREE;
9093 /* FIXME: Figure out how to handle PDP endian bitfields. */
9094 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9095 return NULL_TREE;
9096 if (!BYTES_BIG_ENDIAN)
9097 v = wide_int_to_tree (TREE_TYPE (field),
9098 wi::lrshift (wi::to_wide (v), bitoff));
9099 else
9100 v = wide_int_to_tree (TREE_TYPE (field),
9101 wi::lrshift (wi::to_wide (v),
9102 diff - bitoff));
9104 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9106 return build_constructor (type, elts);
9109 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9110 or extracted constant positions and/or sizes aren't byte aligned. */
9112 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9113 bits between adjacent elements. AMNT should be within
9114 [0, BITS_PER_UNIT).
9115 Example, AMNT = 2:
9116 00011111|11100000 << 2 = 01111111|10000000
9117 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9119 void
9120 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9121 unsigned int amnt)
9123 if (amnt == 0)
9124 return;
9126 unsigned char carry_over = 0U;
9127 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9128 unsigned char clear_mask = (~0U) << amnt;
9130 for (unsigned int i = 0; i < sz; i++)
9132 unsigned prev_carry_over = carry_over;
9133 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9135 ptr[i] <<= amnt;
9136 if (i != 0)
9138 ptr[i] &= clear_mask;
9139 ptr[i] |= prev_carry_over;
9144 /* Like shift_bytes_in_array_left but for big-endian.
9145 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9146 bits between adjacent elements. AMNT should be within
9147 [0, BITS_PER_UNIT).
9148 Example, AMNT = 2:
9149 00011111|11100000 >> 2 = 00000111|11111000
9150 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9152 void
9153 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9154 unsigned int amnt)
9156 if (amnt == 0)
9157 return;
9159 unsigned char carry_over = 0U;
9160 unsigned char carry_mask = ~(~0U << amnt);
9162 for (unsigned int i = 0; i < sz; i++)
9164 unsigned prev_carry_over = carry_over;
9165 carry_over = ptr[i] & carry_mask;
9167 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9168 ptr[i] >>= amnt;
9169 ptr[i] |= prev_carry_over;
9173 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9174 directly on the VECTOR_CST encoding, in a way that works for variable-
9175 length vectors. Return the resulting VECTOR_CST on success or null
9176 on failure. */
9178 static tree
9179 fold_view_convert_vector_encoding (tree type, tree expr)
9181 tree expr_type = TREE_TYPE (expr);
9182 poly_uint64 type_bits, expr_bits;
9183 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9184 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9185 return NULL_TREE;
9187 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9188 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9189 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9190 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9192 /* We can only preserve the semantics of a stepped pattern if the new
9193 vector element is an integer of the same size. */
9194 if (VECTOR_CST_STEPPED_P (expr)
9195 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9196 return NULL_TREE;
9198 /* The number of bits needed to encode one element from every pattern
9199 of the original vector. */
9200 unsigned int expr_sequence_bits
9201 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9203 /* The number of bits needed to encode one element from every pattern
9204 of the result. */
9205 unsigned int type_sequence_bits
9206 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9208 /* Don't try to read more bytes than are available, which can happen
9209 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9210 The general VIEW_CONVERT handling can cope with that case, so there's
9211 no point complicating things here. */
9212 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9213 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9214 BITS_PER_UNIT);
9215 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9216 if (known_gt (buffer_bits, expr_bits))
9217 return NULL_TREE;
9219 /* Get enough bytes of EXPR to form the new encoding. */
9220 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9221 buffer.quick_grow (buffer_bytes);
9222 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9223 buffer_bits / expr_elt_bits)
9224 != (int) buffer_bytes)
9225 return NULL_TREE;
9227 /* Reencode the bytes as TYPE. */
9228 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9229 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9230 type_npatterns, nelts_per_pattern);
9233 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9234 TYPE at compile-time. If we're unable to perform the conversion
9235 return NULL_TREE. */
9237 static tree
9238 fold_view_convert_expr (tree type, tree expr)
9240 /* We support up to 512-bit values (for V8DFmode). */
9241 unsigned char buffer[64];
9242 int len;
9244 /* Check that the host and target are sane. */
9245 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9246 return NULL_TREE;
9248 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9249 if (tree res = fold_view_convert_vector_encoding (type, expr))
9250 return res;
9252 len = native_encode_expr (expr, buffer, sizeof (buffer));
9253 if (len == 0)
9254 return NULL_TREE;
9256 return native_interpret_expr (type, buffer, len);
9259 /* Build an expression for the address of T. Folds away INDIRECT_REF
9260 to avoid confusing the gimplify process. */
9262 tree
9263 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9265 /* The size of the object is not relevant when talking about its address. */
9266 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9267 t = TREE_OPERAND (t, 0);
9269 if (INDIRECT_REF_P (t))
9271 t = TREE_OPERAND (t, 0);
9273 if (TREE_TYPE (t) != ptrtype)
9274 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9276 else if (TREE_CODE (t) == MEM_REF
9277 && integer_zerop (TREE_OPERAND (t, 1)))
9279 t = TREE_OPERAND (t, 0);
9281 if (TREE_TYPE (t) != ptrtype)
9282 t = fold_convert_loc (loc, ptrtype, t);
9284 else if (TREE_CODE (t) == MEM_REF
9285 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9286 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9287 TREE_OPERAND (t, 0),
9288 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9289 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9291 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9293 if (TREE_TYPE (t) != ptrtype)
9294 t = fold_convert_loc (loc, ptrtype, t);
9296 else
9297 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9299 return t;
9302 /* Build an expression for the address of T. */
9304 tree
9305 build_fold_addr_expr_loc (location_t loc, tree t)
9307 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9309 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9312 /* Fold a unary expression of code CODE and type TYPE with operand
9313 OP0. Return the folded expression if folding is successful.
9314 Otherwise, return NULL_TREE. */
9316 tree
9317 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9319 tree tem;
9320 tree arg0;
9321 enum tree_code_class kind = TREE_CODE_CLASS (code);
9323 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9324 && TREE_CODE_LENGTH (code) == 1);
9326 arg0 = op0;
9327 if (arg0)
9329 if (CONVERT_EXPR_CODE_P (code)
9330 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9332 /* Don't use STRIP_NOPS, because signedness of argument type
9333 matters. */
9334 STRIP_SIGN_NOPS (arg0);
9336 else
9338 /* Strip any conversions that don't change the mode. This
9339 is safe for every expression, except for a comparison
9340 expression because its signedness is derived from its
9341 operands.
9343 Note that this is done as an internal manipulation within
9344 the constant folder, in order to find the simplest
9345 representation of the arguments so that their form can be
9346 studied. In any cases, the appropriate type conversions
9347 should be put back in the tree that will get out of the
9348 constant folder. */
9349 STRIP_NOPS (arg0);
9352 if (CONSTANT_CLASS_P (arg0))
9354 tree tem = const_unop (code, type, arg0);
9355 if (tem)
9357 if (TREE_TYPE (tem) != type)
9358 tem = fold_convert_loc (loc, type, tem);
9359 return tem;
9364 tem = generic_simplify (loc, code, type, op0);
9365 if (tem)
9366 return tem;
9368 if (TREE_CODE_CLASS (code) == tcc_unary)
9370 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9371 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9372 fold_build1_loc (loc, code, type,
9373 fold_convert_loc (loc, TREE_TYPE (op0),
9374 TREE_OPERAND (arg0, 1))));
9375 else if (TREE_CODE (arg0) == COND_EXPR)
9377 tree arg01 = TREE_OPERAND (arg0, 1);
9378 tree arg02 = TREE_OPERAND (arg0, 2);
9379 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9380 arg01 = fold_build1_loc (loc, code, type,
9381 fold_convert_loc (loc,
9382 TREE_TYPE (op0), arg01));
9383 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9384 arg02 = fold_build1_loc (loc, code, type,
9385 fold_convert_loc (loc,
9386 TREE_TYPE (op0), arg02));
9387 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9388 arg01, arg02);
9390 /* If this was a conversion, and all we did was to move into
9391 inside the COND_EXPR, bring it back out. But leave it if
9392 it is a conversion from integer to integer and the
9393 result precision is no wider than a word since such a
9394 conversion is cheap and may be optimized away by combine,
9395 while it couldn't if it were outside the COND_EXPR. Then return
9396 so we don't get into an infinite recursion loop taking the
9397 conversion out and then back in. */
9399 if ((CONVERT_EXPR_CODE_P (code)
9400 || code == NON_LVALUE_EXPR)
9401 && TREE_CODE (tem) == COND_EXPR
9402 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9403 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9404 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9405 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9406 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9407 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9408 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9409 && (INTEGRAL_TYPE_P
9410 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9411 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9412 || flag_syntax_only))
9413 tem = build1_loc (loc, code, type,
9414 build3 (COND_EXPR,
9415 TREE_TYPE (TREE_OPERAND
9416 (TREE_OPERAND (tem, 1), 0)),
9417 TREE_OPERAND (tem, 0),
9418 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9419 TREE_OPERAND (TREE_OPERAND (tem, 2),
9420 0)));
9421 return tem;
9425 switch (code)
9427 case NON_LVALUE_EXPR:
9428 if (!maybe_lvalue_p (op0))
9429 return fold_convert_loc (loc, type, op0);
9430 return NULL_TREE;
9432 CASE_CONVERT:
9433 case FLOAT_EXPR:
9434 case FIX_TRUNC_EXPR:
9435 if (COMPARISON_CLASS_P (op0))
9437 /* If we have (type) (a CMP b) and type is an integral type, return
9438 new expression involving the new type. Canonicalize
9439 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9440 non-integral type.
9441 Do not fold the result as that would not simplify further, also
9442 folding again results in recursions. */
9443 if (TREE_CODE (type) == BOOLEAN_TYPE)
9444 return build2_loc (loc, TREE_CODE (op0), type,
9445 TREE_OPERAND (op0, 0),
9446 TREE_OPERAND (op0, 1));
9447 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9448 && TREE_CODE (type) != VECTOR_TYPE)
9449 return build3_loc (loc, COND_EXPR, type, op0,
9450 constant_boolean_node (true, type),
9451 constant_boolean_node (false, type));
9454 /* Handle (T *)&A.B.C for A being of type T and B and C
9455 living at offset zero. This occurs frequently in
9456 C++ upcasting and then accessing the base. */
9457 if (TREE_CODE (op0) == ADDR_EXPR
9458 && POINTER_TYPE_P (type)
9459 && handled_component_p (TREE_OPERAND (op0, 0)))
9461 poly_int64 bitsize, bitpos;
9462 tree offset;
9463 machine_mode mode;
9464 int unsignedp, reversep, volatilep;
9465 tree base
9466 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9467 &offset, &mode, &unsignedp, &reversep,
9468 &volatilep);
9469 /* If the reference was to a (constant) zero offset, we can use
9470 the address of the base if it has the same base type
9471 as the result type and the pointer type is unqualified. */
9472 if (!offset
9473 && known_eq (bitpos, 0)
9474 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9475 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9476 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9477 return fold_convert_loc (loc, type,
9478 build_fold_addr_expr_loc (loc, base));
9481 if (TREE_CODE (op0) == MODIFY_EXPR
9482 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9483 /* Detect assigning a bitfield. */
9484 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9485 && DECL_BIT_FIELD
9486 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9488 /* Don't leave an assignment inside a conversion
9489 unless assigning a bitfield. */
9490 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9491 /* First do the assignment, then return converted constant. */
9492 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9493 suppress_warning (tem /* What warning? */);
9494 TREE_USED (tem) = 1;
9495 return tem;
9498 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9499 constants (if x has signed type, the sign bit cannot be set
9500 in c). This folds extension into the BIT_AND_EXPR.
9501 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9502 very likely don't have maximal range for their precision and this
9503 transformation effectively doesn't preserve non-maximal ranges. */
9504 if (TREE_CODE (type) == INTEGER_TYPE
9505 && TREE_CODE (op0) == BIT_AND_EXPR
9506 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9508 tree and_expr = op0;
9509 tree and0 = TREE_OPERAND (and_expr, 0);
9510 tree and1 = TREE_OPERAND (and_expr, 1);
9511 int change = 0;
9513 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9514 || (TYPE_PRECISION (type)
9515 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9516 change = 1;
9517 else if (TYPE_PRECISION (TREE_TYPE (and1))
9518 <= HOST_BITS_PER_WIDE_INT
9519 && tree_fits_uhwi_p (and1))
9521 unsigned HOST_WIDE_INT cst;
9523 cst = tree_to_uhwi (and1);
9524 cst &= HOST_WIDE_INT_M1U
9525 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9526 change = (cst == 0);
9527 if (change
9528 && !flag_syntax_only
9529 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9530 == ZERO_EXTEND))
9532 tree uns = unsigned_type_for (TREE_TYPE (and0));
9533 and0 = fold_convert_loc (loc, uns, and0);
9534 and1 = fold_convert_loc (loc, uns, and1);
9537 if (change)
9539 tem = force_fit_type (type, wi::to_widest (and1), 0,
9540 TREE_OVERFLOW (and1));
9541 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9542 fold_convert_loc (loc, type, and0), tem);
9546 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9547 cast (T1)X will fold away. We assume that this happens when X itself
9548 is a cast. */
9549 if (POINTER_TYPE_P (type)
9550 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9551 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9553 tree arg00 = TREE_OPERAND (arg0, 0);
9554 tree arg01 = TREE_OPERAND (arg0, 1);
9556 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9557 when the pointed type needs higher alignment than
9558 the p+ first operand's pointed type. */
9559 if (!in_gimple_form
9560 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9561 && (min_align_of_type (TREE_TYPE (type))
9562 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9563 return NULL_TREE;
9565 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9566 when type is a reference type and arg00's type is not,
9567 because arg00 could be validly nullptr and if arg01 doesn't return,
9568 we don't want false positive binding of reference to nullptr. */
9569 if (TREE_CODE (type) == REFERENCE_TYPE
9570 && !in_gimple_form
9571 && sanitize_flags_p (SANITIZE_NULL)
9572 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9573 return NULL_TREE;
9575 arg00 = fold_convert_loc (loc, type, arg00);
9576 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9579 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9580 of the same precision, and X is an integer type not narrower than
9581 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9582 if (INTEGRAL_TYPE_P (type)
9583 && TREE_CODE (op0) == BIT_NOT_EXPR
9584 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9585 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9586 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9588 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9589 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9590 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9591 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9592 fold_convert_loc (loc, type, tem));
9595 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9596 type of X and Y (integer types only). */
9597 if (INTEGRAL_TYPE_P (type)
9598 && TREE_CODE (op0) == MULT_EXPR
9599 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9600 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9601 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9602 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9604 /* Be careful not to introduce new overflows. */
9605 tree mult_type;
9606 if (TYPE_OVERFLOW_WRAPS (type))
9607 mult_type = type;
9608 else
9609 mult_type = unsigned_type_for (type);
9611 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9613 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9614 fold_convert_loc (loc, mult_type,
9615 TREE_OPERAND (op0, 0)),
9616 fold_convert_loc (loc, mult_type,
9617 TREE_OPERAND (op0, 1)));
9618 return fold_convert_loc (loc, type, tem);
9622 return NULL_TREE;
9624 case VIEW_CONVERT_EXPR:
9625 if (TREE_CODE (op0) == MEM_REF)
9627 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9628 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9629 tem = fold_build2_loc (loc, MEM_REF, type,
9630 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9631 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9632 return tem;
9635 return NULL_TREE;
9637 case NEGATE_EXPR:
9638 tem = fold_negate_expr (loc, arg0);
9639 if (tem)
9640 return fold_convert_loc (loc, type, tem);
9641 return NULL_TREE;
9643 case ABS_EXPR:
9644 /* Convert fabs((double)float) into (double)fabsf(float). */
9645 if (TREE_CODE (arg0) == NOP_EXPR
9646 && TREE_CODE (type) == REAL_TYPE)
9648 tree targ0 = strip_float_extensions (arg0);
9649 if (targ0 != arg0)
9650 return fold_convert_loc (loc, type,
9651 fold_build1_loc (loc, ABS_EXPR,
9652 TREE_TYPE (targ0),
9653 targ0));
9655 return NULL_TREE;
9657 case BIT_NOT_EXPR:
9658 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9659 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9660 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9661 fold_convert_loc (loc, type,
9662 TREE_OPERAND (arg0, 0)))))
9663 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9664 fold_convert_loc (loc, type,
9665 TREE_OPERAND (arg0, 1)));
9666 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9667 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9668 fold_convert_loc (loc, type,
9669 TREE_OPERAND (arg0, 1)))))
9670 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9671 fold_convert_loc (loc, type,
9672 TREE_OPERAND (arg0, 0)), tem);
9674 return NULL_TREE;
9676 case TRUTH_NOT_EXPR:
9677 /* Note that the operand of this must be an int
9678 and its values must be 0 or 1.
9679 ("true" is a fixed value perhaps depending on the language,
9680 but we don't handle values other than 1 correctly yet.) */
9681 tem = fold_truth_not_expr (loc, arg0);
9682 if (!tem)
9683 return NULL_TREE;
9684 return fold_convert_loc (loc, type, tem);
9686 case INDIRECT_REF:
9687 /* Fold *&X to X if X is an lvalue. */
9688 if (TREE_CODE (op0) == ADDR_EXPR)
9690 tree op00 = TREE_OPERAND (op0, 0);
9691 if ((VAR_P (op00)
9692 || TREE_CODE (op00) == PARM_DECL
9693 || TREE_CODE (op00) == RESULT_DECL)
9694 && !TREE_READONLY (op00))
9695 return op00;
9697 return NULL_TREE;
9699 default:
9700 return NULL_TREE;
9701 } /* switch (code) */
9705 /* If the operation was a conversion do _not_ mark a resulting constant
9706 with TREE_OVERFLOW if the original constant was not. These conversions
9707 have implementation defined behavior and retaining the TREE_OVERFLOW
9708 flag here would confuse later passes such as VRP. */
9709 tree
9710 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9711 tree type, tree op0)
9713 tree res = fold_unary_loc (loc, code, type, op0);
9714 if (res
9715 && TREE_CODE (res) == INTEGER_CST
9716 && TREE_CODE (op0) == INTEGER_CST
9717 && CONVERT_EXPR_CODE_P (code))
9718 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9720 return res;
9723 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9724 operands OP0 and OP1. LOC is the location of the resulting expression.
9725 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9726 Return the folded expression if folding is successful. Otherwise,
9727 return NULL_TREE. */
9728 static tree
9729 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9730 tree arg0, tree arg1, tree op0, tree op1)
9732 tree tem;
9734 /* We only do these simplifications if we are optimizing. */
9735 if (!optimize)
9736 return NULL_TREE;
9738 /* Check for things like (A || B) && (A || C). We can convert this
9739 to A || (B && C). Note that either operator can be any of the four
9740 truth and/or operations and the transformation will still be
9741 valid. Also note that we only care about order for the
9742 ANDIF and ORIF operators. If B contains side effects, this
9743 might change the truth-value of A. */
9744 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9745 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9746 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9747 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9748 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9749 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9751 tree a00 = TREE_OPERAND (arg0, 0);
9752 tree a01 = TREE_OPERAND (arg0, 1);
9753 tree a10 = TREE_OPERAND (arg1, 0);
9754 tree a11 = TREE_OPERAND (arg1, 1);
9755 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9756 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9757 && (code == TRUTH_AND_EXPR
9758 || code == TRUTH_OR_EXPR));
9760 if (operand_equal_p (a00, a10, 0))
9761 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9762 fold_build2_loc (loc, code, type, a01, a11));
9763 else if (commutative && operand_equal_p (a00, a11, 0))
9764 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9765 fold_build2_loc (loc, code, type, a01, a10));
9766 else if (commutative && operand_equal_p (a01, a10, 0))
9767 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9768 fold_build2_loc (loc, code, type, a00, a11));
9770 /* This case if tricky because we must either have commutative
9771 operators or else A10 must not have side-effects. */
9773 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9774 && operand_equal_p (a01, a11, 0))
9775 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9776 fold_build2_loc (loc, code, type, a00, a10),
9777 a01);
9780 /* See if we can build a range comparison. */
9781 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9782 return tem;
9784 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9785 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9787 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9788 if (tem)
9789 return fold_build2_loc (loc, code, type, tem, arg1);
9792 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9793 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9795 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9796 if (tem)
9797 return fold_build2_loc (loc, code, type, arg0, tem);
9800 /* Check for the possibility of merging component references. If our
9801 lhs is another similar operation, try to merge its rhs with our
9802 rhs. Then try to merge our lhs and rhs. */
9803 if (TREE_CODE (arg0) == code
9804 && (tem = fold_truth_andor_1 (loc, code, type,
9805 TREE_OPERAND (arg0, 1), arg1)) != 0)
9806 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9808 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9809 return tem;
9811 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9812 if (param_logical_op_non_short_circuit != -1)
9813 logical_op_non_short_circuit
9814 = param_logical_op_non_short_circuit;
9815 if (logical_op_non_short_circuit
9816 && !sanitize_coverage_p ()
9817 && (code == TRUTH_AND_EXPR
9818 || code == TRUTH_ANDIF_EXPR
9819 || code == TRUTH_OR_EXPR
9820 || code == TRUTH_ORIF_EXPR))
9822 enum tree_code ncode, icode;
9824 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9825 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9826 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9828 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9829 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9830 We don't want to pack more than two leafs to a non-IF AND/OR
9831 expression.
9832 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9833 equal to IF-CODE, then we don't want to add right-hand operand.
9834 If the inner right-hand side of left-hand operand has
9835 side-effects, or isn't simple, then we can't add to it,
9836 as otherwise we might destroy if-sequence. */
9837 if (TREE_CODE (arg0) == icode
9838 && simple_condition_p (arg1)
9839 /* Needed for sequence points to handle trappings, and
9840 side-effects. */
9841 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9843 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9844 arg1);
9845 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9846 tem);
9848 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9849 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9850 else if (TREE_CODE (arg1) == icode
9851 && simple_condition_p (arg0)
9852 /* Needed for sequence points to handle trappings, and
9853 side-effects. */
9854 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9856 tem = fold_build2_loc (loc, ncode, type,
9857 arg0, TREE_OPERAND (arg1, 0));
9858 return fold_build2_loc (loc, icode, type, tem,
9859 TREE_OPERAND (arg1, 1));
9861 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9862 into (A OR B).
9863 For sequence point consistancy, we need to check for trapping,
9864 and side-effects. */
9865 else if (code == icode && simple_condition_p (arg0)
9866 && simple_condition_p (arg1))
9867 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9870 return NULL_TREE;
9873 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9874 by changing CODE to reduce the magnitude of constants involved in
9875 ARG0 of the comparison.
9876 Returns a canonicalized comparison tree if a simplification was
9877 possible, otherwise returns NULL_TREE.
9878 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9879 valid if signed overflow is undefined. */
9881 static tree
9882 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9883 tree arg0, tree arg1,
9884 bool *strict_overflow_p)
9886 enum tree_code code0 = TREE_CODE (arg0);
9887 tree t, cst0 = NULL_TREE;
9888 int sgn0;
9890 /* Match A +- CST code arg1. We can change this only if overflow
9891 is undefined. */
9892 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9893 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9894 /* In principle pointers also have undefined overflow behavior,
9895 but that causes problems elsewhere. */
9896 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9897 && (code0 == MINUS_EXPR
9898 || code0 == PLUS_EXPR)
9899 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9900 return NULL_TREE;
9902 /* Identify the constant in arg0 and its sign. */
9903 cst0 = TREE_OPERAND (arg0, 1);
9904 sgn0 = tree_int_cst_sgn (cst0);
9906 /* Overflowed constants and zero will cause problems. */
9907 if (integer_zerop (cst0)
9908 || TREE_OVERFLOW (cst0))
9909 return NULL_TREE;
9911 /* See if we can reduce the magnitude of the constant in
9912 arg0 by changing the comparison code. */
9913 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9914 if (code == LT_EXPR
9915 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9916 code = LE_EXPR;
9917 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9918 else if (code == GT_EXPR
9919 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9920 code = GE_EXPR;
9921 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9922 else if (code == LE_EXPR
9923 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9924 code = LT_EXPR;
9925 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9926 else if (code == GE_EXPR
9927 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9928 code = GT_EXPR;
9929 else
9930 return NULL_TREE;
9931 *strict_overflow_p = true;
9933 /* Now build the constant reduced in magnitude. But not if that
9934 would produce one outside of its types range. */
9935 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9936 && ((sgn0 == 1
9937 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9938 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9939 || (sgn0 == -1
9940 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9941 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9942 return NULL_TREE;
9944 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9945 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9946 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9947 t = fold_convert (TREE_TYPE (arg1), t);
9949 return fold_build2_loc (loc, code, type, t, arg1);
9952 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9953 overflow further. Try to decrease the magnitude of constants involved
9954 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9955 and put sole constants at the second argument position.
9956 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9958 static tree
9959 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9960 tree arg0, tree arg1)
9962 tree t;
9963 bool strict_overflow_p;
9964 const char * const warnmsg = G_("assuming signed overflow does not occur "
9965 "when reducing constant in comparison");
9967 /* Try canonicalization by simplifying arg0. */
9968 strict_overflow_p = false;
9969 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9970 &strict_overflow_p);
9971 if (t)
9973 if (strict_overflow_p)
9974 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9975 return t;
9978 /* Try canonicalization by simplifying arg1 using the swapped
9979 comparison. */
9980 code = swap_tree_comparison (code);
9981 strict_overflow_p = false;
9982 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9983 &strict_overflow_p);
9984 if (t && strict_overflow_p)
9985 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9986 return t;
9989 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9990 space. This is used to avoid issuing overflow warnings for
9991 expressions like &p->x which cannot wrap. */
9993 static bool
9994 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9996 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9997 return true;
9999 if (maybe_lt (bitpos, 0))
10000 return true;
10002 poly_wide_int wi_offset;
10003 int precision = TYPE_PRECISION (TREE_TYPE (base));
10004 if (offset == NULL_TREE)
10005 wi_offset = wi::zero (precision);
10006 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10007 return true;
10008 else
10009 wi_offset = wi::to_poly_wide (offset);
10011 wi::overflow_type overflow;
10012 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10013 precision);
10014 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10015 if (overflow)
10016 return true;
10018 poly_uint64 total_hwi, size;
10019 if (!total.to_uhwi (&total_hwi)
10020 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10021 &size)
10022 || known_eq (size, 0U))
10023 return true;
10025 if (known_le (total_hwi, size))
10026 return false;
10028 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10029 array. */
10030 if (TREE_CODE (base) == ADDR_EXPR
10031 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10032 &size)
10033 && maybe_ne (size, 0U)
10034 && known_le (total_hwi, size))
10035 return false;
10037 return true;
10040 /* Return a positive integer when the symbol DECL is known to have
10041 a nonzero address, zero when it's known not to (e.g., it's a weak
10042 symbol), and a negative integer when the symbol is not yet in the
10043 symbol table and so whether or not its address is zero is unknown.
10044 For function local objects always return positive integer. */
10045 static int
10046 maybe_nonzero_address (tree decl)
10048 /* Normally, don't do anything for variables and functions before symtab is
10049 built; it is quite possible that DECL will be declared weak later.
10050 But if folding_initializer, we need a constant answer now, so create
10051 the symtab entry and prevent later weak declaration. */
10052 if (DECL_P (decl) && decl_in_symtab_p (decl))
10053 if (struct symtab_node *symbol
10054 = (folding_initializer
10055 ? symtab_node::get_create (decl)
10056 : symtab_node::get (decl)))
10057 return symbol->nonzero_address ();
10059 /* Function local objects are never NULL. */
10060 if (DECL_P (decl)
10061 && (DECL_CONTEXT (decl)
10062 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10063 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10064 return 1;
10066 return -1;
10069 /* Subroutine of fold_binary. This routine performs all of the
10070 transformations that are common to the equality/inequality
10071 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10072 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10073 fold_binary should call fold_binary. Fold a comparison with
10074 tree code CODE and type TYPE with operands OP0 and OP1. Return
10075 the folded comparison or NULL_TREE. */
10077 static tree
10078 fold_comparison (location_t loc, enum tree_code code, tree type,
10079 tree op0, tree op1)
10081 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10082 tree arg0, arg1, tem;
10084 arg0 = op0;
10085 arg1 = op1;
10087 STRIP_SIGN_NOPS (arg0);
10088 STRIP_SIGN_NOPS (arg1);
10090 /* For comparisons of pointers we can decompose it to a compile time
10091 comparison of the base objects and the offsets into the object.
10092 This requires at least one operand being an ADDR_EXPR or a
10093 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10094 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10095 && (TREE_CODE (arg0) == ADDR_EXPR
10096 || TREE_CODE (arg1) == ADDR_EXPR
10097 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10098 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10100 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10101 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10102 machine_mode mode;
10103 int volatilep, reversep, unsignedp;
10104 bool indirect_base0 = false, indirect_base1 = false;
10106 /* Get base and offset for the access. Strip ADDR_EXPR for
10107 get_inner_reference, but put it back by stripping INDIRECT_REF
10108 off the base object if possible. indirect_baseN will be true
10109 if baseN is not an address but refers to the object itself. */
10110 base0 = arg0;
10111 if (TREE_CODE (arg0) == ADDR_EXPR)
10113 base0
10114 = get_inner_reference (TREE_OPERAND (arg0, 0),
10115 &bitsize, &bitpos0, &offset0, &mode,
10116 &unsignedp, &reversep, &volatilep);
10117 if (INDIRECT_REF_P (base0))
10118 base0 = TREE_OPERAND (base0, 0);
10119 else
10120 indirect_base0 = true;
10122 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10124 base0 = TREE_OPERAND (arg0, 0);
10125 STRIP_SIGN_NOPS (base0);
10126 if (TREE_CODE (base0) == ADDR_EXPR)
10128 base0
10129 = get_inner_reference (TREE_OPERAND (base0, 0),
10130 &bitsize, &bitpos0, &offset0, &mode,
10131 &unsignedp, &reversep, &volatilep);
10132 if (INDIRECT_REF_P (base0))
10133 base0 = TREE_OPERAND (base0, 0);
10134 else
10135 indirect_base0 = true;
10137 if (offset0 == NULL_TREE || integer_zerop (offset0))
10138 offset0 = TREE_OPERAND (arg0, 1);
10139 else
10140 offset0 = size_binop (PLUS_EXPR, offset0,
10141 TREE_OPERAND (arg0, 1));
10142 if (poly_int_tree_p (offset0))
10144 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10145 TYPE_PRECISION (sizetype));
10146 tem <<= LOG2_BITS_PER_UNIT;
10147 tem += bitpos0;
10148 if (tem.to_shwi (&bitpos0))
10149 offset0 = NULL_TREE;
10153 base1 = arg1;
10154 if (TREE_CODE (arg1) == ADDR_EXPR)
10156 base1
10157 = get_inner_reference (TREE_OPERAND (arg1, 0),
10158 &bitsize, &bitpos1, &offset1, &mode,
10159 &unsignedp, &reversep, &volatilep);
10160 if (INDIRECT_REF_P (base1))
10161 base1 = TREE_OPERAND (base1, 0);
10162 else
10163 indirect_base1 = true;
10165 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10167 base1 = TREE_OPERAND (arg1, 0);
10168 STRIP_SIGN_NOPS (base1);
10169 if (TREE_CODE (base1) == ADDR_EXPR)
10171 base1
10172 = get_inner_reference (TREE_OPERAND (base1, 0),
10173 &bitsize, &bitpos1, &offset1, &mode,
10174 &unsignedp, &reversep, &volatilep);
10175 if (INDIRECT_REF_P (base1))
10176 base1 = TREE_OPERAND (base1, 0);
10177 else
10178 indirect_base1 = true;
10180 if (offset1 == NULL_TREE || integer_zerop (offset1))
10181 offset1 = TREE_OPERAND (arg1, 1);
10182 else
10183 offset1 = size_binop (PLUS_EXPR, offset1,
10184 TREE_OPERAND (arg1, 1));
10185 if (poly_int_tree_p (offset1))
10187 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10188 TYPE_PRECISION (sizetype));
10189 tem <<= LOG2_BITS_PER_UNIT;
10190 tem += bitpos1;
10191 if (tem.to_shwi (&bitpos1))
10192 offset1 = NULL_TREE;
10196 /* If we have equivalent bases we might be able to simplify. */
10197 if (indirect_base0 == indirect_base1
10198 && operand_equal_p (base0, base1,
10199 indirect_base0 ? OEP_ADDRESS_OF : 0))
10201 /* We can fold this expression to a constant if the non-constant
10202 offset parts are equal. */
10203 if ((offset0 == offset1
10204 || (offset0 && offset1
10205 && operand_equal_p (offset0, offset1, 0)))
10206 && (equality_code
10207 || (indirect_base0
10208 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10209 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10211 if (!equality_code
10212 && maybe_ne (bitpos0, bitpos1)
10213 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10214 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10215 fold_overflow_warning (("assuming pointer wraparound does not "
10216 "occur when comparing P +- C1 with "
10217 "P +- C2"),
10218 WARN_STRICT_OVERFLOW_CONDITIONAL);
10220 switch (code)
10222 case EQ_EXPR:
10223 if (known_eq (bitpos0, bitpos1))
10224 return constant_boolean_node (true, type);
10225 if (known_ne (bitpos0, bitpos1))
10226 return constant_boolean_node (false, type);
10227 break;
10228 case NE_EXPR:
10229 if (known_ne (bitpos0, bitpos1))
10230 return constant_boolean_node (true, type);
10231 if (known_eq (bitpos0, bitpos1))
10232 return constant_boolean_node (false, type);
10233 break;
10234 case LT_EXPR:
10235 if (known_lt (bitpos0, bitpos1))
10236 return constant_boolean_node (true, type);
10237 if (known_ge (bitpos0, bitpos1))
10238 return constant_boolean_node (false, type);
10239 break;
10240 case LE_EXPR:
10241 if (known_le (bitpos0, bitpos1))
10242 return constant_boolean_node (true, type);
10243 if (known_gt (bitpos0, bitpos1))
10244 return constant_boolean_node (false, type);
10245 break;
10246 case GE_EXPR:
10247 if (known_ge (bitpos0, bitpos1))
10248 return constant_boolean_node (true, type);
10249 if (known_lt (bitpos0, bitpos1))
10250 return constant_boolean_node (false, type);
10251 break;
10252 case GT_EXPR:
10253 if (known_gt (bitpos0, bitpos1))
10254 return constant_boolean_node (true, type);
10255 if (known_le (bitpos0, bitpos1))
10256 return constant_boolean_node (false, type);
10257 break;
10258 default:;
10261 /* We can simplify the comparison to a comparison of the variable
10262 offset parts if the constant offset parts are equal.
10263 Be careful to use signed sizetype here because otherwise we
10264 mess with array offsets in the wrong way. This is possible
10265 because pointer arithmetic is restricted to retain within an
10266 object and overflow on pointer differences is undefined as of
10267 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10268 else if (known_eq (bitpos0, bitpos1)
10269 && (equality_code
10270 || (indirect_base0
10271 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10272 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10274 /* By converting to signed sizetype we cover middle-end pointer
10275 arithmetic which operates on unsigned pointer types of size
10276 type size and ARRAY_REF offsets which are properly sign or
10277 zero extended from their type in case it is narrower than
10278 sizetype. */
10279 if (offset0 == NULL_TREE)
10280 offset0 = build_int_cst (ssizetype, 0);
10281 else
10282 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10283 if (offset1 == NULL_TREE)
10284 offset1 = build_int_cst (ssizetype, 0);
10285 else
10286 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10288 if (!equality_code
10289 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10290 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10291 fold_overflow_warning (("assuming pointer wraparound does not "
10292 "occur when comparing P +- C1 with "
10293 "P +- C2"),
10294 WARN_STRICT_OVERFLOW_COMPARISON);
10296 return fold_build2_loc (loc, code, type, offset0, offset1);
10299 /* For equal offsets we can simplify to a comparison of the
10300 base addresses. */
10301 else if (known_eq (bitpos0, bitpos1)
10302 && (indirect_base0
10303 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10304 && (indirect_base1
10305 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10306 && ((offset0 == offset1)
10307 || (offset0 && offset1
10308 && operand_equal_p (offset0, offset1, 0))))
10310 if (indirect_base0)
10311 base0 = build_fold_addr_expr_loc (loc, base0);
10312 if (indirect_base1)
10313 base1 = build_fold_addr_expr_loc (loc, base1);
10314 return fold_build2_loc (loc, code, type, base0, base1);
10316 /* Comparison between an ordinary (non-weak) symbol and a null
10317 pointer can be eliminated since such symbols must have a non
10318 null address. In C, relational expressions between pointers
10319 to objects and null pointers are undefined. The results
10320 below follow the C++ rules with the additional property that
10321 every object pointer compares greater than a null pointer.
10323 else if (((DECL_P (base0)
10324 && maybe_nonzero_address (base0) > 0
10325 /* Avoid folding references to struct members at offset 0 to
10326 prevent tests like '&ptr->firstmember == 0' from getting
10327 eliminated. When ptr is null, although the -> expression
10328 is strictly speaking invalid, GCC retains it as a matter
10329 of QoI. See PR c/44555. */
10330 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10331 || CONSTANT_CLASS_P (base0))
10332 && indirect_base0
10333 /* The caller guarantees that when one of the arguments is
10334 constant (i.e., null in this case) it is second. */
10335 && integer_zerop (arg1))
10337 switch (code)
10339 case EQ_EXPR:
10340 case LE_EXPR:
10341 case LT_EXPR:
10342 return constant_boolean_node (false, type);
10343 case GE_EXPR:
10344 case GT_EXPR:
10345 case NE_EXPR:
10346 return constant_boolean_node (true, type);
10347 default:
10348 gcc_unreachable ();
10353 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10354 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10355 the resulting offset is smaller in absolute value than the
10356 original one and has the same sign. */
10357 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10358 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10359 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10360 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10361 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10362 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10363 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10364 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10366 tree const1 = TREE_OPERAND (arg0, 1);
10367 tree const2 = TREE_OPERAND (arg1, 1);
10368 tree variable1 = TREE_OPERAND (arg0, 0);
10369 tree variable2 = TREE_OPERAND (arg1, 0);
10370 tree cst;
10371 const char * const warnmsg = G_("assuming signed overflow does not "
10372 "occur when combining constants around "
10373 "a comparison");
10375 /* Put the constant on the side where it doesn't overflow and is
10376 of lower absolute value and of same sign than before. */
10377 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10378 ? MINUS_EXPR : PLUS_EXPR,
10379 const2, const1);
10380 if (!TREE_OVERFLOW (cst)
10381 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10382 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10384 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10385 return fold_build2_loc (loc, code, type,
10386 variable1,
10387 fold_build2_loc (loc, TREE_CODE (arg1),
10388 TREE_TYPE (arg1),
10389 variable2, cst));
10392 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10393 ? MINUS_EXPR : PLUS_EXPR,
10394 const1, const2);
10395 if (!TREE_OVERFLOW (cst)
10396 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10397 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10399 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10400 return fold_build2_loc (loc, code, type,
10401 fold_build2_loc (loc, TREE_CODE (arg0),
10402 TREE_TYPE (arg0),
10403 variable1, cst),
10404 variable2);
10408 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10409 if (tem)
10410 return tem;
10412 /* If we are comparing an expression that just has comparisons
10413 of two integer values, arithmetic expressions of those comparisons,
10414 and constants, we can simplify it. There are only three cases
10415 to check: the two values can either be equal, the first can be
10416 greater, or the second can be greater. Fold the expression for
10417 those three values. Since each value must be 0 or 1, we have
10418 eight possibilities, each of which corresponds to the constant 0
10419 or 1 or one of the six possible comparisons.
10421 This handles common cases like (a > b) == 0 but also handles
10422 expressions like ((x > y) - (y > x)) > 0, which supposedly
10423 occur in macroized code. */
10425 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10427 tree cval1 = 0, cval2 = 0;
10429 if (twoval_comparison_p (arg0, &cval1, &cval2)
10430 /* Don't handle degenerate cases here; they should already
10431 have been handled anyway. */
10432 && cval1 != 0 && cval2 != 0
10433 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10434 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10435 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10436 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10437 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10438 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10439 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10441 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10442 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10444 /* We can't just pass T to eval_subst in case cval1 or cval2
10445 was the same as ARG1. */
10447 tree high_result
10448 = fold_build2_loc (loc, code, type,
10449 eval_subst (loc, arg0, cval1, maxval,
10450 cval2, minval),
10451 arg1);
10452 tree equal_result
10453 = fold_build2_loc (loc, code, type,
10454 eval_subst (loc, arg0, cval1, maxval,
10455 cval2, maxval),
10456 arg1);
10457 tree low_result
10458 = fold_build2_loc (loc, code, type,
10459 eval_subst (loc, arg0, cval1, minval,
10460 cval2, maxval),
10461 arg1);
10463 /* All three of these results should be 0 or 1. Confirm they are.
10464 Then use those values to select the proper code to use. */
10466 if (TREE_CODE (high_result) == INTEGER_CST
10467 && TREE_CODE (equal_result) == INTEGER_CST
10468 && TREE_CODE (low_result) == INTEGER_CST)
10470 /* Make a 3-bit mask with the high-order bit being the
10471 value for `>', the next for '=', and the low for '<'. */
10472 switch ((integer_onep (high_result) * 4)
10473 + (integer_onep (equal_result) * 2)
10474 + integer_onep (low_result))
10476 case 0:
10477 /* Always false. */
10478 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10479 case 1:
10480 code = LT_EXPR;
10481 break;
10482 case 2:
10483 code = EQ_EXPR;
10484 break;
10485 case 3:
10486 code = LE_EXPR;
10487 break;
10488 case 4:
10489 code = GT_EXPR;
10490 break;
10491 case 5:
10492 code = NE_EXPR;
10493 break;
10494 case 6:
10495 code = GE_EXPR;
10496 break;
10497 case 7:
10498 /* Always true. */
10499 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10502 return fold_build2_loc (loc, code, type, cval1, cval2);
10507 return NULL_TREE;
10511 /* Subroutine of fold_binary. Optimize complex multiplications of the
10512 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10513 argument EXPR represents the expression "z" of type TYPE. */
10515 static tree
10516 fold_mult_zconjz (location_t loc, tree type, tree expr)
10518 tree itype = TREE_TYPE (type);
10519 tree rpart, ipart, tem;
10521 if (TREE_CODE (expr) == COMPLEX_EXPR)
10523 rpart = TREE_OPERAND (expr, 0);
10524 ipart = TREE_OPERAND (expr, 1);
10526 else if (TREE_CODE (expr) == COMPLEX_CST)
10528 rpart = TREE_REALPART (expr);
10529 ipart = TREE_IMAGPART (expr);
10531 else
10533 expr = save_expr (expr);
10534 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10535 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10538 rpart = save_expr (rpart);
10539 ipart = save_expr (ipart);
10540 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10541 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10542 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10543 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10544 build_zero_cst (itype));
10548 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10549 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10550 true if successful. */
10552 static bool
10553 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10555 unsigned HOST_WIDE_INT i, nunits;
10557 if (TREE_CODE (arg) == VECTOR_CST
10558 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10560 for (i = 0; i < nunits; ++i)
10561 elts[i] = VECTOR_CST_ELT (arg, i);
10563 else if (TREE_CODE (arg) == CONSTRUCTOR)
10565 constructor_elt *elt;
10567 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10568 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10569 return false;
10570 else
10571 elts[i] = elt->value;
10573 else
10574 return false;
10575 for (; i < nelts; i++)
10576 elts[i]
10577 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10578 return true;
10581 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10582 mask for VLA vec_perm folding.
10583 REASON if specified, will contain the reason why SEL is not suitable.
10584 Used only for debugging and unit-testing. */
10586 static bool
10587 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10588 const vec_perm_indices &sel,
10589 const char **reason = NULL)
10591 unsigned sel_npatterns = sel.encoding ().npatterns ();
10592 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10594 if (!(pow2p_hwi (sel_npatterns)
10595 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10596 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10598 if (reason)
10599 *reason = "npatterns is not power of 2";
10600 return false;
10603 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10604 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10605 poly_uint64 esel;
10606 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10608 if (reason)
10609 *reason = "sel.length is not multiple of sel_npatterns";
10610 return false;
10613 if (sel_nelts_per_pattern < 3)
10614 return true;
10616 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10618 poly_uint64 a1 = sel[pattern + sel_npatterns];
10619 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10620 HOST_WIDE_INT step;
10621 if (!poly_int64 (a2 - a1).is_constant (&step))
10623 if (reason)
10624 *reason = "step is not constant";
10625 return false;
10627 // FIXME: Punt on step < 0 for now, revisit later.
10628 if (step < 0)
10629 return false;
10630 if (step == 0)
10631 continue;
10633 if (!pow2p_hwi (step))
10635 if (reason)
10636 *reason = "step is not power of 2";
10637 return false;
10640 /* Ensure that stepped sequence of the pattern selects elements
10641 only from the same input vector. */
10642 uint64_t q1, qe;
10643 poly_uint64 r1, re;
10644 poly_uint64 ae = a1 + (esel - 2) * step;
10645 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10647 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10648 && can_div_trunc_p (ae, arg_len, &qe, &re)
10649 && q1 == qe))
10651 if (reason)
10652 *reason = "crossed input vectors";
10653 return false;
10656 /* Ensure that the stepped sequence always selects from the same
10657 input pattern. */
10658 unsigned arg_npatterns
10659 = ((q1 & 1) == 0) ? VECTOR_CST_NPATTERNS (arg0)
10660 : VECTOR_CST_NPATTERNS (arg1);
10662 if (!multiple_p (step, arg_npatterns))
10664 if (reason)
10665 *reason = "step is not multiple of npatterns";
10666 return false;
10670 return true;
10673 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10674 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10675 REASON has same purpose as described in
10676 valid_mask_for_fold_vec_perm_cst_p. */
10678 static tree
10679 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10680 const char **reason = NULL)
10682 unsigned res_npatterns, res_nelts_per_pattern;
10683 unsigned HOST_WIDE_INT res_nelts;
10685 /* (1) If SEL is a suitable mask as determined by
10686 valid_mask_for_fold_vec_perm_cst_p, then:
10687 res_npatterns = max of npatterns between ARG0, ARG1, and SEL
10688 res_nelts_per_pattern = max of nelts_per_pattern between
10689 ARG0, ARG1 and SEL.
10690 (2) If SEL is not a suitable mask, and TYPE is VLS then:
10691 res_npatterns = nelts in result vector.
10692 res_nelts_per_pattern = 1.
10693 This exception is made so that VLS ARG0, ARG1 and SEL work as before. */
10694 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10696 res_npatterns
10697 = std::max (VECTOR_CST_NPATTERNS (arg0),
10698 std::max (VECTOR_CST_NPATTERNS (arg1),
10699 sel.encoding ().npatterns ()));
10701 res_nelts_per_pattern
10702 = std::max (VECTOR_CST_NELTS_PER_PATTERN (arg0),
10703 std::max (VECTOR_CST_NELTS_PER_PATTERN (arg1),
10704 sel.encoding ().nelts_per_pattern ()));
10706 res_nelts = res_npatterns * res_nelts_per_pattern;
10708 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10710 res_npatterns = res_nelts;
10711 res_nelts_per_pattern = 1;
10713 else
10714 return NULL_TREE;
10716 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10717 for (unsigned i = 0; i < res_nelts; i++)
10719 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10720 uint64_t q;
10721 poly_uint64 r;
10722 unsigned HOST_WIDE_INT index;
10724 /* Punt if sel[i] /trunc_div len cannot be determined,
10725 because the input vector to be chosen will depend on
10726 runtime vector length.
10727 For example if len == 4 + 4x, and sel[i] == 4,
10728 If len at runtime equals 4, we choose arg1[0].
10729 For any other value of len > 4 at runtime, we choose arg0[4].
10730 which makes the element choice dependent on runtime vector length. */
10731 if (!can_div_trunc_p (sel[i], len, &q, &r))
10733 if (reason)
10734 *reason = "cannot divide selector element by arg len";
10735 return NULL_TREE;
10738 /* sel[i] % len will give the index of element in the chosen input
10739 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10740 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10741 if (!r.is_constant (&index))
10743 if (reason)
10744 *reason = "remainder is not constant";
10745 return NULL_TREE;
10748 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10749 tree elem = vector_cst_elt (arg, index);
10750 out_elts.quick_push (elem);
10753 return out_elts.build ();
10756 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10757 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10758 NULL_TREE otherwise. */
10760 tree
10761 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10763 unsigned int i;
10764 unsigned HOST_WIDE_INT nelts;
10766 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10767 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10768 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10770 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10771 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10772 return NULL_TREE;
10774 if (TREE_CODE (arg0) == VECTOR_CST
10775 && TREE_CODE (arg1) == VECTOR_CST)
10776 return fold_vec_perm_cst (type, arg0, arg1, sel);
10778 /* For fall back case, we want to ensure we have VLS vectors
10779 with equal length. */
10780 if (!sel.length ().is_constant (&nelts))
10781 return NULL_TREE;
10783 gcc_assert (known_eq (sel.length (),
10784 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10785 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10786 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10787 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10788 return NULL_TREE;
10790 vec<constructor_elt, va_gc> *v;
10791 vec_alloc (v, nelts);
10792 for (i = 0; i < nelts; i++)
10794 HOST_WIDE_INT index;
10795 if (!sel[i].is_constant (&index))
10796 return NULL_TREE;
10797 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10799 return build_constructor (type, v);
10802 /* Try to fold a pointer difference of type TYPE two address expressions of
10803 array references AREF0 and AREF1 using location LOC. Return a
10804 simplified expression for the difference or NULL_TREE. */
10806 static tree
10807 fold_addr_of_array_ref_difference (location_t loc, tree type,
10808 tree aref0, tree aref1,
10809 bool use_pointer_diff)
10811 tree base0 = TREE_OPERAND (aref0, 0);
10812 tree base1 = TREE_OPERAND (aref1, 0);
10813 tree base_offset = build_int_cst (type, 0);
10815 /* If the bases are array references as well, recurse. If the bases
10816 are pointer indirections compute the difference of the pointers.
10817 If the bases are equal, we are set. */
10818 if ((TREE_CODE (base0) == ARRAY_REF
10819 && TREE_CODE (base1) == ARRAY_REF
10820 && (base_offset
10821 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10822 use_pointer_diff)))
10823 || (INDIRECT_REF_P (base0)
10824 && INDIRECT_REF_P (base1)
10825 && (base_offset
10826 = use_pointer_diff
10827 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10828 TREE_OPERAND (base0, 0),
10829 TREE_OPERAND (base1, 0))
10830 : fold_binary_loc (loc, MINUS_EXPR, type,
10831 fold_convert (type,
10832 TREE_OPERAND (base0, 0)),
10833 fold_convert (type,
10834 TREE_OPERAND (base1, 0)))))
10835 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10837 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10838 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10839 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10840 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10841 return fold_build2_loc (loc, PLUS_EXPR, type,
10842 base_offset,
10843 fold_build2_loc (loc, MULT_EXPR, type,
10844 diff, esz));
10846 return NULL_TREE;
10849 /* If the real or vector real constant CST of type TYPE has an exact
10850 inverse, return it, else return NULL. */
10852 tree
10853 exact_inverse (tree type, tree cst)
10855 REAL_VALUE_TYPE r;
10856 tree unit_type;
10857 machine_mode mode;
10859 switch (TREE_CODE (cst))
10861 case REAL_CST:
10862 r = TREE_REAL_CST (cst);
10864 if (exact_real_inverse (TYPE_MODE (type), &r))
10865 return build_real (type, r);
10867 return NULL_TREE;
10869 case VECTOR_CST:
10871 unit_type = TREE_TYPE (type);
10872 mode = TYPE_MODE (unit_type);
10874 tree_vector_builder elts;
10875 if (!elts.new_unary_operation (type, cst, false))
10876 return NULL_TREE;
10877 unsigned int count = elts.encoded_nelts ();
10878 for (unsigned int i = 0; i < count; ++i)
10880 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10881 if (!exact_real_inverse (mode, &r))
10882 return NULL_TREE;
10883 elts.quick_push (build_real (unit_type, r));
10886 return elts.build ();
10889 default:
10890 return NULL_TREE;
10894 /* Mask out the tz least significant bits of X of type TYPE where
10895 tz is the number of trailing zeroes in Y. */
10896 static wide_int
10897 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10899 int tz = wi::ctz (y);
10900 if (tz > 0)
10901 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10902 return x;
10905 /* Return true when T is an address and is known to be nonzero.
10906 For floating point we further ensure that T is not denormal.
10907 Similar logic is present in nonzero_address in rtlanal.h.
10909 If the return value is based on the assumption that signed overflow
10910 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10911 change *STRICT_OVERFLOW_P. */
10913 static bool
10914 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10916 tree type = TREE_TYPE (t);
10917 enum tree_code code;
10919 /* Doing something useful for floating point would need more work. */
10920 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10921 return false;
10923 code = TREE_CODE (t);
10924 switch (TREE_CODE_CLASS (code))
10926 case tcc_unary:
10927 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10928 strict_overflow_p);
10929 case tcc_binary:
10930 case tcc_comparison:
10931 return tree_binary_nonzero_warnv_p (code, type,
10932 TREE_OPERAND (t, 0),
10933 TREE_OPERAND (t, 1),
10934 strict_overflow_p);
10935 case tcc_constant:
10936 case tcc_declaration:
10937 case tcc_reference:
10938 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10940 default:
10941 break;
10944 switch (code)
10946 case TRUTH_NOT_EXPR:
10947 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10948 strict_overflow_p);
10950 case TRUTH_AND_EXPR:
10951 case TRUTH_OR_EXPR:
10952 case TRUTH_XOR_EXPR:
10953 return tree_binary_nonzero_warnv_p (code, type,
10954 TREE_OPERAND (t, 0),
10955 TREE_OPERAND (t, 1),
10956 strict_overflow_p);
10958 case COND_EXPR:
10959 case CONSTRUCTOR:
10960 case OBJ_TYPE_REF:
10961 case ADDR_EXPR:
10962 case WITH_SIZE_EXPR:
10963 case SSA_NAME:
10964 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10966 case COMPOUND_EXPR:
10967 case MODIFY_EXPR:
10968 case BIND_EXPR:
10969 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10970 strict_overflow_p);
10972 case SAVE_EXPR:
10973 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10974 strict_overflow_p);
10976 case CALL_EXPR:
10978 tree fndecl = get_callee_fndecl (t);
10979 if (!fndecl) return false;
10980 if (flag_delete_null_pointer_checks && !flag_check_new
10981 && DECL_IS_OPERATOR_NEW_P (fndecl)
10982 && !TREE_NOTHROW (fndecl))
10983 return true;
10984 if (flag_delete_null_pointer_checks
10985 && lookup_attribute ("returns_nonnull",
10986 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10987 return true;
10988 return alloca_call_p (t);
10991 default:
10992 break;
10994 return false;
10997 /* Return true when T is an address and is known to be nonzero.
10998 Handle warnings about undefined signed overflow. */
11000 bool
11001 tree_expr_nonzero_p (tree t)
11003 bool ret, strict_overflow_p;
11005 strict_overflow_p = false;
11006 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11007 if (strict_overflow_p)
11008 fold_overflow_warning (("assuming signed overflow does not occur when "
11009 "determining that expression is always "
11010 "non-zero"),
11011 WARN_STRICT_OVERFLOW_MISC);
11012 return ret;
11015 /* Return true if T is known not to be equal to an integer W. */
11017 bool
11018 expr_not_equal_to (tree t, const wide_int &w)
11020 int_range_max vr;
11021 switch (TREE_CODE (t))
11023 case INTEGER_CST:
11024 return wi::to_wide (t) != w;
11026 case SSA_NAME:
11027 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11028 return false;
11030 if (cfun)
11031 get_range_query (cfun)->range_of_expr (vr, t);
11032 else
11033 get_global_range_query ()->range_of_expr (vr, t);
11035 if (!vr.undefined_p () && !vr.contains_p (w))
11036 return true;
11037 /* If T has some known zero bits and W has any of those bits set,
11038 then T is known not to be equal to W. */
11039 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11040 TYPE_PRECISION (TREE_TYPE (t))), 0))
11041 return true;
11042 return false;
11044 default:
11045 return false;
11049 /* Fold a binary expression of code CODE and type TYPE with operands
11050 OP0 and OP1. LOC is the location of the resulting expression.
11051 Return the folded expression if folding is successful. Otherwise,
11052 return NULL_TREE. */
11054 tree
11055 fold_binary_loc (location_t loc, enum tree_code code, tree type,
11056 tree op0, tree op1)
11058 enum tree_code_class kind = TREE_CODE_CLASS (code);
11059 tree arg0, arg1, tem;
11060 tree t1 = NULL_TREE;
11061 bool strict_overflow_p;
11062 unsigned int prec;
11064 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11065 && TREE_CODE_LENGTH (code) == 2
11066 && op0 != NULL_TREE
11067 && op1 != NULL_TREE);
11069 arg0 = op0;
11070 arg1 = op1;
11072 /* Strip any conversions that don't change the mode. This is
11073 safe for every expression, except for a comparison expression
11074 because its signedness is derived from its operands. So, in
11075 the latter case, only strip conversions that don't change the
11076 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11077 preserved.
11079 Note that this is done as an internal manipulation within the
11080 constant folder, in order to find the simplest representation
11081 of the arguments so that their form can be studied. In any
11082 cases, the appropriate type conversions should be put back in
11083 the tree that will get out of the constant folder. */
11085 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11087 STRIP_SIGN_NOPS (arg0);
11088 STRIP_SIGN_NOPS (arg1);
11090 else
11092 STRIP_NOPS (arg0);
11093 STRIP_NOPS (arg1);
11096 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11097 constant but we can't do arithmetic on them. */
11098 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11100 tem = const_binop (code, type, arg0, arg1);
11101 if (tem != NULL_TREE)
11103 if (TREE_TYPE (tem) != type)
11104 tem = fold_convert_loc (loc, type, tem);
11105 return tem;
11109 /* If this is a commutative operation, and ARG0 is a constant, move it
11110 to ARG1 to reduce the number of tests below. */
11111 if (commutative_tree_code (code)
11112 && tree_swap_operands_p (arg0, arg1))
11113 return fold_build2_loc (loc, code, type, op1, op0);
11115 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11116 to ARG1 to reduce the number of tests below. */
11117 if (kind == tcc_comparison
11118 && tree_swap_operands_p (arg0, arg1))
11119 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11121 tem = generic_simplify (loc, code, type, op0, op1);
11122 if (tem)
11123 return tem;
11125 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11127 First check for cases where an arithmetic operation is applied to a
11128 compound, conditional, or comparison operation. Push the arithmetic
11129 operation inside the compound or conditional to see if any folding
11130 can then be done. Convert comparison to conditional for this purpose.
11131 The also optimizes non-constant cases that used to be done in
11132 expand_expr.
11134 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11135 one of the operands is a comparison and the other is a comparison, a
11136 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11137 code below would make the expression more complex. Change it to a
11138 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11139 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11141 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11142 || code == EQ_EXPR || code == NE_EXPR)
11143 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11144 && ((truth_value_p (TREE_CODE (arg0))
11145 && (truth_value_p (TREE_CODE (arg1))
11146 || (TREE_CODE (arg1) == BIT_AND_EXPR
11147 && integer_onep (TREE_OPERAND (arg1, 1)))))
11148 || (truth_value_p (TREE_CODE (arg1))
11149 && (truth_value_p (TREE_CODE (arg0))
11150 || (TREE_CODE (arg0) == BIT_AND_EXPR
11151 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11153 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11154 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11155 : TRUTH_XOR_EXPR,
11156 boolean_type_node,
11157 fold_convert_loc (loc, boolean_type_node, arg0),
11158 fold_convert_loc (loc, boolean_type_node, arg1));
11160 if (code == EQ_EXPR)
11161 tem = invert_truthvalue_loc (loc, tem);
11163 return fold_convert_loc (loc, type, tem);
11166 if (TREE_CODE_CLASS (code) == tcc_binary
11167 || TREE_CODE_CLASS (code) == tcc_comparison)
11169 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11171 tem = fold_build2_loc (loc, code, type,
11172 fold_convert_loc (loc, TREE_TYPE (op0),
11173 TREE_OPERAND (arg0, 1)), op1);
11174 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11175 tem);
11177 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11179 tem = fold_build2_loc (loc, code, type, op0,
11180 fold_convert_loc (loc, TREE_TYPE (op1),
11181 TREE_OPERAND (arg1, 1)));
11182 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11183 tem);
11186 if (TREE_CODE (arg0) == COND_EXPR
11187 || TREE_CODE (arg0) == VEC_COND_EXPR
11188 || COMPARISON_CLASS_P (arg0))
11190 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11191 arg0, arg1,
11192 /*cond_first_p=*/1);
11193 if (tem != NULL_TREE)
11194 return tem;
11197 if (TREE_CODE (arg1) == COND_EXPR
11198 || TREE_CODE (arg1) == VEC_COND_EXPR
11199 || COMPARISON_CLASS_P (arg1))
11201 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11202 arg1, arg0,
11203 /*cond_first_p=*/0);
11204 if (tem != NULL_TREE)
11205 return tem;
11209 switch (code)
11211 case MEM_REF:
11212 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11213 if (TREE_CODE (arg0) == ADDR_EXPR
11214 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11216 tree iref = TREE_OPERAND (arg0, 0);
11217 return fold_build2 (MEM_REF, type,
11218 TREE_OPERAND (iref, 0),
11219 int_const_binop (PLUS_EXPR, arg1,
11220 TREE_OPERAND (iref, 1)));
11223 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11224 if (TREE_CODE (arg0) == ADDR_EXPR
11225 && handled_component_p (TREE_OPERAND (arg0, 0)))
11227 tree base;
11228 poly_int64 coffset;
11229 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11230 &coffset);
11231 if (!base)
11232 return NULL_TREE;
11233 return fold_build2 (MEM_REF, type,
11234 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11235 int_const_binop (PLUS_EXPR, arg1,
11236 size_int (coffset)));
11239 return NULL_TREE;
11241 case POINTER_PLUS_EXPR:
11242 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11243 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11244 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11245 return fold_convert_loc (loc, type,
11246 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11247 fold_convert_loc (loc, sizetype,
11248 arg1),
11249 fold_convert_loc (loc, sizetype,
11250 arg0)));
11252 return NULL_TREE;
11254 case PLUS_EXPR:
11255 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11257 /* X + (X / CST) * -CST is X % CST. */
11258 if (TREE_CODE (arg1) == MULT_EXPR
11259 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11260 && operand_equal_p (arg0,
11261 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11263 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11264 tree cst1 = TREE_OPERAND (arg1, 1);
11265 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11266 cst1, cst0);
11267 if (sum && integer_zerop (sum))
11268 return fold_convert_loc (loc, type,
11269 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11270 TREE_TYPE (arg0), arg0,
11271 cst0));
11275 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11276 one. Make sure the type is not saturating and has the signedness of
11277 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11278 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11279 if ((TREE_CODE (arg0) == MULT_EXPR
11280 || TREE_CODE (arg1) == MULT_EXPR)
11281 && !TYPE_SATURATING (type)
11282 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11283 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11284 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11286 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11287 if (tem)
11288 return tem;
11291 if (! FLOAT_TYPE_P (type))
11293 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11294 (plus (plus (mult) (mult)) (foo)) so that we can
11295 take advantage of the factoring cases below. */
11296 if (ANY_INTEGRAL_TYPE_P (type)
11297 && TYPE_OVERFLOW_WRAPS (type)
11298 && (((TREE_CODE (arg0) == PLUS_EXPR
11299 || TREE_CODE (arg0) == MINUS_EXPR)
11300 && TREE_CODE (arg1) == MULT_EXPR)
11301 || ((TREE_CODE (arg1) == PLUS_EXPR
11302 || TREE_CODE (arg1) == MINUS_EXPR)
11303 && TREE_CODE (arg0) == MULT_EXPR)))
11305 tree parg0, parg1, parg, marg;
11306 enum tree_code pcode;
11308 if (TREE_CODE (arg1) == MULT_EXPR)
11309 parg = arg0, marg = arg1;
11310 else
11311 parg = arg1, marg = arg0;
11312 pcode = TREE_CODE (parg);
11313 parg0 = TREE_OPERAND (parg, 0);
11314 parg1 = TREE_OPERAND (parg, 1);
11315 STRIP_NOPS (parg0);
11316 STRIP_NOPS (parg1);
11318 if (TREE_CODE (parg0) == MULT_EXPR
11319 && TREE_CODE (parg1) != MULT_EXPR)
11320 return fold_build2_loc (loc, pcode, type,
11321 fold_build2_loc (loc, PLUS_EXPR, type,
11322 fold_convert_loc (loc, type,
11323 parg0),
11324 fold_convert_loc (loc, type,
11325 marg)),
11326 fold_convert_loc (loc, type, parg1));
11327 if (TREE_CODE (parg0) != MULT_EXPR
11328 && TREE_CODE (parg1) == MULT_EXPR)
11329 return
11330 fold_build2_loc (loc, PLUS_EXPR, type,
11331 fold_convert_loc (loc, type, parg0),
11332 fold_build2_loc (loc, pcode, type,
11333 fold_convert_loc (loc, type, marg),
11334 fold_convert_loc (loc, type,
11335 parg1)));
11338 else
11340 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11341 to __complex__ ( x, y ). This is not the same for SNaNs or
11342 if signed zeros are involved. */
11343 if (!HONOR_SNANS (arg0)
11344 && !HONOR_SIGNED_ZEROS (arg0)
11345 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11347 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11348 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11349 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11350 bool arg0rz = false, arg0iz = false;
11351 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11352 || (arg0i && (arg0iz = real_zerop (arg0i))))
11354 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11355 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11356 if (arg0rz && arg1i && real_zerop (arg1i))
11358 tree rp = arg1r ? arg1r
11359 : build1 (REALPART_EXPR, rtype, arg1);
11360 tree ip = arg0i ? arg0i
11361 : build1 (IMAGPART_EXPR, rtype, arg0);
11362 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11364 else if (arg0iz && arg1r && real_zerop (arg1r))
11366 tree rp = arg0r ? arg0r
11367 : build1 (REALPART_EXPR, rtype, arg0);
11368 tree ip = arg1i ? arg1i
11369 : build1 (IMAGPART_EXPR, rtype, arg1);
11370 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11375 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11376 We associate floats only if the user has specified
11377 -fassociative-math. */
11378 if (flag_associative_math
11379 && TREE_CODE (arg1) == PLUS_EXPR
11380 && TREE_CODE (arg0) != MULT_EXPR)
11382 tree tree10 = TREE_OPERAND (arg1, 0);
11383 tree tree11 = TREE_OPERAND (arg1, 1);
11384 if (TREE_CODE (tree11) == MULT_EXPR
11385 && TREE_CODE (tree10) == MULT_EXPR)
11387 tree tree0;
11388 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11389 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11392 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11393 We associate floats only if the user has specified
11394 -fassociative-math. */
11395 if (flag_associative_math
11396 && TREE_CODE (arg0) == PLUS_EXPR
11397 && TREE_CODE (arg1) != MULT_EXPR)
11399 tree tree00 = TREE_OPERAND (arg0, 0);
11400 tree tree01 = TREE_OPERAND (arg0, 1);
11401 if (TREE_CODE (tree01) == MULT_EXPR
11402 && TREE_CODE (tree00) == MULT_EXPR)
11404 tree tree0;
11405 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11406 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11411 bit_rotate:
11412 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11413 is a rotate of A by C1 bits. */
11414 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11415 is a rotate of A by B bits.
11416 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11417 though in this case CODE must be | and not + or ^, otherwise
11418 it doesn't return A when B is 0. */
11420 enum tree_code code0, code1;
11421 tree rtype;
11422 code0 = TREE_CODE (arg0);
11423 code1 = TREE_CODE (arg1);
11424 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11425 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11426 && operand_equal_p (TREE_OPERAND (arg0, 0),
11427 TREE_OPERAND (arg1, 0), 0)
11428 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11429 TYPE_UNSIGNED (rtype))
11430 /* Only create rotates in complete modes. Other cases are not
11431 expanded properly. */
11432 && (element_precision (rtype)
11433 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11435 tree tree01, tree11;
11436 tree orig_tree01, orig_tree11;
11437 enum tree_code code01, code11;
11439 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11440 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11441 STRIP_NOPS (tree01);
11442 STRIP_NOPS (tree11);
11443 code01 = TREE_CODE (tree01);
11444 code11 = TREE_CODE (tree11);
11445 if (code11 != MINUS_EXPR
11446 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11448 std::swap (code0, code1);
11449 std::swap (code01, code11);
11450 std::swap (tree01, tree11);
11451 std::swap (orig_tree01, orig_tree11);
11453 if (code01 == INTEGER_CST
11454 && code11 == INTEGER_CST
11455 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11456 == element_precision (rtype)))
11458 tem = build2_loc (loc, LROTATE_EXPR,
11459 rtype, TREE_OPERAND (arg0, 0),
11460 code0 == LSHIFT_EXPR
11461 ? orig_tree01 : orig_tree11);
11462 return fold_convert_loc (loc, type, tem);
11464 else if (code11 == MINUS_EXPR)
11466 tree tree110, tree111;
11467 tree110 = TREE_OPERAND (tree11, 0);
11468 tree111 = TREE_OPERAND (tree11, 1);
11469 STRIP_NOPS (tree110);
11470 STRIP_NOPS (tree111);
11471 if (TREE_CODE (tree110) == INTEGER_CST
11472 && compare_tree_int (tree110,
11473 element_precision (rtype)) == 0
11474 && operand_equal_p (tree01, tree111, 0))
11476 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11477 ? LROTATE_EXPR : RROTATE_EXPR),
11478 rtype, TREE_OPERAND (arg0, 0),
11479 orig_tree01);
11480 return fold_convert_loc (loc, type, tem);
11483 else if (code == BIT_IOR_EXPR
11484 && code11 == BIT_AND_EXPR
11485 && pow2p_hwi (element_precision (rtype)))
11487 tree tree110, tree111;
11488 tree110 = TREE_OPERAND (tree11, 0);
11489 tree111 = TREE_OPERAND (tree11, 1);
11490 STRIP_NOPS (tree110);
11491 STRIP_NOPS (tree111);
11492 if (TREE_CODE (tree110) == NEGATE_EXPR
11493 && TREE_CODE (tree111) == INTEGER_CST
11494 && compare_tree_int (tree111,
11495 element_precision (rtype) - 1) == 0
11496 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11498 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11499 ? LROTATE_EXPR : RROTATE_EXPR),
11500 rtype, TREE_OPERAND (arg0, 0),
11501 orig_tree01);
11502 return fold_convert_loc (loc, type, tem);
11508 associate:
11509 /* In most languages, can't associate operations on floats through
11510 parentheses. Rather than remember where the parentheses were, we
11511 don't associate floats at all, unless the user has specified
11512 -fassociative-math.
11513 And, we need to make sure type is not saturating. */
11515 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11516 && !TYPE_SATURATING (type)
11517 && !TYPE_OVERFLOW_SANITIZED (type))
11519 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11520 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11521 tree atype = type;
11522 bool ok = true;
11524 /* Split both trees into variables, constants, and literals. Then
11525 associate each group together, the constants with literals,
11526 then the result with variables. This increases the chances of
11527 literals being recombined later and of generating relocatable
11528 expressions for the sum of a constant and literal. */
11529 var0 = split_tree (arg0, type, code,
11530 &minus_var0, &con0, &minus_con0,
11531 &lit0, &minus_lit0, 0);
11532 var1 = split_tree (arg1, type, code,
11533 &minus_var1, &con1, &minus_con1,
11534 &lit1, &minus_lit1, code == MINUS_EXPR);
11536 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11537 if (code == MINUS_EXPR)
11538 code = PLUS_EXPR;
11540 /* With undefined overflow prefer doing association in a type
11541 which wraps on overflow, if that is one of the operand types. */
11542 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11543 && !TYPE_OVERFLOW_WRAPS (type))
11545 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11546 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11547 atype = TREE_TYPE (arg0);
11548 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11549 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11550 atype = TREE_TYPE (arg1);
11551 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11554 /* With undefined overflow we can only associate constants with one
11555 variable, and constants whose association doesn't overflow. */
11556 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11557 && !TYPE_OVERFLOW_WRAPS (atype))
11559 if ((var0 && var1) || (minus_var0 && minus_var1))
11561 /* ??? If split_tree would handle NEGATE_EXPR we could
11562 simply reject these cases and the allowed cases would
11563 be the var0/minus_var1 ones. */
11564 tree tmp0 = var0 ? var0 : minus_var0;
11565 tree tmp1 = var1 ? var1 : minus_var1;
11566 bool one_neg = false;
11568 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11570 tmp0 = TREE_OPERAND (tmp0, 0);
11571 one_neg = !one_neg;
11573 if (CONVERT_EXPR_P (tmp0)
11574 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11575 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11576 <= TYPE_PRECISION (atype)))
11577 tmp0 = TREE_OPERAND (tmp0, 0);
11578 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11580 tmp1 = TREE_OPERAND (tmp1, 0);
11581 one_neg = !one_neg;
11583 if (CONVERT_EXPR_P (tmp1)
11584 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11586 <= TYPE_PRECISION (atype)))
11587 tmp1 = TREE_OPERAND (tmp1, 0);
11588 /* The only case we can still associate with two variables
11589 is if they cancel out. */
11590 if (!one_neg
11591 || !operand_equal_p (tmp0, tmp1, 0))
11592 ok = false;
11594 else if ((var0 && minus_var1
11595 && ! operand_equal_p (var0, minus_var1, 0))
11596 || (minus_var0 && var1
11597 && ! operand_equal_p (minus_var0, var1, 0)))
11598 ok = false;
11601 /* Only do something if we found more than two objects. Otherwise,
11602 nothing has changed and we risk infinite recursion. */
11603 if (ok
11604 && ((var0 != 0) + (var1 != 0)
11605 + (minus_var0 != 0) + (minus_var1 != 0)
11606 + (con0 != 0) + (con1 != 0)
11607 + (minus_con0 != 0) + (minus_con1 != 0)
11608 + (lit0 != 0) + (lit1 != 0)
11609 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11611 var0 = associate_trees (loc, var0, var1, code, atype);
11612 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11613 code, atype);
11614 con0 = associate_trees (loc, con0, con1, code, atype);
11615 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11616 code, atype);
11617 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11618 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11619 code, atype);
11621 if (minus_var0 && var0)
11623 var0 = associate_trees (loc, var0, minus_var0,
11624 MINUS_EXPR, atype);
11625 minus_var0 = 0;
11627 if (minus_con0 && con0)
11629 con0 = associate_trees (loc, con0, minus_con0,
11630 MINUS_EXPR, atype);
11631 minus_con0 = 0;
11634 /* Preserve the MINUS_EXPR if the negative part of the literal is
11635 greater than the positive part. Otherwise, the multiplicative
11636 folding code (i.e extract_muldiv) may be fooled in case
11637 unsigned constants are subtracted, like in the following
11638 example: ((X*2 + 4) - 8U)/2. */
11639 if (minus_lit0 && lit0)
11641 if (TREE_CODE (lit0) == INTEGER_CST
11642 && TREE_CODE (minus_lit0) == INTEGER_CST
11643 && tree_int_cst_lt (lit0, minus_lit0)
11644 /* But avoid ending up with only negated parts. */
11645 && (var0 || con0))
11647 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11648 MINUS_EXPR, atype);
11649 lit0 = 0;
11651 else
11653 lit0 = associate_trees (loc, lit0, minus_lit0,
11654 MINUS_EXPR, atype);
11655 minus_lit0 = 0;
11659 /* Don't introduce overflows through reassociation. */
11660 if ((lit0 && TREE_OVERFLOW_P (lit0))
11661 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11662 return NULL_TREE;
11664 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11665 con0 = associate_trees (loc, con0, lit0, code, atype);
11666 lit0 = 0;
11667 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11668 code, atype);
11669 minus_lit0 = 0;
11671 /* Eliminate minus_con0. */
11672 if (minus_con0)
11674 if (con0)
11675 con0 = associate_trees (loc, con0, minus_con0,
11676 MINUS_EXPR, atype);
11677 else if (var0)
11678 var0 = associate_trees (loc, var0, minus_con0,
11679 MINUS_EXPR, atype);
11680 else
11681 gcc_unreachable ();
11682 minus_con0 = 0;
11685 /* Eliminate minus_var0. */
11686 if (minus_var0)
11688 if (con0)
11689 con0 = associate_trees (loc, con0, minus_var0,
11690 MINUS_EXPR, atype);
11691 else
11692 gcc_unreachable ();
11693 minus_var0 = 0;
11696 return
11697 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11698 code, atype));
11702 return NULL_TREE;
11704 case POINTER_DIFF_EXPR:
11705 case MINUS_EXPR:
11706 /* Fold &a[i] - &a[j] to i-j. */
11707 if (TREE_CODE (arg0) == ADDR_EXPR
11708 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11709 && TREE_CODE (arg1) == ADDR_EXPR
11710 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11712 tree tem = fold_addr_of_array_ref_difference (loc, type,
11713 TREE_OPERAND (arg0, 0),
11714 TREE_OPERAND (arg1, 0),
11715 code
11716 == POINTER_DIFF_EXPR);
11717 if (tem)
11718 return tem;
11721 /* Further transformations are not for pointers. */
11722 if (code == POINTER_DIFF_EXPR)
11723 return NULL_TREE;
11725 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11726 if (TREE_CODE (arg0) == NEGATE_EXPR
11727 && negate_expr_p (op1)
11728 /* If arg0 is e.g. unsigned int and type is int, then this could
11729 introduce UB, because if A is INT_MIN at runtime, the original
11730 expression can be well defined while the latter is not.
11731 See PR83269. */
11732 && !(ANY_INTEGRAL_TYPE_P (type)
11733 && TYPE_OVERFLOW_UNDEFINED (type)
11734 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11735 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11736 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11737 fold_convert_loc (loc, type,
11738 TREE_OPERAND (arg0, 0)));
11740 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11741 __complex__ ( x, -y ). This is not the same for SNaNs or if
11742 signed zeros are involved. */
11743 if (!HONOR_SNANS (arg0)
11744 && !HONOR_SIGNED_ZEROS (arg0)
11745 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11747 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11748 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11749 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11750 bool arg0rz = false, arg0iz = false;
11751 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11752 || (arg0i && (arg0iz = real_zerop (arg0i))))
11754 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11755 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11756 if (arg0rz && arg1i && real_zerop (arg1i))
11758 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11759 arg1r ? arg1r
11760 : build1 (REALPART_EXPR, rtype, arg1));
11761 tree ip = arg0i ? arg0i
11762 : build1 (IMAGPART_EXPR, rtype, arg0);
11763 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11765 else if (arg0iz && arg1r && real_zerop (arg1r))
11767 tree rp = arg0r ? arg0r
11768 : build1 (REALPART_EXPR, rtype, arg0);
11769 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11770 arg1i ? arg1i
11771 : build1 (IMAGPART_EXPR, rtype, arg1));
11772 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11777 /* A - B -> A + (-B) if B is easily negatable. */
11778 if (negate_expr_p (op1)
11779 && ! TYPE_OVERFLOW_SANITIZED (type)
11780 && ((FLOAT_TYPE_P (type)
11781 /* Avoid this transformation if B is a positive REAL_CST. */
11782 && (TREE_CODE (op1) != REAL_CST
11783 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11784 || INTEGRAL_TYPE_P (type)))
11785 return fold_build2_loc (loc, PLUS_EXPR, type,
11786 fold_convert_loc (loc, type, arg0),
11787 negate_expr (op1));
11789 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11790 one. Make sure the type is not saturating and has the signedness of
11791 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11792 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11793 if ((TREE_CODE (arg0) == MULT_EXPR
11794 || TREE_CODE (arg1) == MULT_EXPR)
11795 && !TYPE_SATURATING (type)
11796 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11797 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11798 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11800 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11801 if (tem)
11802 return tem;
11805 goto associate;
11807 case MULT_EXPR:
11808 if (! FLOAT_TYPE_P (type))
11810 /* Transform x * -C into -x * C if x is easily negatable. */
11811 if (TREE_CODE (op1) == INTEGER_CST
11812 && tree_int_cst_sgn (op1) == -1
11813 && negate_expr_p (op0)
11814 && negate_expr_p (op1)
11815 && (tem = negate_expr (op1)) != op1
11816 && ! TREE_OVERFLOW (tem))
11817 return fold_build2_loc (loc, MULT_EXPR, type,
11818 fold_convert_loc (loc, type,
11819 negate_expr (op0)), tem);
11821 strict_overflow_p = false;
11822 if (TREE_CODE (arg1) == INTEGER_CST
11823 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11824 &strict_overflow_p)) != 0)
11826 if (strict_overflow_p)
11827 fold_overflow_warning (("assuming signed overflow does not "
11828 "occur when simplifying "
11829 "multiplication"),
11830 WARN_STRICT_OVERFLOW_MISC);
11831 return fold_convert_loc (loc, type, tem);
11834 /* Optimize z * conj(z) for integer complex numbers. */
11835 if (TREE_CODE (arg0) == CONJ_EXPR
11836 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11837 return fold_mult_zconjz (loc, type, arg1);
11838 if (TREE_CODE (arg1) == CONJ_EXPR
11839 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11840 return fold_mult_zconjz (loc, type, arg0);
11842 else
11844 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11845 This is not the same for NaNs or if signed zeros are
11846 involved. */
11847 if (!HONOR_NANS (arg0)
11848 && !HONOR_SIGNED_ZEROS (arg0)
11849 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11850 && TREE_CODE (arg1) == COMPLEX_CST
11851 && real_zerop (TREE_REALPART (arg1)))
11853 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11854 if (real_onep (TREE_IMAGPART (arg1)))
11855 return
11856 fold_build2_loc (loc, COMPLEX_EXPR, type,
11857 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11858 rtype, arg0)),
11859 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11860 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11861 return
11862 fold_build2_loc (loc, COMPLEX_EXPR, type,
11863 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11864 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11865 rtype, arg0)));
11868 /* Optimize z * conj(z) for floating point complex numbers.
11869 Guarded by flag_unsafe_math_optimizations as non-finite
11870 imaginary components don't produce scalar results. */
11871 if (flag_unsafe_math_optimizations
11872 && TREE_CODE (arg0) == CONJ_EXPR
11873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11874 return fold_mult_zconjz (loc, type, arg1);
11875 if (flag_unsafe_math_optimizations
11876 && TREE_CODE (arg1) == CONJ_EXPR
11877 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11878 return fold_mult_zconjz (loc, type, arg0);
11880 goto associate;
11882 case BIT_IOR_EXPR:
11883 /* Canonicalize (X & C1) | C2. */
11884 if (TREE_CODE (arg0) == BIT_AND_EXPR
11885 && TREE_CODE (arg1) == INTEGER_CST
11886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11888 int width = TYPE_PRECISION (type), w;
11889 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11890 wide_int c2 = wi::to_wide (arg1);
11892 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11893 if ((c1 & c2) == c1)
11894 return omit_one_operand_loc (loc, type, arg1,
11895 TREE_OPERAND (arg0, 0));
11897 wide_int msk = wi::mask (width, false,
11898 TYPE_PRECISION (TREE_TYPE (arg1)));
11900 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11901 if (wi::bit_and_not (msk, c1 | c2) == 0)
11903 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11904 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11907 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11908 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11909 mode which allows further optimizations. */
11910 c1 &= msk;
11911 c2 &= msk;
11912 wide_int c3 = wi::bit_and_not (c1, c2);
11913 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11915 wide_int mask = wi::mask (w, false,
11916 TYPE_PRECISION (type));
11917 if (((c1 | c2) & mask) == mask
11918 && wi::bit_and_not (c1, mask) == 0)
11920 c3 = mask;
11921 break;
11925 if (c3 != c1)
11927 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11928 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11929 wide_int_to_tree (type, c3));
11930 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11934 /* See if this can be simplified into a rotate first. If that
11935 is unsuccessful continue in the association code. */
11936 goto bit_rotate;
11938 case BIT_XOR_EXPR:
11939 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11940 if (TREE_CODE (arg0) == BIT_AND_EXPR
11941 && INTEGRAL_TYPE_P (type)
11942 && integer_onep (TREE_OPERAND (arg0, 1))
11943 && integer_onep (arg1))
11944 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11945 build_zero_cst (TREE_TYPE (arg0)));
11947 /* See if this can be simplified into a rotate first. If that
11948 is unsuccessful continue in the association code. */
11949 goto bit_rotate;
11951 case BIT_AND_EXPR:
11952 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11954 && INTEGRAL_TYPE_P (type)
11955 && integer_onep (TREE_OPERAND (arg0, 1))
11956 && integer_onep (arg1))
11958 tree tem2;
11959 tem = TREE_OPERAND (arg0, 0);
11960 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11961 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11962 tem, tem2);
11963 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11964 build_zero_cst (TREE_TYPE (tem)));
11966 /* Fold ~X & 1 as (X & 1) == 0. */
11967 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11968 && INTEGRAL_TYPE_P (type)
11969 && integer_onep (arg1))
11971 tree tem2;
11972 tem = TREE_OPERAND (arg0, 0);
11973 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11974 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11975 tem, tem2);
11976 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11977 build_zero_cst (TREE_TYPE (tem)));
11979 /* Fold !X & 1 as X == 0. */
11980 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11981 && integer_onep (arg1))
11983 tem = TREE_OPERAND (arg0, 0);
11984 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11985 build_zero_cst (TREE_TYPE (tem)));
11988 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11989 multiple of 1 << CST. */
11990 if (TREE_CODE (arg1) == INTEGER_CST)
11992 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11993 wide_int ncst1 = -cst1;
11994 if ((cst1 & ncst1) == ncst1
11995 && multiple_of_p (type, arg0,
11996 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11997 return fold_convert_loc (loc, type, arg0);
12000 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12001 bits from CST2. */
12002 if (TREE_CODE (arg1) == INTEGER_CST
12003 && TREE_CODE (arg0) == MULT_EXPR
12004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12006 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12007 wide_int masked
12008 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12010 if (masked == 0)
12011 return omit_two_operands_loc (loc, type, build_zero_cst (type),
12012 arg0, arg1);
12013 else if (masked != warg1)
12015 /* Avoid the transform if arg1 is a mask of some
12016 mode which allows further optimizations. */
12017 int pop = wi::popcount (warg1);
12018 if (!(pop >= BITS_PER_UNIT
12019 && pow2p_hwi (pop)
12020 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12021 return fold_build2_loc (loc, code, type, op0,
12022 wide_int_to_tree (type, masked));
12026 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12027 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12028 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12030 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12032 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12033 if (mask == -1)
12034 return
12035 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12038 goto associate;
12040 case RDIV_EXPR:
12041 /* Don't touch a floating-point divide by zero unless the mode
12042 of the constant can represent infinity. */
12043 if (TREE_CODE (arg1) == REAL_CST
12044 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12045 && real_zerop (arg1))
12046 return NULL_TREE;
12048 /* (-A) / (-B) -> A / B */
12049 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12050 return fold_build2_loc (loc, RDIV_EXPR, type,
12051 TREE_OPERAND (arg0, 0),
12052 negate_expr (arg1));
12053 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12054 return fold_build2_loc (loc, RDIV_EXPR, type,
12055 negate_expr (arg0),
12056 TREE_OPERAND (arg1, 0));
12057 return NULL_TREE;
12059 case TRUNC_DIV_EXPR:
12060 /* Fall through */
12062 case FLOOR_DIV_EXPR:
12063 /* Simplify A / (B << N) where A and B are positive and B is
12064 a power of 2, to A >> (N + log2(B)). */
12065 strict_overflow_p = false;
12066 if (TREE_CODE (arg1) == LSHIFT_EXPR
12067 && (TYPE_UNSIGNED (type)
12068 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12070 tree sval = TREE_OPERAND (arg1, 0);
12071 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12073 tree sh_cnt = TREE_OPERAND (arg1, 1);
12074 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12075 wi::exact_log2 (wi::to_wide (sval)));
12077 if (strict_overflow_p)
12078 fold_overflow_warning (("assuming signed overflow does not "
12079 "occur when simplifying A / (B << N)"),
12080 WARN_STRICT_OVERFLOW_MISC);
12082 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12083 sh_cnt, pow2);
12084 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12085 fold_convert_loc (loc, type, arg0), sh_cnt);
12089 /* Fall through */
12091 case ROUND_DIV_EXPR:
12092 case CEIL_DIV_EXPR:
12093 case EXACT_DIV_EXPR:
12094 if (integer_zerop (arg1))
12095 return NULL_TREE;
12097 /* Convert -A / -B to A / B when the type is signed and overflow is
12098 undefined. */
12099 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12100 && TREE_CODE (op0) == NEGATE_EXPR
12101 && negate_expr_p (op1))
12103 if (ANY_INTEGRAL_TYPE_P (type))
12104 fold_overflow_warning (("assuming signed overflow does not occur "
12105 "when distributing negation across "
12106 "division"),
12107 WARN_STRICT_OVERFLOW_MISC);
12108 return fold_build2_loc (loc, code, type,
12109 fold_convert_loc (loc, type,
12110 TREE_OPERAND (arg0, 0)),
12111 negate_expr (op1));
12113 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12114 && TREE_CODE (arg1) == NEGATE_EXPR
12115 && negate_expr_p (op0))
12117 if (ANY_INTEGRAL_TYPE_P (type))
12118 fold_overflow_warning (("assuming signed overflow does not occur "
12119 "when distributing negation across "
12120 "division"),
12121 WARN_STRICT_OVERFLOW_MISC);
12122 return fold_build2_loc (loc, code, type,
12123 negate_expr (op0),
12124 fold_convert_loc (loc, type,
12125 TREE_OPERAND (arg1, 0)));
12128 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12129 operation, EXACT_DIV_EXPR.
12131 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12132 At one time others generated faster code, it's not clear if they do
12133 after the last round to changes to the DIV code in expmed.cc. */
12134 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12135 && multiple_of_p (type, arg0, arg1))
12136 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12137 fold_convert (type, arg0),
12138 fold_convert (type, arg1));
12140 strict_overflow_p = false;
12141 if (TREE_CODE (arg1) == INTEGER_CST
12142 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12143 &strict_overflow_p)) != 0)
12145 if (strict_overflow_p)
12146 fold_overflow_warning (("assuming signed overflow does not occur "
12147 "when simplifying division"),
12148 WARN_STRICT_OVERFLOW_MISC);
12149 return fold_convert_loc (loc, type, tem);
12152 return NULL_TREE;
12154 case CEIL_MOD_EXPR:
12155 case FLOOR_MOD_EXPR:
12156 case ROUND_MOD_EXPR:
12157 case TRUNC_MOD_EXPR:
12158 strict_overflow_p = false;
12159 if (TREE_CODE (arg1) == INTEGER_CST
12160 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12161 &strict_overflow_p)) != 0)
12163 if (strict_overflow_p)
12164 fold_overflow_warning (("assuming signed overflow does not occur "
12165 "when simplifying modulus"),
12166 WARN_STRICT_OVERFLOW_MISC);
12167 return fold_convert_loc (loc, type, tem);
12170 return NULL_TREE;
12172 case LROTATE_EXPR:
12173 case RROTATE_EXPR:
12174 case RSHIFT_EXPR:
12175 case LSHIFT_EXPR:
12176 /* Since negative shift count is not well-defined,
12177 don't try to compute it in the compiler. */
12178 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12179 return NULL_TREE;
12181 prec = element_precision (type);
12183 /* If we have a rotate of a bit operation with the rotate count and
12184 the second operand of the bit operation both constant,
12185 permute the two operations. */
12186 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12187 && (TREE_CODE (arg0) == BIT_AND_EXPR
12188 || TREE_CODE (arg0) == BIT_IOR_EXPR
12189 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12192 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12193 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12194 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12195 fold_build2_loc (loc, code, type,
12196 arg00, arg1),
12197 fold_build2_loc (loc, code, type,
12198 arg01, arg1));
12201 /* Two consecutive rotates adding up to the some integer
12202 multiple of the precision of the type can be ignored. */
12203 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12204 && TREE_CODE (arg0) == RROTATE_EXPR
12205 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12206 && wi::umod_trunc (wi::to_wide (arg1)
12207 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12208 prec) == 0)
12209 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12211 return NULL_TREE;
12213 case MIN_EXPR:
12214 case MAX_EXPR:
12215 goto associate;
12217 case TRUTH_ANDIF_EXPR:
12218 /* Note that the operands of this must be ints
12219 and their values must be 0 or 1.
12220 ("true" is a fixed value perhaps depending on the language.) */
12221 /* If first arg is constant zero, return it. */
12222 if (integer_zerop (arg0))
12223 return fold_convert_loc (loc, type, arg0);
12224 /* FALLTHRU */
12225 case TRUTH_AND_EXPR:
12226 /* If either arg is constant true, drop it. */
12227 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12228 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12229 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12230 /* Preserve sequence points. */
12231 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12232 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12233 /* If second arg is constant zero, result is zero, but first arg
12234 must be evaluated. */
12235 if (integer_zerop (arg1))
12236 return omit_one_operand_loc (loc, type, arg1, arg0);
12237 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12238 case will be handled here. */
12239 if (integer_zerop (arg0))
12240 return omit_one_operand_loc (loc, type, arg0, arg1);
12242 /* !X && X is always false. */
12243 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12244 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12245 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12246 /* X && !X is always false. */
12247 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12248 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12249 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12251 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12252 means A >= Y && A != MAX, but in this case we know that
12253 A < X <= MAX. */
12255 if (!TREE_SIDE_EFFECTS (arg0)
12256 && !TREE_SIDE_EFFECTS (arg1))
12258 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12259 if (tem && !operand_equal_p (tem, arg0, 0))
12260 return fold_convert (type,
12261 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12262 tem, arg1));
12264 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12265 if (tem && !operand_equal_p (tem, arg1, 0))
12266 return fold_convert (type,
12267 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12268 arg0, tem));
12271 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12272 != NULL_TREE)
12273 return tem;
12275 return NULL_TREE;
12277 case TRUTH_ORIF_EXPR:
12278 /* Note that the operands of this must be ints
12279 and their values must be 0 or true.
12280 ("true" is a fixed value perhaps depending on the language.) */
12281 /* If first arg is constant true, return it. */
12282 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12283 return fold_convert_loc (loc, type, arg0);
12284 /* FALLTHRU */
12285 case TRUTH_OR_EXPR:
12286 /* If either arg is constant zero, drop it. */
12287 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12288 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12289 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12290 /* Preserve sequence points. */
12291 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12292 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12293 /* If second arg is constant true, result is true, but we must
12294 evaluate first arg. */
12295 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12296 return omit_one_operand_loc (loc, type, arg1, arg0);
12297 /* Likewise for first arg, but note this only occurs here for
12298 TRUTH_OR_EXPR. */
12299 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12300 return omit_one_operand_loc (loc, type, arg0, arg1);
12302 /* !X || X is always true. */
12303 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12304 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12305 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12306 /* X || !X is always true. */
12307 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12308 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12309 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12311 /* (X && !Y) || (!X && Y) is X ^ Y */
12312 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12313 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12315 tree a0, a1, l0, l1, n0, n1;
12317 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12318 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12320 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12321 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12323 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12324 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12326 if ((operand_equal_p (n0, a0, 0)
12327 && operand_equal_p (n1, a1, 0))
12328 || (operand_equal_p (n0, a1, 0)
12329 && operand_equal_p (n1, a0, 0)))
12330 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12333 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12334 != NULL_TREE)
12335 return tem;
12337 return NULL_TREE;
12339 case TRUTH_XOR_EXPR:
12340 /* If the second arg is constant zero, drop it. */
12341 if (integer_zerop (arg1))
12342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12343 /* If the second arg is constant true, this is a logical inversion. */
12344 if (integer_onep (arg1))
12346 tem = invert_truthvalue_loc (loc, arg0);
12347 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12349 /* Identical arguments cancel to zero. */
12350 if (operand_equal_p (arg0, arg1, 0))
12351 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12353 /* !X ^ X is always true. */
12354 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12356 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12358 /* X ^ !X is always true. */
12359 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12360 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12361 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12363 return NULL_TREE;
12365 case EQ_EXPR:
12366 case NE_EXPR:
12367 STRIP_NOPS (arg0);
12368 STRIP_NOPS (arg1);
12370 tem = fold_comparison (loc, code, type, op0, op1);
12371 if (tem != NULL_TREE)
12372 return tem;
12374 /* bool_var != 1 becomes !bool_var. */
12375 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12376 && code == NE_EXPR)
12377 return fold_convert_loc (loc, type,
12378 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12379 TREE_TYPE (arg0), arg0));
12381 /* bool_var == 0 becomes !bool_var. */
12382 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12383 && code == EQ_EXPR)
12384 return fold_convert_loc (loc, type,
12385 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12386 TREE_TYPE (arg0), arg0));
12388 /* !exp != 0 becomes !exp */
12389 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12390 && code == NE_EXPR)
12391 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12393 /* If this is an EQ or NE comparison with zero and ARG0 is
12394 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12395 two operations, but the latter can be done in one less insn
12396 on machines that have only two-operand insns or on which a
12397 constant cannot be the first operand. */
12398 if (TREE_CODE (arg0) == BIT_AND_EXPR
12399 && integer_zerop (arg1))
12401 tree arg00 = TREE_OPERAND (arg0, 0);
12402 tree arg01 = TREE_OPERAND (arg0, 1);
12403 if (TREE_CODE (arg00) == LSHIFT_EXPR
12404 && integer_onep (TREE_OPERAND (arg00, 0)))
12406 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12407 arg01, TREE_OPERAND (arg00, 1));
12408 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12409 build_one_cst (TREE_TYPE (arg0)));
12410 return fold_build2_loc (loc, code, type,
12411 fold_convert_loc (loc, TREE_TYPE (arg1),
12412 tem), arg1);
12414 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12415 && integer_onep (TREE_OPERAND (arg01, 0)))
12417 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12418 arg00, TREE_OPERAND (arg01, 1));
12419 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12420 build_one_cst (TREE_TYPE (arg0)));
12421 return fold_build2_loc (loc, code, type,
12422 fold_convert_loc (loc, TREE_TYPE (arg1),
12423 tem), arg1);
12427 /* If this is a comparison of a field, we may be able to simplify it. */
12428 if ((TREE_CODE (arg0) == COMPONENT_REF
12429 || TREE_CODE (arg0) == BIT_FIELD_REF)
12430 /* Handle the constant case even without -O
12431 to make sure the warnings are given. */
12432 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12434 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12435 if (t1)
12436 return t1;
12439 /* Optimize comparisons of strlen vs zero to a compare of the
12440 first character of the string vs zero. To wit,
12441 strlen(ptr) == 0 => *ptr == 0
12442 strlen(ptr) != 0 => *ptr != 0
12443 Other cases should reduce to one of these two (or a constant)
12444 due to the return value of strlen being unsigned. */
12445 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12447 tree fndecl = get_callee_fndecl (arg0);
12449 if (fndecl
12450 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12451 && call_expr_nargs (arg0) == 1
12452 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12453 == POINTER_TYPE))
12455 tree ptrtype
12456 = build_pointer_type (build_qualified_type (char_type_node,
12457 TYPE_QUAL_CONST));
12458 tree ptr = fold_convert_loc (loc, ptrtype,
12459 CALL_EXPR_ARG (arg0, 0));
12460 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12461 return fold_build2_loc (loc, code, type, iref,
12462 build_int_cst (TREE_TYPE (iref), 0));
12466 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12467 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12468 if (TREE_CODE (arg0) == RSHIFT_EXPR
12469 && integer_zerop (arg1)
12470 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12472 tree arg00 = TREE_OPERAND (arg0, 0);
12473 tree arg01 = TREE_OPERAND (arg0, 1);
12474 tree itype = TREE_TYPE (arg00);
12475 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12477 if (TYPE_UNSIGNED (itype))
12479 itype = signed_type_for (itype);
12480 arg00 = fold_convert_loc (loc, itype, arg00);
12482 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12483 type, arg00, build_zero_cst (itype));
12487 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12488 (X & C) == 0 when C is a single bit. */
12489 if (TREE_CODE (arg0) == BIT_AND_EXPR
12490 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12491 && integer_zerop (arg1)
12492 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12494 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12495 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12496 TREE_OPERAND (arg0, 1));
12497 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12498 type, tem,
12499 fold_convert_loc (loc, TREE_TYPE (arg0),
12500 arg1));
12503 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12504 constant C is a power of two, i.e. a single bit. */
12505 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12506 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12507 && integer_zerop (arg1)
12508 && integer_pow2p (TREE_OPERAND (arg0, 1))
12509 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12510 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12512 tree arg00 = TREE_OPERAND (arg0, 0);
12513 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12514 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12517 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12518 when is C is a power of two, i.e. a single bit. */
12519 if (TREE_CODE (arg0) == BIT_AND_EXPR
12520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12521 && integer_zerop (arg1)
12522 && integer_pow2p (TREE_OPERAND (arg0, 1))
12523 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12524 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12526 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12527 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12528 arg000, TREE_OPERAND (arg0, 1));
12529 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12530 tem, build_int_cst (TREE_TYPE (tem), 0));
12533 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12534 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12536 tree arg00 = TREE_OPERAND (arg0, 0);
12537 tree arg01 = TREE_OPERAND (arg0, 1);
12538 tree arg10 = TREE_OPERAND (arg1, 0);
12539 tree arg11 = TREE_OPERAND (arg1, 1);
12540 tree itype = TREE_TYPE (arg0);
12542 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12543 operand_equal_p guarantees no side-effects so we don't need
12544 to use omit_one_operand on Z. */
12545 if (operand_equal_p (arg01, arg11, 0))
12546 return fold_build2_loc (loc, code, type, arg00,
12547 fold_convert_loc (loc, TREE_TYPE (arg00),
12548 arg10));
12549 if (operand_equal_p (arg01, arg10, 0))
12550 return fold_build2_loc (loc, code, type, arg00,
12551 fold_convert_loc (loc, TREE_TYPE (arg00),
12552 arg11));
12553 if (operand_equal_p (arg00, arg11, 0))
12554 return fold_build2_loc (loc, code, type, arg01,
12555 fold_convert_loc (loc, TREE_TYPE (arg01),
12556 arg10));
12557 if (operand_equal_p (arg00, arg10, 0))
12558 return fold_build2_loc (loc, code, type, arg01,
12559 fold_convert_loc (loc, TREE_TYPE (arg01),
12560 arg11));
12562 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12563 if (TREE_CODE (arg01) == INTEGER_CST
12564 && TREE_CODE (arg11) == INTEGER_CST)
12566 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12567 fold_convert_loc (loc, itype, arg11));
12568 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12569 return fold_build2_loc (loc, code, type, tem,
12570 fold_convert_loc (loc, itype, arg10));
12574 /* Attempt to simplify equality/inequality comparisons of complex
12575 values. Only lower the comparison if the result is known or
12576 can be simplified to a single scalar comparison. */
12577 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12578 || TREE_CODE (arg0) == COMPLEX_CST)
12579 && (TREE_CODE (arg1) == COMPLEX_EXPR
12580 || TREE_CODE (arg1) == COMPLEX_CST))
12582 tree real0, imag0, real1, imag1;
12583 tree rcond, icond;
12585 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12587 real0 = TREE_OPERAND (arg0, 0);
12588 imag0 = TREE_OPERAND (arg0, 1);
12590 else
12592 real0 = TREE_REALPART (arg0);
12593 imag0 = TREE_IMAGPART (arg0);
12596 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12598 real1 = TREE_OPERAND (arg1, 0);
12599 imag1 = TREE_OPERAND (arg1, 1);
12601 else
12603 real1 = TREE_REALPART (arg1);
12604 imag1 = TREE_IMAGPART (arg1);
12607 rcond = fold_binary_loc (loc, code, type, real0, real1);
12608 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12610 if (integer_zerop (rcond))
12612 if (code == EQ_EXPR)
12613 return omit_two_operands_loc (loc, type, boolean_false_node,
12614 imag0, imag1);
12615 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12617 else
12619 if (code == NE_EXPR)
12620 return omit_two_operands_loc (loc, type, boolean_true_node,
12621 imag0, imag1);
12622 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12626 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12627 if (icond && TREE_CODE (icond) == INTEGER_CST)
12629 if (integer_zerop (icond))
12631 if (code == EQ_EXPR)
12632 return omit_two_operands_loc (loc, type, boolean_false_node,
12633 real0, real1);
12634 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12636 else
12638 if (code == NE_EXPR)
12639 return omit_two_operands_loc (loc, type, boolean_true_node,
12640 real0, real1);
12641 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12646 return NULL_TREE;
12648 case LT_EXPR:
12649 case GT_EXPR:
12650 case LE_EXPR:
12651 case GE_EXPR:
12652 tem = fold_comparison (loc, code, type, op0, op1);
12653 if (tem != NULL_TREE)
12654 return tem;
12656 /* Transform comparisons of the form X +- C CMP X. */
12657 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12658 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12659 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12660 && !HONOR_SNANS (arg0))
12662 tree arg01 = TREE_OPERAND (arg0, 1);
12663 enum tree_code code0 = TREE_CODE (arg0);
12664 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12666 /* (X - c) > X becomes false. */
12667 if (code == GT_EXPR
12668 && ((code0 == MINUS_EXPR && is_positive >= 0)
12669 || (code0 == PLUS_EXPR && is_positive <= 0)))
12670 return constant_boolean_node (0, type);
12672 /* Likewise (X + c) < X becomes false. */
12673 if (code == LT_EXPR
12674 && ((code0 == PLUS_EXPR && is_positive >= 0)
12675 || (code0 == MINUS_EXPR && is_positive <= 0)))
12676 return constant_boolean_node (0, type);
12678 /* Convert (X - c) <= X to true. */
12679 if (!HONOR_NANS (arg1)
12680 && code == LE_EXPR
12681 && ((code0 == MINUS_EXPR && is_positive >= 0)
12682 || (code0 == PLUS_EXPR && is_positive <= 0)))
12683 return constant_boolean_node (1, type);
12685 /* Convert (X + c) >= X to true. */
12686 if (!HONOR_NANS (arg1)
12687 && code == GE_EXPR
12688 && ((code0 == PLUS_EXPR && is_positive >= 0)
12689 || (code0 == MINUS_EXPR && is_positive <= 0)))
12690 return constant_boolean_node (1, type);
12693 /* If we are comparing an ABS_EXPR with a constant, we can
12694 convert all the cases into explicit comparisons, but they may
12695 well not be faster than doing the ABS and one comparison.
12696 But ABS (X) <= C is a range comparison, which becomes a subtraction
12697 and a comparison, and is probably faster. */
12698 if (code == LE_EXPR
12699 && TREE_CODE (arg1) == INTEGER_CST
12700 && TREE_CODE (arg0) == ABS_EXPR
12701 && ! TREE_SIDE_EFFECTS (arg0)
12702 && (tem = negate_expr (arg1)) != 0
12703 && TREE_CODE (tem) == INTEGER_CST
12704 && !TREE_OVERFLOW (tem))
12705 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12706 build2 (GE_EXPR, type,
12707 TREE_OPERAND (arg0, 0), tem),
12708 build2 (LE_EXPR, type,
12709 TREE_OPERAND (arg0, 0), arg1));
12711 /* Convert ABS_EXPR<x> >= 0 to true. */
12712 strict_overflow_p = false;
12713 if (code == GE_EXPR
12714 && (integer_zerop (arg1)
12715 || (! HONOR_NANS (arg0)
12716 && real_zerop (arg1)))
12717 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12719 if (strict_overflow_p)
12720 fold_overflow_warning (("assuming signed overflow does not occur "
12721 "when simplifying comparison of "
12722 "absolute value and zero"),
12723 WARN_STRICT_OVERFLOW_CONDITIONAL);
12724 return omit_one_operand_loc (loc, type,
12725 constant_boolean_node (true, type),
12726 arg0);
12729 /* Convert ABS_EXPR<x> < 0 to false. */
12730 strict_overflow_p = false;
12731 if (code == LT_EXPR
12732 && (integer_zerop (arg1) || real_zerop (arg1))
12733 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12735 if (strict_overflow_p)
12736 fold_overflow_warning (("assuming signed overflow does not occur "
12737 "when simplifying comparison of "
12738 "absolute value and zero"),
12739 WARN_STRICT_OVERFLOW_CONDITIONAL);
12740 return omit_one_operand_loc (loc, type,
12741 constant_boolean_node (false, type),
12742 arg0);
12745 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12746 and similarly for >= into !=. */
12747 if ((code == LT_EXPR || code == GE_EXPR)
12748 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12749 && TREE_CODE (arg1) == LSHIFT_EXPR
12750 && integer_onep (TREE_OPERAND (arg1, 0)))
12751 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12752 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12753 TREE_OPERAND (arg1, 1)),
12754 build_zero_cst (TREE_TYPE (arg0)));
12756 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12757 otherwise Y might be >= # of bits in X's type and thus e.g.
12758 (unsigned char) (1 << Y) for Y 15 might be 0.
12759 If the cast is widening, then 1 << Y should have unsigned type,
12760 otherwise if Y is number of bits in the signed shift type minus 1,
12761 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12762 31 might be 0xffffffff80000000. */
12763 if ((code == LT_EXPR || code == GE_EXPR)
12764 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12765 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12766 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12767 && CONVERT_EXPR_P (arg1)
12768 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12769 && (element_precision (TREE_TYPE (arg1))
12770 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12771 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12772 || (element_precision (TREE_TYPE (arg1))
12773 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12774 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12776 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12777 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12778 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12779 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12780 build_zero_cst (TREE_TYPE (arg0)));
12783 return NULL_TREE;
12785 case UNORDERED_EXPR:
12786 case ORDERED_EXPR:
12787 case UNLT_EXPR:
12788 case UNLE_EXPR:
12789 case UNGT_EXPR:
12790 case UNGE_EXPR:
12791 case UNEQ_EXPR:
12792 case LTGT_EXPR:
12793 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12795 tree targ0 = strip_float_extensions (arg0);
12796 tree targ1 = strip_float_extensions (arg1);
12797 tree newtype = TREE_TYPE (targ0);
12799 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12800 newtype = TREE_TYPE (targ1);
12802 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0)))
12803 return fold_build2_loc (loc, code, type,
12804 fold_convert_loc (loc, newtype, targ0),
12805 fold_convert_loc (loc, newtype, targ1));
12808 return NULL_TREE;
12810 case COMPOUND_EXPR:
12811 /* When pedantic, a compound expression can be neither an lvalue
12812 nor an integer constant expression. */
12813 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12814 return NULL_TREE;
12815 /* Don't let (0, 0) be null pointer constant. */
12816 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12817 : fold_convert_loc (loc, type, arg1);
12818 return tem;
12820 default:
12821 return NULL_TREE;
12822 } /* switch (code) */
12825 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12826 ((A & N) + B) & M -> (A + B) & M
12827 Similarly if (N & M) == 0,
12828 ((A | N) + B) & M -> (A + B) & M
12829 and for - instead of + (or unary - instead of +)
12830 and/or ^ instead of |.
12831 If B is constant and (B & M) == 0, fold into A & M.
12833 This function is a helper for match.pd patterns. Return non-NULL
12834 type in which the simplified operation should be performed only
12835 if any optimization is possible.
12837 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12838 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12839 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12840 +/-. */
12841 tree
12842 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12843 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12844 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12845 tree *pmop)
12847 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12848 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12849 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12850 if (~cst1 == 0
12851 || (cst1 & (cst1 + 1)) != 0
12852 || !INTEGRAL_TYPE_P (type)
12853 || (!TYPE_OVERFLOW_WRAPS (type)
12854 && TREE_CODE (type) != INTEGER_TYPE)
12855 || (wi::max_value (type) & cst1) != cst1)
12856 return NULL_TREE;
12858 enum tree_code codes[2] = { code00, code01 };
12859 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12860 int which = 0;
12861 wide_int cst0;
12863 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12864 arg1 (M) is == (1LL << cst) - 1.
12865 Store C into PMOP[0] and D into PMOP[1]. */
12866 pmop[0] = arg00;
12867 pmop[1] = arg01;
12868 which = code != NEGATE_EXPR;
12870 for (; which >= 0; which--)
12871 switch (codes[which])
12873 case BIT_AND_EXPR:
12874 case BIT_IOR_EXPR:
12875 case BIT_XOR_EXPR:
12876 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12877 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12878 if (codes[which] == BIT_AND_EXPR)
12880 if (cst0 != cst1)
12881 break;
12883 else if (cst0 != 0)
12884 break;
12885 /* If C or D is of the form (A & N) where
12886 (N & M) == M, or of the form (A | N) or
12887 (A ^ N) where (N & M) == 0, replace it with A. */
12888 pmop[which] = arg0xx[2 * which];
12889 break;
12890 case ERROR_MARK:
12891 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12892 break;
12893 /* If C or D is a N where (N & M) == 0, it can be
12894 omitted (replaced with 0). */
12895 if ((code == PLUS_EXPR
12896 || (code == MINUS_EXPR && which == 0))
12897 && (cst1 & wi::to_wide (pmop[which])) == 0)
12898 pmop[which] = build_int_cst (type, 0);
12899 /* Similarly, with C - N where (-N & M) == 0. */
12900 if (code == MINUS_EXPR
12901 && which == 1
12902 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12903 pmop[which] = build_int_cst (type, 0);
12904 break;
12905 default:
12906 gcc_unreachable ();
12909 /* Only build anything new if we optimized one or both arguments above. */
12910 if (pmop[0] == arg00 && pmop[1] == arg01)
12911 return NULL_TREE;
12913 if (TYPE_OVERFLOW_WRAPS (type))
12914 return type;
12915 else
12916 return unsigned_type_for (type);
12919 /* Used by contains_label_[p1]. */
12921 struct contains_label_data
12923 hash_set<tree> *pset;
12924 bool inside_switch_p;
12927 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12928 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12929 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12931 static tree
12932 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12934 contains_label_data *d = (contains_label_data *) data;
12935 switch (TREE_CODE (*tp))
12937 case LABEL_EXPR:
12938 return *tp;
12940 case CASE_LABEL_EXPR:
12941 if (!d->inside_switch_p)
12942 return *tp;
12943 return NULL_TREE;
12945 case SWITCH_EXPR:
12946 if (!d->inside_switch_p)
12948 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12949 return *tp;
12950 d->inside_switch_p = true;
12951 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12952 return *tp;
12953 d->inside_switch_p = false;
12954 *walk_subtrees = 0;
12956 return NULL_TREE;
12958 case GOTO_EXPR:
12959 *walk_subtrees = 0;
12960 return NULL_TREE;
12962 default:
12963 return NULL_TREE;
12967 /* Return whether the sub-tree ST contains a label which is accessible from
12968 outside the sub-tree. */
12970 static bool
12971 contains_label_p (tree st)
12973 hash_set<tree> pset;
12974 contains_label_data data = { &pset, false };
12975 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12978 /* Fold a ternary expression of code CODE and type TYPE with operands
12979 OP0, OP1, and OP2. Return the folded expression if folding is
12980 successful. Otherwise, return NULL_TREE. */
12982 tree
12983 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12984 tree op0, tree op1, tree op2)
12986 tree tem;
12987 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12988 enum tree_code_class kind = TREE_CODE_CLASS (code);
12990 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12991 && TREE_CODE_LENGTH (code) == 3);
12993 /* If this is a commutative operation, and OP0 is a constant, move it
12994 to OP1 to reduce the number of tests below. */
12995 if (commutative_ternary_tree_code (code)
12996 && tree_swap_operands_p (op0, op1))
12997 return fold_build3_loc (loc, code, type, op1, op0, op2);
12999 tem = generic_simplify (loc, code, type, op0, op1, op2);
13000 if (tem)
13001 return tem;
13003 /* Strip any conversions that don't change the mode. This is safe
13004 for every expression, except for a comparison expression because
13005 its signedness is derived from its operands. So, in the latter
13006 case, only strip conversions that don't change the signedness.
13008 Note that this is done as an internal manipulation within the
13009 constant folder, in order to find the simplest representation of
13010 the arguments so that their form can be studied. In any cases,
13011 the appropriate type conversions should be put back in the tree
13012 that will get out of the constant folder. */
13013 if (op0)
13015 arg0 = op0;
13016 STRIP_NOPS (arg0);
13019 if (op1)
13021 arg1 = op1;
13022 STRIP_NOPS (arg1);
13025 if (op2)
13027 arg2 = op2;
13028 STRIP_NOPS (arg2);
13031 switch (code)
13033 case COMPONENT_REF:
13034 if (TREE_CODE (arg0) == CONSTRUCTOR
13035 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13037 unsigned HOST_WIDE_INT idx;
13038 tree field, value;
13039 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13040 if (field == arg1)
13041 return value;
13043 return NULL_TREE;
13045 case COND_EXPR:
13046 case VEC_COND_EXPR:
13047 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13048 so all simple results must be passed through pedantic_non_lvalue. */
13049 if (TREE_CODE (arg0) == INTEGER_CST)
13051 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13052 tem = integer_zerop (arg0) ? op2 : op1;
13053 /* Only optimize constant conditions when the selected branch
13054 has the same type as the COND_EXPR. This avoids optimizing
13055 away "c ? x : throw", where the throw has a void type.
13056 Avoid throwing away that operand which contains label. */
13057 if ((!TREE_SIDE_EFFECTS (unused_op)
13058 || !contains_label_p (unused_op))
13059 && (! VOID_TYPE_P (TREE_TYPE (tem))
13060 || VOID_TYPE_P (type)))
13061 return protected_set_expr_location_unshare (tem, loc);
13062 return NULL_TREE;
13064 else if (TREE_CODE (arg0) == VECTOR_CST)
13066 unsigned HOST_WIDE_INT nelts;
13067 if ((TREE_CODE (arg1) == VECTOR_CST
13068 || TREE_CODE (arg1) == CONSTRUCTOR)
13069 && (TREE_CODE (arg2) == VECTOR_CST
13070 || TREE_CODE (arg2) == CONSTRUCTOR)
13071 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13073 vec_perm_builder sel (nelts, nelts, 1);
13074 for (unsigned int i = 0; i < nelts; i++)
13076 tree val = VECTOR_CST_ELT (arg0, i);
13077 if (integer_all_onesp (val))
13078 sel.quick_push (i);
13079 else if (integer_zerop (val))
13080 sel.quick_push (nelts + i);
13081 else /* Currently unreachable. */
13082 return NULL_TREE;
13084 vec_perm_indices indices (sel, 2, nelts);
13085 tree t = fold_vec_perm (type, arg1, arg2, indices);
13086 if (t != NULL_TREE)
13087 return t;
13091 /* If we have A op B ? A : C, we may be able to convert this to a
13092 simpler expression, depending on the operation and the values
13093 of B and C. Signed zeros prevent all of these transformations,
13094 for reasons given above each one.
13096 Also try swapping the arguments and inverting the conditional. */
13097 if (COMPARISON_CLASS_P (arg0)
13098 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13099 && !HONOR_SIGNED_ZEROS (op1))
13101 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13102 TREE_OPERAND (arg0, 0),
13103 TREE_OPERAND (arg0, 1),
13104 op1, op2);
13105 if (tem)
13106 return tem;
13109 if (COMPARISON_CLASS_P (arg0)
13110 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13111 && !HONOR_SIGNED_ZEROS (op2))
13113 enum tree_code comp_code = TREE_CODE (arg0);
13114 tree arg00 = TREE_OPERAND (arg0, 0);
13115 tree arg01 = TREE_OPERAND (arg0, 1);
13116 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13117 if (comp_code != ERROR_MARK)
13118 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13119 arg00,
13120 arg01,
13121 op2, op1);
13122 if (tem)
13123 return tem;
13126 /* If the second operand is simpler than the third, swap them
13127 since that produces better jump optimization results. */
13128 if (truth_value_p (TREE_CODE (arg0))
13129 && tree_swap_operands_p (op1, op2))
13131 location_t loc0 = expr_location_or (arg0, loc);
13132 /* See if this can be inverted. If it can't, possibly because
13133 it was a floating-point inequality comparison, don't do
13134 anything. */
13135 tem = fold_invert_truthvalue (loc0, arg0);
13136 if (tem)
13137 return fold_build3_loc (loc, code, type, tem, op2, op1);
13140 /* Convert A ? 1 : 0 to simply A. */
13141 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13142 : (integer_onep (op1)
13143 && !VECTOR_TYPE_P (type)))
13144 && integer_zerop (op2)
13145 /* If we try to convert OP0 to our type, the
13146 call to fold will try to move the conversion inside
13147 a COND, which will recurse. In that case, the COND_EXPR
13148 is probably the best choice, so leave it alone. */
13149 && type == TREE_TYPE (arg0))
13150 return protected_set_expr_location_unshare (arg0, loc);
13152 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13153 over COND_EXPR in cases such as floating point comparisons. */
13154 if (integer_zerop (op1)
13155 && code == COND_EXPR
13156 && integer_onep (op2)
13157 && !VECTOR_TYPE_P (type)
13158 && truth_value_p (TREE_CODE (arg0)))
13159 return fold_convert_loc (loc, type,
13160 invert_truthvalue_loc (loc, arg0));
13162 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13163 if (TREE_CODE (arg0) == LT_EXPR
13164 && integer_zerop (TREE_OPERAND (arg0, 1))
13165 && integer_zerop (op2)
13166 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13168 /* sign_bit_p looks through both zero and sign extensions,
13169 but for this optimization only sign extensions are
13170 usable. */
13171 tree tem2 = TREE_OPERAND (arg0, 0);
13172 while (tem != tem2)
13174 if (TREE_CODE (tem2) != NOP_EXPR
13175 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13177 tem = NULL_TREE;
13178 break;
13180 tem2 = TREE_OPERAND (tem2, 0);
13182 /* sign_bit_p only checks ARG1 bits within A's precision.
13183 If <sign bit of A> has wider type than A, bits outside
13184 of A's precision in <sign bit of A> need to be checked.
13185 If they are all 0, this optimization needs to be done
13186 in unsigned A's type, if they are all 1 in signed A's type,
13187 otherwise this can't be done. */
13188 if (tem
13189 && TYPE_PRECISION (TREE_TYPE (tem))
13190 < TYPE_PRECISION (TREE_TYPE (arg1))
13191 && TYPE_PRECISION (TREE_TYPE (tem))
13192 < TYPE_PRECISION (type))
13194 int inner_width, outer_width;
13195 tree tem_type;
13197 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13198 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13199 if (outer_width > TYPE_PRECISION (type))
13200 outer_width = TYPE_PRECISION (type);
13202 wide_int mask = wi::shifted_mask
13203 (inner_width, outer_width - inner_width, false,
13204 TYPE_PRECISION (TREE_TYPE (arg1)));
13206 wide_int common = mask & wi::to_wide (arg1);
13207 if (common == mask)
13209 tem_type = signed_type_for (TREE_TYPE (tem));
13210 tem = fold_convert_loc (loc, tem_type, tem);
13212 else if (common == 0)
13214 tem_type = unsigned_type_for (TREE_TYPE (tem));
13215 tem = fold_convert_loc (loc, tem_type, tem);
13217 else
13218 tem = NULL;
13221 if (tem)
13222 return
13223 fold_convert_loc (loc, type,
13224 fold_build2_loc (loc, BIT_AND_EXPR,
13225 TREE_TYPE (tem), tem,
13226 fold_convert_loc (loc,
13227 TREE_TYPE (tem),
13228 arg1)));
13231 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13232 already handled above. */
13233 if (TREE_CODE (arg0) == BIT_AND_EXPR
13234 && integer_onep (TREE_OPERAND (arg0, 1))
13235 && integer_zerop (op2)
13236 && integer_pow2p (arg1))
13238 tree tem = TREE_OPERAND (arg0, 0);
13239 STRIP_NOPS (tem);
13240 if (TREE_CODE (tem) == RSHIFT_EXPR
13241 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13242 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13243 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13244 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13245 fold_convert_loc (loc, type,
13246 TREE_OPERAND (tem, 0)),
13247 op1);
13250 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13251 is probably obsolete because the first operand should be a
13252 truth value (that's why we have the two cases above), but let's
13253 leave it in until we can confirm this for all front-ends. */
13254 if (integer_zerop (op2)
13255 && TREE_CODE (arg0) == NE_EXPR
13256 && integer_zerop (TREE_OPERAND (arg0, 1))
13257 && integer_pow2p (arg1)
13258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13259 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13260 arg1, OEP_ONLY_CONST)
13261 /* operand_equal_p compares just value, not precision, so e.g.
13262 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13263 second operand 32-bit -128, which is not a power of two (or vice
13264 versa. */
13265 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13266 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13268 /* Disable the transformations below for vectors, since
13269 fold_binary_op_with_conditional_arg may undo them immediately,
13270 yielding an infinite loop. */
13271 if (code == VEC_COND_EXPR)
13272 return NULL_TREE;
13274 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13275 if (integer_zerop (op2)
13276 && truth_value_p (TREE_CODE (arg0))
13277 && truth_value_p (TREE_CODE (arg1))
13278 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13279 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13280 : TRUTH_ANDIF_EXPR,
13281 type, fold_convert_loc (loc, type, arg0), op1);
13283 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13284 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13285 && truth_value_p (TREE_CODE (arg0))
13286 && truth_value_p (TREE_CODE (arg1))
13287 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13289 location_t loc0 = expr_location_or (arg0, loc);
13290 /* Only perform transformation if ARG0 is easily inverted. */
13291 tem = fold_invert_truthvalue (loc0, arg0);
13292 if (tem)
13293 return fold_build2_loc (loc, code == VEC_COND_EXPR
13294 ? BIT_IOR_EXPR
13295 : TRUTH_ORIF_EXPR,
13296 type, fold_convert_loc (loc, type, tem),
13297 op1);
13300 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13301 if (integer_zerop (arg1)
13302 && truth_value_p (TREE_CODE (arg0))
13303 && truth_value_p (TREE_CODE (op2))
13304 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13306 location_t loc0 = expr_location_or (arg0, loc);
13307 /* Only perform transformation if ARG0 is easily inverted. */
13308 tem = fold_invert_truthvalue (loc0, arg0);
13309 if (tem)
13310 return fold_build2_loc (loc, code == VEC_COND_EXPR
13311 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13312 type, fold_convert_loc (loc, type, tem),
13313 op2);
13316 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13317 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13318 && truth_value_p (TREE_CODE (arg0))
13319 && truth_value_p (TREE_CODE (op2))
13320 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13321 return fold_build2_loc (loc, code == VEC_COND_EXPR
13322 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13323 type, fold_convert_loc (loc, type, arg0), op2);
13325 return NULL_TREE;
13327 case CALL_EXPR:
13328 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13329 of fold_ternary on them. */
13330 gcc_unreachable ();
13332 case BIT_FIELD_REF:
13333 if (TREE_CODE (arg0) == VECTOR_CST
13334 && (type == TREE_TYPE (TREE_TYPE (arg0))
13335 || (VECTOR_TYPE_P (type)
13336 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13337 && tree_fits_uhwi_p (op1)
13338 && tree_fits_uhwi_p (op2))
13340 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13341 unsigned HOST_WIDE_INT width
13342 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13343 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13344 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13345 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13347 if (n != 0
13348 && (idx % width) == 0
13349 && (n % width) == 0
13350 && known_le ((idx + n) / width,
13351 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13353 idx = idx / width;
13354 n = n / width;
13356 if (TREE_CODE (arg0) == VECTOR_CST)
13358 if (n == 1)
13360 tem = VECTOR_CST_ELT (arg0, idx);
13361 if (VECTOR_TYPE_P (type))
13362 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13363 return tem;
13366 tree_vector_builder vals (type, n, 1);
13367 for (unsigned i = 0; i < n; ++i)
13368 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13369 return vals.build ();
13374 /* On constants we can use native encode/interpret to constant
13375 fold (nearly) all BIT_FIELD_REFs. */
13376 if (CONSTANT_CLASS_P (arg0)
13377 && can_native_interpret_type_p (type)
13378 && BITS_PER_UNIT == 8
13379 && tree_fits_uhwi_p (op1)
13380 && tree_fits_uhwi_p (op2))
13382 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13383 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13384 /* Limit us to a reasonable amount of work. To relax the
13385 other limitations we need bit-shifting of the buffer
13386 and rounding up the size. */
13387 if (bitpos % BITS_PER_UNIT == 0
13388 && bitsize % BITS_PER_UNIT == 0
13389 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13391 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13392 unsigned HOST_WIDE_INT len
13393 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13394 bitpos / BITS_PER_UNIT);
13395 if (len > 0
13396 && len * BITS_PER_UNIT >= bitsize)
13398 tree v = native_interpret_expr (type, b,
13399 bitsize / BITS_PER_UNIT);
13400 if (v)
13401 return v;
13406 return NULL_TREE;
13408 case VEC_PERM_EXPR:
13409 /* Perform constant folding of BIT_INSERT_EXPR. */
13410 if (TREE_CODE (arg2) == VECTOR_CST
13411 && TREE_CODE (op0) == VECTOR_CST
13412 && TREE_CODE (op1) == VECTOR_CST)
13414 /* Build a vector of integers from the tree mask. */
13415 vec_perm_builder builder;
13416 if (!tree_to_vec_perm_builder (&builder, arg2))
13417 return NULL_TREE;
13419 /* Create a vec_perm_indices for the integer vector. */
13420 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13421 bool single_arg = (op0 == op1);
13422 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13423 return fold_vec_perm (type, op0, op1, sel);
13425 return NULL_TREE;
13427 case BIT_INSERT_EXPR:
13428 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13429 if (TREE_CODE (arg0) == INTEGER_CST
13430 && TREE_CODE (arg1) == INTEGER_CST)
13432 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13433 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13434 wide_int tem = (wi::to_wide (arg0)
13435 & wi::shifted_mask (bitpos, bitsize, true,
13436 TYPE_PRECISION (type)));
13437 wide_int tem2
13438 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13439 bitsize), bitpos);
13440 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13442 else if (TREE_CODE (arg0) == VECTOR_CST
13443 && CONSTANT_CLASS_P (arg1)
13444 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13445 TREE_TYPE (arg1)))
13447 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13448 unsigned HOST_WIDE_INT elsize
13449 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13450 if (bitpos % elsize == 0)
13452 unsigned k = bitpos / elsize;
13453 unsigned HOST_WIDE_INT nelts;
13454 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13455 return arg0;
13456 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13458 tree_vector_builder elts (type, nelts, 1);
13459 elts.quick_grow (nelts);
13460 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13461 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13462 return elts.build ();
13466 return NULL_TREE;
13468 default:
13469 return NULL_TREE;
13470 } /* switch (code) */
13473 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13474 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13475 constructor element index of the value returned. If the element is
13476 not found NULL_TREE is returned and *CTOR_IDX is updated to
13477 the index of the element after the ACCESS_INDEX position (which
13478 may be outside of the CTOR array). */
13480 tree
13481 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13482 unsigned *ctor_idx)
13484 tree index_type = NULL_TREE;
13485 signop index_sgn = UNSIGNED;
13486 offset_int low_bound = 0;
13488 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13490 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13491 if (domain_type && TYPE_MIN_VALUE (domain_type))
13493 /* Static constructors for variably sized objects makes no sense. */
13494 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13495 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13496 /* ??? When it is obvious that the range is signed, treat it so. */
13497 if (TYPE_UNSIGNED (index_type)
13498 && TYPE_MAX_VALUE (domain_type)
13499 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13500 TYPE_MIN_VALUE (domain_type)))
13502 index_sgn = SIGNED;
13503 low_bound
13504 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13505 SIGNED);
13507 else
13509 index_sgn = TYPE_SIGN (index_type);
13510 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13515 if (index_type)
13516 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13517 index_sgn);
13519 offset_int index = low_bound;
13520 if (index_type)
13521 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13523 offset_int max_index = index;
13524 unsigned cnt;
13525 tree cfield, cval;
13526 bool first_p = true;
13528 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13530 /* Array constructor might explicitly set index, or specify a range,
13531 or leave index NULL meaning that it is next index after previous
13532 one. */
13533 if (cfield)
13535 if (TREE_CODE (cfield) == INTEGER_CST)
13536 max_index = index
13537 = offset_int::from (wi::to_wide (cfield), index_sgn);
13538 else
13540 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13541 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13542 index_sgn);
13543 max_index
13544 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13545 index_sgn);
13546 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13549 else if (!first_p)
13551 index = max_index + 1;
13552 if (index_type)
13553 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13554 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13555 max_index = index;
13557 else
13558 first_p = false;
13560 /* Do we have match? */
13561 if (wi::cmp (access_index, index, index_sgn) >= 0)
13563 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13565 if (ctor_idx)
13566 *ctor_idx = cnt;
13567 return cval;
13570 else if (in_gimple_form)
13571 /* We're past the element we search for. Note during parsing
13572 the elements might not be sorted.
13573 ??? We should use a binary search and a flag on the
13574 CONSTRUCTOR as to whether elements are sorted in declaration
13575 order. */
13576 break;
13578 if (ctor_idx)
13579 *ctor_idx = cnt;
13580 return NULL_TREE;
13583 /* Perform constant folding and related simplification of EXPR.
13584 The related simplifications include x*1 => x, x*0 => 0, etc.,
13585 and application of the associative law.
13586 NOP_EXPR conversions may be removed freely (as long as we
13587 are careful not to change the type of the overall expression).
13588 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13589 but we can constant-fold them if they have constant operands. */
13591 #ifdef ENABLE_FOLD_CHECKING
13592 # define fold(x) fold_1 (x)
13593 static tree fold_1 (tree);
13594 static
13595 #endif
13596 tree
13597 fold (tree expr)
13599 const tree t = expr;
13600 enum tree_code code = TREE_CODE (t);
13601 enum tree_code_class kind = TREE_CODE_CLASS (code);
13602 tree tem;
13603 location_t loc = EXPR_LOCATION (expr);
13605 /* Return right away if a constant. */
13606 if (kind == tcc_constant)
13607 return t;
13609 /* CALL_EXPR-like objects with variable numbers of operands are
13610 treated specially. */
13611 if (kind == tcc_vl_exp)
13613 if (code == CALL_EXPR)
13615 tem = fold_call_expr (loc, expr, false);
13616 return tem ? tem : expr;
13618 return expr;
13621 if (IS_EXPR_CODE_CLASS (kind))
13623 tree type = TREE_TYPE (t);
13624 tree op0, op1, op2;
13626 switch (TREE_CODE_LENGTH (code))
13628 case 1:
13629 op0 = TREE_OPERAND (t, 0);
13630 tem = fold_unary_loc (loc, code, type, op0);
13631 return tem ? tem : expr;
13632 case 2:
13633 op0 = TREE_OPERAND (t, 0);
13634 op1 = TREE_OPERAND (t, 1);
13635 tem = fold_binary_loc (loc, code, type, op0, op1);
13636 return tem ? tem : expr;
13637 case 3:
13638 op0 = TREE_OPERAND (t, 0);
13639 op1 = TREE_OPERAND (t, 1);
13640 op2 = TREE_OPERAND (t, 2);
13641 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13642 return tem ? tem : expr;
13643 default:
13644 break;
13648 switch (code)
13650 case ARRAY_REF:
13652 tree op0 = TREE_OPERAND (t, 0);
13653 tree op1 = TREE_OPERAND (t, 1);
13655 if (TREE_CODE (op1) == INTEGER_CST
13656 && TREE_CODE (op0) == CONSTRUCTOR
13657 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13659 tree val = get_array_ctor_element_at_index (op0,
13660 wi::to_offset (op1));
13661 if (val)
13662 return val;
13665 return t;
13668 /* Return a VECTOR_CST if possible. */
13669 case CONSTRUCTOR:
13671 tree type = TREE_TYPE (t);
13672 if (TREE_CODE (type) != VECTOR_TYPE)
13673 return t;
13675 unsigned i;
13676 tree val;
13677 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13678 if (! CONSTANT_CLASS_P (val))
13679 return t;
13681 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13684 case CONST_DECL:
13685 return fold (DECL_INITIAL (t));
13687 default:
13688 return t;
13689 } /* switch (code) */
13692 #ifdef ENABLE_FOLD_CHECKING
13693 #undef fold
13695 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13696 hash_table<nofree_ptr_hash<const tree_node> > *);
13697 static void fold_check_failed (const_tree, const_tree);
13698 void print_fold_checksum (const_tree);
13700 /* When --enable-checking=fold, compute a digest of expr before
13701 and after actual fold call to see if fold did not accidentally
13702 change original expr. */
13704 tree
13705 fold (tree expr)
13707 tree ret;
13708 struct md5_ctx ctx;
13709 unsigned char checksum_before[16], checksum_after[16];
13710 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13712 md5_init_ctx (&ctx);
13713 fold_checksum_tree (expr, &ctx, &ht);
13714 md5_finish_ctx (&ctx, checksum_before);
13715 ht.empty ();
13717 ret = fold_1 (expr);
13719 md5_init_ctx (&ctx);
13720 fold_checksum_tree (expr, &ctx, &ht);
13721 md5_finish_ctx (&ctx, checksum_after);
13723 if (memcmp (checksum_before, checksum_after, 16))
13724 fold_check_failed (expr, ret);
13726 return ret;
13729 void
13730 print_fold_checksum (const_tree expr)
13732 struct md5_ctx ctx;
13733 unsigned char checksum[16], cnt;
13734 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13736 md5_init_ctx (&ctx);
13737 fold_checksum_tree (expr, &ctx, &ht);
13738 md5_finish_ctx (&ctx, checksum);
13739 for (cnt = 0; cnt < 16; ++cnt)
13740 fprintf (stderr, "%02x", checksum[cnt]);
13741 putc ('\n', stderr);
13744 static void
13745 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13747 internal_error ("fold check: original tree changed by fold");
13750 static void
13751 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13752 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13754 const tree_node **slot;
13755 enum tree_code code;
13756 union tree_node *buf;
13757 int i, len;
13759 recursive_label:
13760 if (expr == NULL)
13761 return;
13762 slot = ht->find_slot (expr, INSERT);
13763 if (*slot != NULL)
13764 return;
13765 *slot = expr;
13766 code = TREE_CODE (expr);
13767 if (TREE_CODE_CLASS (code) == tcc_declaration
13768 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13770 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13771 size_t sz = tree_size (expr);
13772 buf = XALLOCAVAR (union tree_node, sz);
13773 memcpy ((char *) buf, expr, sz);
13774 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13775 buf->decl_with_vis.symtab_node = NULL;
13776 buf->base.nowarning_flag = 0;
13777 expr = (tree) buf;
13779 else if (TREE_CODE_CLASS (code) == tcc_type
13780 && (TYPE_POINTER_TO (expr)
13781 || TYPE_REFERENCE_TO (expr)
13782 || TYPE_CACHED_VALUES_P (expr)
13783 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13784 || TYPE_NEXT_VARIANT (expr)
13785 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13787 /* Allow these fields to be modified. */
13788 tree tmp;
13789 size_t sz = tree_size (expr);
13790 buf = XALLOCAVAR (union tree_node, sz);
13791 memcpy ((char *) buf, expr, sz);
13792 expr = tmp = (tree) buf;
13793 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13794 TYPE_POINTER_TO (tmp) = NULL;
13795 TYPE_REFERENCE_TO (tmp) = NULL;
13796 TYPE_NEXT_VARIANT (tmp) = NULL;
13797 TYPE_ALIAS_SET (tmp) = -1;
13798 if (TYPE_CACHED_VALUES_P (tmp))
13800 TYPE_CACHED_VALUES_P (tmp) = 0;
13801 TYPE_CACHED_VALUES (tmp) = NULL;
13804 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13806 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13807 that and change builtins.cc etc. instead - see PR89543. */
13808 size_t sz = tree_size (expr);
13809 buf = XALLOCAVAR (union tree_node, sz);
13810 memcpy ((char *) buf, expr, sz);
13811 buf->base.nowarning_flag = 0;
13812 expr = (tree) buf;
13814 md5_process_bytes (expr, tree_size (expr), ctx);
13815 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13816 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13817 if (TREE_CODE_CLASS (code) != tcc_type
13818 && TREE_CODE_CLASS (code) != tcc_declaration
13819 && code != TREE_LIST
13820 && code != SSA_NAME
13821 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13822 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13823 switch (TREE_CODE_CLASS (code))
13825 case tcc_constant:
13826 switch (code)
13828 case STRING_CST:
13829 md5_process_bytes (TREE_STRING_POINTER (expr),
13830 TREE_STRING_LENGTH (expr), ctx);
13831 break;
13832 case COMPLEX_CST:
13833 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13834 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13835 break;
13836 case VECTOR_CST:
13837 len = vector_cst_encoded_nelts (expr);
13838 for (i = 0; i < len; ++i)
13839 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13840 break;
13841 default:
13842 break;
13844 break;
13845 case tcc_exceptional:
13846 switch (code)
13848 case TREE_LIST:
13849 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13850 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13851 expr = TREE_CHAIN (expr);
13852 goto recursive_label;
13853 break;
13854 case TREE_VEC:
13855 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13856 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13857 break;
13858 default:
13859 break;
13861 break;
13862 case tcc_expression:
13863 case tcc_reference:
13864 case tcc_comparison:
13865 case tcc_unary:
13866 case tcc_binary:
13867 case tcc_statement:
13868 case tcc_vl_exp:
13869 len = TREE_OPERAND_LENGTH (expr);
13870 for (i = 0; i < len; ++i)
13871 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13872 break;
13873 case tcc_declaration:
13874 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13875 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13876 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13878 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13879 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13880 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13881 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13882 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13885 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13887 if (TREE_CODE (expr) == FUNCTION_DECL)
13889 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13890 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13892 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13894 break;
13895 case tcc_type:
13896 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13897 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13898 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13899 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13900 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13901 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13902 if (INTEGRAL_TYPE_P (expr)
13903 || SCALAR_FLOAT_TYPE_P (expr))
13905 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13906 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13908 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13909 if (RECORD_OR_UNION_TYPE_P (expr))
13910 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13911 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13912 break;
13913 default:
13914 break;
13918 /* Helper function for outputting the checksum of a tree T. When
13919 debugging with gdb, you can "define mynext" to be "next" followed
13920 by "call debug_fold_checksum (op0)", then just trace down till the
13921 outputs differ. */
13923 DEBUG_FUNCTION void
13924 debug_fold_checksum (const_tree t)
13926 int i;
13927 unsigned char checksum[16];
13928 struct md5_ctx ctx;
13929 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13931 md5_init_ctx (&ctx);
13932 fold_checksum_tree (t, &ctx, &ht);
13933 md5_finish_ctx (&ctx, checksum);
13934 ht.empty ();
13936 for (i = 0; i < 16; i++)
13937 fprintf (stderr, "%d ", checksum[i]);
13939 fprintf (stderr, "\n");
13942 #endif
13944 /* Fold a unary tree expression with code CODE of type TYPE with an
13945 operand OP0. LOC is the location of the resulting expression.
13946 Return a folded expression if successful. Otherwise, return a tree
13947 expression with code CODE of type TYPE with an operand OP0. */
13949 tree
13950 fold_build1_loc (location_t loc,
13951 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13953 tree tem;
13954 #ifdef ENABLE_FOLD_CHECKING
13955 unsigned char checksum_before[16], checksum_after[16];
13956 struct md5_ctx ctx;
13957 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (op0, &ctx, &ht);
13961 md5_finish_ctx (&ctx, checksum_before);
13962 ht.empty ();
13963 #endif
13965 tem = fold_unary_loc (loc, code, type, op0);
13966 if (!tem)
13967 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13969 #ifdef ENABLE_FOLD_CHECKING
13970 md5_init_ctx (&ctx);
13971 fold_checksum_tree (op0, &ctx, &ht);
13972 md5_finish_ctx (&ctx, checksum_after);
13974 if (memcmp (checksum_before, checksum_after, 16))
13975 fold_check_failed (op0, tem);
13976 #endif
13977 return tem;
13980 /* Fold a binary tree expression with code CODE of type TYPE with
13981 operands OP0 and OP1. LOC is the location of the resulting
13982 expression. Return a folded expression if successful. Otherwise,
13983 return a tree expression with code CODE of type TYPE with operands
13984 OP0 and OP1. */
13986 tree
13987 fold_build2_loc (location_t loc,
13988 enum tree_code code, tree type, tree op0, tree op1
13989 MEM_STAT_DECL)
13991 tree tem;
13992 #ifdef ENABLE_FOLD_CHECKING
13993 unsigned char checksum_before_op0[16],
13994 checksum_before_op1[16],
13995 checksum_after_op0[16],
13996 checksum_after_op1[16];
13997 struct md5_ctx ctx;
13998 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14000 md5_init_ctx (&ctx);
14001 fold_checksum_tree (op0, &ctx, &ht);
14002 md5_finish_ctx (&ctx, checksum_before_op0);
14003 ht.empty ();
14005 md5_init_ctx (&ctx);
14006 fold_checksum_tree (op1, &ctx, &ht);
14007 md5_finish_ctx (&ctx, checksum_before_op1);
14008 ht.empty ();
14009 #endif
14011 tem = fold_binary_loc (loc, code, type, op0, op1);
14012 if (!tem)
14013 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14015 #ifdef ENABLE_FOLD_CHECKING
14016 md5_init_ctx (&ctx);
14017 fold_checksum_tree (op0, &ctx, &ht);
14018 md5_finish_ctx (&ctx, checksum_after_op0);
14019 ht.empty ();
14021 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14022 fold_check_failed (op0, tem);
14024 md5_init_ctx (&ctx);
14025 fold_checksum_tree (op1, &ctx, &ht);
14026 md5_finish_ctx (&ctx, checksum_after_op1);
14028 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14029 fold_check_failed (op1, tem);
14030 #endif
14031 return tem;
14034 /* Fold a ternary tree expression with code CODE of type TYPE with
14035 operands OP0, OP1, and OP2. Return a folded expression if
14036 successful. Otherwise, return a tree expression with code CODE of
14037 type TYPE with operands OP0, OP1, and OP2. */
14039 tree
14040 fold_build3_loc (location_t loc, enum tree_code code, tree type,
14041 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14043 tree tem;
14044 #ifdef ENABLE_FOLD_CHECKING
14045 unsigned char checksum_before_op0[16],
14046 checksum_before_op1[16],
14047 checksum_before_op2[16],
14048 checksum_after_op0[16],
14049 checksum_after_op1[16],
14050 checksum_after_op2[16];
14051 struct md5_ctx ctx;
14052 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14054 md5_init_ctx (&ctx);
14055 fold_checksum_tree (op0, &ctx, &ht);
14056 md5_finish_ctx (&ctx, checksum_before_op0);
14057 ht.empty ();
14059 md5_init_ctx (&ctx);
14060 fold_checksum_tree (op1, &ctx, &ht);
14061 md5_finish_ctx (&ctx, checksum_before_op1);
14062 ht.empty ();
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (op2, &ctx, &ht);
14066 md5_finish_ctx (&ctx, checksum_before_op2);
14067 ht.empty ();
14068 #endif
14070 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14071 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14072 if (!tem)
14073 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14075 #ifdef ENABLE_FOLD_CHECKING
14076 md5_init_ctx (&ctx);
14077 fold_checksum_tree (op0, &ctx, &ht);
14078 md5_finish_ctx (&ctx, checksum_after_op0);
14079 ht.empty ();
14081 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14082 fold_check_failed (op0, tem);
14084 md5_init_ctx (&ctx);
14085 fold_checksum_tree (op1, &ctx, &ht);
14086 md5_finish_ctx (&ctx, checksum_after_op1);
14087 ht.empty ();
14089 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14090 fold_check_failed (op1, tem);
14092 md5_init_ctx (&ctx);
14093 fold_checksum_tree (op2, &ctx, &ht);
14094 md5_finish_ctx (&ctx, checksum_after_op2);
14096 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14097 fold_check_failed (op2, tem);
14098 #endif
14099 return tem;
14102 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14103 arguments in ARGARRAY, and a null static chain.
14104 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14105 of type TYPE from the given operands as constructed by build_call_array. */
14107 tree
14108 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14109 int nargs, tree *argarray)
14111 tree tem;
14112 #ifdef ENABLE_FOLD_CHECKING
14113 unsigned char checksum_before_fn[16],
14114 checksum_before_arglist[16],
14115 checksum_after_fn[16],
14116 checksum_after_arglist[16];
14117 struct md5_ctx ctx;
14118 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14119 int i;
14121 md5_init_ctx (&ctx);
14122 fold_checksum_tree (fn, &ctx, &ht);
14123 md5_finish_ctx (&ctx, checksum_before_fn);
14124 ht.empty ();
14126 md5_init_ctx (&ctx);
14127 for (i = 0; i < nargs; i++)
14128 fold_checksum_tree (argarray[i], &ctx, &ht);
14129 md5_finish_ctx (&ctx, checksum_before_arglist);
14130 ht.empty ();
14131 #endif
14133 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14134 if (!tem)
14135 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14137 #ifdef ENABLE_FOLD_CHECKING
14138 md5_init_ctx (&ctx);
14139 fold_checksum_tree (fn, &ctx, &ht);
14140 md5_finish_ctx (&ctx, checksum_after_fn);
14141 ht.empty ();
14143 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14144 fold_check_failed (fn, tem);
14146 md5_init_ctx (&ctx);
14147 for (i = 0; i < nargs; i++)
14148 fold_checksum_tree (argarray[i], &ctx, &ht);
14149 md5_finish_ctx (&ctx, checksum_after_arglist);
14151 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14152 fold_check_failed (NULL_TREE, tem);
14153 #endif
14154 return tem;
14157 /* Perform constant folding and related simplification of initializer
14158 expression EXPR. These behave identically to "fold_buildN" but ignore
14159 potential run-time traps and exceptions that fold must preserve. */
14161 #define START_FOLD_INIT \
14162 int saved_signaling_nans = flag_signaling_nans;\
14163 int saved_trapping_math = flag_trapping_math;\
14164 int saved_rounding_math = flag_rounding_math;\
14165 int saved_trapv = flag_trapv;\
14166 int saved_folding_initializer = folding_initializer;\
14167 flag_signaling_nans = 0;\
14168 flag_trapping_math = 0;\
14169 flag_rounding_math = 0;\
14170 flag_trapv = 0;\
14171 folding_initializer = 1;
14173 #define END_FOLD_INIT \
14174 flag_signaling_nans = saved_signaling_nans;\
14175 flag_trapping_math = saved_trapping_math;\
14176 flag_rounding_math = saved_rounding_math;\
14177 flag_trapv = saved_trapv;\
14178 folding_initializer = saved_folding_initializer;
14180 tree
14181 fold_init (tree expr)
14183 tree result;
14184 START_FOLD_INIT;
14186 result = fold (expr);
14188 END_FOLD_INIT;
14189 return result;
14192 tree
14193 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14194 tree type, tree op)
14196 tree result;
14197 START_FOLD_INIT;
14199 result = fold_build1_loc (loc, code, type, op);
14201 END_FOLD_INIT;
14202 return result;
14205 tree
14206 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14207 tree type, tree op0, tree op1)
14209 tree result;
14210 START_FOLD_INIT;
14212 result = fold_build2_loc (loc, code, type, op0, op1);
14214 END_FOLD_INIT;
14215 return result;
14218 tree
14219 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14220 int nargs, tree *argarray)
14222 tree result;
14223 START_FOLD_INIT;
14225 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14227 END_FOLD_INIT;
14228 return result;
14231 tree
14232 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14233 tree lhs, tree rhs)
14235 tree result;
14236 START_FOLD_INIT;
14238 result = fold_binary_loc (loc, code, type, lhs, rhs);
14240 END_FOLD_INIT;
14241 return result;
14244 #undef START_FOLD_INIT
14245 #undef END_FOLD_INIT
14247 /* Determine if first argument is a multiple of second argument. Return
14248 false if it is not, or we cannot easily determined it to be.
14250 An example of the sort of thing we care about (at this point; this routine
14251 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14252 fold cases do now) is discovering that
14254 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14256 is a multiple of
14258 SAVE_EXPR (J * 8)
14260 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14262 This code also handles discovering that
14264 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14266 is a multiple of 8 so we don't have to worry about dealing with a
14267 possible remainder.
14269 Note that we *look* inside a SAVE_EXPR only to determine how it was
14270 calculated; it is not safe for fold to do much of anything else with the
14271 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14272 at run time. For example, the latter example above *cannot* be implemented
14273 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14274 evaluation time of the original SAVE_EXPR is not necessarily the same at
14275 the time the new expression is evaluated. The only optimization of this
14276 sort that would be valid is changing
14278 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14280 divided by 8 to
14282 SAVE_EXPR (I) * SAVE_EXPR (J)
14284 (where the same SAVE_EXPR (J) is used in the original and the
14285 transformed version).
14287 NOWRAP specifies whether all outer operations in TYPE should
14288 be considered not wrapping. Any type conversion within TOP acts
14289 as a barrier and we will fall back to NOWRAP being false.
14290 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14291 as not wrapping even though they are generally using unsigned arithmetic. */
14293 bool
14294 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14296 gimple *stmt;
14297 tree op1, op2;
14299 if (operand_equal_p (top, bottom, 0))
14300 return true;
14302 if (TREE_CODE (type) != INTEGER_TYPE)
14303 return false;
14305 switch (TREE_CODE (top))
14307 case BIT_AND_EXPR:
14308 /* Bitwise and provides a power of two multiple. If the mask is
14309 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14310 if (!integer_pow2p (bottom))
14311 return false;
14312 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14313 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14315 case MULT_EXPR:
14316 /* If the multiplication can wrap we cannot recurse further unless
14317 the bottom is a power of two which is where wrapping does not
14318 matter. */
14319 if (!nowrap
14320 && !TYPE_OVERFLOW_UNDEFINED (type)
14321 && !integer_pow2p (bottom))
14322 return false;
14323 if (TREE_CODE (bottom) == INTEGER_CST)
14325 op1 = TREE_OPERAND (top, 0);
14326 op2 = TREE_OPERAND (top, 1);
14327 if (TREE_CODE (op1) == INTEGER_CST)
14328 std::swap (op1, op2);
14329 if (TREE_CODE (op2) == INTEGER_CST)
14331 if (multiple_of_p (type, op2, bottom, nowrap))
14332 return true;
14333 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14334 if (multiple_of_p (type, bottom, op2, nowrap))
14336 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14337 wi::to_widest (op2));
14338 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14340 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14341 return multiple_of_p (type, op1, op2, nowrap);
14344 return multiple_of_p (type, op1, bottom, nowrap);
14347 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14348 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14350 case LSHIFT_EXPR:
14351 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14352 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14354 op1 = TREE_OPERAND (top, 1);
14355 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14357 wide_int mul_op
14358 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14359 return multiple_of_p (type,
14360 wide_int_to_tree (type, mul_op), bottom,
14361 nowrap);
14364 return false;
14366 case MINUS_EXPR:
14367 case PLUS_EXPR:
14368 /* If the addition or subtraction can wrap we cannot recurse further
14369 unless bottom is a power of two which is where wrapping does not
14370 matter. */
14371 if (!nowrap
14372 && !TYPE_OVERFLOW_UNDEFINED (type)
14373 && !integer_pow2p (bottom))
14374 return false;
14376 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14377 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14378 but 0xfffffffd is not. */
14379 op1 = TREE_OPERAND (top, 1);
14380 if (TREE_CODE (top) == PLUS_EXPR
14381 && nowrap
14382 && TYPE_UNSIGNED (type)
14383 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14384 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14386 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14387 precisely, so be conservative here checking if both op0 and op1
14388 are multiple of bottom. Note we check the second operand first
14389 since it's usually simpler. */
14390 return (multiple_of_p (type, op1, bottom, nowrap)
14391 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14393 CASE_CONVERT:
14394 /* Can't handle conversions from non-integral or wider integral type. */
14395 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14396 || (TYPE_PRECISION (type)
14397 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14398 return false;
14399 /* NOWRAP only extends to operations in the outermost type so
14400 make sure to strip it off here. */
14401 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14402 TREE_OPERAND (top, 0), bottom, false);
14404 case SAVE_EXPR:
14405 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14407 case COND_EXPR:
14408 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14409 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14411 case INTEGER_CST:
14412 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14413 return false;
14414 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14415 SIGNED);
14417 case SSA_NAME:
14418 if (TREE_CODE (bottom) == INTEGER_CST
14419 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14420 && gimple_code (stmt) == GIMPLE_ASSIGN)
14422 enum tree_code code = gimple_assign_rhs_code (stmt);
14424 /* Check for special cases to see if top is defined as multiple
14425 of bottom:
14427 top = (X & ~(bottom - 1) ; bottom is power of 2
14431 Y = X % bottom
14432 top = X - Y. */
14433 if (code == BIT_AND_EXPR
14434 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14435 && TREE_CODE (op2) == INTEGER_CST
14436 && integer_pow2p (bottom)
14437 && wi::multiple_of_p (wi::to_widest (op2),
14438 wi::to_widest (bottom), UNSIGNED))
14439 return true;
14441 op1 = gimple_assign_rhs1 (stmt);
14442 if (code == MINUS_EXPR
14443 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14444 && TREE_CODE (op2) == SSA_NAME
14445 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14446 && gimple_code (stmt) == GIMPLE_ASSIGN
14447 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14448 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14449 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14450 return true;
14453 /* fall through */
14455 default:
14456 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14457 return multiple_p (wi::to_poly_widest (top),
14458 wi::to_poly_widest (bottom));
14460 return false;
14464 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14465 This function returns true for integer expressions, and returns
14466 false if uncertain. */
14468 bool
14469 tree_expr_finite_p (const_tree x)
14471 machine_mode mode = element_mode (x);
14472 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14473 return true;
14474 switch (TREE_CODE (x))
14476 case REAL_CST:
14477 return real_isfinite (TREE_REAL_CST_PTR (x));
14478 case COMPLEX_CST:
14479 return tree_expr_finite_p (TREE_REALPART (x))
14480 && tree_expr_finite_p (TREE_IMAGPART (x));
14481 case FLOAT_EXPR:
14482 return true;
14483 case ABS_EXPR:
14484 case CONVERT_EXPR:
14485 case NON_LVALUE_EXPR:
14486 case NEGATE_EXPR:
14487 case SAVE_EXPR:
14488 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14489 case MIN_EXPR:
14490 case MAX_EXPR:
14491 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14492 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14493 case COND_EXPR:
14494 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14495 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14496 case CALL_EXPR:
14497 switch (get_call_combined_fn (x))
14499 CASE_CFN_FABS:
14500 CASE_CFN_FABS_FN:
14501 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14502 CASE_CFN_FMAX:
14503 CASE_CFN_FMAX_FN:
14504 CASE_CFN_FMIN:
14505 CASE_CFN_FMIN_FN:
14506 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14507 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14508 default:
14509 return false;
14512 default:
14513 return false;
14517 /* Return true if expression X evaluates to an infinity.
14518 This function returns false for integer expressions. */
14520 bool
14521 tree_expr_infinite_p (const_tree x)
14523 if (!HONOR_INFINITIES (x))
14524 return false;
14525 switch (TREE_CODE (x))
14527 case REAL_CST:
14528 return real_isinf (TREE_REAL_CST_PTR (x));
14529 case ABS_EXPR:
14530 case NEGATE_EXPR:
14531 case NON_LVALUE_EXPR:
14532 case SAVE_EXPR:
14533 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14534 case COND_EXPR:
14535 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14536 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14537 default:
14538 return false;
14542 /* Return true if expression X could evaluate to an infinity.
14543 This function returns false for integer expressions, and returns
14544 true if uncertain. */
14546 bool
14547 tree_expr_maybe_infinite_p (const_tree x)
14549 if (!HONOR_INFINITIES (x))
14550 return false;
14551 switch (TREE_CODE (x))
14553 case REAL_CST:
14554 return real_isinf (TREE_REAL_CST_PTR (x));
14555 case FLOAT_EXPR:
14556 return false;
14557 case ABS_EXPR:
14558 case NEGATE_EXPR:
14559 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14560 case COND_EXPR:
14561 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14562 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14563 default:
14564 return true;
14568 /* Return true if expression X evaluates to a signaling NaN.
14569 This function returns false for integer expressions. */
14571 bool
14572 tree_expr_signaling_nan_p (const_tree x)
14574 if (!HONOR_SNANS (x))
14575 return false;
14576 switch (TREE_CODE (x))
14578 case REAL_CST:
14579 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14580 case NON_LVALUE_EXPR:
14581 case SAVE_EXPR:
14582 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14583 case COND_EXPR:
14584 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14585 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14586 default:
14587 return false;
14591 /* Return true if expression X could evaluate to a signaling NaN.
14592 This function returns false for integer expressions, and returns
14593 true if uncertain. */
14595 bool
14596 tree_expr_maybe_signaling_nan_p (const_tree x)
14598 if (!HONOR_SNANS (x))
14599 return false;
14600 switch (TREE_CODE (x))
14602 case REAL_CST:
14603 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14604 case FLOAT_EXPR:
14605 return false;
14606 case ABS_EXPR:
14607 case CONVERT_EXPR:
14608 case NEGATE_EXPR:
14609 case NON_LVALUE_EXPR:
14610 case SAVE_EXPR:
14611 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14612 case MIN_EXPR:
14613 case MAX_EXPR:
14614 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14615 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14616 case COND_EXPR:
14617 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14618 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14619 case CALL_EXPR:
14620 switch (get_call_combined_fn (x))
14622 CASE_CFN_FABS:
14623 CASE_CFN_FABS_FN:
14624 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14625 CASE_CFN_FMAX:
14626 CASE_CFN_FMAX_FN:
14627 CASE_CFN_FMIN:
14628 CASE_CFN_FMIN_FN:
14629 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14630 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14631 default:
14632 return true;
14634 default:
14635 return true;
14639 /* Return true if expression X evaluates to a NaN.
14640 This function returns false for integer expressions. */
14642 bool
14643 tree_expr_nan_p (const_tree x)
14645 if (!HONOR_NANS (x))
14646 return false;
14647 switch (TREE_CODE (x))
14649 case REAL_CST:
14650 return real_isnan (TREE_REAL_CST_PTR (x));
14651 case NON_LVALUE_EXPR:
14652 case SAVE_EXPR:
14653 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14654 case COND_EXPR:
14655 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14656 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14657 default:
14658 return false;
14662 /* Return true if expression X could evaluate to a NaN.
14663 This function returns false for integer expressions, and returns
14664 true if uncertain. */
14666 bool
14667 tree_expr_maybe_nan_p (const_tree x)
14669 if (!HONOR_NANS (x))
14670 return false;
14671 switch (TREE_CODE (x))
14673 case REAL_CST:
14674 return real_isnan (TREE_REAL_CST_PTR (x));
14675 case FLOAT_EXPR:
14676 return false;
14677 case PLUS_EXPR:
14678 case MINUS_EXPR:
14679 case MULT_EXPR:
14680 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14681 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14682 case ABS_EXPR:
14683 case CONVERT_EXPR:
14684 case NEGATE_EXPR:
14685 case NON_LVALUE_EXPR:
14686 case SAVE_EXPR:
14687 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14688 case MIN_EXPR:
14689 case MAX_EXPR:
14690 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14691 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14692 case COND_EXPR:
14693 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14694 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14695 case CALL_EXPR:
14696 switch (get_call_combined_fn (x))
14698 CASE_CFN_FABS:
14699 CASE_CFN_FABS_FN:
14700 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14701 CASE_CFN_FMAX:
14702 CASE_CFN_FMAX_FN:
14703 CASE_CFN_FMIN:
14704 CASE_CFN_FMIN_FN:
14705 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14706 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14707 default:
14708 return true;
14710 default:
14711 return true;
14715 /* Return true if expression X could evaluate to -0.0.
14716 This function returns true if uncertain. */
14718 bool
14719 tree_expr_maybe_real_minus_zero_p (const_tree x)
14721 if (!HONOR_SIGNED_ZEROS (x))
14722 return false;
14723 switch (TREE_CODE (x))
14725 case REAL_CST:
14726 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14727 case INTEGER_CST:
14728 case FLOAT_EXPR:
14729 case ABS_EXPR:
14730 return false;
14731 case NON_LVALUE_EXPR:
14732 case SAVE_EXPR:
14733 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14734 case COND_EXPR:
14735 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14736 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14737 case CALL_EXPR:
14738 switch (get_call_combined_fn (x))
14740 CASE_CFN_FABS:
14741 CASE_CFN_FABS_FN:
14742 return false;
14743 default:
14744 break;
14746 default:
14747 break;
14749 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14750 * but currently those predicates require tree and not const_tree. */
14751 return true;
14754 #define tree_expr_nonnegative_warnv_p(X, Y) \
14755 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14757 #define RECURSE(X) \
14758 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14760 /* Return true if CODE or TYPE is known to be non-negative. */
14762 static bool
14763 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14765 if (!VECTOR_TYPE_P (type)
14766 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14767 && truth_value_p (code))
14768 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14769 have a signed:1 type (where the value is -1 and 0). */
14770 return true;
14771 return false;
14774 /* Return true if (CODE OP0) is known to be non-negative. If the return
14775 value is based on the assumption that signed overflow is undefined,
14776 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14777 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14779 bool
14780 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14781 bool *strict_overflow_p, int depth)
14783 if (TYPE_UNSIGNED (type))
14784 return true;
14786 switch (code)
14788 case ABS_EXPR:
14789 /* We can't return 1 if flag_wrapv is set because
14790 ABS_EXPR<INT_MIN> = INT_MIN. */
14791 if (!ANY_INTEGRAL_TYPE_P (type))
14792 return true;
14793 if (TYPE_OVERFLOW_UNDEFINED (type))
14795 *strict_overflow_p = true;
14796 return true;
14798 break;
14800 case NON_LVALUE_EXPR:
14801 case FLOAT_EXPR:
14802 case FIX_TRUNC_EXPR:
14803 return RECURSE (op0);
14805 CASE_CONVERT:
14807 tree inner_type = TREE_TYPE (op0);
14808 tree outer_type = type;
14810 if (SCALAR_FLOAT_TYPE_P (outer_type))
14812 if (SCALAR_FLOAT_TYPE_P (inner_type))
14813 return RECURSE (op0);
14814 if (INTEGRAL_TYPE_P (inner_type))
14816 if (TYPE_UNSIGNED (inner_type))
14817 return true;
14818 return RECURSE (op0);
14821 else if (INTEGRAL_TYPE_P (outer_type))
14823 if (SCALAR_FLOAT_TYPE_P (inner_type))
14824 return RECURSE (op0);
14825 if (INTEGRAL_TYPE_P (inner_type))
14826 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14827 && TYPE_UNSIGNED (inner_type);
14830 break;
14832 default:
14833 return tree_simple_nonnegative_warnv_p (code, type);
14836 /* We don't know sign of `t', so be conservative and return false. */
14837 return false;
14840 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14845 bool
14846 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14847 tree op1, bool *strict_overflow_p,
14848 int depth)
14850 if (TYPE_UNSIGNED (type))
14851 return true;
14853 switch (code)
14855 case POINTER_PLUS_EXPR:
14856 case PLUS_EXPR:
14857 if (FLOAT_TYPE_P (type))
14858 return RECURSE (op0) && RECURSE (op1);
14860 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14861 both unsigned and at least 2 bits shorter than the result. */
14862 if (TREE_CODE (type) == INTEGER_TYPE
14863 && TREE_CODE (op0) == NOP_EXPR
14864 && TREE_CODE (op1) == NOP_EXPR)
14866 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14867 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14868 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14869 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14871 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14872 TYPE_PRECISION (inner2)) + 1;
14873 return prec < TYPE_PRECISION (type);
14876 break;
14878 case MULT_EXPR:
14879 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14881 /* x * x is always non-negative for floating point x
14882 or without overflow. */
14883 if (operand_equal_p (op0, op1, 0)
14884 || (RECURSE (op0) && RECURSE (op1)))
14886 if (ANY_INTEGRAL_TYPE_P (type)
14887 && TYPE_OVERFLOW_UNDEFINED (type))
14888 *strict_overflow_p = true;
14889 return true;
14893 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14894 both unsigned and their total bits is shorter than the result. */
14895 if (TREE_CODE (type) == INTEGER_TYPE
14896 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14897 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14899 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14900 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14901 : TREE_TYPE (op0);
14902 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14903 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14904 : TREE_TYPE (op1);
14906 bool unsigned0 = TYPE_UNSIGNED (inner0);
14907 bool unsigned1 = TYPE_UNSIGNED (inner1);
14909 if (TREE_CODE (op0) == INTEGER_CST)
14910 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14912 if (TREE_CODE (op1) == INTEGER_CST)
14913 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14915 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14916 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14918 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14919 ? tree_int_cst_min_precision (op0, UNSIGNED)
14920 : TYPE_PRECISION (inner0);
14922 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14923 ? tree_int_cst_min_precision (op1, UNSIGNED)
14924 : TYPE_PRECISION (inner1);
14926 return precision0 + precision1 < TYPE_PRECISION (type);
14929 return false;
14931 case BIT_AND_EXPR:
14932 return RECURSE (op0) || RECURSE (op1);
14934 case MAX_EXPR:
14935 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14936 things. */
14937 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14938 return RECURSE (op0) && RECURSE (op1);
14939 return RECURSE (op0) || RECURSE (op1);
14941 case BIT_IOR_EXPR:
14942 case BIT_XOR_EXPR:
14943 case MIN_EXPR:
14944 case RDIV_EXPR:
14945 case TRUNC_DIV_EXPR:
14946 case CEIL_DIV_EXPR:
14947 case FLOOR_DIV_EXPR:
14948 case ROUND_DIV_EXPR:
14949 return RECURSE (op0) && RECURSE (op1);
14951 case TRUNC_MOD_EXPR:
14952 return RECURSE (op0);
14954 case FLOOR_MOD_EXPR:
14955 return RECURSE (op1);
14957 case CEIL_MOD_EXPR:
14958 case ROUND_MOD_EXPR:
14959 default:
14960 return tree_simple_nonnegative_warnv_p (code, type);
14963 /* We don't know sign of `t', so be conservative and return false. */
14964 return false;
14967 /* Return true if T is known to be non-negative. If the return
14968 value is based on the assumption that signed overflow is undefined,
14969 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14970 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14972 bool
14973 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14975 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14976 return true;
14978 switch (TREE_CODE (t))
14980 case INTEGER_CST:
14981 return tree_int_cst_sgn (t) >= 0;
14983 case REAL_CST:
14984 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14986 case FIXED_CST:
14987 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14989 case COND_EXPR:
14990 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14992 case SSA_NAME:
14993 /* Limit the depth of recursion to avoid quadratic behavior.
14994 This is expected to catch almost all occurrences in practice.
14995 If this code misses important cases that unbounded recursion
14996 would not, passes that need this information could be revised
14997 to provide it through dataflow propagation. */
14998 return (!name_registered_for_update_p (t)
14999 && depth < param_max_ssa_name_query_depth
15000 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15001 strict_overflow_p, depth));
15003 default:
15004 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15008 /* Return true if T is known to be non-negative. If the return
15009 value is based on the assumption that signed overflow is undefined,
15010 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15011 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15013 bool
15014 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15015 bool *strict_overflow_p, int depth)
15017 switch (fn)
15019 CASE_CFN_ACOS:
15020 CASE_CFN_ACOS_FN:
15021 CASE_CFN_ACOSH:
15022 CASE_CFN_ACOSH_FN:
15023 CASE_CFN_CABS:
15024 CASE_CFN_CABS_FN:
15025 CASE_CFN_COSH:
15026 CASE_CFN_COSH_FN:
15027 CASE_CFN_ERFC:
15028 CASE_CFN_ERFC_FN:
15029 CASE_CFN_EXP:
15030 CASE_CFN_EXP_FN:
15031 CASE_CFN_EXP10:
15032 CASE_CFN_EXP2:
15033 CASE_CFN_EXP2_FN:
15034 CASE_CFN_FABS:
15035 CASE_CFN_FABS_FN:
15036 CASE_CFN_FDIM:
15037 CASE_CFN_FDIM_FN:
15038 CASE_CFN_HYPOT:
15039 CASE_CFN_HYPOT_FN:
15040 CASE_CFN_POW10:
15041 CASE_CFN_FFS:
15042 CASE_CFN_PARITY:
15043 CASE_CFN_POPCOUNT:
15044 CASE_CFN_CLZ:
15045 CASE_CFN_CLRSB:
15046 case CFN_BUILT_IN_BSWAP16:
15047 case CFN_BUILT_IN_BSWAP32:
15048 case CFN_BUILT_IN_BSWAP64:
15049 case CFN_BUILT_IN_BSWAP128:
15050 /* Always true. */
15051 return true;
15053 CASE_CFN_SQRT:
15054 CASE_CFN_SQRT_FN:
15055 /* sqrt(-0.0) is -0.0. */
15056 if (!HONOR_SIGNED_ZEROS (type))
15057 return true;
15058 return RECURSE (arg0);
15060 CASE_CFN_ASINH:
15061 CASE_CFN_ASINH_FN:
15062 CASE_CFN_ATAN:
15063 CASE_CFN_ATAN_FN:
15064 CASE_CFN_ATANH:
15065 CASE_CFN_ATANH_FN:
15066 CASE_CFN_CBRT:
15067 CASE_CFN_CBRT_FN:
15068 CASE_CFN_CEIL:
15069 CASE_CFN_CEIL_FN:
15070 CASE_CFN_ERF:
15071 CASE_CFN_ERF_FN:
15072 CASE_CFN_EXPM1:
15073 CASE_CFN_EXPM1_FN:
15074 CASE_CFN_FLOOR:
15075 CASE_CFN_FLOOR_FN:
15076 CASE_CFN_FMOD:
15077 CASE_CFN_FMOD_FN:
15078 CASE_CFN_FREXP:
15079 CASE_CFN_FREXP_FN:
15080 CASE_CFN_ICEIL:
15081 CASE_CFN_IFLOOR:
15082 CASE_CFN_IRINT:
15083 CASE_CFN_IROUND:
15084 CASE_CFN_LCEIL:
15085 CASE_CFN_LDEXP:
15086 CASE_CFN_LFLOOR:
15087 CASE_CFN_LLCEIL:
15088 CASE_CFN_LLFLOOR:
15089 CASE_CFN_LLRINT:
15090 CASE_CFN_LLRINT_FN:
15091 CASE_CFN_LLROUND:
15092 CASE_CFN_LLROUND_FN:
15093 CASE_CFN_LRINT:
15094 CASE_CFN_LRINT_FN:
15095 CASE_CFN_LROUND:
15096 CASE_CFN_LROUND_FN:
15097 CASE_CFN_MODF:
15098 CASE_CFN_MODF_FN:
15099 CASE_CFN_NEARBYINT:
15100 CASE_CFN_NEARBYINT_FN:
15101 CASE_CFN_RINT:
15102 CASE_CFN_RINT_FN:
15103 CASE_CFN_ROUND:
15104 CASE_CFN_ROUND_FN:
15105 CASE_CFN_ROUNDEVEN:
15106 CASE_CFN_ROUNDEVEN_FN:
15107 CASE_CFN_SCALB:
15108 CASE_CFN_SCALBLN:
15109 CASE_CFN_SCALBLN_FN:
15110 CASE_CFN_SCALBN:
15111 CASE_CFN_SCALBN_FN:
15112 CASE_CFN_SIGNBIT:
15113 CASE_CFN_SIGNIFICAND:
15114 CASE_CFN_SINH:
15115 CASE_CFN_SINH_FN:
15116 CASE_CFN_TANH:
15117 CASE_CFN_TANH_FN:
15118 CASE_CFN_TRUNC:
15119 CASE_CFN_TRUNC_FN:
15120 /* True if the 1st argument is nonnegative. */
15121 return RECURSE (arg0);
15123 CASE_CFN_FMAX:
15124 CASE_CFN_FMAX_FN:
15125 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15126 things. In the presence of sNaNs, we're only guaranteed to be
15127 non-negative if both operands are non-negative. In the presence
15128 of qNaNs, we're non-negative if either operand is non-negative
15129 and can't be a qNaN, or if both operands are non-negative. */
15130 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15131 tree_expr_maybe_signaling_nan_p (arg1))
15132 return RECURSE (arg0) && RECURSE (arg1);
15133 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15134 || RECURSE (arg1))
15135 : (RECURSE (arg1)
15136 && !tree_expr_maybe_nan_p (arg1));
15138 CASE_CFN_FMIN:
15139 CASE_CFN_FMIN_FN:
15140 /* True if the 1st AND 2nd arguments are nonnegative. */
15141 return RECURSE (arg0) && RECURSE (arg1);
15143 CASE_CFN_COPYSIGN:
15144 CASE_CFN_COPYSIGN_FN:
15145 /* True if the 2nd argument is nonnegative. */
15146 return RECURSE (arg1);
15148 CASE_CFN_POWI:
15149 /* True if the 1st argument is nonnegative or the second
15150 argument is an even integer. */
15151 if (TREE_CODE (arg1) == INTEGER_CST
15152 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15153 return true;
15154 return RECURSE (arg0);
15156 CASE_CFN_POW:
15157 CASE_CFN_POW_FN:
15158 /* True if the 1st argument is nonnegative or the second
15159 argument is an even integer valued real. */
15160 if (TREE_CODE (arg1) == REAL_CST)
15162 REAL_VALUE_TYPE c;
15163 HOST_WIDE_INT n;
15165 c = TREE_REAL_CST (arg1);
15166 n = real_to_integer (&c);
15167 if ((n & 1) == 0)
15169 REAL_VALUE_TYPE cint;
15170 real_from_integer (&cint, VOIDmode, n, SIGNED);
15171 if (real_identical (&c, &cint))
15172 return true;
15175 return RECURSE (arg0);
15177 default:
15178 break;
15180 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15183 /* Return true if T is known to be non-negative. If the return
15184 value is based on the assumption that signed overflow is undefined,
15185 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15186 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15188 static bool
15189 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15191 enum tree_code code = TREE_CODE (t);
15192 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15193 return true;
15195 switch (code)
15197 case TARGET_EXPR:
15199 tree temp = TARGET_EXPR_SLOT (t);
15200 t = TARGET_EXPR_INITIAL (t);
15202 /* If the initializer is non-void, then it's a normal expression
15203 that will be assigned to the slot. */
15204 if (!VOID_TYPE_P (TREE_TYPE (t)))
15205 return RECURSE (t);
15207 /* Otherwise, the initializer sets the slot in some way. One common
15208 way is an assignment statement at the end of the initializer. */
15209 while (1)
15211 if (TREE_CODE (t) == BIND_EXPR)
15212 t = expr_last (BIND_EXPR_BODY (t));
15213 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15214 || TREE_CODE (t) == TRY_CATCH_EXPR)
15215 t = expr_last (TREE_OPERAND (t, 0));
15216 else if (TREE_CODE (t) == STATEMENT_LIST)
15217 t = expr_last (t);
15218 else
15219 break;
15221 if (TREE_CODE (t) == MODIFY_EXPR
15222 && TREE_OPERAND (t, 0) == temp)
15223 return RECURSE (TREE_OPERAND (t, 1));
15225 return false;
15228 case CALL_EXPR:
15230 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15231 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15233 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15234 get_call_combined_fn (t),
15235 arg0,
15236 arg1,
15237 strict_overflow_p, depth);
15239 case COMPOUND_EXPR:
15240 case MODIFY_EXPR:
15241 return RECURSE (TREE_OPERAND (t, 1));
15243 case BIND_EXPR:
15244 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15246 case SAVE_EXPR:
15247 return RECURSE (TREE_OPERAND (t, 0));
15249 default:
15250 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15254 #undef RECURSE
15255 #undef tree_expr_nonnegative_warnv_p
15257 /* Return true if T is known to be non-negative. If the return
15258 value is based on the assumption that signed overflow is undefined,
15259 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15260 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15262 bool
15263 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15265 enum tree_code code;
15266 if (t == error_mark_node)
15267 return false;
15269 code = TREE_CODE (t);
15270 switch (TREE_CODE_CLASS (code))
15272 case tcc_binary:
15273 case tcc_comparison:
15274 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15275 TREE_TYPE (t),
15276 TREE_OPERAND (t, 0),
15277 TREE_OPERAND (t, 1),
15278 strict_overflow_p, depth);
15280 case tcc_unary:
15281 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15282 TREE_TYPE (t),
15283 TREE_OPERAND (t, 0),
15284 strict_overflow_p, depth);
15286 case tcc_constant:
15287 case tcc_declaration:
15288 case tcc_reference:
15289 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15291 default:
15292 break;
15295 switch (code)
15297 case TRUTH_AND_EXPR:
15298 case TRUTH_OR_EXPR:
15299 case TRUTH_XOR_EXPR:
15300 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15301 TREE_TYPE (t),
15302 TREE_OPERAND (t, 0),
15303 TREE_OPERAND (t, 1),
15304 strict_overflow_p, depth);
15305 case TRUTH_NOT_EXPR:
15306 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15307 TREE_TYPE (t),
15308 TREE_OPERAND (t, 0),
15309 strict_overflow_p, depth);
15311 case COND_EXPR:
15312 case CONSTRUCTOR:
15313 case OBJ_TYPE_REF:
15314 case ADDR_EXPR:
15315 case WITH_SIZE_EXPR:
15316 case SSA_NAME:
15317 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15319 default:
15320 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15324 /* Return true if `t' is known to be non-negative. Handle warnings
15325 about undefined signed overflow. */
15327 bool
15328 tree_expr_nonnegative_p (tree t)
15330 bool ret, strict_overflow_p;
15332 strict_overflow_p = false;
15333 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15334 if (strict_overflow_p)
15335 fold_overflow_warning (("assuming signed overflow does not occur when "
15336 "determining that expression is always "
15337 "non-negative"),
15338 WARN_STRICT_OVERFLOW_MISC);
15339 return ret;
15343 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15344 For floating point we further ensure that T is not denormal.
15345 Similar logic is present in nonzero_address in rtlanal.h.
15347 If the return value is based on the assumption that signed overflow
15348 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15349 change *STRICT_OVERFLOW_P. */
15351 bool
15352 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15353 bool *strict_overflow_p)
15355 switch (code)
15357 case ABS_EXPR:
15358 return tree_expr_nonzero_warnv_p (op0,
15359 strict_overflow_p);
15361 case NOP_EXPR:
15363 tree inner_type = TREE_TYPE (op0);
15364 tree outer_type = type;
15366 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15367 && tree_expr_nonzero_warnv_p (op0,
15368 strict_overflow_p));
15370 break;
15372 case NON_LVALUE_EXPR:
15373 return tree_expr_nonzero_warnv_p (op0,
15374 strict_overflow_p);
15376 default:
15377 break;
15380 return false;
15383 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15384 For floating point we further ensure that T is not denormal.
15385 Similar logic is present in nonzero_address in rtlanal.h.
15387 If the return value is based on the assumption that signed overflow
15388 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15389 change *STRICT_OVERFLOW_P. */
15391 bool
15392 tree_binary_nonzero_warnv_p (enum tree_code code,
15393 tree type,
15394 tree op0,
15395 tree op1, bool *strict_overflow_p)
15397 bool sub_strict_overflow_p;
15398 switch (code)
15400 case POINTER_PLUS_EXPR:
15401 case PLUS_EXPR:
15402 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15404 /* With the presence of negative values it is hard
15405 to say something. */
15406 sub_strict_overflow_p = false;
15407 if (!tree_expr_nonnegative_warnv_p (op0,
15408 &sub_strict_overflow_p)
15409 || !tree_expr_nonnegative_warnv_p (op1,
15410 &sub_strict_overflow_p))
15411 return false;
15412 /* One of operands must be positive and the other non-negative. */
15413 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15414 overflows, on a twos-complement machine the sum of two
15415 nonnegative numbers can never be zero. */
15416 return (tree_expr_nonzero_warnv_p (op0,
15417 strict_overflow_p)
15418 || tree_expr_nonzero_warnv_p (op1,
15419 strict_overflow_p));
15421 break;
15423 case MULT_EXPR:
15424 if (TYPE_OVERFLOW_UNDEFINED (type))
15426 if (tree_expr_nonzero_warnv_p (op0,
15427 strict_overflow_p)
15428 && tree_expr_nonzero_warnv_p (op1,
15429 strict_overflow_p))
15431 *strict_overflow_p = true;
15432 return true;
15435 break;
15437 case MIN_EXPR:
15438 sub_strict_overflow_p = false;
15439 if (tree_expr_nonzero_warnv_p (op0,
15440 &sub_strict_overflow_p)
15441 && tree_expr_nonzero_warnv_p (op1,
15442 &sub_strict_overflow_p))
15444 if (sub_strict_overflow_p)
15445 *strict_overflow_p = true;
15447 break;
15449 case MAX_EXPR:
15450 sub_strict_overflow_p = false;
15451 if (tree_expr_nonzero_warnv_p (op0,
15452 &sub_strict_overflow_p))
15454 if (sub_strict_overflow_p)
15455 *strict_overflow_p = true;
15457 /* When both operands are nonzero, then MAX must be too. */
15458 if (tree_expr_nonzero_warnv_p (op1,
15459 strict_overflow_p))
15460 return true;
15462 /* MAX where operand 0 is positive is positive. */
15463 return tree_expr_nonnegative_warnv_p (op0,
15464 strict_overflow_p);
15466 /* MAX where operand 1 is positive is positive. */
15467 else if (tree_expr_nonzero_warnv_p (op1,
15468 &sub_strict_overflow_p)
15469 && tree_expr_nonnegative_warnv_p (op1,
15470 &sub_strict_overflow_p))
15472 if (sub_strict_overflow_p)
15473 *strict_overflow_p = true;
15474 return true;
15476 break;
15478 case BIT_IOR_EXPR:
15479 return (tree_expr_nonzero_warnv_p (op1,
15480 strict_overflow_p)
15481 || tree_expr_nonzero_warnv_p (op0,
15482 strict_overflow_p));
15484 default:
15485 break;
15488 return false;
15491 /* Return true when T is an address and is known to be nonzero.
15492 For floating point we further ensure that T is not denormal.
15493 Similar logic is present in nonzero_address in rtlanal.h.
15495 If the return value is based on the assumption that signed overflow
15496 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15497 change *STRICT_OVERFLOW_P. */
15499 bool
15500 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15502 bool sub_strict_overflow_p;
15503 switch (TREE_CODE (t))
15505 case INTEGER_CST:
15506 return !integer_zerop (t);
15508 case ADDR_EXPR:
15510 tree base = TREE_OPERAND (t, 0);
15512 if (!DECL_P (base))
15513 base = get_base_address (base);
15515 if (base && TREE_CODE (base) == TARGET_EXPR)
15516 base = TARGET_EXPR_SLOT (base);
15518 if (!base)
15519 return false;
15521 /* For objects in symbol table check if we know they are non-zero.
15522 Don't do anything for variables and functions before symtab is built;
15523 it is quite possible that they will be declared weak later. */
15524 int nonzero_addr = maybe_nonzero_address (base);
15525 if (nonzero_addr >= 0)
15526 return nonzero_addr;
15528 /* Constants are never weak. */
15529 if (CONSTANT_CLASS_P (base))
15530 return true;
15532 return false;
15535 case COND_EXPR:
15536 sub_strict_overflow_p = false;
15537 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15538 &sub_strict_overflow_p)
15539 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15540 &sub_strict_overflow_p))
15542 if (sub_strict_overflow_p)
15543 *strict_overflow_p = true;
15544 return true;
15546 break;
15548 case SSA_NAME:
15549 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15550 break;
15551 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15553 default:
15554 break;
15556 return false;
15559 #define integer_valued_real_p(X) \
15560 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15562 #define RECURSE(X) \
15563 ((integer_valued_real_p) (X, depth + 1))
15565 /* Return true if the floating point result of (CODE OP0) has an
15566 integer value. We also allow +Inf, -Inf and NaN to be considered
15567 integer values. Return false for signaling NaN.
15569 DEPTH is the current nesting depth of the query. */
15571 bool
15572 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15574 switch (code)
15576 case FLOAT_EXPR:
15577 return true;
15579 case ABS_EXPR:
15580 return RECURSE (op0);
15582 CASE_CONVERT:
15584 tree type = TREE_TYPE (op0);
15585 if (TREE_CODE (type) == INTEGER_TYPE)
15586 return true;
15587 if (SCALAR_FLOAT_TYPE_P (type))
15588 return RECURSE (op0);
15589 break;
15592 default:
15593 break;
15595 return false;
15598 /* Return true if the floating point result of (CODE OP0 OP1) has an
15599 integer value. We also allow +Inf, -Inf and NaN to be considered
15600 integer values. Return false for signaling NaN.
15602 DEPTH is the current nesting depth of the query. */
15604 bool
15605 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15607 switch (code)
15609 case PLUS_EXPR:
15610 case MINUS_EXPR:
15611 case MULT_EXPR:
15612 case MIN_EXPR:
15613 case MAX_EXPR:
15614 return RECURSE (op0) && RECURSE (op1);
15616 default:
15617 break;
15619 return false;
15622 /* Return true if the floating point result of calling FNDECL with arguments
15623 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15624 considered integer values. Return false for signaling NaN. If FNDECL
15625 takes fewer than 2 arguments, the remaining ARGn are null.
15627 DEPTH is the current nesting depth of the query. */
15629 bool
15630 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15632 switch (fn)
15634 CASE_CFN_CEIL:
15635 CASE_CFN_CEIL_FN:
15636 CASE_CFN_FLOOR:
15637 CASE_CFN_FLOOR_FN:
15638 CASE_CFN_NEARBYINT:
15639 CASE_CFN_NEARBYINT_FN:
15640 CASE_CFN_RINT:
15641 CASE_CFN_RINT_FN:
15642 CASE_CFN_ROUND:
15643 CASE_CFN_ROUND_FN:
15644 CASE_CFN_ROUNDEVEN:
15645 CASE_CFN_ROUNDEVEN_FN:
15646 CASE_CFN_TRUNC:
15647 CASE_CFN_TRUNC_FN:
15648 return true;
15650 CASE_CFN_FMIN:
15651 CASE_CFN_FMIN_FN:
15652 CASE_CFN_FMAX:
15653 CASE_CFN_FMAX_FN:
15654 return RECURSE (arg0) && RECURSE (arg1);
15656 default:
15657 break;
15659 return false;
15662 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15663 has an integer value. We also allow +Inf, -Inf and NaN to be
15664 considered integer values. Return false for signaling NaN.
15666 DEPTH is the current nesting depth of the query. */
15668 bool
15669 integer_valued_real_single_p (tree t, int depth)
15671 switch (TREE_CODE (t))
15673 case REAL_CST:
15674 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15676 case COND_EXPR:
15677 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15679 case SSA_NAME:
15680 /* Limit the depth of recursion to avoid quadratic behavior.
15681 This is expected to catch almost all occurrences in practice.
15682 If this code misses important cases that unbounded recursion
15683 would not, passes that need this information could be revised
15684 to provide it through dataflow propagation. */
15685 return (!name_registered_for_update_p (t)
15686 && depth < param_max_ssa_name_query_depth
15687 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15688 depth));
15690 default:
15691 break;
15693 return false;
15696 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15697 has an integer value. We also allow +Inf, -Inf and NaN to be
15698 considered integer values. Return false for signaling NaN.
15700 DEPTH is the current nesting depth of the query. */
15702 static bool
15703 integer_valued_real_invalid_p (tree t, int depth)
15705 switch (TREE_CODE (t))
15707 case COMPOUND_EXPR:
15708 case MODIFY_EXPR:
15709 case BIND_EXPR:
15710 return RECURSE (TREE_OPERAND (t, 1));
15712 case SAVE_EXPR:
15713 return RECURSE (TREE_OPERAND (t, 0));
15715 default:
15716 break;
15718 return false;
15721 #undef RECURSE
15722 #undef integer_valued_real_p
15724 /* Return true if the floating point expression T has an integer value.
15725 We also allow +Inf, -Inf and NaN to be considered integer values.
15726 Return false for signaling NaN.
15728 DEPTH is the current nesting depth of the query. */
15730 bool
15731 integer_valued_real_p (tree t, int depth)
15733 if (t == error_mark_node)
15734 return false;
15736 STRIP_ANY_LOCATION_WRAPPER (t);
15738 tree_code code = TREE_CODE (t);
15739 switch (TREE_CODE_CLASS (code))
15741 case tcc_binary:
15742 case tcc_comparison:
15743 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15744 TREE_OPERAND (t, 1), depth);
15746 case tcc_unary:
15747 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15749 case tcc_constant:
15750 case tcc_declaration:
15751 case tcc_reference:
15752 return integer_valued_real_single_p (t, depth);
15754 default:
15755 break;
15758 switch (code)
15760 case COND_EXPR:
15761 case SSA_NAME:
15762 return integer_valued_real_single_p (t, depth);
15764 case CALL_EXPR:
15766 tree arg0 = (call_expr_nargs (t) > 0
15767 ? CALL_EXPR_ARG (t, 0)
15768 : NULL_TREE);
15769 tree arg1 = (call_expr_nargs (t) > 1
15770 ? CALL_EXPR_ARG (t, 1)
15771 : NULL_TREE);
15772 return integer_valued_real_call_p (get_call_combined_fn (t),
15773 arg0, arg1, depth);
15776 default:
15777 return integer_valued_real_invalid_p (t, depth);
15781 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15782 attempt to fold the expression to a constant without modifying TYPE,
15783 OP0 or OP1.
15785 If the expression could be simplified to a constant, then return
15786 the constant. If the expression would not be simplified to a
15787 constant, then return NULL_TREE. */
15789 tree
15790 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15792 tree tem = fold_binary (code, type, op0, op1);
15793 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15796 /* Given the components of a unary expression CODE, TYPE and OP0,
15797 attempt to fold the expression to a constant without modifying
15798 TYPE or OP0.
15800 If the expression could be simplified to a constant, then return
15801 the constant. If the expression would not be simplified to a
15802 constant, then return NULL_TREE. */
15804 tree
15805 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15807 tree tem = fold_unary (code, type, op0);
15808 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15811 /* If EXP represents referencing an element in a constant string
15812 (either via pointer arithmetic or array indexing), return the
15813 tree representing the value accessed, otherwise return NULL. */
15815 tree
15816 fold_read_from_constant_string (tree exp)
15818 if ((INDIRECT_REF_P (exp)
15819 || TREE_CODE (exp) == ARRAY_REF)
15820 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15822 tree exp1 = TREE_OPERAND (exp, 0);
15823 tree index;
15824 tree string;
15825 location_t loc = EXPR_LOCATION (exp);
15827 if (INDIRECT_REF_P (exp))
15828 string = string_constant (exp1, &index, NULL, NULL);
15829 else
15831 tree low_bound = array_ref_low_bound (exp);
15832 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15834 /* Optimize the special-case of a zero lower bound.
15836 We convert the low_bound to sizetype to avoid some problems
15837 with constant folding. (E.g. suppose the lower bound is 1,
15838 and its mode is QI. Without the conversion,l (ARRAY
15839 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15840 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15841 if (! integer_zerop (low_bound))
15842 index = size_diffop_loc (loc, index,
15843 fold_convert_loc (loc, sizetype, low_bound));
15845 string = exp1;
15848 scalar_int_mode char_mode;
15849 if (string
15850 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15851 && TREE_CODE (string) == STRING_CST
15852 && tree_fits_uhwi_p (index)
15853 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15854 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15855 &char_mode)
15856 && GET_MODE_SIZE (char_mode) == 1)
15857 return build_int_cst_type (TREE_TYPE (exp),
15858 (TREE_STRING_POINTER (string)
15859 [TREE_INT_CST_LOW (index)]));
15861 return NULL;
15864 /* Folds a read from vector element at IDX of vector ARG. */
15866 tree
15867 fold_read_from_vector (tree arg, poly_uint64 idx)
15869 unsigned HOST_WIDE_INT i;
15870 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15871 && known_ge (idx, 0u)
15872 && idx.is_constant (&i))
15874 if (TREE_CODE (arg) == VECTOR_CST)
15875 return VECTOR_CST_ELT (arg, i);
15876 else if (TREE_CODE (arg) == CONSTRUCTOR)
15878 if (CONSTRUCTOR_NELTS (arg)
15879 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15880 return NULL_TREE;
15881 if (i >= CONSTRUCTOR_NELTS (arg))
15882 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15883 return CONSTRUCTOR_ELT (arg, i)->value;
15886 return NULL_TREE;
15889 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15890 an integer constant, real, or fixed-point constant.
15892 TYPE is the type of the result. */
15894 static tree
15895 fold_negate_const (tree arg0, tree type)
15897 tree t = NULL_TREE;
15899 switch (TREE_CODE (arg0))
15901 case REAL_CST:
15902 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15903 break;
15905 case FIXED_CST:
15907 FIXED_VALUE_TYPE f;
15908 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15909 &(TREE_FIXED_CST (arg0)), NULL,
15910 TYPE_SATURATING (type));
15911 t = build_fixed (type, f);
15912 /* Propagate overflow flags. */
15913 if (overflow_p | TREE_OVERFLOW (arg0))
15914 TREE_OVERFLOW (t) = 1;
15915 break;
15918 default:
15919 if (poly_int_tree_p (arg0))
15921 wi::overflow_type overflow;
15922 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15923 t = force_fit_type (type, res, 1,
15924 (overflow && ! TYPE_UNSIGNED (type))
15925 || TREE_OVERFLOW (arg0));
15926 break;
15929 gcc_unreachable ();
15932 return t;
15935 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15936 an integer constant or real constant.
15938 TYPE is the type of the result. */
15940 tree
15941 fold_abs_const (tree arg0, tree type)
15943 tree t = NULL_TREE;
15945 switch (TREE_CODE (arg0))
15947 case INTEGER_CST:
15949 /* If the value is unsigned or non-negative, then the absolute value
15950 is the same as the ordinary value. */
15951 wide_int val = wi::to_wide (arg0);
15952 wi::overflow_type overflow = wi::OVF_NONE;
15953 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15956 /* If the value is negative, then the absolute value is
15957 its negation. */
15958 else
15959 val = wi::neg (val, &overflow);
15961 /* Force to the destination type, set TREE_OVERFLOW for signed
15962 TYPE only. */
15963 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15965 break;
15967 case REAL_CST:
15968 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15969 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15970 else
15971 t = arg0;
15972 break;
15974 default:
15975 gcc_unreachable ();
15978 return t;
15981 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15982 constant. TYPE is the type of the result. */
15984 static tree
15985 fold_not_const (const_tree arg0, tree type)
15987 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15989 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15992 /* Given CODE, a relational operator, the target type, TYPE and two
15993 constant operands OP0 and OP1, return the result of the
15994 relational operation. If the result is not a compile time
15995 constant, then return NULL_TREE. */
15997 static tree
15998 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16000 int result, invert;
16002 /* From here on, the only cases we handle are when the result is
16003 known to be a constant. */
16005 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16007 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16008 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16010 /* Handle the cases where either operand is a NaN. */
16011 if (real_isnan (c0) || real_isnan (c1))
16013 switch (code)
16015 case EQ_EXPR:
16016 case ORDERED_EXPR:
16017 result = 0;
16018 break;
16020 case NE_EXPR:
16021 case UNORDERED_EXPR:
16022 case UNLT_EXPR:
16023 case UNLE_EXPR:
16024 case UNGT_EXPR:
16025 case UNGE_EXPR:
16026 case UNEQ_EXPR:
16027 result = 1;
16028 break;
16030 case LT_EXPR:
16031 case LE_EXPR:
16032 case GT_EXPR:
16033 case GE_EXPR:
16034 case LTGT_EXPR:
16035 if (flag_trapping_math)
16036 return NULL_TREE;
16037 result = 0;
16038 break;
16040 default:
16041 gcc_unreachable ();
16044 return constant_boolean_node (result, type);
16047 return constant_boolean_node (real_compare (code, c0, c1), type);
16050 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16052 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16053 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16054 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16057 /* Handle equality/inequality of complex constants. */
16058 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16060 tree rcond = fold_relational_const (code, type,
16061 TREE_REALPART (op0),
16062 TREE_REALPART (op1));
16063 tree icond = fold_relational_const (code, type,
16064 TREE_IMAGPART (op0),
16065 TREE_IMAGPART (op1));
16066 if (code == EQ_EXPR)
16067 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16068 else if (code == NE_EXPR)
16069 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16070 else
16071 return NULL_TREE;
16074 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16076 if (!VECTOR_TYPE_P (type))
16078 /* Have vector comparison with scalar boolean result. */
16079 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16080 && known_eq (VECTOR_CST_NELTS (op0),
16081 VECTOR_CST_NELTS (op1)));
16082 unsigned HOST_WIDE_INT nunits;
16083 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16084 return NULL_TREE;
16085 for (unsigned i = 0; i < nunits; i++)
16087 tree elem0 = VECTOR_CST_ELT (op0, i);
16088 tree elem1 = VECTOR_CST_ELT (op1, i);
16089 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16090 if (tmp == NULL_TREE)
16091 return NULL_TREE;
16092 if (integer_zerop (tmp))
16093 return constant_boolean_node (code == NE_EXPR, type);
16095 return constant_boolean_node (code == EQ_EXPR, type);
16097 tree_vector_builder elts;
16098 if (!elts.new_binary_operation (type, op0, op1, false))
16099 return NULL_TREE;
16100 unsigned int count = elts.encoded_nelts ();
16101 for (unsigned i = 0; i < count; i++)
16103 tree elem_type = TREE_TYPE (type);
16104 tree elem0 = VECTOR_CST_ELT (op0, i);
16105 tree elem1 = VECTOR_CST_ELT (op1, i);
16107 tree tem = fold_relational_const (code, elem_type,
16108 elem0, elem1);
16110 if (tem == NULL_TREE)
16111 return NULL_TREE;
16113 elts.quick_push (build_int_cst (elem_type,
16114 integer_zerop (tem) ? 0 : -1));
16117 return elts.build ();
16120 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16122 To compute GT, swap the arguments and do LT.
16123 To compute GE, do LT and invert the result.
16124 To compute LE, swap the arguments, do LT and invert the result.
16125 To compute NE, do EQ and invert the result.
16127 Therefore, the code below must handle only EQ and LT. */
16129 if (code == LE_EXPR || code == GT_EXPR)
16131 std::swap (op0, op1);
16132 code = swap_tree_comparison (code);
16135 /* Note that it is safe to invert for real values here because we
16136 have already handled the one case that it matters. */
16138 invert = 0;
16139 if (code == NE_EXPR || code == GE_EXPR)
16141 invert = 1;
16142 code = invert_tree_comparison (code, false);
16145 /* Compute a result for LT or EQ if args permit;
16146 Otherwise return T. */
16147 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16149 if (code == EQ_EXPR)
16150 result = tree_int_cst_equal (op0, op1);
16151 else
16152 result = tree_int_cst_lt (op0, op1);
16154 else
16155 return NULL_TREE;
16157 if (invert)
16158 result ^= 1;
16159 return constant_boolean_node (result, type);
16162 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16163 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16164 itself. */
16166 tree
16167 fold_build_cleanup_point_expr (tree type, tree expr)
16169 /* If the expression does not have side effects then we don't have to wrap
16170 it with a cleanup point expression. */
16171 if (!TREE_SIDE_EFFECTS (expr))
16172 return expr;
16174 /* If the expression is a return, check to see if the expression inside the
16175 return has no side effects or the right hand side of the modify expression
16176 inside the return. If either don't have side effects set we don't need to
16177 wrap the expression in a cleanup point expression. Note we don't check the
16178 left hand side of the modify because it should always be a return decl. */
16179 if (TREE_CODE (expr) == RETURN_EXPR)
16181 tree op = TREE_OPERAND (expr, 0);
16182 if (!op || !TREE_SIDE_EFFECTS (op))
16183 return expr;
16184 op = TREE_OPERAND (op, 1);
16185 if (!TREE_SIDE_EFFECTS (op))
16186 return expr;
16189 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16192 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16193 of an indirection through OP0, or NULL_TREE if no simplification is
16194 possible. */
16196 tree
16197 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16199 tree sub = op0;
16200 tree subtype;
16201 poly_uint64 const_op01;
16203 STRIP_NOPS (sub);
16204 subtype = TREE_TYPE (sub);
16205 if (!POINTER_TYPE_P (subtype)
16206 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16207 return NULL_TREE;
16209 if (TREE_CODE (sub) == ADDR_EXPR)
16211 tree op = TREE_OPERAND (sub, 0);
16212 tree optype = TREE_TYPE (op);
16214 /* *&CONST_DECL -> to the value of the const decl. */
16215 if (TREE_CODE (op) == CONST_DECL)
16216 return DECL_INITIAL (op);
16217 /* *&p => p; make sure to handle *&"str"[cst] here. */
16218 if (type == optype)
16220 tree fop = fold_read_from_constant_string (op);
16221 if (fop)
16222 return fop;
16223 else
16224 return op;
16226 /* *(foo *)&fooarray => fooarray[0] */
16227 else if (TREE_CODE (optype) == ARRAY_TYPE
16228 && type == TREE_TYPE (optype)
16229 && (!in_gimple_form
16230 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16232 tree type_domain = TYPE_DOMAIN (optype);
16233 tree min_val = size_zero_node;
16234 if (type_domain && TYPE_MIN_VALUE (type_domain))
16235 min_val = TYPE_MIN_VALUE (type_domain);
16236 if (in_gimple_form
16237 && TREE_CODE (min_val) != INTEGER_CST)
16238 return NULL_TREE;
16239 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16240 NULL_TREE, NULL_TREE);
16242 /* *(foo *)&complexfoo => __real__ complexfoo */
16243 else if (TREE_CODE (optype) == COMPLEX_TYPE
16244 && type == TREE_TYPE (optype))
16245 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16246 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16247 else if (VECTOR_TYPE_P (optype)
16248 && type == TREE_TYPE (optype))
16250 tree part_width = TYPE_SIZE (type);
16251 tree index = bitsize_int (0);
16252 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16253 index);
16257 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16258 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16260 tree op00 = TREE_OPERAND (sub, 0);
16261 tree op01 = TREE_OPERAND (sub, 1);
16263 STRIP_NOPS (op00);
16264 if (TREE_CODE (op00) == ADDR_EXPR)
16266 tree op00type;
16267 op00 = TREE_OPERAND (op00, 0);
16268 op00type = TREE_TYPE (op00);
16270 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16271 if (VECTOR_TYPE_P (op00type)
16272 && type == TREE_TYPE (op00type)
16273 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16274 but we want to treat offsets with MSB set as negative.
16275 For the code below negative offsets are invalid and
16276 TYPE_SIZE of the element is something unsigned, so
16277 check whether op01 fits into poly_int64, which implies
16278 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16279 then just use poly_uint64 because we want to treat the
16280 value as unsigned. */
16281 && tree_fits_poly_int64_p (op01))
16283 tree part_width = TYPE_SIZE (type);
16284 poly_uint64 max_offset
16285 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16286 * TYPE_VECTOR_SUBPARTS (op00type));
16287 if (known_lt (const_op01, max_offset))
16289 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16290 return fold_build3_loc (loc,
16291 BIT_FIELD_REF, type, op00,
16292 part_width, index);
16295 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16296 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16297 && type == TREE_TYPE (op00type))
16299 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16300 const_op01))
16301 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16303 /* ((foo *)&fooarray)[1] => fooarray[1] */
16304 else if (TREE_CODE (op00type) == ARRAY_TYPE
16305 && type == TREE_TYPE (op00type))
16307 tree type_domain = TYPE_DOMAIN (op00type);
16308 tree min_val = size_zero_node;
16309 if (type_domain && TYPE_MIN_VALUE (type_domain))
16310 min_val = TYPE_MIN_VALUE (type_domain);
16311 poly_uint64 type_size, index;
16312 if (poly_int_tree_p (min_val)
16313 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16314 && multiple_p (const_op01, type_size, &index))
16316 poly_offset_int off = index + wi::to_poly_offset (min_val);
16317 op01 = wide_int_to_tree (sizetype, off);
16318 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16319 NULL_TREE, NULL_TREE);
16325 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16326 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16327 && type == TREE_TYPE (TREE_TYPE (subtype))
16328 && (!in_gimple_form
16329 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16331 tree type_domain;
16332 tree min_val = size_zero_node;
16333 sub = build_fold_indirect_ref_loc (loc, sub);
16334 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16335 if (type_domain && TYPE_MIN_VALUE (type_domain))
16336 min_val = TYPE_MIN_VALUE (type_domain);
16337 if (in_gimple_form
16338 && TREE_CODE (min_val) != INTEGER_CST)
16339 return NULL_TREE;
16340 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16341 NULL_TREE);
16344 return NULL_TREE;
16347 /* Builds an expression for an indirection through T, simplifying some
16348 cases. */
16350 tree
16351 build_fold_indirect_ref_loc (location_t loc, tree t)
16353 tree type = TREE_TYPE (TREE_TYPE (t));
16354 tree sub = fold_indirect_ref_1 (loc, type, t);
16356 if (sub)
16357 return sub;
16359 return build1_loc (loc, INDIRECT_REF, type, t);
16362 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16364 tree
16365 fold_indirect_ref_loc (location_t loc, tree t)
16367 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16369 if (sub)
16370 return sub;
16371 else
16372 return t;
16375 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16376 whose result is ignored. The type of the returned tree need not be
16377 the same as the original expression. */
16379 tree
16380 fold_ignored_result (tree t)
16382 if (!TREE_SIDE_EFFECTS (t))
16383 return integer_zero_node;
16385 for (;;)
16386 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16388 case tcc_unary:
16389 t = TREE_OPERAND (t, 0);
16390 break;
16392 case tcc_binary:
16393 case tcc_comparison:
16394 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16395 t = TREE_OPERAND (t, 0);
16396 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16397 t = TREE_OPERAND (t, 1);
16398 else
16399 return t;
16400 break;
16402 case tcc_expression:
16403 switch (TREE_CODE (t))
16405 case COMPOUND_EXPR:
16406 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16407 return t;
16408 t = TREE_OPERAND (t, 0);
16409 break;
16411 case COND_EXPR:
16412 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16413 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16414 return t;
16415 t = TREE_OPERAND (t, 0);
16416 break;
16418 default:
16419 return t;
16421 break;
16423 default:
16424 return t;
16428 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16430 tree
16431 round_up_loc (location_t loc, tree value, unsigned int divisor)
16433 tree div = NULL_TREE;
16435 if (divisor == 1)
16436 return value;
16438 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16439 have to do anything. Only do this when we are not given a const,
16440 because in that case, this check is more expensive than just
16441 doing it. */
16442 if (TREE_CODE (value) != INTEGER_CST)
16444 div = build_int_cst (TREE_TYPE (value), divisor);
16446 if (multiple_of_p (TREE_TYPE (value), value, div))
16447 return value;
16450 /* If divisor is a power of two, simplify this to bit manipulation. */
16451 if (pow2_or_zerop (divisor))
16453 if (TREE_CODE (value) == INTEGER_CST)
16455 wide_int val = wi::to_wide (value);
16456 bool overflow_p;
16458 if ((val & (divisor - 1)) == 0)
16459 return value;
16461 overflow_p = TREE_OVERFLOW (value);
16462 val += divisor - 1;
16463 val &= (int) -divisor;
16464 if (val == 0)
16465 overflow_p = true;
16467 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16469 else
16471 tree t;
16473 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16474 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16475 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16476 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16479 else
16481 if (!div)
16482 div = build_int_cst (TREE_TYPE (value), divisor);
16483 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16484 value = size_binop_loc (loc, MULT_EXPR, value, div);
16487 return value;
16490 /* Likewise, but round down. */
16492 tree
16493 round_down_loc (location_t loc, tree value, int divisor)
16495 tree div = NULL_TREE;
16497 gcc_assert (divisor > 0);
16498 if (divisor == 1)
16499 return value;
16501 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16502 have to do anything. Only do this when we are not given a const,
16503 because in that case, this check is more expensive than just
16504 doing it. */
16505 if (TREE_CODE (value) != INTEGER_CST)
16507 div = build_int_cst (TREE_TYPE (value), divisor);
16509 if (multiple_of_p (TREE_TYPE (value), value, div))
16510 return value;
16513 /* If divisor is a power of two, simplify this to bit manipulation. */
16514 if (pow2_or_zerop (divisor))
16516 tree t;
16518 t = build_int_cst (TREE_TYPE (value), -divisor);
16519 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16521 else
16523 if (!div)
16524 div = build_int_cst (TREE_TYPE (value), divisor);
16525 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16526 value = size_binop_loc (loc, MULT_EXPR, value, div);
16529 return value;
16532 /* Returns the pointer to the base of the object addressed by EXP and
16533 extracts the information about the offset of the access, storing it
16534 to PBITPOS and POFFSET. */
16536 static tree
16537 split_address_to_core_and_offset (tree exp,
16538 poly_int64_pod *pbitpos, tree *poffset)
16540 tree core;
16541 machine_mode mode;
16542 int unsignedp, reversep, volatilep;
16543 poly_int64 bitsize;
16544 location_t loc = EXPR_LOCATION (exp);
16546 if (TREE_CODE (exp) == SSA_NAME)
16547 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16548 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16549 exp = gimple_assign_rhs1 (def);
16551 if (TREE_CODE (exp) == ADDR_EXPR)
16553 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16554 poffset, &mode, &unsignedp, &reversep,
16555 &volatilep);
16556 core = build_fold_addr_expr_loc (loc, core);
16558 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16560 core = TREE_OPERAND (exp, 0);
16561 STRIP_NOPS (core);
16562 *pbitpos = 0;
16563 *poffset = TREE_OPERAND (exp, 1);
16564 if (poly_int_tree_p (*poffset))
16566 poly_offset_int tem
16567 = wi::sext (wi::to_poly_offset (*poffset),
16568 TYPE_PRECISION (TREE_TYPE (*poffset)));
16569 tem <<= LOG2_BITS_PER_UNIT;
16570 if (tem.to_shwi (pbitpos))
16571 *poffset = NULL_TREE;
16574 else
16576 core = exp;
16577 *pbitpos = 0;
16578 *poffset = NULL_TREE;
16581 return core;
16584 /* Returns true if addresses of E1 and E2 differ by a constant, false
16585 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16587 bool
16588 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16590 tree core1, core2;
16591 poly_int64 bitpos1, bitpos2;
16592 tree toffset1, toffset2, tdiff, type;
16594 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16595 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16597 poly_int64 bytepos1, bytepos2;
16598 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16599 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16600 || !operand_equal_p (core1, core2, 0))
16601 return false;
16603 if (toffset1 && toffset2)
16605 type = TREE_TYPE (toffset1);
16606 if (type != TREE_TYPE (toffset2))
16607 toffset2 = fold_convert (type, toffset2);
16609 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16610 if (!cst_and_fits_in_hwi (tdiff))
16611 return false;
16613 *diff = int_cst_value (tdiff);
16615 else if (toffset1 || toffset2)
16617 /* If only one of the offsets is non-constant, the difference cannot
16618 be a constant. */
16619 return false;
16621 else
16622 *diff = 0;
16624 *diff += bytepos1 - bytepos2;
16625 return true;
16628 /* Return OFF converted to a pointer offset type suitable as offset for
16629 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16630 tree
16631 convert_to_ptrofftype_loc (location_t loc, tree off)
16633 if (ptrofftype_p (TREE_TYPE (off)))
16634 return off;
16635 return fold_convert_loc (loc, sizetype, off);
16638 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16639 tree
16640 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16642 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16643 ptr, convert_to_ptrofftype_loc (loc, off));
16646 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16647 tree
16648 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16650 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16651 ptr, size_int (off));
16654 /* Return a pointer to a NUL-terminated string containing the sequence
16655 of bytes corresponding to the representation of the object referred to
16656 by SRC (or a subsequence of such bytes within it if SRC is a reference
16657 to an initialized constant array plus some constant offset).
16658 Set *STRSIZE the number of bytes in the constant sequence including
16659 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16660 where A is the array that stores the constant sequence that SRC points
16661 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16662 need not point to a string or even an array of characters but may point
16663 to an object of any type. */
16665 const char *
16666 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16668 /* The offset into the array A storing the string, and A's byte size. */
16669 tree offset_node;
16670 tree mem_size;
16672 if (strsize)
16673 *strsize = 0;
16675 if (strsize)
16676 src = byte_representation (src, &offset_node, &mem_size, NULL);
16677 else
16678 src = string_constant (src, &offset_node, &mem_size, NULL);
16679 if (!src)
16680 return NULL;
16682 unsigned HOST_WIDE_INT offset = 0;
16683 if (offset_node != NULL_TREE)
16685 if (!tree_fits_uhwi_p (offset_node))
16686 return NULL;
16687 else
16688 offset = tree_to_uhwi (offset_node);
16691 if (!tree_fits_uhwi_p (mem_size))
16692 return NULL;
16694 /* ARRAY_SIZE is the byte size of the array the constant sequence
16695 is stored in and equal to sizeof A. INIT_BYTES is the number
16696 of bytes in the constant sequence used to initialize the array,
16697 including any embedded NULs as well as the terminating NUL (for
16698 strings), but not including any trailing zeros/NULs past
16699 the terminating one appended implicitly to a string literal to
16700 zero out the remainder of the array it's stored in. For example,
16701 given:
16702 const char a[7] = "abc\0d";
16703 n = strlen (a + 1);
16704 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16705 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16706 is equal to strlen (A) + 1. */
16707 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16708 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16709 const char *string = TREE_STRING_POINTER (src);
16711 /* Ideally this would turn into a gcc_checking_assert over time. */
16712 if (init_bytes > array_size)
16713 init_bytes = array_size;
16715 if (init_bytes == 0 || offset >= array_size)
16716 return NULL;
16718 if (strsize)
16720 /* Compute and store the number of characters from the beginning
16721 of the substring at OFFSET to the end, including the terminating
16722 nul. Offsets past the initial length refer to null strings. */
16723 if (offset < init_bytes)
16724 *strsize = init_bytes - offset;
16725 else
16726 *strsize = 1;
16728 else
16730 tree eltype = TREE_TYPE (TREE_TYPE (src));
16731 /* Support only properly NUL-terminated single byte strings. */
16732 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16733 return NULL;
16734 if (string[init_bytes - 1] != '\0')
16735 return NULL;
16738 return offset < init_bytes ? string + offset : "";
16741 /* Return a pointer to a NUL-terminated string corresponding to
16742 the expression STR referencing a constant string, possibly
16743 involving a constant offset. Return null if STR either doesn't
16744 reference a constant string or if it involves a nonconstant
16745 offset. */
16747 const char *
16748 c_getstr (tree str)
16750 return getbyterep (str, NULL);
16753 /* Given a tree T, compute which bits in T may be nonzero. */
16755 wide_int
16756 tree_nonzero_bits (const_tree t)
16758 switch (TREE_CODE (t))
16760 case INTEGER_CST:
16761 return wi::to_wide (t);
16762 case SSA_NAME:
16763 return get_nonzero_bits (t);
16764 case NON_LVALUE_EXPR:
16765 case SAVE_EXPR:
16766 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16767 case BIT_AND_EXPR:
16768 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16769 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16770 case BIT_IOR_EXPR:
16771 case BIT_XOR_EXPR:
16772 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16773 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16774 case COND_EXPR:
16775 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16776 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16777 CASE_CONVERT:
16778 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16779 TYPE_PRECISION (TREE_TYPE (t)),
16780 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16781 case PLUS_EXPR:
16782 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16784 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16785 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16786 if (wi::bit_and (nzbits1, nzbits2) == 0)
16787 return wi::bit_or (nzbits1, nzbits2);
16789 break;
16790 case LSHIFT_EXPR:
16791 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16793 tree type = TREE_TYPE (t);
16794 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16795 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16796 TYPE_PRECISION (type));
16797 return wi::neg_p (arg1)
16798 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16799 : wi::lshift (nzbits, arg1);
16801 break;
16802 case RSHIFT_EXPR:
16803 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16805 tree type = TREE_TYPE (t);
16806 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16807 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16808 TYPE_PRECISION (type));
16809 return wi::neg_p (arg1)
16810 ? wi::lshift (nzbits, -arg1)
16811 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16813 break;
16814 default:
16815 break;
16818 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16821 /* Helper function for address compare simplifications in match.pd.
16822 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16823 TYPE is the type of comparison operands.
16824 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16825 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16826 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16827 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16828 and 2 if unknown. */
16831 address_compare (tree_code code, tree type, tree op0, tree op1,
16832 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16833 bool generic)
16835 if (TREE_CODE (op0) == SSA_NAME)
16836 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16837 if (TREE_CODE (op1) == SSA_NAME)
16838 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16839 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16840 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16841 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16842 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16843 if (base0 && TREE_CODE (base0) == MEM_REF)
16845 off0 += mem_ref_offset (base0).force_shwi ();
16846 base0 = TREE_OPERAND (base0, 0);
16848 if (base1 && TREE_CODE (base1) == MEM_REF)
16850 off1 += mem_ref_offset (base1).force_shwi ();
16851 base1 = TREE_OPERAND (base1, 0);
16853 if (base0 == NULL_TREE || base1 == NULL_TREE)
16854 return 2;
16856 int equal = 2;
16857 /* Punt in GENERIC on variables with value expressions;
16858 the value expressions might point to fields/elements
16859 of other vars etc. */
16860 if (generic
16861 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16862 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16863 return 2;
16864 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16866 symtab_node *node0 = symtab_node::get_create (base0);
16867 symtab_node *node1 = symtab_node::get_create (base1);
16868 equal = node0->equal_address_to (node1);
16870 else if ((DECL_P (base0)
16871 || TREE_CODE (base0) == SSA_NAME
16872 || TREE_CODE (base0) == STRING_CST)
16873 && (DECL_P (base1)
16874 || TREE_CODE (base1) == SSA_NAME
16875 || TREE_CODE (base1) == STRING_CST))
16876 equal = (base0 == base1);
16877 /* Assume different STRING_CSTs with the same content will be
16878 merged. */
16879 if (equal == 0
16880 && TREE_CODE (base0) == STRING_CST
16881 && TREE_CODE (base1) == STRING_CST
16882 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16883 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16884 TREE_STRING_LENGTH (base0)) == 0)
16885 equal = 1;
16886 if (equal == 1)
16888 if (code == EQ_EXPR
16889 || code == NE_EXPR
16890 /* If the offsets are equal we can ignore overflow. */
16891 || known_eq (off0, off1)
16892 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16893 /* Or if we compare using pointers to decls or strings. */
16894 || (POINTER_TYPE_P (type)
16895 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16896 return 1;
16897 return 2;
16899 if (equal != 0)
16900 return equal;
16901 if (code != EQ_EXPR && code != NE_EXPR)
16902 return 2;
16904 /* At this point we know (or assume) the two pointers point at
16905 different objects. */
16906 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16907 off0.is_constant (&ioff0);
16908 off1.is_constant (&ioff1);
16909 /* Punt on non-zero offsets from functions. */
16910 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16911 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16912 return 2;
16913 /* Or if the bases are neither decls nor string literals. */
16914 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16915 return 2;
16916 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16917 return 2;
16918 /* For initializers, assume addresses of different functions are
16919 different. */
16920 if (folding_initializer
16921 && TREE_CODE (base0) == FUNCTION_DECL
16922 && TREE_CODE (base1) == FUNCTION_DECL)
16923 return 0;
16925 /* Compute whether one address points to the start of one
16926 object and another one to the end of another one. */
16927 poly_int64 size0 = 0, size1 = 0;
16928 if (TREE_CODE (base0) == STRING_CST)
16930 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16931 equal = 2;
16932 else
16933 size0 = TREE_STRING_LENGTH (base0);
16935 else if (TREE_CODE (base0) == FUNCTION_DECL)
16936 size0 = 1;
16937 else
16939 tree sz0 = DECL_SIZE_UNIT (base0);
16940 if (!tree_fits_poly_int64_p (sz0))
16941 equal = 2;
16942 else
16943 size0 = tree_to_poly_int64 (sz0);
16945 if (TREE_CODE (base1) == STRING_CST)
16947 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16948 equal = 2;
16949 else
16950 size1 = TREE_STRING_LENGTH (base1);
16952 else if (TREE_CODE (base1) == FUNCTION_DECL)
16953 size1 = 1;
16954 else
16956 tree sz1 = DECL_SIZE_UNIT (base1);
16957 if (!tree_fits_poly_int64_p (sz1))
16958 equal = 2;
16959 else
16960 size1 = tree_to_poly_int64 (sz1);
16962 if (equal == 0)
16964 /* If one offset is pointing (or could be) to the beginning of one
16965 object and the other is pointing to one past the last byte of the
16966 other object, punt. */
16967 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16968 equal = 2;
16969 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16970 equal = 2;
16971 /* If both offsets are the same, there are some cases we know that are
16972 ok. Either if we know they aren't zero, or if we know both sizes
16973 are no zero. */
16974 if (equal == 2
16975 && known_eq (off0, off1)
16976 && (known_ne (off0, 0)
16977 || (known_ne (size0, 0) && known_ne (size1, 0))))
16978 equal = 0;
16981 /* At this point, equal is 2 if either one or both pointers are out of
16982 bounds of their object, or one points to start of its object and the
16983 other points to end of its object. This is unspecified behavior
16984 e.g. in C++. Otherwise equal is 0. */
16985 if (folding_cxx_constexpr && equal)
16986 return equal;
16988 /* When both pointers point to string literals, even when equal is 0,
16989 due to tail merging of string literals the pointers might be the same. */
16990 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16992 if (ioff0 < 0
16993 || ioff1 < 0
16994 || ioff0 > TREE_STRING_LENGTH (base0)
16995 || ioff1 > TREE_STRING_LENGTH (base1))
16996 return 2;
16998 /* If the bytes in the string literals starting at the pointers
16999 differ, the pointers need to be different. */
17000 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17001 TREE_STRING_POINTER (base1) + ioff1,
17002 MIN (TREE_STRING_LENGTH (base0) - ioff0,
17003 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17005 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17006 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17007 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17008 ioffmin) == 0)
17009 /* If even the bytes in the string literal before the
17010 pointers are the same, the string literals could be
17011 tail merged. */
17012 return 2;
17014 return 0;
17017 if (folding_cxx_constexpr)
17018 return 0;
17020 /* If this is a pointer comparison, ignore for now even
17021 valid equalities where one pointer is the offset zero
17022 of one object and the other to one past end of another one. */
17023 if (!INTEGRAL_TYPE_P (type))
17024 return 0;
17026 /* Assume that string literals can't be adjacent to variables
17027 (automatic or global). */
17028 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17029 return 0;
17031 /* Assume that automatic variables can't be adjacent to global
17032 variables. */
17033 if (is_global_var (base0) != is_global_var (base1))
17034 return 0;
17036 return equal;
17039 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17040 tree
17041 ctor_single_nonzero_element (const_tree t)
17043 unsigned HOST_WIDE_INT idx;
17044 constructor_elt *ce;
17045 tree elt = NULL_TREE;
17047 if (TREE_CODE (t) != CONSTRUCTOR)
17048 return NULL_TREE;
17049 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17050 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17052 if (elt)
17053 return NULL_TREE;
17054 elt = ce->value;
17056 return elt;
17059 #if CHECKING_P
17061 namespace selftest {
17063 /* Helper functions for writing tests of folding trees. */
17065 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17067 static void
17068 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17069 tree constant)
17071 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17074 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17075 wrapping WRAPPED_EXPR. */
17077 static void
17078 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17079 tree wrapped_expr)
17081 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17082 ASSERT_NE (wrapped_expr, result);
17083 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17084 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17087 /* Verify that various arithmetic binary operations are folded
17088 correctly. */
17090 static void
17091 test_arithmetic_folding ()
17093 tree type = integer_type_node;
17094 tree x = create_tmp_var_raw (type, "x");
17095 tree zero = build_zero_cst (type);
17096 tree one = build_int_cst (type, 1);
17098 /* Addition. */
17099 /* 1 <-- (0 + 1) */
17100 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17101 one);
17102 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17103 one);
17105 /* (nonlvalue)x <-- (x + 0) */
17106 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17109 /* Subtraction. */
17110 /* 0 <-- (x - x) */
17111 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17112 zero);
17113 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17116 /* Multiplication. */
17117 /* 0 <-- (x * 0) */
17118 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17119 zero);
17121 /* (nonlvalue)x <-- (x * 1) */
17122 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17126 namespace test_fold_vec_perm_cst {
17128 /* Build a VECTOR_CST corresponding to VMODE, and has
17129 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17130 Fill it with randomized elements, using rand() % THRESHOLD. */
17132 static tree
17133 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17134 unsigned nelts_per_pattern,
17135 int step = 0, int threshold = 100)
17137 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17138 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17139 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17141 // Fill a0 for each pattern
17142 for (unsigned i = 0; i < npatterns; i++)
17143 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17145 if (nelts_per_pattern == 1)
17146 return builder.build ();
17148 // Fill a1 for each pattern
17149 for (unsigned i = 0; i < npatterns; i++)
17150 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17152 if (nelts_per_pattern == 2)
17153 return builder.build ();
17155 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17157 tree prev_elem = builder[i - npatterns];
17158 int prev_elem_val = TREE_INT_CST_LOW (prev_elem);
17159 int val = prev_elem_val + step;
17160 builder.quick_push (build_int_cst (inner_type, val));
17163 return builder.build ();
17166 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17167 when result is VLA. */
17169 static void
17170 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17171 tree res, tree *expected_res)
17173 /* Actual npatterns and encoded_elts in res may be less than expected due
17174 to canonicalization. */
17175 ASSERT_TRUE (res != NULL_TREE);
17176 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17177 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17179 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17180 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17183 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17184 when the result is VLS. */
17186 static void
17187 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17189 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17190 for (unsigned i = 0; i < expected_nelts; i++)
17191 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17194 /* Helper routine to push multiple elements into BUILDER. */
17195 template<unsigned N>
17196 static void builder_push_elems (vec_perm_builder& builder,
17197 poly_uint64 (&elems)[N])
17199 for (unsigned i = 0; i < N; i++)
17200 builder.quick_push (elems[i]);
17203 #define ARG0(index) vector_cst_elt (arg0, index)
17204 #define ARG1(index) vector_cst_elt (arg1, index)
17206 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17208 static void
17209 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17211 for (int i = 0; i < 10; i++)
17213 /* Case 1:
17214 sel = { 0, 4, 1, 5, ... }
17215 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17217 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17218 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17220 tree inner_type
17221 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17222 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17224 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17225 vec_perm_builder builder (res_len, 4, 1);
17226 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17227 builder_push_elems (builder, mask_elems);
17229 vec_perm_indices sel (builder, 2, res_len);
17230 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17232 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17233 validate_res (4, 1, res, expected_res);
17236 /* Case 2: Same as case 1, but contains an out of bounds access which
17237 should wrap around.
17238 sel = {0, 8, 4, 12, ...} (4, 1)
17239 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17241 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17242 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17244 tree inner_type
17245 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17246 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17248 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17249 vec_perm_builder builder (res_len, 4, 1);
17250 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17251 builder_push_elems (builder, mask_elems);
17253 vec_perm_indices sel (builder, 2, res_len);
17254 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17256 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17257 validate_res (4, 1, res, expected_res);
17262 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17264 static void
17265 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17267 for (int i = 0; i < 10; i++)
17269 /* Case 1:
17270 sel = { 0, 1, 2, 3}
17271 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17273 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17274 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17276 tree inner_type
17277 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17278 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17280 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17281 vec_perm_builder builder (res_len, 4, 1);
17282 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17283 builder_push_elems (builder, mask_elems);
17285 vec_perm_indices sel (builder, 2, res_len);
17286 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17288 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17289 validate_res_vls (res, expected_res, 4);
17292 /* Case 2: Same as Case 1, but crossing input vector.
17293 sel = {0, 2, 4, 6}
17294 In this case,the index 4 is ambiguous since len = 4 + 4x.
17295 Since we cannot determine, which vector to choose from during
17296 compile time, should return NULL_TREE. */
17298 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17299 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17301 tree inner_type
17302 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17303 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17305 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17306 vec_perm_builder builder (res_len, 4, 1);
17307 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17308 builder_push_elems (builder, mask_elems);
17310 vec_perm_indices sel (builder, 2, res_len);
17311 const char *reason;
17312 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17314 ASSERT_TRUE (res == NULL_TREE);
17315 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17320 /* Test all input vectors. */
17322 static void
17323 test_all_nunits (machine_mode vmode)
17325 /* Test with 10 different inputs. */
17326 for (int i = 0; i < 10; i++)
17328 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17329 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17330 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17332 /* Case 1: mask = {0, ...} // (1, 1)
17333 res = { arg0[0], ... } // (1, 1) */
17335 vec_perm_builder builder (len, 1, 1);
17336 builder.quick_push (0);
17337 vec_perm_indices sel (builder, 2, len);
17338 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17339 tree expected_res[] = { ARG0(0) };
17340 validate_res (1, 1, res, expected_res);
17343 /* Case 2: mask = {len, ...} // (1, 1)
17344 res = { arg1[0], ... } // (1, 1) */
17346 vec_perm_builder builder (len, 1, 1);
17347 builder.quick_push (len);
17348 vec_perm_indices sel (builder, 2, len);
17349 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17351 tree expected_res[] = { ARG1(0) };
17352 validate_res (1, 1, res, expected_res);
17357 /* Test all vectors which contain at-least 2 elements. */
17359 static void
17360 test_nunits_min_2 (machine_mode vmode)
17362 for (int i = 0; i < 10; i++)
17364 /* Case 1: mask = { 0, len, ... } // (2, 1)
17365 res = { arg0[0], arg1[0], ... } // (2, 1) */
17367 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17368 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17369 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17371 vec_perm_builder builder (len, 2, 1);
17372 poly_uint64 mask_elems[] = { 0, len };
17373 builder_push_elems (builder, mask_elems);
17375 vec_perm_indices sel (builder, 2, len);
17376 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17378 tree expected_res[] = { ARG0(0), ARG1(0) };
17379 validate_res (2, 1, res, expected_res);
17382 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17383 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17385 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17386 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17387 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17389 vec_perm_builder builder (len, 2, 2);
17390 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17391 builder_push_elems (builder, mask_elems);
17393 vec_perm_indices sel (builder, 2, len);
17394 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17396 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17397 validate_res (2, 2, res, expected_res);
17400 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17401 Test that the stepped sequence of the pattern selects from
17402 same input pattern. Since input vectors have npatterns = 2,
17403 and step (a2 - a1) = 1, step is not a multiple of npatterns
17404 in input vector. So return NULL_TREE. */
17406 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1);
17407 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17408 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17410 vec_perm_builder builder (len, 1, 3);
17411 poly_uint64 mask_elems[] = { 0, 0, 1 };
17412 builder_push_elems (builder, mask_elems);
17414 vec_perm_indices sel (builder, 2, len);
17415 const char *reason;
17416 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17417 &reason);
17418 ASSERT_TRUE (res == NULL_TREE);
17419 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17422 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17423 Test that stepped sequence of the pattern selects from arg0.
17424 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17426 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17427 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17428 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17430 vec_perm_builder builder (len, 1, 3);
17431 poly_uint64 mask_elems[] = { len, 0, 1 };
17432 builder_push_elems (builder, mask_elems);
17434 vec_perm_indices sel (builder, 2, len);
17435 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17437 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17438 validate_res (1, 3, res, expected_res);
17443 /* Test all vectors which contain at-least 4 elements. */
17445 static void
17446 test_nunits_min_4 (machine_mode vmode)
17448 for (int i = 0; i < 10; i++)
17450 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17451 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17453 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17454 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17455 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17457 vec_perm_builder builder (len, 4, 1);
17458 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17459 builder_push_elems (builder, mask_elems);
17461 vec_perm_indices sel (builder, 2, len);
17462 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17464 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17465 validate_res (4, 1, res, expected_res);
17468 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17469 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17471 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17472 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17473 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17475 vec_perm_builder builder (arg0_len, 1, 3);
17476 poly_uint64 mask_elems[] = {0, 1, 2};
17477 builder_push_elems (builder, mask_elems);
17479 vec_perm_indices sel (builder, 2, arg0_len);
17480 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17481 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17482 validate_res (1, 3, res, expected_res);
17485 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17486 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17488 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17489 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17490 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17492 vec_perm_builder builder (len, 1, 3);
17493 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17494 builder_push_elems (builder, mask_elems);
17496 vec_perm_indices sel (builder, 2, len);
17497 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17498 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17499 validate_res (1, 3, res, expected_res);
17502 /* Case 4:
17503 sel = { len, 0, 2, ... } // (1, 3)
17504 This should return NULL because we cross the input vectors.
17505 Because,
17506 Let's assume len = C + Cx
17507 a1 = 0
17508 S = 2
17509 esel = arg0_len / sel_npatterns = C + Cx
17510 ae = 0 + (esel - 2) * S
17511 = 0 + (C + Cx - 2) * 2
17512 = 2(C-2) + 2Cx
17514 For C >= 4:
17515 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17516 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17517 Since q1 != qe, we cross input vectors.
17518 So return NULL_TREE. */
17520 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17521 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17522 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17524 vec_perm_builder builder (arg0_len, 1, 3);
17525 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17526 builder_push_elems (builder, mask_elems);
17528 vec_perm_indices sel (builder, 2, arg0_len);
17529 const char *reason;
17530 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17531 ASSERT_TRUE (res == NULL_TREE);
17532 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17535 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17536 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17537 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17539 Note that fold_vec_perm_cst will set
17540 res_npatterns = max(4, max(4, 2)) = 4
17541 However after canonicalizing, we will end up with shape (2, 2). */
17543 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17544 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17545 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17547 vec_perm_builder builder (len, 2, 2);
17548 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17549 builder_push_elems (builder, mask_elems);
17551 vec_perm_indices sel (builder, 2, len);
17552 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17553 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17554 validate_res (2, 2, res, expected_res);
17557 /* Case 6: Test combination in sel, where one pattern is dup and other
17558 is stepped sequence.
17559 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17560 res = { arg0[0], arg0[0], arg0[0],
17561 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17563 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17564 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17565 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17567 vec_perm_builder builder (len, 2, 3);
17568 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17569 builder_push_elems (builder, mask_elems);
17571 vec_perm_indices sel (builder, 2, len);
17572 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17574 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17575 ARG0(1), ARG0(0), ARG0(2) };
17576 validate_res (2, 3, res, expected_res);
17579 /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17580 when arg0, arg1 and sel have different number of patterns.
17581 arg0 is of shape (1, 1)
17582 arg1 is of shape (4, 1)
17583 sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17585 In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17586 However,
17587 step = (len+2) - (len+1) = 1
17588 arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17589 Since step is not a multiple of arg_npatterns,
17590 valid_mask_for_fold_vec_perm_cst should return false,
17591 and thus fold_vec_perm_cst should return NULL_TREE. */
17593 tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17594 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17595 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17597 vec_perm_builder builder (len, 2, 3);
17598 poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17599 builder_push_elems (builder, mask_elems);
17601 vec_perm_indices sel (builder, 2, len);
17602 const char *reason;
17603 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17605 ASSERT_TRUE (res == NULL_TREE);
17606 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17611 /* Test all vectors which contain at-least 8 elements. */
17613 static void
17614 test_nunits_min_8 (machine_mode vmode)
17616 for (int i = 0; i < 10; i++)
17618 /* Case 1: sel_npatterns (4) > input npatterns (2)
17619 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17620 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17621 arg0[2], arg0[0], arg0[3], arg1[0],
17622 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17624 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17625 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17626 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17628 vec_perm_builder builder(len, 4, 3);
17629 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17630 4, 0, 5, len };
17631 builder_push_elems (builder, mask_elems);
17633 vec_perm_indices sel (builder, 2, len);
17634 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17636 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17637 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17638 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17639 validate_res (4, 3, res, expected_res);
17644 /* Test vectors for which nunits[0] <= 4. */
17646 static void
17647 test_nunits_max_4 (machine_mode vmode)
17649 /* Case 1: mask = {0, 4, ...} // (1, 2)
17650 This should return NULL_TREE because the index 4 may choose
17651 from either arg0 or arg1 depending on vector length. */
17653 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17654 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17655 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17657 vec_perm_builder builder (len, 1, 2);
17658 poly_uint64 mask_elems[] = {0, 4};
17659 builder_push_elems (builder, mask_elems);
17661 vec_perm_indices sel (builder, 2, len);
17662 const char *reason;
17663 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17664 ASSERT_TRUE (res == NULL_TREE);
17665 ASSERT_TRUE (reason != NULL);
17666 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17670 #undef ARG0
17671 #undef ARG1
17673 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17675 static bool
17676 is_simple_vla_size (poly_uint64 size)
17678 if (size.is_constant ()
17679 || !pow2p_hwi (size.coeffs[0]))
17680 return false;
17681 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17682 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17683 return false;
17684 return true;
17687 /* Execute fold_vec_perm_cst unit tests. */
17689 static void
17690 test ()
17692 machine_mode vnx4si_mode = E_VOIDmode;
17693 machine_mode v4si_mode = E_VOIDmode;
17695 machine_mode vmode;
17696 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17698 /* Obtain modes corresponding to VNx4SI and V4SI,
17699 to call mixed mode tests below.
17700 FIXME: Is there a better way to do this ? */
17701 if (GET_MODE_INNER (vmode) == SImode)
17703 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17704 if (is_simple_vla_size (nunits)
17705 && nunits.coeffs[0] == 4)
17706 vnx4si_mode = vmode;
17707 else if (known_eq (nunits, poly_uint64 (4)))
17708 v4si_mode = vmode;
17711 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17712 || !targetm.vector_mode_supported_p (vmode))
17713 continue;
17715 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17716 test_all_nunits (vmode);
17717 if (nunits.coeffs[0] >= 2)
17718 test_nunits_min_2 (vmode);
17719 if (nunits.coeffs[0] >= 4)
17720 test_nunits_min_4 (vmode);
17721 if (nunits.coeffs[0] >= 8)
17722 test_nunits_min_8 (vmode);
17724 if (nunits.coeffs[0] <= 4)
17725 test_nunits_max_4 (vmode);
17728 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
17729 && targetm.vector_mode_supported_p (vnx4si_mode)
17730 && targetm.vector_mode_supported_p (v4si_mode))
17732 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
17733 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
17736 } // end of test_fold_vec_perm_cst namespace
17738 /* Verify that various binary operations on vectors are folded
17739 correctly. */
17741 static void
17742 test_vector_folding ()
17744 tree inner_type = integer_type_node;
17745 tree type = build_vector_type (inner_type, 4);
17746 tree zero = build_zero_cst (type);
17747 tree one = build_one_cst (type);
17748 tree index = build_index_vector (type, 0, 1);
17750 /* Verify equality tests that return a scalar boolean result. */
17751 tree res_type = boolean_type_node;
17752 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
17753 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17754 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17755 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17756 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17757 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17758 index, one)));
17759 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17760 index, index)));
17761 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17762 index, index)));
17765 /* Verify folding of VEC_DUPLICATE_EXPRs. */
17767 static void
17768 test_vec_duplicate_folding ()
17770 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17771 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17772 /* This will be 1 if VEC_MODE isn't a vector mode. */
17773 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17775 tree type = build_vector_type (ssizetype, nunits);
17776 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17777 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17778 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17781 /* Run all of the selftests within this file. */
17783 void
17784 fold_const_cc_tests ()
17786 test_arithmetic_folding ();
17787 test_vector_folding ();
17788 test_vec_duplicate_folding ();
17789 test_fold_vec_perm_cst::test ();
17792 } // namespace selftest
17794 #endif /* CHECKING_P */