Extend fold_vec_perm to handle VLA vector_cst.
[official-gcc.git] / gcc / fold-const.cc
blobc6fb083027db7193f16dd29af21cf8b3d0dbd5c0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #define INCLUDE_ALGORITHM
44 #include "config.h"
45 #include "system.h"
46 #include "coretypes.h"
47 #include "backend.h"
48 #include "target.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "predict.h"
53 #include "memmodel.h"
54 #include "tm_p.h"
55 #include "tree-ssa-operands.h"
56 #include "optabs-query.h"
57 #include "cgraph.h"
58 #include "diagnostic-core.h"
59 #include "flags.h"
60 #include "alias.h"
61 #include "fold-const.h"
62 #include "fold-const-call.h"
63 #include "stor-layout.h"
64 #include "calls.h"
65 #include "tree-iterator.h"
66 #include "expr.h"
67 #include "intl.h"
68 #include "langhooks.h"
69 #include "tree-eh.h"
70 #include "gimplify.h"
71 #include "tree-dfa.h"
72 #include "builtins.h"
73 #include "generic-match.h"
74 #include "gimple-iterator.h"
75 #include "gimple-fold.h"
76 #include "tree-into-ssa.h"
77 #include "md5.h"
78 #include "case-cfn-macros.h"
79 #include "stringpool.h"
80 #include "tree-vrp.h"
81 #include "tree-ssanames.h"
82 #include "selftest.h"
83 #include "stringpool.h"
84 #include "attribs.h"
85 #include "tree-vector-builder.h"
86 #include "vec-perm-indices.h"
87 #include "asan.h"
88 #include "gimple-range.h"
90 /* Nonzero if we are folding constants inside an initializer or a C++
91 manifestly-constant-evaluated context; zero otherwise.
92 Should be used when folding in initializer enables additional
93 optimizations. */
94 int folding_initializer = 0;
96 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 otherwise.
98 Should be used when certain constructs shouldn't be optimized
99 during folding in that context. */
100 bool folding_cxx_constexpr = false;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code {
106 COMPCODE_FALSE = 0,
107 COMPCODE_LT = 1,
108 COMPCODE_EQ = 2,
109 COMPCODE_LE = 3,
110 COMPCODE_GT = 4,
111 COMPCODE_LTGT = 5,
112 COMPCODE_GE = 6,
113 COMPCODE_ORD = 7,
114 COMPCODE_UNORD = 8,
115 COMPCODE_UNLT = 9,
116 COMPCODE_UNEQ = 10,
117 COMPCODE_UNLE = 11,
118 COMPCODE_UNGT = 12,
119 COMPCODE_NE = 13,
120 COMPCODE_UNGE = 14,
121 COMPCODE_TRUE = 15
124 static bool negate_expr_p (tree);
125 static tree negate_expr (tree);
126 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 static enum comparison_code comparison_to_compcode (enum tree_code);
128 static enum tree_code compcode_to_comparison (enum comparison_code);
129 static bool twoval_comparison_p (tree, tree *, tree *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree optimize_bit_field_compare (location_t, enum tree_code,
132 tree, tree, tree);
133 static bool simple_operand_p (const_tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
154 /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 tree_code
157 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
159 enum tree_code code = ERROR_MARK;
161 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 return ERROR_MARK;
164 if (!operand_equal_p (exp0, exp2))
165 return ERROR_MARK;
167 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
169 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
171 /* X <= Y - 1 equals to X < Y. */
172 if (cmp == LE_EXPR)
173 code = LT_EXPR;
174 /* X > Y - 1 equals to X >= Y. */
175 if (cmp == GT_EXPR)
176 code = GE_EXPR;
177 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
180 value_range r;
181 get_range_query (cfun)->range_of_expr (r, exp0);
182 if (r.undefined_p ())
183 r.set_varying (TREE_TYPE (exp0));
185 widest_int min = widest_int::from (r.lower_bound (),
186 TYPE_SIGN (TREE_TYPE (exp0)));
187 if (min == wi::to_widest (exp1))
188 code = MAX_EXPR;
191 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
193 /* X < Y + 1 equals to X <= Y. */
194 if (cmp == LT_EXPR)
195 code = LE_EXPR;
196 /* X >= Y + 1 equals to X > Y. */
197 if (cmp == GE_EXPR)
198 code = GT_EXPR;
199 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
202 value_range r;
203 get_range_query (cfun)->range_of_expr (r, exp0);
204 if (r.undefined_p ())
205 r.set_varying (TREE_TYPE (exp0));
207 widest_int max = widest_int::from (r.upper_bound (),
208 TYPE_SIGN (TREE_TYPE (exp0)));
209 if (max == wi::to_widest (exp1))
210 code = MIN_EXPR;
214 if (code != ERROR_MARK
215 || operand_equal_p (exp1, exp3))
217 if (cmp == LT_EXPR || cmp == LE_EXPR)
218 code = MIN_EXPR;
219 if (cmp == GT_EXPR || cmp == GE_EXPR)
220 code = MAX_EXPR;
222 return code;
225 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 Otherwise, return LOC. */
228 static location_t
229 expr_location_or (tree t, location_t loc)
231 location_t tloc = EXPR_LOCATION (t);
232 return tloc == UNKNOWN_LOCATION ? loc : tloc;
235 /* Similar to protected_set_expr_location, but never modify x in place,
236 if location can and needs to be set, unshare it. */
238 tree
239 protected_set_expr_location_unshare (tree x, location_t loc)
241 if (CAN_HAVE_LOCATION_P (x)
242 && EXPR_LOCATION (x) != loc
243 && !(TREE_CODE (x) == SAVE_EXPR
244 || TREE_CODE (x) == TARGET_EXPR
245 || TREE_CODE (x) == BIND_EXPR))
247 x = copy_node (x);
248 SET_EXPR_LOCATION (x, loc);
250 return x;
253 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 division and returns the quotient. Otherwise returns
255 NULL_TREE. */
257 tree
258 div_if_zero_remainder (const_tree arg1, const_tree arg2)
260 widest_int quo;
262 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 SIGNED, &quo))
264 return wide_int_to_tree (TREE_TYPE (arg1), quo);
266 return NULL_TREE;
269 /* This is nonzero if we should defer warnings about undefined
270 overflow. This facility exists because these warnings are a
271 special case. The code to estimate loop iterations does not want
272 to issue any warnings, since it works with expressions which do not
273 occur in user code. Various bits of cleanup code call fold(), but
274 only use the result if it has certain characteristics (e.g., is a
275 constant); that code only wants to issue a warning if the result is
276 used. */
278 static int fold_deferring_overflow_warnings;
280 /* If a warning about undefined overflow is deferred, this is the
281 warning. Note that this may cause us to turn two warnings into
282 one, but that is fine since it is sufficient to only give one
283 warning per expression. */
285 static const char* fold_deferred_overflow_warning;
287 /* If a warning about undefined overflow is deferred, this is the
288 level at which the warning should be emitted. */
290 static enum warn_strict_overflow_code fold_deferred_overflow_code;
292 /* Start deferring overflow warnings. We could use a stack here to
293 permit nested calls, but at present it is not necessary. */
295 void
296 fold_defer_overflow_warnings (void)
298 ++fold_deferring_overflow_warnings;
301 /* Stop deferring overflow warnings. If there is a pending warning,
302 and ISSUE is true, then issue the warning if appropriate. STMT is
303 the statement with which the warning should be associated (used for
304 location information); STMT may be NULL. CODE is the level of the
305 warning--a warn_strict_overflow_code value. This function will use
306 the smaller of CODE and the deferred code when deciding whether to
307 issue the warning. CODE may be zero to mean to always use the
308 deferred code. */
310 void
311 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
313 const char *warnmsg;
314 location_t locus;
316 gcc_assert (fold_deferring_overflow_warnings > 0);
317 --fold_deferring_overflow_warnings;
318 if (fold_deferring_overflow_warnings > 0)
320 if (fold_deferred_overflow_warning != NULL
321 && code != 0
322 && code < (int) fold_deferred_overflow_code)
323 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 return;
327 warnmsg = fold_deferred_overflow_warning;
328 fold_deferred_overflow_warning = NULL;
330 if (!issue || warnmsg == NULL)
331 return;
333 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 return;
336 /* Use the smallest code level when deciding to issue the
337 warning. */
338 if (code == 0 || code > (int) fold_deferred_overflow_code)
339 code = fold_deferred_overflow_code;
341 if (!issue_strict_overflow_warning (code))
342 return;
344 if (stmt == NULL)
345 locus = input_location;
346 else
347 locus = gimple_location (stmt);
348 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
351 /* Stop deferring overflow warnings, ignoring any deferred
352 warnings. */
354 void
355 fold_undefer_and_ignore_overflow_warnings (void)
357 fold_undefer_overflow_warnings (false, NULL, 0);
360 /* Whether we are deferring overflow warnings. */
362 bool
363 fold_deferring_overflow_warnings_p (void)
365 return fold_deferring_overflow_warnings > 0;
368 /* This is called when we fold something based on the fact that signed
369 overflow is undefined. */
371 void
372 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
374 if (fold_deferring_overflow_warnings > 0)
376 if (fold_deferred_overflow_warning == NULL
377 || wc < fold_deferred_overflow_code)
379 fold_deferred_overflow_warning = gmsgid;
380 fold_deferred_overflow_code = wc;
383 else if (issue_strict_overflow_warning (wc))
384 warning (OPT_Wstrict_overflow, gmsgid);
387 /* Return true if the built-in mathematical function specified by CODE
388 is odd, i.e. -f(x) == f(-x). */
390 bool
391 negate_mathfn_p (combined_fn fn)
393 switch (fn)
395 CASE_CFN_ASIN:
396 CASE_CFN_ASIN_FN:
397 CASE_CFN_ASINH:
398 CASE_CFN_ASINH_FN:
399 CASE_CFN_ATAN:
400 CASE_CFN_ATAN_FN:
401 CASE_CFN_ATANH:
402 CASE_CFN_ATANH_FN:
403 CASE_CFN_CASIN:
404 CASE_CFN_CASIN_FN:
405 CASE_CFN_CASINH:
406 CASE_CFN_CASINH_FN:
407 CASE_CFN_CATAN:
408 CASE_CFN_CATAN_FN:
409 CASE_CFN_CATANH:
410 CASE_CFN_CATANH_FN:
411 CASE_CFN_CBRT:
412 CASE_CFN_CBRT_FN:
413 CASE_CFN_CPROJ:
414 CASE_CFN_CPROJ_FN:
415 CASE_CFN_CSIN:
416 CASE_CFN_CSIN_FN:
417 CASE_CFN_CSINH:
418 CASE_CFN_CSINH_FN:
419 CASE_CFN_CTAN:
420 CASE_CFN_CTAN_FN:
421 CASE_CFN_CTANH:
422 CASE_CFN_CTANH_FN:
423 CASE_CFN_ERF:
424 CASE_CFN_ERF_FN:
425 CASE_CFN_LLROUND:
426 CASE_CFN_LLROUND_FN:
427 CASE_CFN_LROUND:
428 CASE_CFN_LROUND_FN:
429 CASE_CFN_ROUND:
430 CASE_CFN_ROUNDEVEN:
431 CASE_CFN_ROUNDEVEN_FN:
432 CASE_CFN_SIN:
433 CASE_CFN_SIN_FN:
434 CASE_CFN_SINH:
435 CASE_CFN_SINH_FN:
436 CASE_CFN_TAN:
437 CASE_CFN_TAN_FN:
438 CASE_CFN_TANH:
439 CASE_CFN_TANH_FN:
440 CASE_CFN_TRUNC:
441 CASE_CFN_TRUNC_FN:
442 return true;
444 CASE_CFN_LLRINT:
445 CASE_CFN_LLRINT_FN:
446 CASE_CFN_LRINT:
447 CASE_CFN_LRINT_FN:
448 CASE_CFN_NEARBYINT:
449 CASE_CFN_NEARBYINT_FN:
450 CASE_CFN_RINT:
451 CASE_CFN_RINT_FN:
452 return !flag_rounding_math;
454 default:
455 break;
457 return false;
460 /* Check whether we may negate an integer constant T without causing
461 overflow. */
463 bool
464 may_negate_without_overflow_p (const_tree t)
466 tree type;
468 gcc_assert (TREE_CODE (t) == INTEGER_CST);
470 type = TREE_TYPE (t);
471 if (TYPE_UNSIGNED (type))
472 return false;
474 return !wi::only_sign_bit_p (wi::to_wide (t));
477 /* Determine whether an expression T can be cheaply negated using
478 the function negate_expr without introducing undefined overflow. */
480 static bool
481 negate_expr_p (tree t)
483 tree type;
485 if (t == 0)
486 return false;
488 type = TREE_TYPE (t);
490 STRIP_SIGN_NOPS (t);
491 switch (TREE_CODE (t))
493 case INTEGER_CST:
494 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 return true;
497 /* Check that -CST will not overflow type. */
498 return may_negate_without_overflow_p (t);
499 case BIT_NOT_EXPR:
500 return (INTEGRAL_TYPE_P (type)
501 && TYPE_OVERFLOW_WRAPS (type));
503 case FIXED_CST:
504 return true;
506 case NEGATE_EXPR:
507 return !TYPE_OVERFLOW_SANITIZED (type);
509 case REAL_CST:
510 /* We want to canonicalize to positive real constants. Pretend
511 that only negative ones can be easily negated. */
512 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
514 case COMPLEX_CST:
515 return negate_expr_p (TREE_REALPART (t))
516 && negate_expr_p (TREE_IMAGPART (t));
518 case VECTOR_CST:
520 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 return true;
523 /* Steps don't prevent negation. */
524 unsigned int count = vector_cst_encoded_nelts (t);
525 for (unsigned int i = 0; i < count; ++i)
526 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 return false;
529 return true;
532 case COMPLEX_EXPR:
533 return negate_expr_p (TREE_OPERAND (t, 0))
534 && negate_expr_p (TREE_OPERAND (t, 1));
536 case CONJ_EXPR:
537 return negate_expr_p (TREE_OPERAND (t, 0));
539 case PLUS_EXPR:
540 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 || HONOR_SIGNED_ZEROS (type)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && ! TYPE_OVERFLOW_WRAPS (type)))
544 return false;
545 /* -(A + B) -> (-B) - A. */
546 if (negate_expr_p (TREE_OPERAND (t, 1)))
547 return true;
548 /* -(A + B) -> (-A) - B. */
549 return negate_expr_p (TREE_OPERAND (t, 0));
551 case MINUS_EXPR:
552 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 && !HONOR_SIGNED_ZEROS (type)
555 && (! ANY_INTEGRAL_TYPE_P (type)
556 || TYPE_OVERFLOW_WRAPS (type));
558 case MULT_EXPR:
559 if (TYPE_UNSIGNED (type))
560 break;
561 /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 if n is a (negative) power of two. */
563 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 && (wi::popcount
567 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 && (wi::popcount
570 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 break;
573 /* Fall through. */
575 case RDIV_EXPR:
576 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 return negate_expr_p (TREE_OPERAND (t, 1))
578 || negate_expr_p (TREE_OPERAND (t, 0));
579 break;
581 case TRUNC_DIV_EXPR:
582 case ROUND_DIV_EXPR:
583 case EXACT_DIV_EXPR:
584 if (TYPE_UNSIGNED (type))
585 break;
586 /* In general we can't negate A in A / B, because if A is INT_MIN and
587 B is not 1 we change the sign of the result. */
588 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 && negate_expr_p (TREE_OPERAND (t, 0)))
590 return true;
591 /* In general we can't negate B in A / B, because if A is INT_MIN and
592 B is 1, we may turn this into INT_MIN / -1 which is undefined
593 and actually traps on some architectures. */
594 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 && ! integer_onep (TREE_OPERAND (t, 1))))
598 return negate_expr_p (TREE_OPERAND (t, 1));
599 break;
601 case NOP_EXPR:
602 /* Negate -((double)float) as (double)(-float). */
603 if (SCALAR_FLOAT_TYPE_P (type))
605 tree tem = strip_float_extensions (t);
606 if (tem != t)
607 return negate_expr_p (tem);
609 break;
611 case CALL_EXPR:
612 /* Negate -f(x) as f(-x). */
613 if (negate_mathfn_p (get_call_combined_fn (t)))
614 return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 break;
617 case RSHIFT_EXPR:
618 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
621 tree op1 = TREE_OPERAND (t, 1);
622 if (wi::to_wide (op1) == element_precision (type) - 1)
623 return true;
625 break;
627 default:
628 break;
630 return false;
633 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 simplification is possible.
635 If negate_expr_p would return true for T, NULL_TREE will never be
636 returned. */
638 static tree
639 fold_negate_expr_1 (location_t loc, tree t)
641 tree type = TREE_TYPE (t);
642 tree tem;
644 switch (TREE_CODE (t))
646 /* Convert - (~A) to A + 1. */
647 case BIT_NOT_EXPR:
648 if (INTEGRAL_TYPE_P (type))
649 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 build_one_cst (type));
651 break;
653 case INTEGER_CST:
654 tem = fold_negate_const (t, type);
655 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 || (ANY_INTEGRAL_TYPE_P (type)
657 && !TYPE_OVERFLOW_TRAPS (type)
658 && TYPE_OVERFLOW_WRAPS (type))
659 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 return tem;
661 break;
663 case POLY_INT_CST:
664 case REAL_CST:
665 case FIXED_CST:
666 tem = fold_negate_const (t, type);
667 return tem;
669 case COMPLEX_CST:
671 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 if (rpart && ipart)
674 return build_complex (type, rpart, ipart);
676 break;
678 case VECTOR_CST:
680 tree_vector_builder elts;
681 elts.new_unary_operation (type, t, true);
682 unsigned int count = elts.encoded_nelts ();
683 for (unsigned int i = 0; i < count; ++i)
685 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 if (elt == NULL_TREE)
687 return NULL_TREE;
688 elts.quick_push (elt);
691 return elts.build ();
694 case COMPLEX_EXPR:
695 if (negate_expr_p (t))
696 return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 break;
701 case CONJ_EXPR:
702 if (negate_expr_p (t))
703 return fold_build1_loc (loc, CONJ_EXPR, type,
704 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 break;
707 case NEGATE_EXPR:
708 if (!TYPE_OVERFLOW_SANITIZED (type))
709 return TREE_OPERAND (t, 0);
710 break;
712 case PLUS_EXPR:
713 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 && !HONOR_SIGNED_ZEROS (type))
716 /* -(A + B) -> (-B) - A. */
717 if (negate_expr_p (TREE_OPERAND (t, 1)))
719 tem = negate_expr (TREE_OPERAND (t, 1));
720 return fold_build2_loc (loc, MINUS_EXPR, type,
721 tem, TREE_OPERAND (t, 0));
724 /* -(A + B) -> (-A) - B. */
725 if (negate_expr_p (TREE_OPERAND (t, 0)))
727 tem = negate_expr (TREE_OPERAND (t, 0));
728 return fold_build2_loc (loc, MINUS_EXPR, type,
729 tem, TREE_OPERAND (t, 1));
732 break;
734 case MINUS_EXPR:
735 /* - (A - B) -> B - A */
736 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 && !HONOR_SIGNED_ZEROS (type))
738 return fold_build2_loc (loc, MINUS_EXPR, type,
739 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 break;
742 case MULT_EXPR:
743 if (TYPE_UNSIGNED (type))
744 break;
746 /* Fall through. */
748 case RDIV_EXPR:
749 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
751 tem = TREE_OPERAND (t, 1);
752 if (negate_expr_p (tem))
753 return fold_build2_loc (loc, TREE_CODE (t), type,
754 TREE_OPERAND (t, 0), negate_expr (tem));
755 tem = TREE_OPERAND (t, 0);
756 if (negate_expr_p (tem))
757 return fold_build2_loc (loc, TREE_CODE (t), type,
758 negate_expr (tem), TREE_OPERAND (t, 1));
760 break;
762 case TRUNC_DIV_EXPR:
763 case ROUND_DIV_EXPR:
764 case EXACT_DIV_EXPR:
765 if (TYPE_UNSIGNED (type))
766 break;
767 /* In general we can't negate A in A / B, because if A is INT_MIN and
768 B is not 1 we change the sign of the result. */
769 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 && negate_expr_p (TREE_OPERAND (t, 0)))
771 return fold_build2_loc (loc, TREE_CODE (t), type,
772 negate_expr (TREE_OPERAND (t, 0)),
773 TREE_OPERAND (t, 1));
774 /* In general we can't negate B in A / B, because if A is INT_MIN and
775 B is 1, we may turn this into INT_MIN / -1 which is undefined
776 and actually traps on some architectures. */
777 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 && ! integer_onep (TREE_OPERAND (t, 1))))
781 && negate_expr_p (TREE_OPERAND (t, 1)))
782 return fold_build2_loc (loc, TREE_CODE (t), type,
783 TREE_OPERAND (t, 0),
784 negate_expr (TREE_OPERAND (t, 1)));
785 break;
787 case NOP_EXPR:
788 /* Convert -((double)float) into (double)(-float). */
789 if (SCALAR_FLOAT_TYPE_P (type))
791 tem = strip_float_extensions (t);
792 if (tem != t && negate_expr_p (tem))
793 return fold_convert_loc (loc, type, negate_expr (tem));
795 break;
797 case CALL_EXPR:
798 /* Negate -f(x) as f(-x). */
799 if (negate_mathfn_p (get_call_combined_fn (t))
800 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
802 tree fndecl, arg;
804 fndecl = get_callee_fndecl (t);
805 arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 return build_call_expr_loc (loc, fndecl, 1, arg);
808 break;
810 case RSHIFT_EXPR:
811 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
814 tree op1 = TREE_OPERAND (t, 1);
815 if (wi::to_wide (op1) == element_precision (type) - 1)
817 tree ntype = TYPE_UNSIGNED (type)
818 ? signed_type_for (type)
819 : unsigned_type_for (type);
820 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 return fold_convert_loc (loc, type, temp);
825 break;
827 default:
828 break;
831 return NULL_TREE;
834 /* A wrapper for fold_negate_expr_1. */
836 static tree
837 fold_negate_expr (location_t loc, tree t)
839 tree type = TREE_TYPE (t);
840 STRIP_SIGN_NOPS (t);
841 tree tem = fold_negate_expr_1 (loc, t);
842 if (tem == NULL_TREE)
843 return NULL_TREE;
844 return fold_convert_loc (loc, type, tem);
847 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 return NULL_TREE. */
851 static tree
852 negate_expr (tree t)
854 tree type, tem;
855 location_t loc;
857 if (t == NULL_TREE)
858 return NULL_TREE;
860 loc = EXPR_LOCATION (t);
861 type = TREE_TYPE (t);
862 STRIP_SIGN_NOPS (t);
864 tem = fold_negate_expr (loc, t);
865 if (!tem)
866 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 return fold_convert_loc (loc, type, tem);
870 /* Split a tree IN into a constant, literal and variable parts that could be
871 combined with CODE to make IN. "constant" means an expression with
872 TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 commutative arithmetic operation. Store the constant part into *CONP,
874 the literal in *LITP and return the variable part. If a part isn't
875 present, set it to null. If the tree does not decompose in this way,
876 return the entire tree as the variable part and the other parts as null.
878 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 case, we negate an operand that was subtracted. Except if it is a
880 literal for which we use *MINUS_LITP instead.
882 If NEGATE_P is true, we are negating all of IN, again except a literal
883 for which we use *MINUS_LITP instead. If a variable part is of pointer
884 type, it is negated after converting to TYPE. This prevents us from
885 generating illegal MINUS pointer expression. LOC is the location of
886 the converted variable part.
888 If IN is itself a literal or constant, return it as appropriate.
890 Note that we do not guarantee that any of the three values will be the
891 same type as IN, but they will have the same signedness and mode. */
893 static tree
894 split_tree (tree in, tree type, enum tree_code code,
895 tree *minus_varp, tree *conp, tree *minus_conp,
896 tree *litp, tree *minus_litp, int negate_p)
898 tree var = 0;
899 *minus_varp = 0;
900 *conp = 0;
901 *minus_conp = 0;
902 *litp = 0;
903 *minus_litp = 0;
905 /* Strip any conversions that don't change the machine mode or signedness. */
906 STRIP_SIGN_NOPS (in);
908 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 || TREE_CODE (in) == FIXED_CST)
910 *litp = in;
911 else if (TREE_CODE (in) == code
912 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 /* We can associate addition and subtraction together (even
915 though the C standard doesn't say so) for integers because
916 the value is not affected. For reals, the value might be
917 affected, so we can't. */
918 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR
921 && (TREE_CODE (in) == PLUS_EXPR
922 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
924 tree op0 = TREE_OPERAND (in, 0);
925 tree op1 = TREE_OPERAND (in, 1);
926 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
929 /* First see if either of the operands is a literal, then a constant. */
930 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 || TREE_CODE (op0) == FIXED_CST)
932 *litp = op0, op0 = 0;
933 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 || TREE_CODE (op1) == FIXED_CST)
935 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
937 if (op0 != 0 && TREE_CONSTANT (op0))
938 *conp = op0, op0 = 0;
939 else if (op1 != 0 && TREE_CONSTANT (op1))
940 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
942 /* If we haven't dealt with either operand, this is not a case we can
943 decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 if (op0 != 0 && op1 != 0)
945 var = in;
946 else if (op0 != 0)
947 var = op0;
948 else
949 var = op1, neg_var_p = neg1_p;
951 /* Now do any needed negations. */
952 if (neg_litp_p)
953 *minus_litp = *litp, *litp = 0;
954 if (neg_conp_p && *conp)
955 *minus_conp = *conp, *conp = 0;
956 if (neg_var_p && var)
957 *minus_varp = var, var = 0;
959 else if (TREE_CONSTANT (in))
960 *conp = in;
961 else if (TREE_CODE (in) == BIT_NOT_EXPR
962 && code == PLUS_EXPR)
964 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 when IN is constant. */
966 *litp = build_minus_one_cst (type);
967 *minus_varp = TREE_OPERAND (in, 0);
969 else
970 var = in;
972 if (negate_p)
974 if (*litp)
975 *minus_litp = *litp, *litp = 0;
976 else if (*minus_litp)
977 *litp = *minus_litp, *minus_litp = 0;
978 if (*conp)
979 *minus_conp = *conp, *conp = 0;
980 else if (*minus_conp)
981 *conp = *minus_conp, *minus_conp = 0;
982 if (var)
983 *minus_varp = var, var = 0;
984 else if (*minus_varp)
985 var = *minus_varp, *minus_varp = 0;
988 if (*litp
989 && TREE_OVERFLOW_P (*litp))
990 *litp = drop_tree_overflow (*litp);
991 if (*minus_litp
992 && TREE_OVERFLOW_P (*minus_litp))
993 *minus_litp = drop_tree_overflow (*minus_litp);
995 return var;
998 /* Re-associate trees split by the above function. T1 and T2 are
999 either expressions to associate or null. Return the new
1000 expression, if any. LOC is the location of the new expression. If
1001 we build an operation, do it in TYPE and with CODE. */
1003 static tree
1004 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1006 if (t1 == 0)
1008 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 return t2;
1011 else if (t2 == 0)
1012 return t1;
1014 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 try to fold this since we will have infinite recursion. But do
1016 deal with any NEGATE_EXPRs. */
1017 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1021 if (code == PLUS_EXPR)
1023 if (TREE_CODE (t1) == NEGATE_EXPR)
1024 return build2_loc (loc, MINUS_EXPR, type,
1025 fold_convert_loc (loc, type, t2),
1026 fold_convert_loc (loc, type,
1027 TREE_OPERAND (t1, 0)));
1028 else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 return build2_loc (loc, MINUS_EXPR, type,
1030 fold_convert_loc (loc, type, t1),
1031 fold_convert_loc (loc, type,
1032 TREE_OPERAND (t2, 0)));
1033 else if (integer_zerop (t2))
1034 return fold_convert_loc (loc, type, t1);
1036 else if (code == MINUS_EXPR)
1038 if (integer_zerop (t2))
1039 return fold_convert_loc (loc, type, t1);
1042 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 fold_convert_loc (loc, type, t2));
1046 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 fold_convert_loc (loc, type, t2));
1050 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 for use in int_const_binop, size_binop and size_diffop. */
1053 static bool
1054 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1056 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 return false;
1058 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 return false;
1061 switch (code)
1063 case LSHIFT_EXPR:
1064 case RSHIFT_EXPR:
1065 case LROTATE_EXPR:
1066 case RROTATE_EXPR:
1067 return true;
1069 default:
1070 break;
1073 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 && TYPE_MODE (type1) == TYPE_MODE (type2);
1078 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 a new constant in RES. Return FALSE if we don't know how to
1080 evaluate CODE at compile-time. */
1082 bool
1083 wide_int_binop (wide_int &res,
1084 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 signop sign, wi::overflow_type *overflow)
1087 wide_int tmp;
1088 *overflow = wi::OVF_NONE;
1089 switch (code)
1091 case BIT_IOR_EXPR:
1092 res = wi::bit_or (arg1, arg2);
1093 break;
1095 case BIT_XOR_EXPR:
1096 res = wi::bit_xor (arg1, arg2);
1097 break;
1099 case BIT_AND_EXPR:
1100 res = wi::bit_and (arg1, arg2);
1101 break;
1103 case LSHIFT_EXPR:
1104 if (wi::neg_p (arg2))
1105 return false;
1106 res = wi::lshift (arg1, arg2);
1107 break;
1109 case RSHIFT_EXPR:
1110 if (wi::neg_p (arg2))
1111 return false;
1112 /* It's unclear from the C standard whether shifts can overflow.
1113 The following code ignores overflow; perhaps a C standard
1114 interpretation ruling is needed. */
1115 res = wi::rshift (arg1, arg2, sign);
1116 break;
1118 case RROTATE_EXPR:
1119 case LROTATE_EXPR:
1120 if (wi::neg_p (arg2))
1122 tmp = -arg2;
1123 if (code == RROTATE_EXPR)
1124 code = LROTATE_EXPR;
1125 else
1126 code = RROTATE_EXPR;
1128 else
1129 tmp = arg2;
1131 if (code == RROTATE_EXPR)
1132 res = wi::rrotate (arg1, tmp);
1133 else
1134 res = wi::lrotate (arg1, tmp);
1135 break;
1137 case PLUS_EXPR:
1138 res = wi::add (arg1, arg2, sign, overflow);
1139 break;
1141 case MINUS_EXPR:
1142 res = wi::sub (arg1, arg2, sign, overflow);
1143 break;
1145 case MULT_EXPR:
1146 res = wi::mul (arg1, arg2, sign, overflow);
1147 break;
1149 case MULT_HIGHPART_EXPR:
1150 res = wi::mul_high (arg1, arg2, sign);
1151 break;
1153 case TRUNC_DIV_EXPR:
1154 case EXACT_DIV_EXPR:
1155 if (arg2 == 0)
1156 return false;
1157 res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 break;
1160 case FLOOR_DIV_EXPR:
1161 if (arg2 == 0)
1162 return false;
1163 res = wi::div_floor (arg1, arg2, sign, overflow);
1164 break;
1166 case CEIL_DIV_EXPR:
1167 if (arg2 == 0)
1168 return false;
1169 res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 break;
1172 case ROUND_DIV_EXPR:
1173 if (arg2 == 0)
1174 return false;
1175 res = wi::div_round (arg1, arg2, sign, overflow);
1176 break;
1178 case TRUNC_MOD_EXPR:
1179 if (arg2 == 0)
1180 return false;
1181 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 break;
1184 case FLOOR_MOD_EXPR:
1185 if (arg2 == 0)
1186 return false;
1187 res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 break;
1190 case CEIL_MOD_EXPR:
1191 if (arg2 == 0)
1192 return false;
1193 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 break;
1196 case ROUND_MOD_EXPR:
1197 if (arg2 == 0)
1198 return false;
1199 res = wi::mod_round (arg1, arg2, sign, overflow);
1200 break;
1202 case MIN_EXPR:
1203 res = wi::min (arg1, arg2, sign);
1204 break;
1206 case MAX_EXPR:
1207 res = wi::max (arg1, arg2, sign);
1208 break;
1210 default:
1211 return false;
1213 return true;
1216 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1217 produce a new constant in RES. Return FALSE if we don't know how
1218 to evaluate CODE at compile-time. */
1220 static bool
1221 poly_int_binop (poly_wide_int &res, enum tree_code code,
1222 const_tree arg1, const_tree arg2,
1223 signop sign, wi::overflow_type *overflow)
1225 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1226 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1227 switch (code)
1229 case PLUS_EXPR:
1230 res = wi::add (wi::to_poly_wide (arg1),
1231 wi::to_poly_wide (arg2), sign, overflow);
1232 break;
1234 case MINUS_EXPR:
1235 res = wi::sub (wi::to_poly_wide (arg1),
1236 wi::to_poly_wide (arg2), sign, overflow);
1237 break;
1239 case MULT_EXPR:
1240 if (TREE_CODE (arg2) == INTEGER_CST)
1241 res = wi::mul (wi::to_poly_wide (arg1),
1242 wi::to_wide (arg2), sign, overflow);
1243 else if (TREE_CODE (arg1) == INTEGER_CST)
1244 res = wi::mul (wi::to_poly_wide (arg2),
1245 wi::to_wide (arg1), sign, overflow);
1246 else
1247 return NULL_TREE;
1248 break;
1250 case LSHIFT_EXPR:
1251 if (TREE_CODE (arg2) == INTEGER_CST)
1252 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1253 else
1254 return false;
1255 break;
1257 case BIT_IOR_EXPR:
1258 if (TREE_CODE (arg2) != INTEGER_CST
1259 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1260 &res))
1261 return false;
1262 break;
1264 default:
1265 return false;
1267 return true;
1270 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1271 produce a new constant. Return NULL_TREE if we don't know how to
1272 evaluate CODE at compile-time. */
1274 tree
1275 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1276 int overflowable)
1278 poly_wide_int poly_res;
1279 tree type = TREE_TYPE (arg1);
1280 signop sign = TYPE_SIGN (type);
1281 wi::overflow_type overflow = wi::OVF_NONE;
1283 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1285 wide_int warg1 = wi::to_wide (arg1), res;
1286 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1287 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1288 return NULL_TREE;
1289 poly_res = res;
1291 else if (!poly_int_tree_p (arg1)
1292 || !poly_int_tree_p (arg2)
1293 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1294 return NULL_TREE;
1295 return force_fit_type (type, poly_res, overflowable,
1296 (((sign == SIGNED || overflowable == -1)
1297 && overflow)
1298 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1301 /* Return true if binary operation OP distributes over addition in operand
1302 OPNO, with the other operand being held constant. OPNO counts from 1. */
1304 static bool
1305 distributes_over_addition_p (tree_code op, int opno)
1307 switch (op)
1309 case PLUS_EXPR:
1310 case MINUS_EXPR:
1311 case MULT_EXPR:
1312 return true;
1314 case LSHIFT_EXPR:
1315 return opno == 1;
1317 default:
1318 return false;
1322 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1323 constant. We assume ARG1 and ARG2 have the same data type, or at least
1324 are the same kind of constant and the same machine mode. Return zero if
1325 combining the constants is not allowed in the current operating mode. */
1327 static tree
1328 const_binop (enum tree_code code, tree arg1, tree arg2)
1330 /* Sanity check for the recursive cases. */
1331 if (!arg1 || !arg2)
1332 return NULL_TREE;
1334 STRIP_NOPS (arg1);
1335 STRIP_NOPS (arg2);
1337 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1339 if (code == POINTER_PLUS_EXPR)
1340 return int_const_binop (PLUS_EXPR,
1341 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1343 return int_const_binop (code, arg1, arg2);
1346 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1348 machine_mode mode;
1349 REAL_VALUE_TYPE d1;
1350 REAL_VALUE_TYPE d2;
1351 REAL_VALUE_TYPE value;
1352 REAL_VALUE_TYPE result;
1353 bool inexact;
1354 tree t, type;
1356 /* The following codes are handled by real_arithmetic. */
1357 switch (code)
1359 case PLUS_EXPR:
1360 case MINUS_EXPR:
1361 case MULT_EXPR:
1362 case RDIV_EXPR:
1363 case MIN_EXPR:
1364 case MAX_EXPR:
1365 break;
1367 default:
1368 return NULL_TREE;
1371 d1 = TREE_REAL_CST (arg1);
1372 d2 = TREE_REAL_CST (arg2);
1374 type = TREE_TYPE (arg1);
1375 mode = TYPE_MODE (type);
1377 /* Don't perform operation if we honor signaling NaNs and
1378 either operand is a signaling NaN. */
1379 if (HONOR_SNANS (mode)
1380 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1381 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1382 return NULL_TREE;
1384 /* Don't perform operation if it would raise a division
1385 by zero exception. */
1386 if (code == RDIV_EXPR
1387 && real_equal (&d2, &dconst0)
1388 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1389 return NULL_TREE;
1391 /* If either operand is a NaN, just return it. Otherwise, set up
1392 for floating-point trap; we return an overflow. */
1393 if (REAL_VALUE_ISNAN (d1))
1395 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1396 is off. */
1397 d1.signalling = 0;
1398 t = build_real (type, d1);
1399 return t;
1401 else if (REAL_VALUE_ISNAN (d2))
1403 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1404 is off. */
1405 d2.signalling = 0;
1406 t = build_real (type, d2);
1407 return t;
1410 inexact = real_arithmetic (&value, code, &d1, &d2);
1411 real_convert (&result, mode, &value);
1413 /* Don't constant fold this floating point operation if
1414 both operands are not NaN but the result is NaN, and
1415 flag_trapping_math. Such operations should raise an
1416 invalid operation exception. */
1417 if (flag_trapping_math
1418 && MODE_HAS_NANS (mode)
1419 && REAL_VALUE_ISNAN (result)
1420 && !REAL_VALUE_ISNAN (d1)
1421 && !REAL_VALUE_ISNAN (d2))
1422 return NULL_TREE;
1424 /* Don't constant fold this floating point operation if
1425 the result has overflowed and flag_trapping_math. */
1426 if (flag_trapping_math
1427 && MODE_HAS_INFINITIES (mode)
1428 && REAL_VALUE_ISINF (result)
1429 && !REAL_VALUE_ISINF (d1)
1430 && !REAL_VALUE_ISINF (d2))
1431 return NULL_TREE;
1433 /* Don't constant fold this floating point operation if the
1434 result may dependent upon the run-time rounding mode and
1435 flag_rounding_math is set, or if GCC's software emulation
1436 is unable to accurately represent the result. */
1437 if ((flag_rounding_math
1438 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1439 && (inexact || !real_identical (&result, &value)))
1440 return NULL_TREE;
1442 t = build_real (type, result);
1444 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1445 return t;
1448 if (TREE_CODE (arg1) == FIXED_CST)
1450 FIXED_VALUE_TYPE f1;
1451 FIXED_VALUE_TYPE f2;
1452 FIXED_VALUE_TYPE result;
1453 tree t, type;
1454 bool sat_p;
1455 bool overflow_p;
1457 /* The following codes are handled by fixed_arithmetic. */
1458 switch (code)
1460 case PLUS_EXPR:
1461 case MINUS_EXPR:
1462 case MULT_EXPR:
1463 case TRUNC_DIV_EXPR:
1464 if (TREE_CODE (arg2) != FIXED_CST)
1465 return NULL_TREE;
1466 f2 = TREE_FIXED_CST (arg2);
1467 break;
1469 case LSHIFT_EXPR:
1470 case RSHIFT_EXPR:
1472 if (TREE_CODE (arg2) != INTEGER_CST)
1473 return NULL_TREE;
1474 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1475 f2.data.high = w2.elt (1);
1476 f2.data.low = w2.ulow ();
1477 f2.mode = SImode;
1479 break;
1481 default:
1482 return NULL_TREE;
1485 f1 = TREE_FIXED_CST (arg1);
1486 type = TREE_TYPE (arg1);
1487 sat_p = TYPE_SATURATING (type);
1488 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1489 t = build_fixed (type, result);
1490 /* Propagate overflow flags. */
1491 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1492 TREE_OVERFLOW (t) = 1;
1493 return t;
1496 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1498 tree type = TREE_TYPE (arg1);
1499 tree r1 = TREE_REALPART (arg1);
1500 tree i1 = TREE_IMAGPART (arg1);
1501 tree r2 = TREE_REALPART (arg2);
1502 tree i2 = TREE_IMAGPART (arg2);
1503 tree real, imag;
1505 switch (code)
1507 case PLUS_EXPR:
1508 case MINUS_EXPR:
1509 real = const_binop (code, r1, r2);
1510 imag = const_binop (code, i1, i2);
1511 break;
1513 case MULT_EXPR:
1514 if (COMPLEX_FLOAT_TYPE_P (type))
1515 return do_mpc_arg2 (arg1, arg2, type,
1516 /* do_nonfinite= */ folding_initializer,
1517 mpc_mul);
1519 real = const_binop (MINUS_EXPR,
1520 const_binop (MULT_EXPR, r1, r2),
1521 const_binop (MULT_EXPR, i1, i2));
1522 imag = const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR, r1, i2),
1524 const_binop (MULT_EXPR, i1, r2));
1525 break;
1527 case RDIV_EXPR:
1528 if (COMPLEX_FLOAT_TYPE_P (type))
1529 return do_mpc_arg2 (arg1, arg2, type,
1530 /* do_nonfinite= */ folding_initializer,
1531 mpc_div);
1532 /* Fallthru. */
1533 case TRUNC_DIV_EXPR:
1534 case CEIL_DIV_EXPR:
1535 case FLOOR_DIV_EXPR:
1536 case ROUND_DIV_EXPR:
1537 if (flag_complex_method == 0)
1539 /* Keep this algorithm in sync with
1540 tree-complex.cc:expand_complex_div_straight().
1542 Expand complex division to scalars, straightforward algorithm.
1543 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1544 t = br*br + bi*bi
1546 tree magsquared
1547 = const_binop (PLUS_EXPR,
1548 const_binop (MULT_EXPR, r2, r2),
1549 const_binop (MULT_EXPR, i2, i2));
1550 tree t1
1551 = const_binop (PLUS_EXPR,
1552 const_binop (MULT_EXPR, r1, r2),
1553 const_binop (MULT_EXPR, i1, i2));
1554 tree t2
1555 = const_binop (MINUS_EXPR,
1556 const_binop (MULT_EXPR, i1, r2),
1557 const_binop (MULT_EXPR, r1, i2));
1559 real = const_binop (code, t1, magsquared);
1560 imag = const_binop (code, t2, magsquared);
1562 else
1564 /* Keep this algorithm in sync with
1565 tree-complex.cc:expand_complex_div_wide().
1567 Expand complex division to scalars, modified algorithm to minimize
1568 overflow with wide input ranges. */
1569 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1570 fold_abs_const (r2, TREE_TYPE (type)),
1571 fold_abs_const (i2, TREE_TYPE (type)));
1573 if (integer_nonzerop (compare))
1575 /* In the TRUE branch, we compute
1576 ratio = br/bi;
1577 div = (br * ratio) + bi;
1578 tr = (ar * ratio) + ai;
1579 ti = (ai * ratio) - ar;
1580 tr = tr / div;
1581 ti = ti / div; */
1582 tree ratio = const_binop (code, r2, i2);
1583 tree div = const_binop (PLUS_EXPR, i2,
1584 const_binop (MULT_EXPR, r2, ratio));
1585 real = const_binop (MULT_EXPR, r1, ratio);
1586 real = const_binop (PLUS_EXPR, real, i1);
1587 real = const_binop (code, real, div);
1589 imag = const_binop (MULT_EXPR, i1, ratio);
1590 imag = const_binop (MINUS_EXPR, imag, r1);
1591 imag = const_binop (code, imag, div);
1593 else
1595 /* In the FALSE branch, we compute
1596 ratio = d/c;
1597 divisor = (d * ratio) + c;
1598 tr = (b * ratio) + a;
1599 ti = b - (a * ratio);
1600 tr = tr / div;
1601 ti = ti / div; */
1602 tree ratio = const_binop (code, i2, r2);
1603 tree div = const_binop (PLUS_EXPR, r2,
1604 const_binop (MULT_EXPR, i2, ratio));
1606 real = const_binop (MULT_EXPR, i1, ratio);
1607 real = const_binop (PLUS_EXPR, real, r1);
1608 real = const_binop (code, real, div);
1610 imag = const_binop (MULT_EXPR, r1, ratio);
1611 imag = const_binop (MINUS_EXPR, i1, imag);
1612 imag = const_binop (code, imag, div);
1615 break;
1617 default:
1618 return NULL_TREE;
1621 if (real && imag)
1622 return build_complex (type, real, imag);
1625 if (TREE_CODE (arg1) == VECTOR_CST
1626 && TREE_CODE (arg2) == VECTOR_CST
1627 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1628 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1630 tree type = TREE_TYPE (arg1);
1631 bool step_ok_p;
1632 if (VECTOR_CST_STEPPED_P (arg1)
1633 && VECTOR_CST_STEPPED_P (arg2))
1634 /* We can operate directly on the encoding if:
1636 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1637 implies
1638 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1640 Addition and subtraction are the supported operators
1641 for which this is true. */
1642 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1643 else if (VECTOR_CST_STEPPED_P (arg1))
1644 /* We can operate directly on stepped encodings if:
1646 a3 - a2 == a2 - a1
1647 implies:
1648 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1650 which is true if (x -> x op c) distributes over addition. */
1651 step_ok_p = distributes_over_addition_p (code, 1);
1652 else
1653 /* Similarly in reverse. */
1654 step_ok_p = distributes_over_addition_p (code, 2);
1655 tree_vector_builder elts;
1656 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1657 return NULL_TREE;
1658 unsigned int count = elts.encoded_nelts ();
1659 for (unsigned int i = 0; i < count; ++i)
1661 tree elem1 = VECTOR_CST_ELT (arg1, i);
1662 tree elem2 = VECTOR_CST_ELT (arg2, i);
1664 tree elt = const_binop (code, elem1, elem2);
1666 /* It is possible that const_binop cannot handle the given
1667 code and return NULL_TREE */
1668 if (elt == NULL_TREE)
1669 return NULL_TREE;
1670 elts.quick_push (elt);
1673 return elts.build ();
1676 /* Shifts allow a scalar offset for a vector. */
1677 if (TREE_CODE (arg1) == VECTOR_CST
1678 && TREE_CODE (arg2) == INTEGER_CST)
1680 tree type = TREE_TYPE (arg1);
1681 bool step_ok_p = distributes_over_addition_p (code, 1);
1682 tree_vector_builder elts;
1683 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1684 return NULL_TREE;
1685 unsigned int count = elts.encoded_nelts ();
1686 for (unsigned int i = 0; i < count; ++i)
1688 tree elem1 = VECTOR_CST_ELT (arg1, i);
1690 tree elt = const_binop (code, elem1, arg2);
1692 /* It is possible that const_binop cannot handle the given
1693 code and return NULL_TREE. */
1694 if (elt == NULL_TREE)
1695 return NULL_TREE;
1696 elts.quick_push (elt);
1699 return elts.build ();
1701 return NULL_TREE;
1704 /* Overload that adds a TYPE parameter to be able to dispatch
1705 to fold_relational_const. */
1707 tree
1708 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1710 if (TREE_CODE_CLASS (code) == tcc_comparison)
1711 return fold_relational_const (code, type, arg1, arg2);
1713 /* ??? Until we make the const_binop worker take the type of the
1714 result as argument put those cases that need it here. */
1715 switch (code)
1717 case VEC_SERIES_EXPR:
1718 if (CONSTANT_CLASS_P (arg1)
1719 && CONSTANT_CLASS_P (arg2))
1720 return build_vec_series (type, arg1, arg2);
1721 return NULL_TREE;
1723 case COMPLEX_EXPR:
1724 if ((TREE_CODE (arg1) == REAL_CST
1725 && TREE_CODE (arg2) == REAL_CST)
1726 || (TREE_CODE (arg1) == INTEGER_CST
1727 && TREE_CODE (arg2) == INTEGER_CST))
1728 return build_complex (type, arg1, arg2);
1729 return NULL_TREE;
1731 case POINTER_DIFF_EXPR:
1732 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1734 poly_offset_int res = (wi::to_poly_offset (arg1)
1735 - wi::to_poly_offset (arg2));
1736 return force_fit_type (type, res, 1,
1737 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1739 return NULL_TREE;
1741 case VEC_PACK_TRUNC_EXPR:
1742 case VEC_PACK_FIX_TRUNC_EXPR:
1743 case VEC_PACK_FLOAT_EXPR:
1745 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1747 if (TREE_CODE (arg1) != VECTOR_CST
1748 || TREE_CODE (arg2) != VECTOR_CST)
1749 return NULL_TREE;
1751 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1752 return NULL_TREE;
1754 out_nelts = in_nelts * 2;
1755 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1756 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1758 tree_vector_builder elts (type, out_nelts, 1);
1759 for (i = 0; i < out_nelts; i++)
1761 tree elt = (i < in_nelts
1762 ? VECTOR_CST_ELT (arg1, i)
1763 : VECTOR_CST_ELT (arg2, i - in_nelts));
1764 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1765 ? NOP_EXPR
1766 : code == VEC_PACK_FLOAT_EXPR
1767 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1768 TREE_TYPE (type), elt);
1769 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1770 return NULL_TREE;
1771 elts.quick_push (elt);
1774 return elts.build ();
1777 case VEC_WIDEN_MULT_LO_EXPR:
1778 case VEC_WIDEN_MULT_HI_EXPR:
1779 case VEC_WIDEN_MULT_EVEN_EXPR:
1780 case VEC_WIDEN_MULT_ODD_EXPR:
1782 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1784 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1785 return NULL_TREE;
1787 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1788 return NULL_TREE;
1789 out_nelts = in_nelts / 2;
1790 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1791 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1793 if (code == VEC_WIDEN_MULT_LO_EXPR)
1794 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1795 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1796 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1797 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1798 scale = 1, ofs = 0;
1799 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1800 scale = 1, ofs = 1;
1802 tree_vector_builder elts (type, out_nelts, 1);
1803 for (out = 0; out < out_nelts; out++)
1805 unsigned int in = (out << scale) + ofs;
1806 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1807 VECTOR_CST_ELT (arg1, in));
1808 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1809 VECTOR_CST_ELT (arg2, in));
1811 if (t1 == NULL_TREE || t2 == NULL_TREE)
1812 return NULL_TREE;
1813 tree elt = const_binop (MULT_EXPR, t1, t2);
1814 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1815 return NULL_TREE;
1816 elts.quick_push (elt);
1819 return elts.build ();
1822 default:;
1825 if (TREE_CODE_CLASS (code) != tcc_binary)
1826 return NULL_TREE;
1828 /* Make sure type and arg0 have the same saturating flag. */
1829 gcc_checking_assert (TYPE_SATURATING (type)
1830 == TYPE_SATURATING (TREE_TYPE (arg1)));
1832 return const_binop (code, arg1, arg2);
1835 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1836 Return zero if computing the constants is not possible. */
1838 tree
1839 const_unop (enum tree_code code, tree type, tree arg0)
1841 /* Don't perform the operation, other than NEGATE and ABS, if
1842 flag_signaling_nans is on and the operand is a signaling NaN. */
1843 if (TREE_CODE (arg0) == REAL_CST
1844 && HONOR_SNANS (arg0)
1845 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1846 && code != NEGATE_EXPR
1847 && code != ABS_EXPR
1848 && code != ABSU_EXPR)
1849 return NULL_TREE;
1851 switch (code)
1853 CASE_CONVERT:
1854 case FLOAT_EXPR:
1855 case FIX_TRUNC_EXPR:
1856 case FIXED_CONVERT_EXPR:
1857 return fold_convert_const (code, type, arg0);
1859 case ADDR_SPACE_CONVERT_EXPR:
1860 /* If the source address is 0, and the source address space
1861 cannot have a valid object at 0, fold to dest type null. */
1862 if (integer_zerop (arg0)
1863 && !(targetm.addr_space.zero_address_valid
1864 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1865 return fold_convert_const (code, type, arg0);
1866 break;
1868 case VIEW_CONVERT_EXPR:
1869 return fold_view_convert_expr (type, arg0);
1871 case NEGATE_EXPR:
1873 /* Can't call fold_negate_const directly here as that doesn't
1874 handle all cases and we might not be able to negate some
1875 constants. */
1876 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1877 if (tem && CONSTANT_CLASS_P (tem))
1878 return tem;
1879 break;
1882 case ABS_EXPR:
1883 case ABSU_EXPR:
1884 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1885 return fold_abs_const (arg0, type);
1886 break;
1888 case CONJ_EXPR:
1889 if (TREE_CODE (arg0) == COMPLEX_CST)
1891 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1892 TREE_TYPE (type));
1893 return build_complex (type, TREE_REALPART (arg0), ipart);
1895 break;
1897 case BIT_NOT_EXPR:
1898 if (TREE_CODE (arg0) == INTEGER_CST)
1899 return fold_not_const (arg0, type);
1900 else if (POLY_INT_CST_P (arg0))
1901 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1902 /* Perform BIT_NOT_EXPR on each element individually. */
1903 else if (TREE_CODE (arg0) == VECTOR_CST)
1905 tree elem;
1907 /* This can cope with stepped encodings because ~x == -1 - x. */
1908 tree_vector_builder elements;
1909 elements.new_unary_operation (type, arg0, true);
1910 unsigned int i, count = elements.encoded_nelts ();
1911 for (i = 0; i < count; ++i)
1913 elem = VECTOR_CST_ELT (arg0, i);
1914 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1915 if (elem == NULL_TREE)
1916 break;
1917 elements.quick_push (elem);
1919 if (i == count)
1920 return elements.build ();
1922 break;
1924 case TRUTH_NOT_EXPR:
1925 if (TREE_CODE (arg0) == INTEGER_CST)
1926 return constant_boolean_node (integer_zerop (arg0), type);
1927 break;
1929 case REALPART_EXPR:
1930 if (TREE_CODE (arg0) == COMPLEX_CST)
1931 return fold_convert (type, TREE_REALPART (arg0));
1932 break;
1934 case IMAGPART_EXPR:
1935 if (TREE_CODE (arg0) == COMPLEX_CST)
1936 return fold_convert (type, TREE_IMAGPART (arg0));
1937 break;
1939 case VEC_UNPACK_LO_EXPR:
1940 case VEC_UNPACK_HI_EXPR:
1941 case VEC_UNPACK_FLOAT_LO_EXPR:
1942 case VEC_UNPACK_FLOAT_HI_EXPR:
1943 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1944 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1946 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1947 enum tree_code subcode;
1949 if (TREE_CODE (arg0) != VECTOR_CST)
1950 return NULL_TREE;
1952 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1953 return NULL_TREE;
1954 out_nelts = in_nelts / 2;
1955 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1957 unsigned int offset = 0;
1958 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1959 || code == VEC_UNPACK_FLOAT_LO_EXPR
1960 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1961 offset = out_nelts;
1963 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1964 subcode = NOP_EXPR;
1965 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1966 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1967 subcode = FLOAT_EXPR;
1968 else
1969 subcode = FIX_TRUNC_EXPR;
1971 tree_vector_builder elts (type, out_nelts, 1);
1972 for (i = 0; i < out_nelts; i++)
1974 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1975 VECTOR_CST_ELT (arg0, i + offset));
1976 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1977 return NULL_TREE;
1978 elts.quick_push (elt);
1981 return elts.build ();
1984 case VEC_DUPLICATE_EXPR:
1985 if (CONSTANT_CLASS_P (arg0))
1986 return build_vector_from_val (type, arg0);
1987 return NULL_TREE;
1989 default:
1990 break;
1993 return NULL_TREE;
1996 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1997 indicates which particular sizetype to create. */
1999 tree
2000 size_int_kind (poly_int64 number, enum size_type_kind kind)
2002 return build_int_cst (sizetype_tab[(int) kind], number);
2005 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2006 is a tree code. The type of the result is taken from the operands.
2007 Both must be equivalent integer types, ala int_binop_types_match_p.
2008 If the operands are constant, so is the result. */
2010 tree
2011 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2013 tree type = TREE_TYPE (arg0);
2015 if (arg0 == error_mark_node || arg1 == error_mark_node)
2016 return error_mark_node;
2018 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2019 TREE_TYPE (arg1)));
2021 /* Handle the special case of two poly_int constants faster. */
2022 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2024 /* And some specific cases even faster than that. */
2025 if (code == PLUS_EXPR)
2027 if (integer_zerop (arg0)
2028 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2029 return arg1;
2030 if (integer_zerop (arg1)
2031 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2032 return arg0;
2034 else if (code == MINUS_EXPR)
2036 if (integer_zerop (arg1)
2037 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2038 return arg0;
2040 else if (code == MULT_EXPR)
2042 if (integer_onep (arg0)
2043 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2044 return arg1;
2047 /* Handle general case of two integer constants. For sizetype
2048 constant calculations we always want to know about overflow,
2049 even in the unsigned case. */
2050 tree res = int_const_binop (code, arg0, arg1, -1);
2051 if (res != NULL_TREE)
2052 return res;
2055 return fold_build2_loc (loc, code, type, arg0, arg1);
2058 /* Given two values, either both of sizetype or both of bitsizetype,
2059 compute the difference between the two values. Return the value
2060 in signed type corresponding to the type of the operands. */
2062 tree
2063 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2065 tree type = TREE_TYPE (arg0);
2066 tree ctype;
2068 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2069 TREE_TYPE (arg1)));
2071 /* If the type is already signed, just do the simple thing. */
2072 if (!TYPE_UNSIGNED (type))
2073 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2075 if (type == sizetype)
2076 ctype = ssizetype;
2077 else if (type == bitsizetype)
2078 ctype = sbitsizetype;
2079 else
2080 ctype = signed_type_for (type);
2082 /* If either operand is not a constant, do the conversions to the signed
2083 type and subtract. The hardware will do the right thing with any
2084 overflow in the subtraction. */
2085 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2086 return size_binop_loc (loc, MINUS_EXPR,
2087 fold_convert_loc (loc, ctype, arg0),
2088 fold_convert_loc (loc, ctype, arg1));
2090 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2091 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2092 overflow) and negate (which can't either). Special-case a result
2093 of zero while we're here. */
2094 if (tree_int_cst_equal (arg0, arg1))
2095 return build_int_cst (ctype, 0);
2096 else if (tree_int_cst_lt (arg1, arg0))
2097 return fold_convert_loc (loc, ctype,
2098 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2099 else
2100 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2101 fold_convert_loc (loc, ctype,
2102 size_binop_loc (loc,
2103 MINUS_EXPR,
2104 arg1, arg0)));
2107 /* A subroutine of fold_convert_const handling conversions of an
2108 INTEGER_CST to another integer type. */
2110 static tree
2111 fold_convert_const_int_from_int (tree type, const_tree arg1)
2113 /* Given an integer constant, make new constant with new type,
2114 appropriately sign-extended or truncated. Use widest_int
2115 so that any extension is done according ARG1's type. */
2116 return force_fit_type (type, wi::to_widest (arg1),
2117 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2118 TREE_OVERFLOW (arg1));
2121 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2122 to an integer type. */
2124 static tree
2125 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2127 bool overflow = false;
2128 tree t;
2130 /* The following code implements the floating point to integer
2131 conversion rules required by the Java Language Specification,
2132 that IEEE NaNs are mapped to zero and values that overflow
2133 the target precision saturate, i.e. values greater than
2134 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2135 are mapped to INT_MIN. These semantics are allowed by the
2136 C and C++ standards that simply state that the behavior of
2137 FP-to-integer conversion is unspecified upon overflow. */
2139 wide_int val;
2140 REAL_VALUE_TYPE r;
2141 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2143 switch (code)
2145 case FIX_TRUNC_EXPR:
2146 real_trunc (&r, VOIDmode, &x);
2147 break;
2149 default:
2150 gcc_unreachable ();
2153 /* If R is NaN, return zero and show we have an overflow. */
2154 if (REAL_VALUE_ISNAN (r))
2156 overflow = true;
2157 val = wi::zero (TYPE_PRECISION (type));
2160 /* See if R is less than the lower bound or greater than the
2161 upper bound. */
2163 if (! overflow)
2165 tree lt = TYPE_MIN_VALUE (type);
2166 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2167 if (real_less (&r, &l))
2169 overflow = true;
2170 val = wi::to_wide (lt);
2174 if (! overflow)
2176 tree ut = TYPE_MAX_VALUE (type);
2177 if (ut)
2179 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2180 if (real_less (&u, &r))
2182 overflow = true;
2183 val = wi::to_wide (ut);
2188 if (! overflow)
2189 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2191 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2192 return t;
2195 /* A subroutine of fold_convert_const handling conversions of a
2196 FIXED_CST to an integer type. */
2198 static tree
2199 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2201 tree t;
2202 double_int temp, temp_trunc;
2203 scalar_mode mode;
2205 /* Right shift FIXED_CST to temp by fbit. */
2206 temp = TREE_FIXED_CST (arg1).data;
2207 mode = TREE_FIXED_CST (arg1).mode;
2208 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2210 temp = temp.rshift (GET_MODE_FBIT (mode),
2211 HOST_BITS_PER_DOUBLE_INT,
2212 SIGNED_FIXED_POINT_MODE_P (mode));
2214 /* Left shift temp to temp_trunc by fbit. */
2215 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2216 HOST_BITS_PER_DOUBLE_INT,
2217 SIGNED_FIXED_POINT_MODE_P (mode));
2219 else
2221 temp = double_int_zero;
2222 temp_trunc = double_int_zero;
2225 /* If FIXED_CST is negative, we need to round the value toward 0.
2226 By checking if the fractional bits are not zero to add 1 to temp. */
2227 if (SIGNED_FIXED_POINT_MODE_P (mode)
2228 && temp_trunc.is_negative ()
2229 && TREE_FIXED_CST (arg1).data != temp_trunc)
2230 temp += double_int_one;
2232 /* Given a fixed-point constant, make new constant with new type,
2233 appropriately sign-extended or truncated. */
2234 t = force_fit_type (type, temp, -1,
2235 (temp.is_negative ()
2236 && (TYPE_UNSIGNED (type)
2237 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2238 | TREE_OVERFLOW (arg1));
2240 return t;
2243 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2244 to another floating point type. */
2246 static tree
2247 fold_convert_const_real_from_real (tree type, const_tree arg1)
2249 REAL_VALUE_TYPE value;
2250 tree t;
2252 /* If the underlying modes are the same, simply treat it as
2253 copy and rebuild with TREE_REAL_CST information and the
2254 given type. */
2255 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2257 t = build_real (type, TREE_REAL_CST (arg1));
2258 return t;
2261 /* Don't perform the operation if flag_signaling_nans is on
2262 and the operand is a signaling NaN. */
2263 if (HONOR_SNANS (arg1)
2264 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2265 return NULL_TREE;
2267 /* With flag_rounding_math we should respect the current rounding mode
2268 unless the conversion is exact. */
2269 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2270 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2271 return NULL_TREE;
2273 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2274 t = build_real (type, value);
2276 /* If converting an infinity or NAN to a representation that doesn't
2277 have one, set the overflow bit so that we can produce some kind of
2278 error message at the appropriate point if necessary. It's not the
2279 most user-friendly message, but it's better than nothing. */
2280 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2281 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2282 TREE_OVERFLOW (t) = 1;
2283 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2284 && !MODE_HAS_NANS (TYPE_MODE (type)))
2285 TREE_OVERFLOW (t) = 1;
2286 /* Regular overflow, conversion produced an infinity in a mode that
2287 can't represent them. */
2288 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2289 && REAL_VALUE_ISINF (value)
2290 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2291 TREE_OVERFLOW (t) = 1;
2292 else
2293 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 return t;
2297 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2298 to a floating point type. */
2300 static tree
2301 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2303 REAL_VALUE_TYPE value;
2304 tree t;
2306 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2307 &TREE_FIXED_CST (arg1));
2308 t = build_real (type, value);
2310 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2311 return t;
2314 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2315 to another fixed-point type. */
2317 static tree
2318 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2320 FIXED_VALUE_TYPE value;
2321 tree t;
2322 bool overflow_p;
2324 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2325 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2326 t = build_fixed (type, value);
2328 /* Propagate overflow flags. */
2329 if (overflow_p | TREE_OVERFLOW (arg1))
2330 TREE_OVERFLOW (t) = 1;
2331 return t;
2334 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2335 to a fixed-point type. */
2337 static tree
2338 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2340 FIXED_VALUE_TYPE value;
2341 tree t;
2342 bool overflow_p;
2343 double_int di;
2345 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2347 di.low = TREE_INT_CST_ELT (arg1, 0);
2348 if (TREE_INT_CST_NUNITS (arg1) == 1)
2349 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2350 else
2351 di.high = TREE_INT_CST_ELT (arg1, 1);
2353 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2354 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2355 TYPE_SATURATING (type));
2356 t = build_fixed (type, value);
2358 /* Propagate overflow flags. */
2359 if (overflow_p | TREE_OVERFLOW (arg1))
2360 TREE_OVERFLOW (t) = 1;
2361 return t;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2367 static tree
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2371 tree t;
2372 bool overflow_p;
2374 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2381 TREE_OVERFLOW (t) = 1;
2382 return t;
2385 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2386 type TYPE. If no simplification can be done return NULL_TREE. */
2388 static tree
2389 fold_convert_const (enum tree_code code, tree type, tree arg1)
2391 tree arg_type = TREE_TYPE (arg1);
2392 if (arg_type == type)
2393 return arg1;
2395 /* We can't widen types, since the runtime value could overflow the
2396 original type before being extended to the new type. */
2397 if (POLY_INT_CST_P (arg1)
2398 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2399 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2400 return build_poly_int_cst (type,
2401 poly_wide_int::from (poly_int_cst_value (arg1),
2402 TYPE_PRECISION (type),
2403 TYPE_SIGN (arg_type)));
2405 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2406 || TREE_CODE (type) == OFFSET_TYPE)
2408 if (TREE_CODE (arg1) == INTEGER_CST)
2409 return fold_convert_const_int_from_int (type, arg1);
2410 else if (TREE_CODE (arg1) == REAL_CST)
2411 return fold_convert_const_int_from_real (code, type, arg1);
2412 else if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_int_from_fixed (type, arg1);
2415 else if (SCALAR_FLOAT_TYPE_P (type))
2417 if (TREE_CODE (arg1) == INTEGER_CST)
2419 tree res = build_real_from_int_cst (type, arg1);
2420 /* Avoid the folding if flag_rounding_math is on and the
2421 conversion is not exact. */
2422 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2424 bool fail = false;
2425 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2426 TYPE_PRECISION (TREE_TYPE (arg1)));
2427 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2428 return NULL_TREE;
2430 return res;
2432 else if (TREE_CODE (arg1) == REAL_CST)
2433 return fold_convert_const_real_from_real (type, arg1);
2434 else if (TREE_CODE (arg1) == FIXED_CST)
2435 return fold_convert_const_real_from_fixed (type, arg1);
2437 else if (FIXED_POINT_TYPE_P (type))
2439 if (TREE_CODE (arg1) == FIXED_CST)
2440 return fold_convert_const_fixed_from_fixed (type, arg1);
2441 else if (TREE_CODE (arg1) == INTEGER_CST)
2442 return fold_convert_const_fixed_from_int (type, arg1);
2443 else if (TREE_CODE (arg1) == REAL_CST)
2444 return fold_convert_const_fixed_from_real (type, arg1);
2446 else if (VECTOR_TYPE_P (type))
2448 if (TREE_CODE (arg1) == VECTOR_CST
2449 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2451 tree elttype = TREE_TYPE (type);
2452 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2453 /* We can't handle steps directly when extending, since the
2454 values need to wrap at the original precision first. */
2455 bool step_ok_p
2456 = (INTEGRAL_TYPE_P (elttype)
2457 && INTEGRAL_TYPE_P (arg1_elttype)
2458 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2459 tree_vector_builder v;
2460 if (!v.new_unary_operation (type, arg1, step_ok_p))
2461 return NULL_TREE;
2462 unsigned int len = v.encoded_nelts ();
2463 for (unsigned int i = 0; i < len; ++i)
2465 tree elt = VECTOR_CST_ELT (arg1, i);
2466 tree cvt = fold_convert_const (code, elttype, elt);
2467 if (cvt == NULL_TREE)
2468 return NULL_TREE;
2469 v.quick_push (cvt);
2471 return v.build ();
2474 return NULL_TREE;
2477 /* Construct a vector of zero elements of vector type TYPE. */
2479 static tree
2480 build_zero_vector (tree type)
2482 tree t;
2484 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2485 return build_vector_from_val (type, t);
2488 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2490 bool
2491 fold_convertible_p (const_tree type, const_tree arg)
2493 const_tree orig = TREE_TYPE (arg);
2495 if (type == orig)
2496 return true;
2498 if (TREE_CODE (arg) == ERROR_MARK
2499 || TREE_CODE (type) == ERROR_MARK
2500 || TREE_CODE (orig) == ERROR_MARK)
2501 return false;
2503 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2504 return true;
2506 switch (TREE_CODE (type))
2508 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2509 case POINTER_TYPE: case REFERENCE_TYPE:
2510 case OFFSET_TYPE:
2511 return (INTEGRAL_TYPE_P (orig)
2512 || (POINTER_TYPE_P (orig)
2513 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2514 || TREE_CODE (orig) == OFFSET_TYPE);
2516 case REAL_TYPE:
2517 case FIXED_POINT_TYPE:
2518 case VOID_TYPE:
2519 return TREE_CODE (type) == TREE_CODE (orig);
2521 case VECTOR_TYPE:
2522 return (VECTOR_TYPE_P (orig)
2523 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2524 TYPE_VECTOR_SUBPARTS (orig))
2525 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2527 default:
2528 return false;
2532 /* Convert expression ARG to type TYPE. Used by the middle-end for
2533 simple conversions in preference to calling the front-end's convert. */
2535 tree
2536 fold_convert_loc (location_t loc, tree type, tree arg)
2538 tree orig = TREE_TYPE (arg);
2539 tree tem;
2541 if (type == orig)
2542 return arg;
2544 if (TREE_CODE (arg) == ERROR_MARK
2545 || TREE_CODE (type) == ERROR_MARK
2546 || TREE_CODE (orig) == ERROR_MARK)
2547 return error_mark_node;
2549 switch (TREE_CODE (type))
2551 case POINTER_TYPE:
2552 case REFERENCE_TYPE:
2553 /* Handle conversions between pointers to different address spaces. */
2554 if (POINTER_TYPE_P (orig)
2555 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2556 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2557 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2558 /* fall through */
2560 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2561 case OFFSET_TYPE:
2562 if (TREE_CODE (arg) == INTEGER_CST)
2564 tem = fold_convert_const (NOP_EXPR, type, arg);
2565 if (tem != NULL_TREE)
2566 return tem;
2568 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2569 || TREE_CODE (orig) == OFFSET_TYPE)
2570 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2571 if (TREE_CODE (orig) == COMPLEX_TYPE)
2572 return fold_convert_loc (loc, type,
2573 fold_build1_loc (loc, REALPART_EXPR,
2574 TREE_TYPE (orig), arg));
2575 gcc_assert (VECTOR_TYPE_P (orig)
2576 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2577 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2579 case REAL_TYPE:
2580 if (TREE_CODE (arg) == INTEGER_CST)
2582 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2583 if (tem != NULL_TREE)
2584 return tem;
2586 else if (TREE_CODE (arg) == REAL_CST)
2588 tem = fold_convert_const (NOP_EXPR, type, arg);
2589 if (tem != NULL_TREE)
2590 return tem;
2592 else if (TREE_CODE (arg) == FIXED_CST)
2594 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2595 if (tem != NULL_TREE)
2596 return tem;
2599 switch (TREE_CODE (orig))
2601 case INTEGER_TYPE:
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2604 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2606 case REAL_TYPE:
2607 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2609 case FIXED_POINT_TYPE:
2610 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2612 case COMPLEX_TYPE:
2613 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2614 return fold_convert_loc (loc, type, tem);
2616 default:
2617 gcc_unreachable ();
2620 case FIXED_POINT_TYPE:
2621 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2622 || TREE_CODE (arg) == REAL_CST)
2624 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2625 if (tem != NULL_TREE)
2626 goto fold_convert_exit;
2629 switch (TREE_CODE (orig))
2631 case FIXED_POINT_TYPE:
2632 case INTEGER_TYPE:
2633 case ENUMERAL_TYPE:
2634 case BOOLEAN_TYPE:
2635 case REAL_TYPE:
2636 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2638 case COMPLEX_TYPE:
2639 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2640 return fold_convert_loc (loc, type, tem);
2642 default:
2643 gcc_unreachable ();
2646 case COMPLEX_TYPE:
2647 switch (TREE_CODE (orig))
2649 case INTEGER_TYPE:
2650 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2651 case POINTER_TYPE: case REFERENCE_TYPE:
2652 case REAL_TYPE:
2653 case FIXED_POINT_TYPE:
2654 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2655 fold_convert_loc (loc, TREE_TYPE (type), arg),
2656 fold_convert_loc (loc, TREE_TYPE (type),
2657 integer_zero_node));
2658 case COMPLEX_TYPE:
2660 tree rpart, ipart;
2662 if (TREE_CODE (arg) == COMPLEX_EXPR)
2664 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2665 TREE_OPERAND (arg, 0));
2666 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2667 TREE_OPERAND (arg, 1));
2668 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2671 arg = save_expr (arg);
2672 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2673 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2674 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2675 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2676 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2679 default:
2680 gcc_unreachable ();
2683 case VECTOR_TYPE:
2684 if (integer_zerop (arg))
2685 return build_zero_vector (type);
2686 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2687 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2688 || VECTOR_TYPE_P (orig));
2689 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2691 case VOID_TYPE:
2692 tem = fold_ignored_result (arg);
2693 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2695 default:
2696 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2697 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2698 gcc_unreachable ();
2700 fold_convert_exit:
2701 tem = protected_set_expr_location_unshare (tem, loc);
2702 return tem;
2705 /* Return false if expr can be assumed not to be an lvalue, true
2706 otherwise. */
2708 static bool
2709 maybe_lvalue_p (const_tree x)
2711 /* We only need to wrap lvalue tree codes. */
2712 switch (TREE_CODE (x))
2714 case VAR_DECL:
2715 case PARM_DECL:
2716 case RESULT_DECL:
2717 case LABEL_DECL:
2718 case FUNCTION_DECL:
2719 case SSA_NAME:
2720 case COMPOUND_LITERAL_EXPR:
2722 case COMPONENT_REF:
2723 case MEM_REF:
2724 case INDIRECT_REF:
2725 case ARRAY_REF:
2726 case ARRAY_RANGE_REF:
2727 case BIT_FIELD_REF:
2728 case OBJ_TYPE_REF:
2730 case REALPART_EXPR:
2731 case IMAGPART_EXPR:
2732 case PREINCREMENT_EXPR:
2733 case PREDECREMENT_EXPR:
2734 case SAVE_EXPR:
2735 case TRY_CATCH_EXPR:
2736 case WITH_CLEANUP_EXPR:
2737 case COMPOUND_EXPR:
2738 case MODIFY_EXPR:
2739 case TARGET_EXPR:
2740 case COND_EXPR:
2741 case BIND_EXPR:
2742 case VIEW_CONVERT_EXPR:
2743 break;
2745 default:
2746 /* Assume the worst for front-end tree codes. */
2747 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2748 break;
2749 return false;
2752 return true;
2755 /* Return an expr equal to X but certainly not valid as an lvalue. */
2757 tree
2758 non_lvalue_loc (location_t loc, tree x)
2760 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2761 us. */
2762 if (in_gimple_form)
2763 return x;
2765 if (! maybe_lvalue_p (x))
2766 return x;
2767 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2770 /* Given a tree comparison code, return the code that is the logical inverse.
2771 It is generally not safe to do this for floating-point comparisons, except
2772 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2773 ERROR_MARK in this case. */
2775 enum tree_code
2776 invert_tree_comparison (enum tree_code code, bool honor_nans)
2778 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2779 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2780 return ERROR_MARK;
2782 switch (code)
2784 case EQ_EXPR:
2785 return NE_EXPR;
2786 case NE_EXPR:
2787 return EQ_EXPR;
2788 case GT_EXPR:
2789 return honor_nans ? UNLE_EXPR : LE_EXPR;
2790 case GE_EXPR:
2791 return honor_nans ? UNLT_EXPR : LT_EXPR;
2792 case LT_EXPR:
2793 return honor_nans ? UNGE_EXPR : GE_EXPR;
2794 case LE_EXPR:
2795 return honor_nans ? UNGT_EXPR : GT_EXPR;
2796 case LTGT_EXPR:
2797 return UNEQ_EXPR;
2798 case UNEQ_EXPR:
2799 return LTGT_EXPR;
2800 case UNGT_EXPR:
2801 return LE_EXPR;
2802 case UNGE_EXPR:
2803 return LT_EXPR;
2804 case UNLT_EXPR:
2805 return GE_EXPR;
2806 case UNLE_EXPR:
2807 return GT_EXPR;
2808 case ORDERED_EXPR:
2809 return UNORDERED_EXPR;
2810 case UNORDERED_EXPR:
2811 return ORDERED_EXPR;
2812 default:
2813 gcc_unreachable ();
2817 /* Similar, but return the comparison that results if the operands are
2818 swapped. This is safe for floating-point. */
2820 enum tree_code
2821 swap_tree_comparison (enum tree_code code)
2823 switch (code)
2825 case EQ_EXPR:
2826 case NE_EXPR:
2827 case ORDERED_EXPR:
2828 case UNORDERED_EXPR:
2829 case LTGT_EXPR:
2830 case UNEQ_EXPR:
2831 return code;
2832 case GT_EXPR:
2833 return LT_EXPR;
2834 case GE_EXPR:
2835 return LE_EXPR;
2836 case LT_EXPR:
2837 return GT_EXPR;
2838 case LE_EXPR:
2839 return GE_EXPR;
2840 case UNGT_EXPR:
2841 return UNLT_EXPR;
2842 case UNGE_EXPR:
2843 return UNLE_EXPR;
2844 case UNLT_EXPR:
2845 return UNGT_EXPR;
2846 case UNLE_EXPR:
2847 return UNGE_EXPR;
2848 default:
2849 gcc_unreachable ();
2854 /* Convert a comparison tree code from an enum tree_code representation
2855 into a compcode bit-based encoding. This function is the inverse of
2856 compcode_to_comparison. */
2858 static enum comparison_code
2859 comparison_to_compcode (enum tree_code code)
2861 switch (code)
2863 case LT_EXPR:
2864 return COMPCODE_LT;
2865 case EQ_EXPR:
2866 return COMPCODE_EQ;
2867 case LE_EXPR:
2868 return COMPCODE_LE;
2869 case GT_EXPR:
2870 return COMPCODE_GT;
2871 case NE_EXPR:
2872 return COMPCODE_NE;
2873 case GE_EXPR:
2874 return COMPCODE_GE;
2875 case ORDERED_EXPR:
2876 return COMPCODE_ORD;
2877 case UNORDERED_EXPR:
2878 return COMPCODE_UNORD;
2879 case UNLT_EXPR:
2880 return COMPCODE_UNLT;
2881 case UNEQ_EXPR:
2882 return COMPCODE_UNEQ;
2883 case UNLE_EXPR:
2884 return COMPCODE_UNLE;
2885 case UNGT_EXPR:
2886 return COMPCODE_UNGT;
2887 case LTGT_EXPR:
2888 return COMPCODE_LTGT;
2889 case UNGE_EXPR:
2890 return COMPCODE_UNGE;
2891 default:
2892 gcc_unreachable ();
2896 /* Convert a compcode bit-based encoding of a comparison operator back
2897 to GCC's enum tree_code representation. This function is the
2898 inverse of comparison_to_compcode. */
2900 static enum tree_code
2901 compcode_to_comparison (enum comparison_code code)
2903 switch (code)
2905 case COMPCODE_LT:
2906 return LT_EXPR;
2907 case COMPCODE_EQ:
2908 return EQ_EXPR;
2909 case COMPCODE_LE:
2910 return LE_EXPR;
2911 case COMPCODE_GT:
2912 return GT_EXPR;
2913 case COMPCODE_NE:
2914 return NE_EXPR;
2915 case COMPCODE_GE:
2916 return GE_EXPR;
2917 case COMPCODE_ORD:
2918 return ORDERED_EXPR;
2919 case COMPCODE_UNORD:
2920 return UNORDERED_EXPR;
2921 case COMPCODE_UNLT:
2922 return UNLT_EXPR;
2923 case COMPCODE_UNEQ:
2924 return UNEQ_EXPR;
2925 case COMPCODE_UNLE:
2926 return UNLE_EXPR;
2927 case COMPCODE_UNGT:
2928 return UNGT_EXPR;
2929 case COMPCODE_LTGT:
2930 return LTGT_EXPR;
2931 case COMPCODE_UNGE:
2932 return UNGE_EXPR;
2933 default:
2934 gcc_unreachable ();
2938 /* Return true if COND1 tests the opposite condition of COND2. */
2940 bool
2941 inverse_conditions_p (const_tree cond1, const_tree cond2)
2943 return (COMPARISON_CLASS_P (cond1)
2944 && COMPARISON_CLASS_P (cond2)
2945 && (invert_tree_comparison
2946 (TREE_CODE (cond1),
2947 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2948 && operand_equal_p (TREE_OPERAND (cond1, 0),
2949 TREE_OPERAND (cond2, 0), 0)
2950 && operand_equal_p (TREE_OPERAND (cond1, 1),
2951 TREE_OPERAND (cond2, 1), 0));
2954 /* Return a tree for the comparison which is the combination of
2955 doing the AND or OR (depending on CODE) of the two operations LCODE
2956 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2957 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2958 if this makes the transformation invalid. */
2960 tree
2961 combine_comparisons (location_t loc,
2962 enum tree_code code, enum tree_code lcode,
2963 enum tree_code rcode, tree truth_type,
2964 tree ll_arg, tree lr_arg)
2966 bool honor_nans = HONOR_NANS (ll_arg);
2967 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2968 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2969 int compcode;
2971 switch (code)
2973 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2974 compcode = lcompcode & rcompcode;
2975 break;
2977 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2978 compcode = lcompcode | rcompcode;
2979 break;
2981 default:
2982 return NULL_TREE;
2985 if (!honor_nans)
2987 /* Eliminate unordered comparisons, as well as LTGT and ORD
2988 which are not used unless the mode has NaNs. */
2989 compcode &= ~COMPCODE_UNORD;
2990 if (compcode == COMPCODE_LTGT)
2991 compcode = COMPCODE_NE;
2992 else if (compcode == COMPCODE_ORD)
2993 compcode = COMPCODE_TRUE;
2995 else if (flag_trapping_math)
2997 /* Check that the original operation and the optimized ones will trap
2998 under the same condition. */
2999 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3000 && (lcompcode != COMPCODE_EQ)
3001 && (lcompcode != COMPCODE_ORD);
3002 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3003 && (rcompcode != COMPCODE_EQ)
3004 && (rcompcode != COMPCODE_ORD);
3005 bool trap = (compcode & COMPCODE_UNORD) == 0
3006 && (compcode != COMPCODE_EQ)
3007 && (compcode != COMPCODE_ORD);
3009 /* In a short-circuited boolean expression the LHS might be
3010 such that the RHS, if evaluated, will never trap. For
3011 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3012 if neither x nor y is NaN. (This is a mixed blessing: for
3013 example, the expression above will never trap, hence
3014 optimizing it to x < y would be invalid). */
3015 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3016 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3017 rtrap = false;
3019 /* If the comparison was short-circuited, and only the RHS
3020 trapped, we may now generate a spurious trap. */
3021 if (rtrap && !ltrap
3022 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3023 return NULL_TREE;
3025 /* If we changed the conditions that cause a trap, we lose. */
3026 if ((ltrap || rtrap) != trap)
3027 return NULL_TREE;
3030 if (compcode == COMPCODE_TRUE)
3031 return constant_boolean_node (true, truth_type);
3032 else if (compcode == COMPCODE_FALSE)
3033 return constant_boolean_node (false, truth_type);
3034 else
3036 enum tree_code tcode;
3038 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3039 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3043 /* Return nonzero if two operands (typically of the same tree node)
3044 are necessarily equal. FLAGS modifies behavior as follows:
3046 If OEP_ONLY_CONST is set, only return nonzero for constants.
3047 This function tests whether the operands are indistinguishable;
3048 it does not test whether they are equal using C's == operation.
3049 The distinction is important for IEEE floating point, because
3050 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3051 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3053 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3054 even though it may hold multiple values during a function.
3055 This is because a GCC tree node guarantees that nothing else is
3056 executed between the evaluation of its "operands" (which may often
3057 be evaluated in arbitrary order). Hence if the operands themselves
3058 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3059 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3060 unset means assuming isochronic (or instantaneous) tree equivalence.
3061 Unless comparing arbitrary expression trees, such as from different
3062 statements, this flag can usually be left unset.
3064 If OEP_PURE_SAME is set, then pure functions with identical arguments
3065 are considered the same. It is used when the caller has other ways
3066 to ensure that global memory is unchanged in between.
3068 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3069 not values of expressions.
3071 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3072 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3074 If OEP_BITWISE is set, then require the values to be bitwise identical
3075 rather than simply numerically equal. Do not take advantage of things
3076 like math-related flags or undefined behavior; only return true for
3077 values that are provably bitwise identical in all circumstances.
3079 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3080 any operand with side effect. This is unnecesarily conservative in the
3081 case we know that arg0 and arg1 are in disjoint code paths (such as in
3082 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3083 addresses with TREE_CONSTANT flag set so we know that &var == &var
3084 even if var is volatile. */
3086 bool
3087 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3088 unsigned int flags)
3090 bool r;
3091 if (verify_hash_value (arg0, arg1, flags, &r))
3092 return r;
3094 STRIP_ANY_LOCATION_WRAPPER (arg0);
3095 STRIP_ANY_LOCATION_WRAPPER (arg1);
3097 /* If either is ERROR_MARK, they aren't equal. */
3098 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3099 || TREE_TYPE (arg0) == error_mark_node
3100 || TREE_TYPE (arg1) == error_mark_node)
3101 return false;
3103 /* Similar, if either does not have a type (like a template id),
3104 they aren't equal. */
3105 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3106 return false;
3108 /* Bitwise identity makes no sense if the values have different layouts. */
3109 if ((flags & OEP_BITWISE)
3110 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3111 return false;
3113 /* We cannot consider pointers to different address space equal. */
3114 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3115 && POINTER_TYPE_P (TREE_TYPE (arg1))
3116 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3117 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3118 return false;
3120 /* Check equality of integer constants before bailing out due to
3121 precision differences. */
3122 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3124 /* Address of INTEGER_CST is not defined; check that we did not forget
3125 to drop the OEP_ADDRESS_OF flags. */
3126 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3127 return tree_int_cst_equal (arg0, arg1);
3130 if (!(flags & OEP_ADDRESS_OF))
3132 /* If both types don't have the same signedness, then we can't consider
3133 them equal. We must check this before the STRIP_NOPS calls
3134 because they may change the signedness of the arguments. As pointers
3135 strictly don't have a signedness, require either two pointers or
3136 two non-pointers as well. */
3137 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3138 || POINTER_TYPE_P (TREE_TYPE (arg0))
3139 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3140 return false;
3142 /* If both types don't have the same precision, then it is not safe
3143 to strip NOPs. */
3144 if (element_precision (TREE_TYPE (arg0))
3145 != element_precision (TREE_TYPE (arg1)))
3146 return false;
3148 STRIP_NOPS (arg0);
3149 STRIP_NOPS (arg1);
3151 #if 0
3152 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3153 sanity check once the issue is solved. */
3154 else
3155 /* Addresses of conversions and SSA_NAMEs (and many other things)
3156 are not defined. Check that we did not forget to drop the
3157 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3158 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3159 && TREE_CODE (arg0) != SSA_NAME);
3160 #endif
3162 /* In case both args are comparisons but with different comparison
3163 code, try to swap the comparison operands of one arg to produce
3164 a match and compare that variant. */
3165 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3166 && COMPARISON_CLASS_P (arg0)
3167 && COMPARISON_CLASS_P (arg1))
3169 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3171 if (TREE_CODE (arg0) == swap_code)
3172 return operand_equal_p (TREE_OPERAND (arg0, 0),
3173 TREE_OPERAND (arg1, 1), flags)
3174 && operand_equal_p (TREE_OPERAND (arg0, 1),
3175 TREE_OPERAND (arg1, 0), flags);
3178 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3180 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3181 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3183 else if (flags & OEP_ADDRESS_OF)
3185 /* If we are interested in comparing addresses ignore
3186 MEM_REF wrappings of the base that can appear just for
3187 TBAA reasons. */
3188 if (TREE_CODE (arg0) == MEM_REF
3189 && DECL_P (arg1)
3190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3191 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3192 && integer_zerop (TREE_OPERAND (arg0, 1)))
3193 return true;
3194 else if (TREE_CODE (arg1) == MEM_REF
3195 && DECL_P (arg0)
3196 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3197 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3198 && integer_zerop (TREE_OPERAND (arg1, 1)))
3199 return true;
3200 return false;
3202 else
3203 return false;
3206 /* When not checking adddresses, this is needed for conversions and for
3207 COMPONENT_REF. Might as well play it safe and always test this. */
3208 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3209 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3210 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3211 && !(flags & OEP_ADDRESS_OF)))
3212 return false;
3214 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3215 We don't care about side effects in that case because the SAVE_EXPR
3216 takes care of that for us. In all other cases, two expressions are
3217 equal if they have no side effects. If we have two identical
3218 expressions with side effects that should be treated the same due
3219 to the only side effects being identical SAVE_EXPR's, that will
3220 be detected in the recursive calls below.
3221 If we are taking an invariant address of two identical objects
3222 they are necessarily equal as well. */
3223 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3224 && (TREE_CODE (arg0) == SAVE_EXPR
3225 || (flags & OEP_MATCH_SIDE_EFFECTS)
3226 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3227 return true;
3229 /* Next handle constant cases, those for which we can return 1 even
3230 if ONLY_CONST is set. */
3231 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3232 switch (TREE_CODE (arg0))
3234 case INTEGER_CST:
3235 return tree_int_cst_equal (arg0, arg1);
3237 case FIXED_CST:
3238 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3239 TREE_FIXED_CST (arg1));
3241 case REAL_CST:
3242 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3243 return true;
3245 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3247 /* If we do not distinguish between signed and unsigned zero,
3248 consider them equal. */
3249 if (real_zerop (arg0) && real_zerop (arg1))
3250 return true;
3252 return false;
3254 case VECTOR_CST:
3256 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3257 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3258 return false;
3260 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3261 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3262 return false;
3264 unsigned int count = vector_cst_encoded_nelts (arg0);
3265 for (unsigned int i = 0; i < count; ++i)
3266 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3267 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3268 return false;
3269 return true;
3272 case COMPLEX_CST:
3273 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3274 flags)
3275 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3276 flags));
3278 case STRING_CST:
3279 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3280 && ! memcmp (TREE_STRING_POINTER (arg0),
3281 TREE_STRING_POINTER (arg1),
3282 TREE_STRING_LENGTH (arg0)));
3284 case ADDR_EXPR:
3285 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3286 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3287 flags | OEP_ADDRESS_OF
3288 | OEP_MATCH_SIDE_EFFECTS);
3289 case CONSTRUCTOR:
3290 /* In GIMPLE empty constructors are allowed in initializers of
3291 aggregates. */
3292 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3293 default:
3294 break;
3297 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3298 two instances of undefined behavior will give identical results. */
3299 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3300 return false;
3302 /* Define macros to test an operand from arg0 and arg1 for equality and a
3303 variant that allows null and views null as being different from any
3304 non-null value. In the latter case, if either is null, the both
3305 must be; otherwise, do the normal comparison. */
3306 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3307 TREE_OPERAND (arg1, N), flags)
3309 #define OP_SAME_WITH_NULL(N) \
3310 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3311 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3313 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3315 case tcc_unary:
3316 /* Two conversions are equal only if signedness and modes match. */
3317 switch (TREE_CODE (arg0))
3319 CASE_CONVERT:
3320 case FIX_TRUNC_EXPR:
3321 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3322 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3323 return false;
3324 break;
3325 default:
3326 break;
3329 return OP_SAME (0);
3332 case tcc_comparison:
3333 case tcc_binary:
3334 if (OP_SAME (0) && OP_SAME (1))
3335 return true;
3337 /* For commutative ops, allow the other order. */
3338 return (commutative_tree_code (TREE_CODE (arg0))
3339 && operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 1), flags)
3341 && operand_equal_p (TREE_OPERAND (arg0, 1),
3342 TREE_OPERAND (arg1, 0), flags));
3344 case tcc_reference:
3345 /* If either of the pointer (or reference) expressions we are
3346 dereferencing contain a side effect, these cannot be equal,
3347 but their addresses can be. */
3348 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3349 && (TREE_SIDE_EFFECTS (arg0)
3350 || TREE_SIDE_EFFECTS (arg1)))
3351 return false;
3353 switch (TREE_CODE (arg0))
3355 case INDIRECT_REF:
3356 if (!(flags & OEP_ADDRESS_OF))
3358 if (TYPE_ALIGN (TREE_TYPE (arg0))
3359 != TYPE_ALIGN (TREE_TYPE (arg1)))
3360 return false;
3361 /* Verify that the access types are compatible. */
3362 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3363 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3364 return false;
3366 flags &= ~OEP_ADDRESS_OF;
3367 return OP_SAME (0);
3369 case IMAGPART_EXPR:
3370 /* Require the same offset. */
3371 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3372 TYPE_SIZE (TREE_TYPE (arg1)),
3373 flags & ~OEP_ADDRESS_OF))
3374 return false;
3376 /* Fallthru. */
3377 case REALPART_EXPR:
3378 case VIEW_CONVERT_EXPR:
3379 return OP_SAME (0);
3381 case TARGET_MEM_REF:
3382 case MEM_REF:
3383 if (!(flags & OEP_ADDRESS_OF))
3385 /* Require equal access sizes */
3386 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3387 && (!TYPE_SIZE (TREE_TYPE (arg0))
3388 || !TYPE_SIZE (TREE_TYPE (arg1))
3389 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3390 TYPE_SIZE (TREE_TYPE (arg1)),
3391 flags)))
3392 return false;
3393 /* Verify that access happens in similar types. */
3394 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3395 return false;
3396 /* Verify that accesses are TBAA compatible. */
3397 if (!alias_ptr_types_compatible_p
3398 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3399 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3400 || (MR_DEPENDENCE_CLIQUE (arg0)
3401 != MR_DEPENDENCE_CLIQUE (arg1))
3402 || (MR_DEPENDENCE_BASE (arg0)
3403 != MR_DEPENDENCE_BASE (arg1)))
3404 return false;
3405 /* Verify that alignment is compatible. */
3406 if (TYPE_ALIGN (TREE_TYPE (arg0))
3407 != TYPE_ALIGN (TREE_TYPE (arg1)))
3408 return false;
3410 flags &= ~OEP_ADDRESS_OF;
3411 return (OP_SAME (0) && OP_SAME (1)
3412 /* TARGET_MEM_REF require equal extra operands. */
3413 && (TREE_CODE (arg0) != TARGET_MEM_REF
3414 || (OP_SAME_WITH_NULL (2)
3415 && OP_SAME_WITH_NULL (3)
3416 && OP_SAME_WITH_NULL (4))));
3418 case ARRAY_REF:
3419 case ARRAY_RANGE_REF:
3420 if (!OP_SAME (0))
3421 return false;
3422 flags &= ~OEP_ADDRESS_OF;
3423 /* Compare the array index by value if it is constant first as we
3424 may have different types but same value here. */
3425 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3426 TREE_OPERAND (arg1, 1))
3427 || OP_SAME (1))
3428 && OP_SAME_WITH_NULL (2)
3429 && OP_SAME_WITH_NULL (3)
3430 /* Compare low bound and element size as with OEP_ADDRESS_OF
3431 we have to account for the offset of the ref. */
3432 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3433 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3434 || (operand_equal_p (array_ref_low_bound
3435 (CONST_CAST_TREE (arg0)),
3436 array_ref_low_bound
3437 (CONST_CAST_TREE (arg1)), flags)
3438 && operand_equal_p (array_ref_element_size
3439 (CONST_CAST_TREE (arg0)),
3440 array_ref_element_size
3441 (CONST_CAST_TREE (arg1)),
3442 flags))));
3444 case COMPONENT_REF:
3445 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3446 may be NULL when we're called to compare MEM_EXPRs. */
3447 if (!OP_SAME_WITH_NULL (0))
3448 return false;
3450 bool compare_address = flags & OEP_ADDRESS_OF;
3452 /* Most of time we only need to compare FIELD_DECLs for equality.
3453 However when determining address look into actual offsets.
3454 These may match for unions and unshared record types. */
3455 flags &= ~OEP_ADDRESS_OF;
3456 if (!OP_SAME (1))
3458 if (compare_address
3459 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3461 tree field0 = TREE_OPERAND (arg0, 1);
3462 tree field1 = TREE_OPERAND (arg1, 1);
3464 /* Non-FIELD_DECL operands can appear in C++ templates. */
3465 if (TREE_CODE (field0) != FIELD_DECL
3466 || TREE_CODE (field1) != FIELD_DECL
3467 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3468 DECL_FIELD_OFFSET (field1), flags)
3469 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3470 DECL_FIELD_BIT_OFFSET (field1),
3471 flags))
3472 return false;
3474 else
3475 return false;
3478 return OP_SAME_WITH_NULL (2);
3480 case BIT_FIELD_REF:
3481 if (!OP_SAME (0))
3482 return false;
3483 flags &= ~OEP_ADDRESS_OF;
3484 return OP_SAME (1) && OP_SAME (2);
3486 default:
3487 return false;
3490 case tcc_expression:
3491 switch (TREE_CODE (arg0))
3493 case ADDR_EXPR:
3494 /* Be sure we pass right ADDRESS_OF flag. */
3495 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3496 return operand_equal_p (TREE_OPERAND (arg0, 0),
3497 TREE_OPERAND (arg1, 0),
3498 flags | OEP_ADDRESS_OF);
3500 case TRUTH_NOT_EXPR:
3501 return OP_SAME (0);
3503 case TRUTH_ANDIF_EXPR:
3504 case TRUTH_ORIF_EXPR:
3505 return OP_SAME (0) && OP_SAME (1);
3507 case WIDEN_MULT_PLUS_EXPR:
3508 case WIDEN_MULT_MINUS_EXPR:
3509 if (!OP_SAME (2))
3510 return false;
3511 /* The multiplcation operands are commutative. */
3512 /* FALLTHRU */
3514 case TRUTH_AND_EXPR:
3515 case TRUTH_OR_EXPR:
3516 case TRUTH_XOR_EXPR:
3517 if (OP_SAME (0) && OP_SAME (1))
3518 return true;
3520 /* Otherwise take into account this is a commutative operation. */
3521 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3522 TREE_OPERAND (arg1, 1), flags)
3523 && operand_equal_p (TREE_OPERAND (arg0, 1),
3524 TREE_OPERAND (arg1, 0), flags));
3526 case COND_EXPR:
3527 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3528 return false;
3529 flags &= ~OEP_ADDRESS_OF;
3530 return OP_SAME (0);
3532 case BIT_INSERT_EXPR:
3533 /* BIT_INSERT_EXPR has an implict operand as the type precision
3534 of op1. Need to check to make sure they are the same. */
3535 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3536 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3537 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3538 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3539 return false;
3540 /* FALLTHRU */
3542 case VEC_COND_EXPR:
3543 case DOT_PROD_EXPR:
3544 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3546 case MODIFY_EXPR:
3547 case INIT_EXPR:
3548 case COMPOUND_EXPR:
3549 case PREDECREMENT_EXPR:
3550 case PREINCREMENT_EXPR:
3551 case POSTDECREMENT_EXPR:
3552 case POSTINCREMENT_EXPR:
3553 if (flags & OEP_LEXICOGRAPHIC)
3554 return OP_SAME (0) && OP_SAME (1);
3555 return false;
3557 case CLEANUP_POINT_EXPR:
3558 case EXPR_STMT:
3559 case SAVE_EXPR:
3560 if (flags & OEP_LEXICOGRAPHIC)
3561 return OP_SAME (0);
3562 return false;
3564 case OBJ_TYPE_REF:
3565 /* Virtual table reference. */
3566 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3567 OBJ_TYPE_REF_EXPR (arg1), flags))
3568 return false;
3569 flags &= ~OEP_ADDRESS_OF;
3570 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3571 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3572 return false;
3573 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3574 OBJ_TYPE_REF_OBJECT (arg1), flags))
3575 return false;
3576 if (virtual_method_call_p (arg0))
3578 if (!virtual_method_call_p (arg1))
3579 return false;
3580 return types_same_for_odr (obj_type_ref_class (arg0),
3581 obj_type_ref_class (arg1));
3583 return false;
3585 default:
3586 return false;
3589 case tcc_vl_exp:
3590 switch (TREE_CODE (arg0))
3592 case CALL_EXPR:
3593 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3594 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3595 /* If not both CALL_EXPRs are either internal or normal function
3596 functions, then they are not equal. */
3597 return false;
3598 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3600 /* If the CALL_EXPRs call different internal functions, then they
3601 are not equal. */
3602 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3603 return false;
3605 else
3607 /* If the CALL_EXPRs call different functions, then they are not
3608 equal. */
3609 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3610 flags))
3611 return false;
3614 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3616 unsigned int cef = call_expr_flags (arg0);
3617 if (flags & OEP_PURE_SAME)
3618 cef &= ECF_CONST | ECF_PURE;
3619 else
3620 cef &= ECF_CONST;
3621 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3622 return false;
3625 /* Now see if all the arguments are the same. */
3627 const_call_expr_arg_iterator iter0, iter1;
3628 const_tree a0, a1;
3629 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3630 a1 = first_const_call_expr_arg (arg1, &iter1);
3631 a0 && a1;
3632 a0 = next_const_call_expr_arg (&iter0),
3633 a1 = next_const_call_expr_arg (&iter1))
3634 if (! operand_equal_p (a0, a1, flags))
3635 return false;
3637 /* If we get here and both argument lists are exhausted
3638 then the CALL_EXPRs are equal. */
3639 return ! (a0 || a1);
3641 default:
3642 return false;
3645 case tcc_declaration:
3646 /* Consider __builtin_sqrt equal to sqrt. */
3647 if (TREE_CODE (arg0) == FUNCTION_DECL)
3648 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3649 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3650 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3651 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3653 if (DECL_P (arg0)
3654 && (flags & OEP_DECL_NAME)
3655 && (flags & OEP_LEXICOGRAPHIC))
3657 /* Consider decls with the same name equal. The caller needs
3658 to make sure they refer to the same entity (such as a function
3659 formal parameter). */
3660 tree a0name = DECL_NAME (arg0);
3661 tree a1name = DECL_NAME (arg1);
3662 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3663 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3664 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3666 return false;
3668 case tcc_exceptional:
3669 if (TREE_CODE (arg0) == CONSTRUCTOR)
3671 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3672 return false;
3674 /* In GIMPLE constructors are used only to build vectors from
3675 elements. Individual elements in the constructor must be
3676 indexed in increasing order and form an initial sequence.
3678 We make no effort to compare constructors in generic.
3679 (see sem_variable::equals in ipa-icf which can do so for
3680 constants). */
3681 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3682 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3683 return false;
3685 /* Be sure that vectors constructed have the same representation.
3686 We only tested element precision and modes to match.
3687 Vectors may be BLKmode and thus also check that the number of
3688 parts match. */
3689 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3690 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3691 return false;
3693 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3694 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3695 unsigned int len = vec_safe_length (v0);
3697 if (len != vec_safe_length (v1))
3698 return false;
3700 for (unsigned int i = 0; i < len; i++)
3702 constructor_elt *c0 = &(*v0)[i];
3703 constructor_elt *c1 = &(*v1)[i];
3705 if (!operand_equal_p (c0->value, c1->value, flags)
3706 /* In GIMPLE the indexes can be either NULL or matching i.
3707 Double check this so we won't get false
3708 positives for GENERIC. */
3709 || (c0->index
3710 && (TREE_CODE (c0->index) != INTEGER_CST
3711 || compare_tree_int (c0->index, i)))
3712 || (c1->index
3713 && (TREE_CODE (c1->index) != INTEGER_CST
3714 || compare_tree_int (c1->index, i))))
3715 return false;
3717 return true;
3719 else if (TREE_CODE (arg0) == STATEMENT_LIST
3720 && (flags & OEP_LEXICOGRAPHIC))
3722 /* Compare the STATEMENT_LISTs. */
3723 tree_stmt_iterator tsi1, tsi2;
3724 tree body1 = CONST_CAST_TREE (arg0);
3725 tree body2 = CONST_CAST_TREE (arg1);
3726 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3727 tsi_next (&tsi1), tsi_next (&tsi2))
3729 /* The lists don't have the same number of statements. */
3730 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3731 return false;
3732 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3733 return true;
3734 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3735 flags & (OEP_LEXICOGRAPHIC
3736 | OEP_NO_HASH_CHECK)))
3737 return false;
3740 return false;
3742 case tcc_statement:
3743 switch (TREE_CODE (arg0))
3745 case RETURN_EXPR:
3746 if (flags & OEP_LEXICOGRAPHIC)
3747 return OP_SAME_WITH_NULL (0);
3748 return false;
3749 case DEBUG_BEGIN_STMT:
3750 if (flags & OEP_LEXICOGRAPHIC)
3751 return true;
3752 return false;
3753 default:
3754 return false;
3757 default:
3758 return false;
3761 #undef OP_SAME
3762 #undef OP_SAME_WITH_NULL
3765 /* Generate a hash value for an expression. This can be used iteratively
3766 by passing a previous result as the HSTATE argument. */
3768 void
3769 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3770 unsigned int flags)
3772 int i;
3773 enum tree_code code;
3774 enum tree_code_class tclass;
3776 if (t == NULL_TREE || t == error_mark_node)
3778 hstate.merge_hash (0);
3779 return;
3782 STRIP_ANY_LOCATION_WRAPPER (t);
3784 if (!(flags & OEP_ADDRESS_OF))
3785 STRIP_NOPS (t);
3787 code = TREE_CODE (t);
3789 switch (code)
3791 /* Alas, constants aren't shared, so we can't rely on pointer
3792 identity. */
3793 case VOID_CST:
3794 hstate.merge_hash (0);
3795 return;
3796 case INTEGER_CST:
3797 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3798 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3799 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3800 return;
3801 case REAL_CST:
3803 unsigned int val2;
3804 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3805 val2 = rvc_zero;
3806 else
3807 val2 = real_hash (TREE_REAL_CST_PTR (t));
3808 hstate.merge_hash (val2);
3809 return;
3811 case FIXED_CST:
3813 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3814 hstate.merge_hash (val2);
3815 return;
3817 case STRING_CST:
3818 hstate.add ((const void *) TREE_STRING_POINTER (t),
3819 TREE_STRING_LENGTH (t));
3820 return;
3821 case COMPLEX_CST:
3822 hash_operand (TREE_REALPART (t), hstate, flags);
3823 hash_operand (TREE_IMAGPART (t), hstate, flags);
3824 return;
3825 case VECTOR_CST:
3827 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3828 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3829 unsigned int count = vector_cst_encoded_nelts (t);
3830 for (unsigned int i = 0; i < count; ++i)
3831 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3832 return;
3834 case SSA_NAME:
3835 /* We can just compare by pointer. */
3836 hstate.add_hwi (SSA_NAME_VERSION (t));
3837 return;
3838 case PLACEHOLDER_EXPR:
3839 /* The node itself doesn't matter. */
3840 return;
3841 case BLOCK:
3842 case OMP_CLAUSE:
3843 /* Ignore. */
3844 return;
3845 case TREE_LIST:
3846 /* A list of expressions, for a CALL_EXPR or as the elements of a
3847 VECTOR_CST. */
3848 for (; t; t = TREE_CHAIN (t))
3849 hash_operand (TREE_VALUE (t), hstate, flags);
3850 return;
3851 case CONSTRUCTOR:
3853 unsigned HOST_WIDE_INT idx;
3854 tree field, value;
3855 flags &= ~OEP_ADDRESS_OF;
3856 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3857 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3859 /* In GIMPLE the indexes can be either NULL or matching i. */
3860 if (field == NULL_TREE)
3861 field = bitsize_int (idx);
3862 hash_operand (field, hstate, flags);
3863 hash_operand (value, hstate, flags);
3865 return;
3867 case STATEMENT_LIST:
3869 tree_stmt_iterator i;
3870 for (i = tsi_start (CONST_CAST_TREE (t));
3871 !tsi_end_p (i); tsi_next (&i))
3872 hash_operand (tsi_stmt (i), hstate, flags);
3873 return;
3875 case TREE_VEC:
3876 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3877 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3878 return;
3879 case IDENTIFIER_NODE:
3880 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3881 return;
3882 case FUNCTION_DECL:
3883 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3884 Otherwise nodes that compare equal according to operand_equal_p might
3885 get different hash codes. However, don't do this for machine specific
3886 or front end builtins, since the function code is overloaded in those
3887 cases. */
3888 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3889 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3891 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3892 code = TREE_CODE (t);
3894 /* FALL THROUGH */
3895 default:
3896 if (POLY_INT_CST_P (t))
3898 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3899 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3900 return;
3902 tclass = TREE_CODE_CLASS (code);
3904 if (tclass == tcc_declaration)
3906 /* DECL's have a unique ID */
3907 hstate.add_hwi (DECL_UID (t));
3909 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3911 /* For comparisons that can be swapped, use the lower
3912 tree code. */
3913 enum tree_code ccode = swap_tree_comparison (code);
3914 if (code < ccode)
3915 ccode = code;
3916 hstate.add_object (ccode);
3917 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3918 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3920 else if (CONVERT_EXPR_CODE_P (code))
3922 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3923 operand_equal_p. */
3924 enum tree_code ccode = NOP_EXPR;
3925 hstate.add_object (ccode);
3927 /* Don't hash the type, that can lead to having nodes which
3928 compare equal according to operand_equal_p, but which
3929 have different hash codes. Make sure to include signedness
3930 in the hash computation. */
3931 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3932 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3934 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3935 else if (code == MEM_REF
3936 && (flags & OEP_ADDRESS_OF) != 0
3937 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3938 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3939 && integer_zerop (TREE_OPERAND (t, 1)))
3940 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3941 hstate, flags);
3942 /* Don't ICE on FE specific trees, or their arguments etc.
3943 during operand_equal_p hash verification. */
3944 else if (!IS_EXPR_CODE_CLASS (tclass))
3945 gcc_assert (flags & OEP_HASH_CHECK);
3946 else
3948 unsigned int sflags = flags;
3950 hstate.add_object (code);
3952 switch (code)
3954 case ADDR_EXPR:
3955 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3956 flags |= OEP_ADDRESS_OF;
3957 sflags = flags;
3958 break;
3960 case INDIRECT_REF:
3961 case MEM_REF:
3962 case TARGET_MEM_REF:
3963 flags &= ~OEP_ADDRESS_OF;
3964 sflags = flags;
3965 break;
3967 case COMPONENT_REF:
3968 if (sflags & OEP_ADDRESS_OF)
3970 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3971 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3972 hstate, flags & ~OEP_ADDRESS_OF);
3973 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3974 hstate, flags & ~OEP_ADDRESS_OF);
3975 return;
3977 break;
3978 case ARRAY_REF:
3979 case ARRAY_RANGE_REF:
3980 case BIT_FIELD_REF:
3981 sflags &= ~OEP_ADDRESS_OF;
3982 break;
3984 case COND_EXPR:
3985 flags &= ~OEP_ADDRESS_OF;
3986 break;
3988 case WIDEN_MULT_PLUS_EXPR:
3989 case WIDEN_MULT_MINUS_EXPR:
3991 /* The multiplication operands are commutative. */
3992 inchash::hash one, two;
3993 hash_operand (TREE_OPERAND (t, 0), one, flags);
3994 hash_operand (TREE_OPERAND (t, 1), two, flags);
3995 hstate.add_commutative (one, two);
3996 hash_operand (TREE_OPERAND (t, 2), two, flags);
3997 return;
4000 case CALL_EXPR:
4001 if (CALL_EXPR_FN (t) == NULL_TREE)
4002 hstate.add_int (CALL_EXPR_IFN (t));
4003 break;
4005 case TARGET_EXPR:
4006 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4007 Usually different TARGET_EXPRs just should use
4008 different temporaries in their slots. */
4009 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4010 return;
4012 case OBJ_TYPE_REF:
4013 /* Virtual table reference. */
4014 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4015 flags &= ~OEP_ADDRESS_OF;
4016 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4017 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4018 if (!virtual_method_call_p (t))
4019 return;
4020 if (tree c = obj_type_ref_class (t))
4022 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4023 /* We compute mangled names only when free_lang_data is run.
4024 In that case we can hash precisely. */
4025 if (TREE_CODE (c) == TYPE_DECL
4026 && DECL_ASSEMBLER_NAME_SET_P (c))
4027 hstate.add_object
4028 (IDENTIFIER_HASH_VALUE
4029 (DECL_ASSEMBLER_NAME (c)));
4031 return;
4032 default:
4033 break;
4036 /* Don't hash the type, that can lead to having nodes which
4037 compare equal according to operand_equal_p, but which
4038 have different hash codes. */
4039 if (code == NON_LVALUE_EXPR)
4041 /* Make sure to include signness in the hash computation. */
4042 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4043 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4046 else if (commutative_tree_code (code))
4048 /* It's a commutative expression. We want to hash it the same
4049 however it appears. We do this by first hashing both operands
4050 and then rehashing based on the order of their independent
4051 hashes. */
4052 inchash::hash one, two;
4053 hash_operand (TREE_OPERAND (t, 0), one, flags);
4054 hash_operand (TREE_OPERAND (t, 1), two, flags);
4055 hstate.add_commutative (one, two);
4057 else
4058 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4059 hash_operand (TREE_OPERAND (t, i), hstate,
4060 i == 0 ? flags : sflags);
4062 return;
4066 bool
4067 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4068 unsigned int flags, bool *ret)
4070 /* When checking and unless comparing DECL names, verify that if
4071 the outermost operand_equal_p call returns non-zero then ARG0
4072 and ARG1 have the same hash value. */
4073 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4075 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4077 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4079 inchash::hash hstate0 (0), hstate1 (0);
4080 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4081 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4082 hashval_t h0 = hstate0.end ();
4083 hashval_t h1 = hstate1.end ();
4084 gcc_assert (h0 == h1);
4086 *ret = true;
4088 else
4089 *ret = false;
4091 return true;
4094 return false;
4098 static operand_compare default_compare_instance;
4100 /* Conveinece wrapper around operand_compare class because usually we do
4101 not need to play with the valueizer. */
4103 bool
4104 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4106 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4109 namespace inchash
4112 /* Generate a hash value for an expression. This can be used iteratively
4113 by passing a previous result as the HSTATE argument.
4115 This function is intended to produce the same hash for expressions which
4116 would compare equal using operand_equal_p. */
4117 void
4118 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4120 default_compare_instance.hash_operand (t, hstate, flags);
4125 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4126 with a different signedness or a narrower precision. */
4128 static bool
4129 operand_equal_for_comparison_p (tree arg0, tree arg1)
4131 if (operand_equal_p (arg0, arg1, 0))
4132 return true;
4134 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4135 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4136 return false;
4138 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4139 and see if the inner values are the same. This removes any
4140 signedness comparison, which doesn't matter here. */
4141 tree op0 = arg0;
4142 tree op1 = arg1;
4143 STRIP_NOPS (op0);
4144 STRIP_NOPS (op1);
4145 if (operand_equal_p (op0, op1, 0))
4146 return true;
4148 /* Discard a single widening conversion from ARG1 and see if the inner
4149 value is the same as ARG0. */
4150 if (CONVERT_EXPR_P (arg1)
4151 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4152 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4153 < TYPE_PRECISION (TREE_TYPE (arg1))
4154 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4155 return true;
4157 return false;
4160 /* See if ARG is an expression that is either a comparison or is performing
4161 arithmetic on comparisons. The comparisons must only be comparing
4162 two different values, which will be stored in *CVAL1 and *CVAL2; if
4163 they are nonzero it means that some operands have already been found.
4164 No variables may be used anywhere else in the expression except in the
4165 comparisons.
4167 If this is true, return 1. Otherwise, return zero. */
4169 static bool
4170 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4172 enum tree_code code = TREE_CODE (arg);
4173 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4175 /* We can handle some of the tcc_expression cases here. */
4176 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4177 tclass = tcc_unary;
4178 else if (tclass == tcc_expression
4179 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4180 || code == COMPOUND_EXPR))
4181 tclass = tcc_binary;
4183 switch (tclass)
4185 case tcc_unary:
4186 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4188 case tcc_binary:
4189 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4190 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4192 case tcc_constant:
4193 return true;
4195 case tcc_expression:
4196 if (code == COND_EXPR)
4197 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4198 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4199 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4200 return false;
4202 case tcc_comparison:
4203 /* First see if we can handle the first operand, then the second. For
4204 the second operand, we know *CVAL1 can't be zero. It must be that
4205 one side of the comparison is each of the values; test for the
4206 case where this isn't true by failing if the two operands
4207 are the same. */
4209 if (operand_equal_p (TREE_OPERAND (arg, 0),
4210 TREE_OPERAND (arg, 1), 0))
4211 return false;
4213 if (*cval1 == 0)
4214 *cval1 = TREE_OPERAND (arg, 0);
4215 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4217 else if (*cval2 == 0)
4218 *cval2 = TREE_OPERAND (arg, 0);
4219 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4221 else
4222 return false;
4224 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4226 else if (*cval2 == 0)
4227 *cval2 = TREE_OPERAND (arg, 1);
4228 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4230 else
4231 return false;
4233 return true;
4235 default:
4236 return false;
4240 /* ARG is a tree that is known to contain just arithmetic operations and
4241 comparisons. Evaluate the operations in the tree substituting NEW0 for
4242 any occurrence of OLD0 as an operand of a comparison and likewise for
4243 NEW1 and OLD1. */
4245 static tree
4246 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4247 tree old1, tree new1)
4249 tree type = TREE_TYPE (arg);
4250 enum tree_code code = TREE_CODE (arg);
4251 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4253 /* We can handle some of the tcc_expression cases here. */
4254 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4255 tclass = tcc_unary;
4256 else if (tclass == tcc_expression
4257 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4258 tclass = tcc_binary;
4260 switch (tclass)
4262 case tcc_unary:
4263 return fold_build1_loc (loc, code, type,
4264 eval_subst (loc, TREE_OPERAND (arg, 0),
4265 old0, new0, old1, new1));
4267 case tcc_binary:
4268 return fold_build2_loc (loc, code, type,
4269 eval_subst (loc, TREE_OPERAND (arg, 0),
4270 old0, new0, old1, new1),
4271 eval_subst (loc, TREE_OPERAND (arg, 1),
4272 old0, new0, old1, new1));
4274 case tcc_expression:
4275 switch (code)
4277 case SAVE_EXPR:
4278 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4279 old1, new1);
4281 case COMPOUND_EXPR:
4282 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4283 old1, new1);
4285 case COND_EXPR:
4286 return fold_build3_loc (loc, code, type,
4287 eval_subst (loc, TREE_OPERAND (arg, 0),
4288 old0, new0, old1, new1),
4289 eval_subst (loc, TREE_OPERAND (arg, 1),
4290 old0, new0, old1, new1),
4291 eval_subst (loc, TREE_OPERAND (arg, 2),
4292 old0, new0, old1, new1));
4293 default:
4294 break;
4296 /* Fall through - ??? */
4298 case tcc_comparison:
4300 tree arg0 = TREE_OPERAND (arg, 0);
4301 tree arg1 = TREE_OPERAND (arg, 1);
4303 /* We need to check both for exact equality and tree equality. The
4304 former will be true if the operand has a side-effect. In that
4305 case, we know the operand occurred exactly once. */
4307 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4308 arg0 = new0;
4309 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4310 arg0 = new1;
4312 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4313 arg1 = new0;
4314 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4315 arg1 = new1;
4317 return fold_build2_loc (loc, code, type, arg0, arg1);
4320 default:
4321 return arg;
4325 /* Return a tree for the case when the result of an expression is RESULT
4326 converted to TYPE and OMITTED was previously an operand of the expression
4327 but is now not needed (e.g., we folded OMITTED * 0).
4329 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4330 the conversion of RESULT to TYPE. */
4332 tree
4333 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4335 tree t = fold_convert_loc (loc, type, result);
4337 /* If the resulting operand is an empty statement, just return the omitted
4338 statement casted to void. */
4339 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4340 return build1_loc (loc, NOP_EXPR, void_type_node,
4341 fold_ignored_result (omitted));
4343 if (TREE_SIDE_EFFECTS (omitted))
4344 return build2_loc (loc, COMPOUND_EXPR, type,
4345 fold_ignored_result (omitted), t);
4347 return non_lvalue_loc (loc, t);
4350 /* Return a tree for the case when the result of an expression is RESULT
4351 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4352 of the expression but are now not needed.
4354 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4355 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4356 evaluated before OMITTED2. Otherwise, if neither has side effects,
4357 just do the conversion of RESULT to TYPE. */
4359 tree
4360 omit_two_operands_loc (location_t loc, tree type, tree result,
4361 tree omitted1, tree omitted2)
4363 tree t = fold_convert_loc (loc, type, result);
4365 if (TREE_SIDE_EFFECTS (omitted2))
4366 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4367 if (TREE_SIDE_EFFECTS (omitted1))
4368 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4370 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4374 /* Return a simplified tree node for the truth-negation of ARG. This
4375 never alters ARG itself. We assume that ARG is an operation that
4376 returns a truth value (0 or 1).
4378 FIXME: one would think we would fold the result, but it causes
4379 problems with the dominator optimizer. */
4381 static tree
4382 fold_truth_not_expr (location_t loc, tree arg)
4384 tree type = TREE_TYPE (arg);
4385 enum tree_code code = TREE_CODE (arg);
4386 location_t loc1, loc2;
4388 /* If this is a comparison, we can simply invert it, except for
4389 floating-point non-equality comparisons, in which case we just
4390 enclose a TRUTH_NOT_EXPR around what we have. */
4392 if (TREE_CODE_CLASS (code) == tcc_comparison)
4394 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4395 if (FLOAT_TYPE_P (op_type)
4396 && flag_trapping_math
4397 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4398 && code != NE_EXPR && code != EQ_EXPR)
4399 return NULL_TREE;
4401 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4402 if (code == ERROR_MARK)
4403 return NULL_TREE;
4405 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4406 TREE_OPERAND (arg, 1));
4407 copy_warning (ret, arg);
4408 return ret;
4411 switch (code)
4413 case INTEGER_CST:
4414 return constant_boolean_node (integer_zerop (arg), type);
4416 case TRUTH_AND_EXPR:
4417 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4418 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4419 return build2_loc (loc, TRUTH_OR_EXPR, type,
4420 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4421 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4423 case TRUTH_OR_EXPR:
4424 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4425 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4426 return build2_loc (loc, TRUTH_AND_EXPR, type,
4427 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4428 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4430 case TRUTH_XOR_EXPR:
4431 /* Here we can invert either operand. We invert the first operand
4432 unless the second operand is a TRUTH_NOT_EXPR in which case our
4433 result is the XOR of the first operand with the inside of the
4434 negation of the second operand. */
4436 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4437 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4438 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4439 else
4440 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4441 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4442 TREE_OPERAND (arg, 1));
4444 case TRUTH_ANDIF_EXPR:
4445 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4446 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4447 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4448 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4449 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4451 case TRUTH_ORIF_EXPR:
4452 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4453 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4454 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4455 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4456 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4458 case TRUTH_NOT_EXPR:
4459 return TREE_OPERAND (arg, 0);
4461 case COND_EXPR:
4463 tree arg1 = TREE_OPERAND (arg, 1);
4464 tree arg2 = TREE_OPERAND (arg, 2);
4466 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4467 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4469 /* A COND_EXPR may have a throw as one operand, which
4470 then has void type. Just leave void operands
4471 as they are. */
4472 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4473 VOID_TYPE_P (TREE_TYPE (arg1))
4474 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4475 VOID_TYPE_P (TREE_TYPE (arg2))
4476 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4479 case COMPOUND_EXPR:
4480 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4481 return build2_loc (loc, COMPOUND_EXPR, type,
4482 TREE_OPERAND (arg, 0),
4483 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4485 case NON_LVALUE_EXPR:
4486 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4487 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4489 CASE_CONVERT:
4490 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4491 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4493 /* fall through */
4495 case FLOAT_EXPR:
4496 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4497 return build1_loc (loc, TREE_CODE (arg), type,
4498 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4500 case BIT_AND_EXPR:
4501 if (!integer_onep (TREE_OPERAND (arg, 1)))
4502 return NULL_TREE;
4503 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4505 case SAVE_EXPR:
4506 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4508 case CLEANUP_POINT_EXPR:
4509 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4510 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4511 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4513 default:
4514 return NULL_TREE;
4518 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4519 assume that ARG is an operation that returns a truth value (0 or 1
4520 for scalars, 0 or -1 for vectors). Return the folded expression if
4521 folding is successful. Otherwise, return NULL_TREE. */
4523 static tree
4524 fold_invert_truthvalue (location_t loc, tree arg)
4526 tree type = TREE_TYPE (arg);
4527 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4528 ? BIT_NOT_EXPR
4529 : TRUTH_NOT_EXPR,
4530 type, arg);
4533 /* Return a simplified tree node for the truth-negation of ARG. This
4534 never alters ARG itself. We assume that ARG is an operation that
4535 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4537 tree
4538 invert_truthvalue_loc (location_t loc, tree arg)
4540 if (TREE_CODE (arg) == ERROR_MARK)
4541 return arg;
4543 tree type = TREE_TYPE (arg);
4544 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4545 ? BIT_NOT_EXPR
4546 : TRUTH_NOT_EXPR,
4547 type, arg);
4550 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4551 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4552 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4553 is the original memory reference used to preserve the alias set of
4554 the access. */
4556 static tree
4557 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4558 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4559 int unsignedp, int reversep)
4561 tree result, bftype;
4563 /* Attempt not to lose the access path if possible. */
4564 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4566 tree ninner = TREE_OPERAND (orig_inner, 0);
4567 machine_mode nmode;
4568 poly_int64 nbitsize, nbitpos;
4569 tree noffset;
4570 int nunsignedp, nreversep, nvolatilep = 0;
4571 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4572 &noffset, &nmode, &nunsignedp,
4573 &nreversep, &nvolatilep);
4574 if (base == inner
4575 && noffset == NULL_TREE
4576 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4577 && !reversep
4578 && !nreversep
4579 && !nvolatilep)
4581 inner = ninner;
4582 bitpos -= nbitpos;
4586 alias_set_type iset = get_alias_set (orig_inner);
4587 if (iset == 0 && get_alias_set (inner) != iset)
4588 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4589 build_fold_addr_expr (inner),
4590 build_int_cst (ptr_type_node, 0));
4592 if (known_eq (bitpos, 0) && !reversep)
4594 tree size = TYPE_SIZE (TREE_TYPE (inner));
4595 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4596 || POINTER_TYPE_P (TREE_TYPE (inner)))
4597 && tree_fits_shwi_p (size)
4598 && tree_to_shwi (size) == bitsize)
4599 return fold_convert_loc (loc, type, inner);
4602 bftype = type;
4603 if (TYPE_PRECISION (bftype) != bitsize
4604 || TYPE_UNSIGNED (bftype) == !unsignedp)
4605 bftype = build_nonstandard_integer_type (bitsize, 0);
4607 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4608 bitsize_int (bitsize), bitsize_int (bitpos));
4609 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4611 if (bftype != type)
4612 result = fold_convert_loc (loc, type, result);
4614 return result;
4617 /* Optimize a bit-field compare.
4619 There are two cases: First is a compare against a constant and the
4620 second is a comparison of two items where the fields are at the same
4621 bit position relative to the start of a chunk (byte, halfword, word)
4622 large enough to contain it. In these cases we can avoid the shift
4623 implicit in bitfield extractions.
4625 For constants, we emit a compare of the shifted constant with the
4626 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4627 compared. For two fields at the same position, we do the ANDs with the
4628 similar mask and compare the result of the ANDs.
4630 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4631 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4632 are the left and right operands of the comparison, respectively.
4634 If the optimization described above can be done, we return the resulting
4635 tree. Otherwise we return zero. */
4637 static tree
4638 optimize_bit_field_compare (location_t loc, enum tree_code code,
4639 tree compare_type, tree lhs, tree rhs)
4641 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4642 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4643 tree type = TREE_TYPE (lhs);
4644 tree unsigned_type;
4645 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4646 machine_mode lmode, rmode;
4647 scalar_int_mode nmode;
4648 int lunsignedp, runsignedp;
4649 int lreversep, rreversep;
4650 int lvolatilep = 0, rvolatilep = 0;
4651 tree linner, rinner = NULL_TREE;
4652 tree mask;
4653 tree offset;
4655 /* Get all the information about the extractions being done. If the bit size
4656 is the same as the size of the underlying object, we aren't doing an
4657 extraction at all and so can do nothing. We also don't want to
4658 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4659 then will no longer be able to replace it. */
4660 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4661 &lunsignedp, &lreversep, &lvolatilep);
4662 if (linner == lhs
4663 || !known_size_p (plbitsize)
4664 || !plbitsize.is_constant (&lbitsize)
4665 || !plbitpos.is_constant (&lbitpos)
4666 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4667 || offset != 0
4668 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4669 || lvolatilep)
4670 return 0;
4672 if (const_p)
4673 rreversep = lreversep;
4674 else
4676 /* If this is not a constant, we can only do something if bit positions,
4677 sizes, signedness and storage order are the same. */
4678 rinner
4679 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4680 &runsignedp, &rreversep, &rvolatilep);
4682 if (rinner == rhs
4683 || maybe_ne (lbitpos, rbitpos)
4684 || maybe_ne (lbitsize, rbitsize)
4685 || lunsignedp != runsignedp
4686 || lreversep != rreversep
4687 || offset != 0
4688 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4689 || rvolatilep)
4690 return 0;
4693 /* Honor the C++ memory model and mimic what RTL expansion does. */
4694 poly_uint64 bitstart = 0;
4695 poly_uint64 bitend = 0;
4696 if (TREE_CODE (lhs) == COMPONENT_REF)
4698 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4699 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4700 return 0;
4703 /* See if we can find a mode to refer to this field. We should be able to,
4704 but fail if we can't. */
4705 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4706 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4707 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4708 TYPE_ALIGN (TREE_TYPE (rinner))),
4709 BITS_PER_WORD, false, &nmode))
4710 return 0;
4712 /* Set signed and unsigned types of the precision of this mode for the
4713 shifts below. */
4714 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4716 /* Compute the bit position and size for the new reference and our offset
4717 within it. If the new reference is the same size as the original, we
4718 won't optimize anything, so return zero. */
4719 nbitsize = GET_MODE_BITSIZE (nmode);
4720 nbitpos = lbitpos & ~ (nbitsize - 1);
4721 lbitpos -= nbitpos;
4722 if (nbitsize == lbitsize)
4723 return 0;
4725 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4726 lbitpos = nbitsize - lbitsize - lbitpos;
4728 /* Make the mask to be used against the extracted field. */
4729 mask = build_int_cst_type (unsigned_type, -1);
4730 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4731 mask = const_binop (RSHIFT_EXPR, mask,
4732 size_int (nbitsize - lbitsize - lbitpos));
4734 if (! const_p)
4736 if (nbitpos < 0)
4737 return 0;
4739 /* If not comparing with constant, just rework the comparison
4740 and return. */
4741 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4742 nbitsize, nbitpos, 1, lreversep);
4743 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4744 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4745 nbitsize, nbitpos, 1, rreversep);
4746 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4747 return fold_build2_loc (loc, code, compare_type, t1, t2);
4750 /* Otherwise, we are handling the constant case. See if the constant is too
4751 big for the field. Warn and return a tree for 0 (false) if so. We do
4752 this not only for its own sake, but to avoid having to test for this
4753 error case below. If we didn't, we might generate wrong code.
4755 For unsigned fields, the constant shifted right by the field length should
4756 be all zero. For signed fields, the high-order bits should agree with
4757 the sign bit. */
4759 if (lunsignedp)
4761 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4763 warning (0, "comparison is always %d due to width of bit-field",
4764 code == NE_EXPR);
4765 return constant_boolean_node (code == NE_EXPR, compare_type);
4768 else
4770 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4771 if (tem != 0 && tem != -1)
4773 warning (0, "comparison is always %d due to width of bit-field",
4774 code == NE_EXPR);
4775 return constant_boolean_node (code == NE_EXPR, compare_type);
4779 if (nbitpos < 0)
4780 return 0;
4782 /* Single-bit compares should always be against zero. */
4783 if (lbitsize == 1 && ! integer_zerop (rhs))
4785 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4786 rhs = build_int_cst (type, 0);
4789 /* Make a new bitfield reference, shift the constant over the
4790 appropriate number of bits and mask it with the computed mask
4791 (in case this was a signed field). If we changed it, make a new one. */
4792 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4793 nbitsize, nbitpos, 1, lreversep);
4795 rhs = const_binop (BIT_AND_EXPR,
4796 const_binop (LSHIFT_EXPR,
4797 fold_convert_loc (loc, unsigned_type, rhs),
4798 size_int (lbitpos)),
4799 mask);
4801 lhs = build2_loc (loc, code, compare_type,
4802 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4803 return lhs;
4806 /* Subroutine for fold_truth_andor_1: decode a field reference.
4808 If EXP is a comparison reference, we return the innermost reference.
4810 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4811 set to the starting bit number.
4813 If the innermost field can be completely contained in a mode-sized
4814 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4816 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4817 otherwise it is not changed.
4819 *PUNSIGNEDP is set to the signedness of the field.
4821 *PREVERSEP is set to the storage order of the field.
4823 *PMASK is set to the mask used. This is either contained in a
4824 BIT_AND_EXPR or derived from the width of the field.
4826 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4828 Return 0 if this is not a component reference or is one that we can't
4829 do anything with. */
4831 static tree
4832 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4833 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4834 int *punsignedp, int *preversep, int *pvolatilep,
4835 tree *pmask, tree *pand_mask)
4837 tree exp = *exp_;
4838 tree outer_type = 0;
4839 tree and_mask = 0;
4840 tree mask, inner, offset;
4841 tree unsigned_type;
4842 unsigned int precision;
4844 /* All the optimizations using this function assume integer fields.
4845 There are problems with FP fields since the type_for_size call
4846 below can fail for, e.g., XFmode. */
4847 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4848 return NULL_TREE;
4850 /* We are interested in the bare arrangement of bits, so strip everything
4851 that doesn't affect the machine mode. However, record the type of the
4852 outermost expression if it may matter below. */
4853 if (CONVERT_EXPR_P (exp)
4854 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4855 outer_type = TREE_TYPE (exp);
4856 STRIP_NOPS (exp);
4858 if (TREE_CODE (exp) == BIT_AND_EXPR)
4860 and_mask = TREE_OPERAND (exp, 1);
4861 exp = TREE_OPERAND (exp, 0);
4862 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4863 if (TREE_CODE (and_mask) != INTEGER_CST)
4864 return NULL_TREE;
4867 poly_int64 poly_bitsize, poly_bitpos;
4868 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4869 pmode, punsignedp, preversep, pvolatilep);
4870 if ((inner == exp && and_mask == 0)
4871 || !poly_bitsize.is_constant (pbitsize)
4872 || !poly_bitpos.is_constant (pbitpos)
4873 || *pbitsize < 0
4874 || offset != 0
4875 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4876 /* Reject out-of-bound accesses (PR79731). */
4877 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4878 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4879 *pbitpos + *pbitsize) < 0))
4880 return NULL_TREE;
4882 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4883 if (unsigned_type == NULL_TREE)
4884 return NULL_TREE;
4886 *exp_ = exp;
4888 /* If the number of bits in the reference is the same as the bitsize of
4889 the outer type, then the outer type gives the signedness. Otherwise
4890 (in case of a small bitfield) the signedness is unchanged. */
4891 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4892 *punsignedp = TYPE_UNSIGNED (outer_type);
4894 /* Compute the mask to access the bitfield. */
4895 precision = TYPE_PRECISION (unsigned_type);
4897 mask = build_int_cst_type (unsigned_type, -1);
4899 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4900 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4902 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4903 if (and_mask != 0)
4904 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4905 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4907 *pmask = mask;
4908 *pand_mask = and_mask;
4909 return inner;
4912 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4913 bit positions and MASK is SIGNED. */
4915 static bool
4916 all_ones_mask_p (const_tree mask, unsigned int size)
4918 tree type = TREE_TYPE (mask);
4919 unsigned int precision = TYPE_PRECISION (type);
4921 /* If this function returns true when the type of the mask is
4922 UNSIGNED, then there will be errors. In particular see
4923 gcc.c-torture/execute/990326-1.c. There does not appear to be
4924 any documentation paper trail as to why this is so. But the pre
4925 wide-int worked with that restriction and it has been preserved
4926 here. */
4927 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4928 return false;
4930 return wi::mask (size, false, precision) == wi::to_wide (mask);
4933 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4934 represents the sign bit of EXP's type. If EXP represents a sign
4935 or zero extension, also test VAL against the unextended type.
4936 The return value is the (sub)expression whose sign bit is VAL,
4937 or NULL_TREE otherwise. */
4939 tree
4940 sign_bit_p (tree exp, const_tree val)
4942 int width;
4943 tree t;
4945 /* Tree EXP must have an integral type. */
4946 t = TREE_TYPE (exp);
4947 if (! INTEGRAL_TYPE_P (t))
4948 return NULL_TREE;
4950 /* Tree VAL must be an integer constant. */
4951 if (TREE_CODE (val) != INTEGER_CST
4952 || TREE_OVERFLOW (val))
4953 return NULL_TREE;
4955 width = TYPE_PRECISION (t);
4956 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4957 return exp;
4959 /* Handle extension from a narrower type. */
4960 if (TREE_CODE (exp) == NOP_EXPR
4961 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4962 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4964 return NULL_TREE;
4967 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4968 operand is simple enough to be evaluated unconditionally. */
4970 static bool
4971 simple_operand_p (const_tree exp)
4973 /* Strip any conversions that don't change the machine mode. */
4974 STRIP_NOPS (exp);
4976 return (CONSTANT_CLASS_P (exp)
4977 || TREE_CODE (exp) == SSA_NAME
4978 || (DECL_P (exp)
4979 && ! TREE_ADDRESSABLE (exp)
4980 && ! TREE_THIS_VOLATILE (exp)
4981 && ! DECL_NONLOCAL (exp)
4982 /* Don't regard global variables as simple. They may be
4983 allocated in ways unknown to the compiler (shared memory,
4984 #pragma weak, etc). */
4985 && ! TREE_PUBLIC (exp)
4986 && ! DECL_EXTERNAL (exp)
4987 /* Weakrefs are not safe to be read, since they can be NULL.
4988 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4989 have DECL_WEAK flag set. */
4990 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4991 /* Loading a static variable is unduly expensive, but global
4992 registers aren't expensive. */
4993 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4996 /* Determine if an operand is simple enough to be evaluated unconditionally.
4997 In addition to simple_operand_p, we assume that comparisons, conversions,
4998 and logic-not operations are simple, if their operands are simple, too. */
5000 bool
5001 simple_condition_p (tree exp)
5003 enum tree_code code;
5005 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5006 return false;
5008 while (CONVERT_EXPR_P (exp))
5009 exp = TREE_OPERAND (exp, 0);
5011 code = TREE_CODE (exp);
5013 if (TREE_CODE_CLASS (code) == tcc_comparison)
5014 return (simple_operand_p (TREE_OPERAND (exp, 0))
5015 && simple_operand_p (TREE_OPERAND (exp, 1)));
5017 if (code == TRUTH_NOT_EXPR)
5018 return simple_condition_p (TREE_OPERAND (exp, 0));
5020 return simple_operand_p (exp);
5024 /* The following functions are subroutines to fold_range_test and allow it to
5025 try to change a logical combination of comparisons into a range test.
5027 For example, both
5028 X == 2 || X == 3 || X == 4 || X == 5
5030 X >= 2 && X <= 5
5031 are converted to
5032 (unsigned) (X - 2) <= 3
5034 We describe each set of comparisons as being either inside or outside
5035 a range, using a variable named like IN_P, and then describe the
5036 range with a lower and upper bound. If one of the bounds is omitted,
5037 it represents either the highest or lowest value of the type.
5039 In the comments below, we represent a range by two numbers in brackets
5040 preceded by a "+" to designate being inside that range, or a "-" to
5041 designate being outside that range, so the condition can be inverted by
5042 flipping the prefix. An omitted bound is represented by a "-". For
5043 example, "- [-, 10]" means being outside the range starting at the lowest
5044 possible value and ending at 10, in other words, being greater than 10.
5045 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5046 always false.
5048 We set up things so that the missing bounds are handled in a consistent
5049 manner so neither a missing bound nor "true" and "false" need to be
5050 handled using a special case. */
5052 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5053 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5054 and UPPER1_P are nonzero if the respective argument is an upper bound
5055 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5056 must be specified for a comparison. ARG1 will be converted to ARG0's
5057 type if both are specified. */
5059 static tree
5060 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5061 tree arg1, int upper1_p)
5063 tree tem;
5064 int result;
5065 int sgn0, sgn1;
5067 /* If neither arg represents infinity, do the normal operation.
5068 Else, if not a comparison, return infinity. Else handle the special
5069 comparison rules. Note that most of the cases below won't occur, but
5070 are handled for consistency. */
5072 if (arg0 != 0 && arg1 != 0)
5074 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5075 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5076 STRIP_NOPS (tem);
5077 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5080 if (TREE_CODE_CLASS (code) != tcc_comparison)
5081 return 0;
5083 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5084 for neither. In real maths, we cannot assume open ended ranges are
5085 the same. But, this is computer arithmetic, where numbers are finite.
5086 We can therefore make the transformation of any unbounded range with
5087 the value Z, Z being greater than any representable number. This permits
5088 us to treat unbounded ranges as equal. */
5089 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5090 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5091 switch (code)
5093 case EQ_EXPR:
5094 result = sgn0 == sgn1;
5095 break;
5096 case NE_EXPR:
5097 result = sgn0 != sgn1;
5098 break;
5099 case LT_EXPR:
5100 result = sgn0 < sgn1;
5101 break;
5102 case LE_EXPR:
5103 result = sgn0 <= sgn1;
5104 break;
5105 case GT_EXPR:
5106 result = sgn0 > sgn1;
5107 break;
5108 case GE_EXPR:
5109 result = sgn0 >= sgn1;
5110 break;
5111 default:
5112 gcc_unreachable ();
5115 return constant_boolean_node (result, type);
5118 /* Helper routine for make_range. Perform one step for it, return
5119 new expression if the loop should continue or NULL_TREE if it should
5120 stop. */
5122 tree
5123 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5124 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5125 bool *strict_overflow_p)
5127 tree arg0_type = TREE_TYPE (arg0);
5128 tree n_low, n_high, low = *p_low, high = *p_high;
5129 int in_p = *p_in_p, n_in_p;
5131 switch (code)
5133 case TRUTH_NOT_EXPR:
5134 /* We can only do something if the range is testing for zero. */
5135 if (low == NULL_TREE || high == NULL_TREE
5136 || ! integer_zerop (low) || ! integer_zerop (high))
5137 return NULL_TREE;
5138 *p_in_p = ! in_p;
5139 return arg0;
5141 case EQ_EXPR: case NE_EXPR:
5142 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5143 /* We can only do something if the range is testing for zero
5144 and if the second operand is an integer constant. Note that
5145 saying something is "in" the range we make is done by
5146 complementing IN_P since it will set in the initial case of
5147 being not equal to zero; "out" is leaving it alone. */
5148 if (low == NULL_TREE || high == NULL_TREE
5149 || ! integer_zerop (low) || ! integer_zerop (high)
5150 || TREE_CODE (arg1) != INTEGER_CST)
5151 return NULL_TREE;
5153 switch (code)
5155 case NE_EXPR: /* - [c, c] */
5156 low = high = arg1;
5157 break;
5158 case EQ_EXPR: /* + [c, c] */
5159 in_p = ! in_p, low = high = arg1;
5160 break;
5161 case GT_EXPR: /* - [-, c] */
5162 low = 0, high = arg1;
5163 break;
5164 case GE_EXPR: /* + [c, -] */
5165 in_p = ! in_p, low = arg1, high = 0;
5166 break;
5167 case LT_EXPR: /* - [c, -] */
5168 low = arg1, high = 0;
5169 break;
5170 case LE_EXPR: /* + [-, c] */
5171 in_p = ! in_p, low = 0, high = arg1;
5172 break;
5173 default:
5174 gcc_unreachable ();
5177 /* If this is an unsigned comparison, we also know that EXP is
5178 greater than or equal to zero. We base the range tests we make
5179 on that fact, so we record it here so we can parse existing
5180 range tests. We test arg0_type since often the return type
5181 of, e.g. EQ_EXPR, is boolean. */
5182 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5184 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5185 in_p, low, high, 1,
5186 build_int_cst (arg0_type, 0),
5187 NULL_TREE))
5188 return NULL_TREE;
5190 in_p = n_in_p, low = n_low, high = n_high;
5192 /* If the high bound is missing, but we have a nonzero low
5193 bound, reverse the range so it goes from zero to the low bound
5194 minus 1. */
5195 if (high == 0 && low && ! integer_zerop (low))
5197 in_p = ! in_p;
5198 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5199 build_int_cst (TREE_TYPE (low), 1), 0);
5200 low = build_int_cst (arg0_type, 0);
5204 *p_low = low;
5205 *p_high = high;
5206 *p_in_p = in_p;
5207 return arg0;
5209 case NEGATE_EXPR:
5210 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5211 low and high are non-NULL, then normalize will DTRT. */
5212 if (!TYPE_UNSIGNED (arg0_type)
5213 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5215 if (low == NULL_TREE)
5216 low = TYPE_MIN_VALUE (arg0_type);
5217 if (high == NULL_TREE)
5218 high = TYPE_MAX_VALUE (arg0_type);
5221 /* (-x) IN [a,b] -> x in [-b, -a] */
5222 n_low = range_binop (MINUS_EXPR, exp_type,
5223 build_int_cst (exp_type, 0),
5224 0, high, 1);
5225 n_high = range_binop (MINUS_EXPR, exp_type,
5226 build_int_cst (exp_type, 0),
5227 0, low, 0);
5228 if (n_high != 0 && TREE_OVERFLOW (n_high))
5229 return NULL_TREE;
5230 goto normalize;
5232 case BIT_NOT_EXPR:
5233 /* ~ X -> -X - 1 */
5234 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5235 build_int_cst (exp_type, 1));
5237 case PLUS_EXPR:
5238 case MINUS_EXPR:
5239 if (TREE_CODE (arg1) != INTEGER_CST)
5240 return NULL_TREE;
5242 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5243 move a constant to the other side. */
5244 if (!TYPE_UNSIGNED (arg0_type)
5245 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5246 return NULL_TREE;
5248 /* If EXP is signed, any overflow in the computation is undefined,
5249 so we don't worry about it so long as our computations on
5250 the bounds don't overflow. For unsigned, overflow is defined
5251 and this is exactly the right thing. */
5252 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5253 arg0_type, low, 0, arg1, 0);
5254 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5255 arg0_type, high, 1, arg1, 0);
5256 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5257 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5258 return NULL_TREE;
5260 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5261 *strict_overflow_p = true;
5263 normalize:
5264 /* Check for an unsigned range which has wrapped around the maximum
5265 value thus making n_high < n_low, and normalize it. */
5266 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5268 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5269 build_int_cst (TREE_TYPE (n_high), 1), 0);
5270 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5271 build_int_cst (TREE_TYPE (n_low), 1), 0);
5273 /* If the range is of the form +/- [ x+1, x ], we won't
5274 be able to normalize it. But then, it represents the
5275 whole range or the empty set, so make it
5276 +/- [ -, - ]. */
5277 if (tree_int_cst_equal (n_low, low)
5278 && tree_int_cst_equal (n_high, high))
5279 low = high = 0;
5280 else
5281 in_p = ! in_p;
5283 else
5284 low = n_low, high = n_high;
5286 *p_low = low;
5287 *p_high = high;
5288 *p_in_p = in_p;
5289 return arg0;
5291 CASE_CONVERT:
5292 case NON_LVALUE_EXPR:
5293 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5294 return NULL_TREE;
5296 if (! INTEGRAL_TYPE_P (arg0_type)
5297 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5298 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5299 return NULL_TREE;
5301 n_low = low, n_high = high;
5303 if (n_low != 0)
5304 n_low = fold_convert_loc (loc, arg0_type, n_low);
5306 if (n_high != 0)
5307 n_high = fold_convert_loc (loc, arg0_type, n_high);
5309 /* If we're converting arg0 from an unsigned type, to exp,
5310 a signed type, we will be doing the comparison as unsigned.
5311 The tests above have already verified that LOW and HIGH
5312 are both positive.
5314 So we have to ensure that we will handle large unsigned
5315 values the same way that the current signed bounds treat
5316 negative values. */
5318 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5320 tree high_positive;
5321 tree equiv_type;
5322 /* For fixed-point modes, we need to pass the saturating flag
5323 as the 2nd parameter. */
5324 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5325 equiv_type
5326 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5327 TYPE_SATURATING (arg0_type));
5328 else
5329 equiv_type
5330 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5332 /* A range without an upper bound is, naturally, unbounded.
5333 Since convert would have cropped a very large value, use
5334 the max value for the destination type. */
5335 high_positive
5336 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5337 : TYPE_MAX_VALUE (arg0_type);
5339 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5340 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5341 fold_convert_loc (loc, arg0_type,
5342 high_positive),
5343 build_int_cst (arg0_type, 1));
5345 /* If the low bound is specified, "and" the range with the
5346 range for which the original unsigned value will be
5347 positive. */
5348 if (low != 0)
5350 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5351 1, fold_convert_loc (loc, arg0_type,
5352 integer_zero_node),
5353 high_positive))
5354 return NULL_TREE;
5356 in_p = (n_in_p == in_p);
5358 else
5360 /* Otherwise, "or" the range with the range of the input
5361 that will be interpreted as negative. */
5362 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5363 1, fold_convert_loc (loc, arg0_type,
5364 integer_zero_node),
5365 high_positive))
5366 return NULL_TREE;
5368 in_p = (in_p != n_in_p);
5372 /* Otherwise, if we are converting arg0 from signed type, to exp,
5373 an unsigned type, we will do the comparison as signed. If
5374 high is non-NULL, we punt above if it doesn't fit in the signed
5375 type, so if we get through here, +[-, high] or +[low, high] are
5376 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5377 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5378 high is not, the +[low, -] range is equivalent to union of
5379 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5380 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5381 low being 0, which should be treated as [-, -]. */
5382 else if (TYPE_UNSIGNED (exp_type)
5383 && !TYPE_UNSIGNED (arg0_type)
5384 && low
5385 && !high)
5387 if (integer_zerop (low))
5388 n_low = NULL_TREE;
5389 else
5391 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5392 n_low, build_int_cst (arg0_type, -1));
5393 n_low = build_zero_cst (arg0_type);
5394 in_p = !in_p;
5398 *p_low = n_low;
5399 *p_high = n_high;
5400 *p_in_p = in_p;
5401 return arg0;
5403 default:
5404 return NULL_TREE;
5408 /* Given EXP, a logical expression, set the range it is testing into
5409 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5410 actually being tested. *PLOW and *PHIGH will be made of the same
5411 type as the returned expression. If EXP is not a comparison, we
5412 will most likely not be returning a useful value and range. Set
5413 *STRICT_OVERFLOW_P to true if the return value is only valid
5414 because signed overflow is undefined; otherwise, do not change
5415 *STRICT_OVERFLOW_P. */
5417 tree
5418 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5419 bool *strict_overflow_p)
5421 enum tree_code code;
5422 tree arg0, arg1 = NULL_TREE;
5423 tree exp_type, nexp;
5424 int in_p;
5425 tree low, high;
5426 location_t loc = EXPR_LOCATION (exp);
5428 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5429 and see if we can refine the range. Some of the cases below may not
5430 happen, but it doesn't seem worth worrying about this. We "continue"
5431 the outer loop when we've changed something; otherwise we "break"
5432 the switch, which will "break" the while. */
5434 in_p = 0;
5435 low = high = build_int_cst (TREE_TYPE (exp), 0);
5437 while (1)
5439 code = TREE_CODE (exp);
5440 exp_type = TREE_TYPE (exp);
5441 arg0 = NULL_TREE;
5443 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5445 if (TREE_OPERAND_LENGTH (exp) > 0)
5446 arg0 = TREE_OPERAND (exp, 0);
5447 if (TREE_CODE_CLASS (code) == tcc_binary
5448 || TREE_CODE_CLASS (code) == tcc_comparison
5449 || (TREE_CODE_CLASS (code) == tcc_expression
5450 && TREE_OPERAND_LENGTH (exp) > 1))
5451 arg1 = TREE_OPERAND (exp, 1);
5453 if (arg0 == NULL_TREE)
5454 break;
5456 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5457 &high, &in_p, strict_overflow_p);
5458 if (nexp == NULL_TREE)
5459 break;
5460 exp = nexp;
5463 /* If EXP is a constant, we can evaluate whether this is true or false. */
5464 if (TREE_CODE (exp) == INTEGER_CST)
5466 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5467 exp, 0, low, 0))
5468 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5469 exp, 1, high, 1)));
5470 low = high = 0;
5471 exp = 0;
5474 *pin_p = in_p, *plow = low, *phigh = high;
5475 return exp;
5478 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5479 a bitwise check i.e. when
5480 LOW == 0xXX...X00...0
5481 HIGH == 0xXX...X11...1
5482 Return corresponding mask in MASK and stem in VALUE. */
5484 static bool
5485 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5486 tree *value)
5488 if (TREE_CODE (low) != INTEGER_CST
5489 || TREE_CODE (high) != INTEGER_CST)
5490 return false;
5492 unsigned prec = TYPE_PRECISION (type);
5493 wide_int lo = wi::to_wide (low, prec);
5494 wide_int hi = wi::to_wide (high, prec);
5496 wide_int end_mask = lo ^ hi;
5497 if ((end_mask & (end_mask + 1)) != 0
5498 || (lo & end_mask) != 0)
5499 return false;
5501 wide_int stem_mask = ~end_mask;
5502 wide_int stem = lo & stem_mask;
5503 if (stem != (hi & stem_mask))
5504 return false;
5506 *mask = wide_int_to_tree (type, stem_mask);
5507 *value = wide_int_to_tree (type, stem);
5509 return true;
5512 /* Helper routine for build_range_check and match.pd. Return the type to
5513 perform the check or NULL if it shouldn't be optimized. */
5515 tree
5516 range_check_type (tree etype)
5518 /* First make sure that arithmetics in this type is valid, then make sure
5519 that it wraps around. */
5520 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5521 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5523 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5525 tree utype, minv, maxv;
5527 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5528 for the type in question, as we rely on this here. */
5529 utype = unsigned_type_for (etype);
5530 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5531 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5532 build_int_cst (TREE_TYPE (maxv), 1), 1);
5533 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5535 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5536 minv, 1, maxv, 1)))
5537 etype = utype;
5538 else
5539 return NULL_TREE;
5541 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5542 etype = unsigned_type_for (etype);
5543 return etype;
5546 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5547 type, TYPE, return an expression to test if EXP is in (or out of, depending
5548 on IN_P) the range. Return 0 if the test couldn't be created. */
5550 tree
5551 build_range_check (location_t loc, tree type, tree exp, int in_p,
5552 tree low, tree high)
5554 tree etype = TREE_TYPE (exp), mask, value;
5556 /* Disable this optimization for function pointer expressions
5557 on targets that require function pointer canonicalization. */
5558 if (targetm.have_canonicalize_funcptr_for_compare ()
5559 && POINTER_TYPE_P (etype)
5560 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5561 return NULL_TREE;
5563 if (! in_p)
5565 value = build_range_check (loc, type, exp, 1, low, high);
5566 if (value != 0)
5567 return invert_truthvalue_loc (loc, value);
5569 return 0;
5572 if (low == 0 && high == 0)
5573 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5575 if (low == 0)
5576 return fold_build2_loc (loc, LE_EXPR, type, exp,
5577 fold_convert_loc (loc, etype, high));
5579 if (high == 0)
5580 return fold_build2_loc (loc, GE_EXPR, type, exp,
5581 fold_convert_loc (loc, etype, low));
5583 if (operand_equal_p (low, high, 0))
5584 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5585 fold_convert_loc (loc, etype, low));
5587 if (TREE_CODE (exp) == BIT_AND_EXPR
5588 && maskable_range_p (low, high, etype, &mask, &value))
5589 return fold_build2_loc (loc, EQ_EXPR, type,
5590 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5591 exp, mask),
5592 value);
5594 if (integer_zerop (low))
5596 if (! TYPE_UNSIGNED (etype))
5598 etype = unsigned_type_for (etype);
5599 high = fold_convert_loc (loc, etype, high);
5600 exp = fold_convert_loc (loc, etype, exp);
5602 return build_range_check (loc, type, exp, 1, 0, high);
5605 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5606 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5608 int prec = TYPE_PRECISION (etype);
5610 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5612 if (TYPE_UNSIGNED (etype))
5614 tree signed_etype = signed_type_for (etype);
5615 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5616 etype
5617 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5618 else
5619 etype = signed_etype;
5620 exp = fold_convert_loc (loc, etype, exp);
5622 return fold_build2_loc (loc, GT_EXPR, type, exp,
5623 build_int_cst (etype, 0));
5627 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5628 This requires wrap-around arithmetics for the type of the expression. */
5629 etype = range_check_type (etype);
5630 if (etype == NULL_TREE)
5631 return NULL_TREE;
5633 high = fold_convert_loc (loc, etype, high);
5634 low = fold_convert_loc (loc, etype, low);
5635 exp = fold_convert_loc (loc, etype, exp);
5637 value = const_binop (MINUS_EXPR, high, low);
5639 if (value != 0 && !TREE_OVERFLOW (value))
5640 return build_range_check (loc, type,
5641 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5642 1, build_int_cst (etype, 0), value);
5644 return 0;
5647 /* Return the predecessor of VAL in its type, handling the infinite case. */
5649 static tree
5650 range_predecessor (tree val)
5652 tree type = TREE_TYPE (val);
5654 if (INTEGRAL_TYPE_P (type)
5655 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5656 return 0;
5657 else
5658 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5659 build_int_cst (TREE_TYPE (val), 1), 0);
5662 /* Return the successor of VAL in its type, handling the infinite case. */
5664 static tree
5665 range_successor (tree val)
5667 tree type = TREE_TYPE (val);
5669 if (INTEGRAL_TYPE_P (type)
5670 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5671 return 0;
5672 else
5673 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5674 build_int_cst (TREE_TYPE (val), 1), 0);
5677 /* Given two ranges, see if we can merge them into one. Return 1 if we
5678 can, 0 if we can't. Set the output range into the specified parameters. */
5680 bool
5681 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5682 tree high0, int in1_p, tree low1, tree high1)
5684 bool no_overlap;
5685 int subset;
5686 int temp;
5687 tree tem;
5688 int in_p;
5689 tree low, high;
5690 int lowequal = ((low0 == 0 && low1 == 0)
5691 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5692 low0, 0, low1, 0)));
5693 int highequal = ((high0 == 0 && high1 == 0)
5694 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5695 high0, 1, high1, 1)));
5697 /* Make range 0 be the range that starts first, or ends last if they
5698 start at the same value. Swap them if it isn't. */
5699 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5700 low0, 0, low1, 0))
5701 || (lowequal
5702 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5703 high1, 1, high0, 1))))
5705 temp = in0_p, in0_p = in1_p, in1_p = temp;
5706 tem = low0, low0 = low1, low1 = tem;
5707 tem = high0, high0 = high1, high1 = tem;
5710 /* If the second range is != high1 where high1 is the type maximum of
5711 the type, try first merging with < high1 range. */
5712 if (low1
5713 && high1
5714 && TREE_CODE (low1) == INTEGER_CST
5715 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5716 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5717 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5718 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5719 && operand_equal_p (low1, high1, 0))
5721 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5722 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5723 !in1_p, NULL_TREE, range_predecessor (low1)))
5724 return true;
5725 /* Similarly for the second range != low1 where low1 is the type minimum
5726 of the type, try first merging with > low1 range. */
5727 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5728 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5729 !in1_p, range_successor (low1), NULL_TREE))
5730 return true;
5733 /* Now flag two cases, whether the ranges are disjoint or whether the
5734 second range is totally subsumed in the first. Note that the tests
5735 below are simplified by the ones above. */
5736 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5737 high0, 1, low1, 0));
5738 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5739 high1, 1, high0, 1));
5741 /* We now have four cases, depending on whether we are including or
5742 excluding the two ranges. */
5743 if (in0_p && in1_p)
5745 /* If they don't overlap, the result is false. If the second range
5746 is a subset it is the result. Otherwise, the range is from the start
5747 of the second to the end of the first. */
5748 if (no_overlap)
5749 in_p = 0, low = high = 0;
5750 else if (subset)
5751 in_p = 1, low = low1, high = high1;
5752 else
5753 in_p = 1, low = low1, high = high0;
5756 else if (in0_p && ! in1_p)
5758 /* If they don't overlap, the result is the first range. If they are
5759 equal, the result is false. If the second range is a subset of the
5760 first, and the ranges begin at the same place, we go from just after
5761 the end of the second range to the end of the first. If the second
5762 range is not a subset of the first, or if it is a subset and both
5763 ranges end at the same place, the range starts at the start of the
5764 first range and ends just before the second range.
5765 Otherwise, we can't describe this as a single range. */
5766 if (no_overlap)
5767 in_p = 1, low = low0, high = high0;
5768 else if (lowequal && highequal)
5769 in_p = 0, low = high = 0;
5770 else if (subset && lowequal)
5772 low = range_successor (high1);
5773 high = high0;
5774 in_p = 1;
5775 if (low == 0)
5777 /* We are in the weird situation where high0 > high1 but
5778 high1 has no successor. Punt. */
5779 return 0;
5782 else if (! subset || highequal)
5784 low = low0;
5785 high = range_predecessor (low1);
5786 in_p = 1;
5787 if (high == 0)
5789 /* low0 < low1 but low1 has no predecessor. Punt. */
5790 return 0;
5793 else
5794 return 0;
5797 else if (! in0_p && in1_p)
5799 /* If they don't overlap, the result is the second range. If the second
5800 is a subset of the first, the result is false. Otherwise,
5801 the range starts just after the first range and ends at the
5802 end of the second. */
5803 if (no_overlap)
5804 in_p = 1, low = low1, high = high1;
5805 else if (subset || highequal)
5806 in_p = 0, low = high = 0;
5807 else
5809 low = range_successor (high0);
5810 high = high1;
5811 in_p = 1;
5812 if (low == 0)
5814 /* high1 > high0 but high0 has no successor. Punt. */
5815 return 0;
5820 else
5822 /* The case where we are excluding both ranges. Here the complex case
5823 is if they don't overlap. In that case, the only time we have a
5824 range is if they are adjacent. If the second is a subset of the
5825 first, the result is the first. Otherwise, the range to exclude
5826 starts at the beginning of the first range and ends at the end of the
5827 second. */
5828 if (no_overlap)
5830 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5831 range_successor (high0),
5832 1, low1, 0)))
5833 in_p = 0, low = low0, high = high1;
5834 else
5836 /* Canonicalize - [min, x] into - [-, x]. */
5837 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5838 switch (TREE_CODE (TREE_TYPE (low0)))
5840 case ENUMERAL_TYPE:
5841 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5842 GET_MODE_BITSIZE
5843 (TYPE_MODE (TREE_TYPE (low0)))))
5844 break;
5845 /* FALLTHROUGH */
5846 case INTEGER_TYPE:
5847 if (tree_int_cst_equal (low0,
5848 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5849 low0 = 0;
5850 break;
5851 case POINTER_TYPE:
5852 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5853 && integer_zerop (low0))
5854 low0 = 0;
5855 break;
5856 default:
5857 break;
5860 /* Canonicalize - [x, max] into - [x, -]. */
5861 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5862 switch (TREE_CODE (TREE_TYPE (high1)))
5864 case ENUMERAL_TYPE:
5865 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5866 GET_MODE_BITSIZE
5867 (TYPE_MODE (TREE_TYPE (high1)))))
5868 break;
5869 /* FALLTHROUGH */
5870 case INTEGER_TYPE:
5871 if (tree_int_cst_equal (high1,
5872 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5873 high1 = 0;
5874 break;
5875 case POINTER_TYPE:
5876 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5877 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5878 high1, 1,
5879 build_int_cst (TREE_TYPE (high1), 1),
5880 1)))
5881 high1 = 0;
5882 break;
5883 default:
5884 break;
5887 /* The ranges might be also adjacent between the maximum and
5888 minimum values of the given type. For
5889 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5890 return + [x + 1, y - 1]. */
5891 if (low0 == 0 && high1 == 0)
5893 low = range_successor (high0);
5894 high = range_predecessor (low1);
5895 if (low == 0 || high == 0)
5896 return 0;
5898 in_p = 1;
5900 else
5901 return 0;
5904 else if (subset)
5905 in_p = 0, low = low0, high = high0;
5906 else
5907 in_p = 0, low = low0, high = high1;
5910 *pin_p = in_p, *plow = low, *phigh = high;
5911 return 1;
5915 /* Subroutine of fold, looking inside expressions of the form
5916 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5917 are the three operands of the COND_EXPR. This function is
5918 being used also to optimize A op B ? C : A, by reversing the
5919 comparison first.
5921 Return a folded expression whose code is not a COND_EXPR
5922 anymore, or NULL_TREE if no folding opportunity is found. */
5924 static tree
5925 fold_cond_expr_with_comparison (location_t loc, tree type,
5926 enum tree_code comp_code,
5927 tree arg00, tree arg01, tree arg1, tree arg2)
5929 tree arg1_type = TREE_TYPE (arg1);
5930 tree tem;
5932 STRIP_NOPS (arg1);
5933 STRIP_NOPS (arg2);
5935 /* If we have A op 0 ? A : -A, consider applying the following
5936 transformations:
5938 A == 0? A : -A same as -A
5939 A != 0? A : -A same as A
5940 A >= 0? A : -A same as abs (A)
5941 A > 0? A : -A same as abs (A)
5942 A <= 0? A : -A same as -abs (A)
5943 A < 0? A : -A same as -abs (A)
5945 None of these transformations work for modes with signed
5946 zeros. If A is +/-0, the first two transformations will
5947 change the sign of the result (from +0 to -0, or vice
5948 versa). The last four will fix the sign of the result,
5949 even though the original expressions could be positive or
5950 negative, depending on the sign of A.
5952 Note that all these transformations are correct if A is
5953 NaN, since the two alternatives (A and -A) are also NaNs. */
5954 if (!HONOR_SIGNED_ZEROS (type)
5955 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5956 ? real_zerop (arg01)
5957 : integer_zerop (arg01))
5958 && ((TREE_CODE (arg2) == NEGATE_EXPR
5959 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5960 /* In the case that A is of the form X-Y, '-A' (arg2) may
5961 have already been folded to Y-X, check for that. */
5962 || (TREE_CODE (arg1) == MINUS_EXPR
5963 && TREE_CODE (arg2) == MINUS_EXPR
5964 && operand_equal_p (TREE_OPERAND (arg1, 0),
5965 TREE_OPERAND (arg2, 1), 0)
5966 && operand_equal_p (TREE_OPERAND (arg1, 1),
5967 TREE_OPERAND (arg2, 0), 0))))
5968 switch (comp_code)
5970 case EQ_EXPR:
5971 case UNEQ_EXPR:
5972 tem = fold_convert_loc (loc, arg1_type, arg1);
5973 return fold_convert_loc (loc, type, negate_expr (tem));
5974 case NE_EXPR:
5975 case LTGT_EXPR:
5976 return fold_convert_loc (loc, type, arg1);
5977 case UNGE_EXPR:
5978 case UNGT_EXPR:
5979 if (flag_trapping_math)
5980 break;
5981 /* Fall through. */
5982 case GE_EXPR:
5983 case GT_EXPR:
5984 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5985 break;
5986 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5987 return fold_convert_loc (loc, type, tem);
5988 case UNLE_EXPR:
5989 case UNLT_EXPR:
5990 if (flag_trapping_math)
5991 break;
5992 /* FALLTHRU */
5993 case LE_EXPR:
5994 case LT_EXPR:
5995 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5996 break;
5997 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5998 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6000 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6001 is not, invokes UB both in abs and in the negation of it.
6002 So, use ABSU_EXPR instead. */
6003 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6004 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6005 tem = negate_expr (tem);
6006 return fold_convert_loc (loc, type, tem);
6008 else
6010 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6011 return negate_expr (fold_convert_loc (loc, type, tem));
6013 default:
6014 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6015 break;
6018 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6019 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6020 both transformations are correct when A is NaN: A != 0
6021 is then true, and A == 0 is false. */
6023 if (!HONOR_SIGNED_ZEROS (type)
6024 && integer_zerop (arg01) && integer_zerop (arg2))
6026 if (comp_code == NE_EXPR)
6027 return fold_convert_loc (loc, type, arg1);
6028 else if (comp_code == EQ_EXPR)
6029 return build_zero_cst (type);
6032 /* Try some transformations of A op B ? A : B.
6034 A == B? A : B same as B
6035 A != B? A : B same as A
6036 A >= B? A : B same as max (A, B)
6037 A > B? A : B same as max (B, A)
6038 A <= B? A : B same as min (A, B)
6039 A < B? A : B same as min (B, A)
6041 As above, these transformations don't work in the presence
6042 of signed zeros. For example, if A and B are zeros of
6043 opposite sign, the first two transformations will change
6044 the sign of the result. In the last four, the original
6045 expressions give different results for (A=+0, B=-0) and
6046 (A=-0, B=+0), but the transformed expressions do not.
6048 The first two transformations are correct if either A or B
6049 is a NaN. In the first transformation, the condition will
6050 be false, and B will indeed be chosen. In the case of the
6051 second transformation, the condition A != B will be true,
6052 and A will be chosen.
6054 The conversions to max() and min() are not correct if B is
6055 a number and A is not. The conditions in the original
6056 expressions will be false, so all four give B. The min()
6057 and max() versions would give a NaN instead. */
6058 if (!HONOR_SIGNED_ZEROS (type)
6059 && operand_equal_for_comparison_p (arg01, arg2)
6060 /* Avoid these transformations if the COND_EXPR may be used
6061 as an lvalue in the C++ front-end. PR c++/19199. */
6062 && (in_gimple_form
6063 || VECTOR_TYPE_P (type)
6064 || (! lang_GNU_CXX ()
6065 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6066 || ! maybe_lvalue_p (arg1)
6067 || ! maybe_lvalue_p (arg2)))
6069 tree comp_op0 = arg00;
6070 tree comp_op1 = arg01;
6071 tree comp_type = TREE_TYPE (comp_op0);
6073 switch (comp_code)
6075 case EQ_EXPR:
6076 return fold_convert_loc (loc, type, arg2);
6077 case NE_EXPR:
6078 return fold_convert_loc (loc, type, arg1);
6079 case LE_EXPR:
6080 case LT_EXPR:
6081 case UNLE_EXPR:
6082 case UNLT_EXPR:
6083 /* In C++ a ?: expression can be an lvalue, so put the
6084 operand which will be used if they are equal first
6085 so that we can convert this back to the
6086 corresponding COND_EXPR. */
6087 if (!HONOR_NANS (arg1))
6089 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6090 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6091 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6092 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6093 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6094 comp_op1, comp_op0);
6095 return fold_convert_loc (loc, type, tem);
6097 break;
6098 case GE_EXPR:
6099 case GT_EXPR:
6100 case UNGE_EXPR:
6101 case UNGT_EXPR:
6102 if (!HONOR_NANS (arg1))
6104 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6105 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6106 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6107 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6108 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6109 comp_op1, comp_op0);
6110 return fold_convert_loc (loc, type, tem);
6112 break;
6113 case UNEQ_EXPR:
6114 if (!HONOR_NANS (arg1))
6115 return fold_convert_loc (loc, type, arg2);
6116 break;
6117 case LTGT_EXPR:
6118 if (!HONOR_NANS (arg1))
6119 return fold_convert_loc (loc, type, arg1);
6120 break;
6121 default:
6122 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6123 break;
6127 return NULL_TREE;
6132 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6133 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6134 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6135 false) >= 2)
6136 #endif
6138 /* EXP is some logical combination of boolean tests. See if we can
6139 merge it into some range test. Return the new tree if so. */
6141 static tree
6142 fold_range_test (location_t loc, enum tree_code code, tree type,
6143 tree op0, tree op1)
6145 int or_op = (code == TRUTH_ORIF_EXPR
6146 || code == TRUTH_OR_EXPR);
6147 int in0_p, in1_p, in_p;
6148 tree low0, low1, low, high0, high1, high;
6149 bool strict_overflow_p = false;
6150 tree tem, lhs, rhs;
6151 const char * const warnmsg = G_("assuming signed overflow does not occur "
6152 "when simplifying range test");
6154 if (!INTEGRAL_TYPE_P (type))
6155 return 0;
6157 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6158 /* If op0 is known true or false and this is a short-circuiting
6159 operation we must not merge with op1 since that makes side-effects
6160 unconditional. So special-case this. */
6161 if (!lhs
6162 && ((code == TRUTH_ORIF_EXPR && in0_p)
6163 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6164 return op0;
6165 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6167 /* If this is an OR operation, invert both sides; we will invert
6168 again at the end. */
6169 if (or_op)
6170 in0_p = ! in0_p, in1_p = ! in1_p;
6172 /* If both expressions are the same, if we can merge the ranges, and we
6173 can build the range test, return it or it inverted. If one of the
6174 ranges is always true or always false, consider it to be the same
6175 expression as the other. */
6176 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6177 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6178 in1_p, low1, high1)
6179 && (tem = (build_range_check (loc, type,
6180 lhs != 0 ? lhs
6181 : rhs != 0 ? rhs : integer_zero_node,
6182 in_p, low, high))) != 0)
6184 if (strict_overflow_p)
6185 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6186 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6189 /* On machines where the branch cost is expensive, if this is a
6190 short-circuited branch and the underlying object on both sides
6191 is the same, make a non-short-circuit operation. */
6192 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6193 if (param_logical_op_non_short_circuit != -1)
6194 logical_op_non_short_circuit
6195 = param_logical_op_non_short_circuit;
6196 if (logical_op_non_short_circuit
6197 && !sanitize_coverage_p ()
6198 && lhs != 0 && rhs != 0
6199 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6200 && operand_equal_p (lhs, rhs, 0))
6202 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6203 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6204 which cases we can't do this. */
6205 if (simple_operand_p (lhs))
6206 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6207 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6208 type, op0, op1);
6210 else if (!lang_hooks.decls.global_bindings_p ()
6211 && !CONTAINS_PLACEHOLDER_P (lhs))
6213 tree common = save_expr (lhs);
6215 if ((lhs = build_range_check (loc, type, common,
6216 or_op ? ! in0_p : in0_p,
6217 low0, high0)) != 0
6218 && (rhs = build_range_check (loc, type, common,
6219 or_op ? ! in1_p : in1_p,
6220 low1, high1)) != 0)
6222 if (strict_overflow_p)
6223 fold_overflow_warning (warnmsg,
6224 WARN_STRICT_OVERFLOW_COMPARISON);
6225 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6226 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6227 type, lhs, rhs);
6232 return 0;
6235 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6236 bit value. Arrange things so the extra bits will be set to zero if and
6237 only if C is signed-extended to its full width. If MASK is nonzero,
6238 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6240 static tree
6241 unextend (tree c, int p, int unsignedp, tree mask)
6243 tree type = TREE_TYPE (c);
6244 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6245 tree temp;
6247 if (p == modesize || unsignedp)
6248 return c;
6250 /* We work by getting just the sign bit into the low-order bit, then
6251 into the high-order bit, then sign-extend. We then XOR that value
6252 with C. */
6253 temp = build_int_cst (TREE_TYPE (c),
6254 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6256 /* We must use a signed type in order to get an arithmetic right shift.
6257 However, we must also avoid introducing accidental overflows, so that
6258 a subsequent call to integer_zerop will work. Hence we must
6259 do the type conversion here. At this point, the constant is either
6260 zero or one, and the conversion to a signed type can never overflow.
6261 We could get an overflow if this conversion is done anywhere else. */
6262 if (TYPE_UNSIGNED (type))
6263 temp = fold_convert (signed_type_for (type), temp);
6265 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6266 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6267 if (mask != 0)
6268 temp = const_binop (BIT_AND_EXPR, temp,
6269 fold_convert (TREE_TYPE (c), mask));
6270 /* If necessary, convert the type back to match the type of C. */
6271 if (TYPE_UNSIGNED (type))
6272 temp = fold_convert (type, temp);
6274 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6277 /* For an expression that has the form
6278 (A && B) || ~B
6280 (A || B) && ~B,
6281 we can drop one of the inner expressions and simplify to
6282 A || ~B
6284 A && ~B
6285 LOC is the location of the resulting expression. OP is the inner
6286 logical operation; the left-hand side in the examples above, while CMPOP
6287 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6288 removing a condition that guards another, as in
6289 (A != NULL && A->...) || A == NULL
6290 which we must not transform. If RHS_ONLY is true, only eliminate the
6291 right-most operand of the inner logical operation. */
6293 static tree
6294 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6295 bool rhs_only)
6297 tree type = TREE_TYPE (cmpop);
6298 enum tree_code code = TREE_CODE (cmpop);
6299 enum tree_code truthop_code = TREE_CODE (op);
6300 tree lhs = TREE_OPERAND (op, 0);
6301 tree rhs = TREE_OPERAND (op, 1);
6302 tree orig_lhs = lhs, orig_rhs = rhs;
6303 enum tree_code rhs_code = TREE_CODE (rhs);
6304 enum tree_code lhs_code = TREE_CODE (lhs);
6305 enum tree_code inv_code;
6307 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6308 return NULL_TREE;
6310 if (TREE_CODE_CLASS (code) != tcc_comparison)
6311 return NULL_TREE;
6313 if (rhs_code == truthop_code)
6315 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6316 if (newrhs != NULL_TREE)
6318 rhs = newrhs;
6319 rhs_code = TREE_CODE (rhs);
6322 if (lhs_code == truthop_code && !rhs_only)
6324 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6325 if (newlhs != NULL_TREE)
6327 lhs = newlhs;
6328 lhs_code = TREE_CODE (lhs);
6332 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6333 if (inv_code == rhs_code
6334 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6335 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6336 return lhs;
6337 if (!rhs_only && inv_code == lhs_code
6338 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6339 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6340 return rhs;
6341 if (rhs != orig_rhs || lhs != orig_lhs)
6342 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6343 lhs, rhs);
6344 return NULL_TREE;
6347 /* Find ways of folding logical expressions of LHS and RHS:
6348 Try to merge two comparisons to the same innermost item.
6349 Look for range tests like "ch >= '0' && ch <= '9'".
6350 Look for combinations of simple terms on machines with expensive branches
6351 and evaluate the RHS unconditionally.
6353 For example, if we have p->a == 2 && p->b == 4 and we can make an
6354 object large enough to span both A and B, we can do this with a comparison
6355 against the object ANDed with the a mask.
6357 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6358 operations to do this with one comparison.
6360 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6361 function and the one above.
6363 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6364 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6366 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6367 two operands.
6369 We return the simplified tree or 0 if no optimization is possible. */
6371 static tree
6372 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6373 tree lhs, tree rhs)
6375 /* If this is the "or" of two comparisons, we can do something if
6376 the comparisons are NE_EXPR. If this is the "and", we can do something
6377 if the comparisons are EQ_EXPR. I.e.,
6378 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6380 WANTED_CODE is this operation code. For single bit fields, we can
6381 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6382 comparison for one-bit fields. */
6384 enum tree_code wanted_code;
6385 enum tree_code lcode, rcode;
6386 tree ll_arg, lr_arg, rl_arg, rr_arg;
6387 tree ll_inner, lr_inner, rl_inner, rr_inner;
6388 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6389 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6390 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6391 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6392 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6393 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6394 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6395 scalar_int_mode lnmode, rnmode;
6396 tree ll_mask, lr_mask, rl_mask, rr_mask;
6397 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6398 tree l_const, r_const;
6399 tree lntype, rntype, result;
6400 HOST_WIDE_INT first_bit, end_bit;
6401 int volatilep;
6403 /* Start by getting the comparison codes. Fail if anything is volatile.
6404 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6405 it were surrounded with a NE_EXPR. */
6407 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6408 return 0;
6410 lcode = TREE_CODE (lhs);
6411 rcode = TREE_CODE (rhs);
6413 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6415 lhs = build2 (NE_EXPR, truth_type, lhs,
6416 build_int_cst (TREE_TYPE (lhs), 0));
6417 lcode = NE_EXPR;
6420 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6422 rhs = build2 (NE_EXPR, truth_type, rhs,
6423 build_int_cst (TREE_TYPE (rhs), 0));
6424 rcode = NE_EXPR;
6427 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6428 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6429 return 0;
6431 ll_arg = TREE_OPERAND (lhs, 0);
6432 lr_arg = TREE_OPERAND (lhs, 1);
6433 rl_arg = TREE_OPERAND (rhs, 0);
6434 rr_arg = TREE_OPERAND (rhs, 1);
6436 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6437 if (simple_operand_p (ll_arg)
6438 && simple_operand_p (lr_arg))
6440 if (operand_equal_p (ll_arg, rl_arg, 0)
6441 && operand_equal_p (lr_arg, rr_arg, 0))
6443 result = combine_comparisons (loc, code, lcode, rcode,
6444 truth_type, ll_arg, lr_arg);
6445 if (result)
6446 return result;
6448 else if (operand_equal_p (ll_arg, rr_arg, 0)
6449 && operand_equal_p (lr_arg, rl_arg, 0))
6451 result = combine_comparisons (loc, code, lcode,
6452 swap_tree_comparison (rcode),
6453 truth_type, ll_arg, lr_arg);
6454 if (result)
6455 return result;
6459 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6460 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6462 /* If the RHS can be evaluated unconditionally and its operands are
6463 simple, it wins to evaluate the RHS unconditionally on machines
6464 with expensive branches. In this case, this isn't a comparison
6465 that can be merged. */
6467 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6468 false) >= 2
6469 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6470 && simple_operand_p (rl_arg)
6471 && simple_operand_p (rr_arg))
6473 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6474 if (code == TRUTH_OR_EXPR
6475 && lcode == NE_EXPR && integer_zerop (lr_arg)
6476 && rcode == NE_EXPR && integer_zerop (rr_arg)
6477 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6478 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6479 return build2_loc (loc, NE_EXPR, truth_type,
6480 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6481 ll_arg, rl_arg),
6482 build_int_cst (TREE_TYPE (ll_arg), 0));
6484 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6485 if (code == TRUTH_AND_EXPR
6486 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6487 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6488 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6489 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6490 return build2_loc (loc, EQ_EXPR, truth_type,
6491 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6492 ll_arg, rl_arg),
6493 build_int_cst (TREE_TYPE (ll_arg), 0));
6496 /* See if the comparisons can be merged. Then get all the parameters for
6497 each side. */
6499 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6500 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6501 return 0;
6503 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6504 volatilep = 0;
6505 ll_inner = decode_field_reference (loc, &ll_arg,
6506 &ll_bitsize, &ll_bitpos, &ll_mode,
6507 &ll_unsignedp, &ll_reversep, &volatilep,
6508 &ll_mask, &ll_and_mask);
6509 lr_inner = decode_field_reference (loc, &lr_arg,
6510 &lr_bitsize, &lr_bitpos, &lr_mode,
6511 &lr_unsignedp, &lr_reversep, &volatilep,
6512 &lr_mask, &lr_and_mask);
6513 rl_inner = decode_field_reference (loc, &rl_arg,
6514 &rl_bitsize, &rl_bitpos, &rl_mode,
6515 &rl_unsignedp, &rl_reversep, &volatilep,
6516 &rl_mask, &rl_and_mask);
6517 rr_inner = decode_field_reference (loc, &rr_arg,
6518 &rr_bitsize, &rr_bitpos, &rr_mode,
6519 &rr_unsignedp, &rr_reversep, &volatilep,
6520 &rr_mask, &rr_and_mask);
6522 /* It must be true that the inner operation on the lhs of each
6523 comparison must be the same if we are to be able to do anything.
6524 Then see if we have constants. If not, the same must be true for
6525 the rhs's. */
6526 if (volatilep
6527 || ll_reversep != rl_reversep
6528 || ll_inner == 0 || rl_inner == 0
6529 || ! operand_equal_p (ll_inner, rl_inner, 0))
6530 return 0;
6532 if (TREE_CODE (lr_arg) == INTEGER_CST
6533 && TREE_CODE (rr_arg) == INTEGER_CST)
6535 l_const = lr_arg, r_const = rr_arg;
6536 lr_reversep = ll_reversep;
6538 else if (lr_reversep != rr_reversep
6539 || lr_inner == 0 || rr_inner == 0
6540 || ! operand_equal_p (lr_inner, rr_inner, 0))
6541 return 0;
6542 else
6543 l_const = r_const = 0;
6545 /* If either comparison code is not correct for our logical operation,
6546 fail. However, we can convert a one-bit comparison against zero into
6547 the opposite comparison against that bit being set in the field. */
6549 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6550 if (lcode != wanted_code)
6552 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6554 /* Make the left operand unsigned, since we are only interested
6555 in the value of one bit. Otherwise we are doing the wrong
6556 thing below. */
6557 ll_unsignedp = 1;
6558 l_const = ll_mask;
6560 else
6561 return 0;
6564 /* This is analogous to the code for l_const above. */
6565 if (rcode != wanted_code)
6567 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6569 rl_unsignedp = 1;
6570 r_const = rl_mask;
6572 else
6573 return 0;
6576 /* See if we can find a mode that contains both fields being compared on
6577 the left. If we can't, fail. Otherwise, update all constants and masks
6578 to be relative to a field of that size. */
6579 first_bit = MIN (ll_bitpos, rl_bitpos);
6580 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6581 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6582 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6583 volatilep, &lnmode))
6584 return 0;
6586 lnbitsize = GET_MODE_BITSIZE (lnmode);
6587 lnbitpos = first_bit & ~ (lnbitsize - 1);
6588 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6589 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6591 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6593 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6594 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6597 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6598 size_int (xll_bitpos));
6599 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6600 size_int (xrl_bitpos));
6601 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6602 return 0;
6604 if (l_const)
6606 l_const = fold_convert_loc (loc, lntype, l_const);
6607 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6608 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6609 if (l_const == NULL_TREE)
6610 return 0;
6611 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6612 fold_build1_loc (loc, BIT_NOT_EXPR,
6613 lntype, ll_mask))))
6615 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6617 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6620 if (r_const)
6622 r_const = fold_convert_loc (loc, lntype, r_const);
6623 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6624 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6625 if (r_const == NULL_TREE)
6626 return 0;
6627 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6628 fold_build1_loc (loc, BIT_NOT_EXPR,
6629 lntype, rl_mask))))
6631 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6633 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6637 /* If the right sides are not constant, do the same for it. Also,
6638 disallow this optimization if a size, signedness or storage order
6639 mismatch occurs between the left and right sides. */
6640 if (l_const == 0)
6642 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6643 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6644 || ll_reversep != lr_reversep
6645 /* Make sure the two fields on the right
6646 correspond to the left without being swapped. */
6647 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6648 return 0;
6650 first_bit = MIN (lr_bitpos, rr_bitpos);
6651 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6652 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6653 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6654 volatilep, &rnmode))
6655 return 0;
6657 rnbitsize = GET_MODE_BITSIZE (rnmode);
6658 rnbitpos = first_bit & ~ (rnbitsize - 1);
6659 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6660 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6662 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6664 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6665 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6668 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6669 rntype, lr_mask),
6670 size_int (xlr_bitpos));
6671 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6672 rntype, rr_mask),
6673 size_int (xrr_bitpos));
6674 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6675 return 0;
6677 /* Make a mask that corresponds to both fields being compared.
6678 Do this for both items being compared. If the operands are the
6679 same size and the bits being compared are in the same position
6680 then we can do this by masking both and comparing the masked
6681 results. */
6682 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6683 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6684 if (lnbitsize == rnbitsize
6685 && xll_bitpos == xlr_bitpos
6686 && lnbitpos >= 0
6687 && rnbitpos >= 0)
6689 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6690 lntype, lnbitsize, lnbitpos,
6691 ll_unsignedp || rl_unsignedp, ll_reversep);
6692 if (! all_ones_mask_p (ll_mask, lnbitsize))
6693 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6695 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6696 rntype, rnbitsize, rnbitpos,
6697 lr_unsignedp || rr_unsignedp, lr_reversep);
6698 if (! all_ones_mask_p (lr_mask, rnbitsize))
6699 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6701 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6704 /* There is still another way we can do something: If both pairs of
6705 fields being compared are adjacent, we may be able to make a wider
6706 field containing them both.
6708 Note that we still must mask the lhs/rhs expressions. Furthermore,
6709 the mask must be shifted to account for the shift done by
6710 make_bit_field_ref. */
6711 if (((ll_bitsize + ll_bitpos == rl_bitpos
6712 && lr_bitsize + lr_bitpos == rr_bitpos)
6713 || (ll_bitpos == rl_bitpos + rl_bitsize
6714 && lr_bitpos == rr_bitpos + rr_bitsize))
6715 && ll_bitpos >= 0
6716 && rl_bitpos >= 0
6717 && lr_bitpos >= 0
6718 && rr_bitpos >= 0)
6720 tree type;
6722 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6723 ll_bitsize + rl_bitsize,
6724 MIN (ll_bitpos, rl_bitpos),
6725 ll_unsignedp, ll_reversep);
6726 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6727 lr_bitsize + rr_bitsize,
6728 MIN (lr_bitpos, rr_bitpos),
6729 lr_unsignedp, lr_reversep);
6731 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6732 size_int (MIN (xll_bitpos, xrl_bitpos)));
6733 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6734 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6735 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6736 return 0;
6738 /* Convert to the smaller type before masking out unwanted bits. */
6739 type = lntype;
6740 if (lntype != rntype)
6742 if (lnbitsize > rnbitsize)
6744 lhs = fold_convert_loc (loc, rntype, lhs);
6745 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6746 type = rntype;
6748 else if (lnbitsize < rnbitsize)
6750 rhs = fold_convert_loc (loc, lntype, rhs);
6751 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6752 type = lntype;
6756 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6757 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6759 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6760 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6762 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6765 return 0;
6768 /* Handle the case of comparisons with constants. If there is something in
6769 common between the masks, those bits of the constants must be the same.
6770 If not, the condition is always false. Test for this to avoid generating
6771 incorrect code below. */
6772 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6773 if (! integer_zerop (result)
6774 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6775 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6777 if (wanted_code == NE_EXPR)
6779 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6780 return constant_boolean_node (true, truth_type);
6782 else
6784 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6785 return constant_boolean_node (false, truth_type);
6789 if (lnbitpos < 0)
6790 return 0;
6792 /* Construct the expression we will return. First get the component
6793 reference we will make. Unless the mask is all ones the width of
6794 that field, perform the mask operation. Then compare with the
6795 merged constant. */
6796 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6797 lntype, lnbitsize, lnbitpos,
6798 ll_unsignedp || rl_unsignedp, ll_reversep);
6800 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6801 if (! all_ones_mask_p (ll_mask, lnbitsize))
6802 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6804 return build2_loc (loc, wanted_code, truth_type, result,
6805 const_binop (BIT_IOR_EXPR, l_const, r_const));
6808 /* T is an integer expression that is being multiplied, divided, or taken a
6809 modulus (CODE says which and what kind of divide or modulus) by a
6810 constant C. See if we can eliminate that operation by folding it with
6811 other operations already in T. WIDE_TYPE, if non-null, is a type that
6812 should be used for the computation if wider than our type.
6814 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6815 (X * 2) + (Y * 4). We must, however, be assured that either the original
6816 expression would not overflow or that overflow is undefined for the type
6817 in the language in question.
6819 If we return a non-null expression, it is an equivalent form of the
6820 original computation, but need not be in the original type.
6822 We set *STRICT_OVERFLOW_P to true if the return values depends on
6823 signed overflow being undefined. Otherwise we do not change
6824 *STRICT_OVERFLOW_P. */
6826 static tree
6827 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6828 bool *strict_overflow_p)
6830 /* To avoid exponential search depth, refuse to allow recursion past
6831 three levels. Beyond that (1) it's highly unlikely that we'll find
6832 something interesting and (2) we've probably processed it before
6833 when we built the inner expression. */
6835 static int depth;
6836 tree ret;
6838 if (depth > 3)
6839 return NULL;
6841 depth++;
6842 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6843 depth--;
6845 return ret;
6848 static tree
6849 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6850 bool *strict_overflow_p)
6852 tree type = TREE_TYPE (t);
6853 enum tree_code tcode = TREE_CODE (t);
6854 tree ctype = (wide_type != 0
6855 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6856 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6857 ? wide_type : type);
6858 tree t1, t2;
6859 bool same_p = tcode == code;
6860 tree op0 = NULL_TREE, op1 = NULL_TREE;
6861 bool sub_strict_overflow_p;
6863 /* Don't deal with constants of zero here; they confuse the code below. */
6864 if (integer_zerop (c))
6865 return NULL_TREE;
6867 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6868 op0 = TREE_OPERAND (t, 0);
6870 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6871 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6873 /* Note that we need not handle conditional operations here since fold
6874 already handles those cases. So just do arithmetic here. */
6875 switch (tcode)
6877 case INTEGER_CST:
6878 /* For a constant, we can always simplify if we are a multiply
6879 or (for divide and modulus) if it is a multiple of our constant. */
6880 if (code == MULT_EXPR
6881 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6882 TYPE_SIGN (type)))
6884 tree tem = const_binop (code, fold_convert (ctype, t),
6885 fold_convert (ctype, c));
6886 /* If the multiplication overflowed, we lost information on it.
6887 See PR68142 and PR69845. */
6888 if (TREE_OVERFLOW (tem))
6889 return NULL_TREE;
6890 return tem;
6892 break;
6894 CASE_CONVERT: case NON_LVALUE_EXPR:
6895 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6896 break;
6897 /* If op0 is an expression ... */
6898 if ((COMPARISON_CLASS_P (op0)
6899 || UNARY_CLASS_P (op0)
6900 || BINARY_CLASS_P (op0)
6901 || VL_EXP_CLASS_P (op0)
6902 || EXPRESSION_CLASS_P (op0))
6903 /* ... and has wrapping overflow, and its type is smaller
6904 than ctype, then we cannot pass through as widening. */
6905 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6906 && (TYPE_PRECISION (ctype)
6907 > TYPE_PRECISION (TREE_TYPE (op0))))
6908 /* ... or this is a truncation (t is narrower than op0),
6909 then we cannot pass through this narrowing. */
6910 || (TYPE_PRECISION (type)
6911 < TYPE_PRECISION (TREE_TYPE (op0)))
6912 /* ... or signedness changes for division or modulus,
6913 then we cannot pass through this conversion. */
6914 || (code != MULT_EXPR
6915 && (TYPE_UNSIGNED (ctype)
6916 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6917 /* ... or has undefined overflow while the converted to
6918 type has not, we cannot do the operation in the inner type
6919 as that would introduce undefined overflow. */
6920 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6921 && !TYPE_OVERFLOW_UNDEFINED (type))))
6922 break;
6924 /* Pass the constant down and see if we can make a simplification. If
6925 we can, replace this expression with the inner simplification for
6926 possible later conversion to our or some other type. */
6927 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6928 && TREE_CODE (t2) == INTEGER_CST
6929 && !TREE_OVERFLOW (t2)
6930 && (t1 = extract_muldiv (op0, t2, code,
6931 code == MULT_EXPR ? ctype : NULL_TREE,
6932 strict_overflow_p)) != 0)
6933 return t1;
6934 break;
6936 case ABS_EXPR:
6937 /* If widening the type changes it from signed to unsigned, then we
6938 must avoid building ABS_EXPR itself as unsigned. */
6939 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6941 tree cstype = (*signed_type_for) (ctype);
6942 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6943 != 0)
6945 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6946 return fold_convert (ctype, t1);
6948 break;
6950 /* If the constant is negative, we cannot simplify this. */
6951 if (tree_int_cst_sgn (c) == -1)
6952 break;
6953 /* FALLTHROUGH */
6954 case NEGATE_EXPR:
6955 /* For division and modulus, type can't be unsigned, as e.g.
6956 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6957 For signed types, even with wrapping overflow, this is fine. */
6958 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6959 break;
6960 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6961 != 0)
6962 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6963 break;
6965 case MIN_EXPR: case MAX_EXPR:
6966 /* If widening the type changes the signedness, then we can't perform
6967 this optimization as that changes the result. */
6968 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6969 break;
6971 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6972 sub_strict_overflow_p = false;
6973 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6974 &sub_strict_overflow_p)) != 0
6975 && (t2 = extract_muldiv (op1, c, code, wide_type,
6976 &sub_strict_overflow_p)) != 0)
6978 if (tree_int_cst_sgn (c) < 0)
6979 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6980 if (sub_strict_overflow_p)
6981 *strict_overflow_p = true;
6982 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6983 fold_convert (ctype, t2));
6985 break;
6987 case LSHIFT_EXPR: case RSHIFT_EXPR:
6988 /* If the second operand is constant, this is a multiplication
6989 or floor division, by a power of two, so we can treat it that
6990 way unless the multiplier or divisor overflows. Signed
6991 left-shift overflow is implementation-defined rather than
6992 undefined in C90, so do not convert signed left shift into
6993 multiplication. */
6994 if (TREE_CODE (op1) == INTEGER_CST
6995 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6996 /* const_binop may not detect overflow correctly,
6997 so check for it explicitly here. */
6998 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6999 wi::to_wide (op1))
7000 && (t1 = fold_convert (ctype,
7001 const_binop (LSHIFT_EXPR, size_one_node,
7002 op1))) != 0
7003 && !TREE_OVERFLOW (t1))
7004 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7005 ? MULT_EXPR : FLOOR_DIV_EXPR,
7006 ctype,
7007 fold_convert (ctype, op0),
7008 t1),
7009 c, code, wide_type, strict_overflow_p);
7010 break;
7012 case PLUS_EXPR: case MINUS_EXPR:
7013 /* See if we can eliminate the operation on both sides. If we can, we
7014 can return a new PLUS or MINUS. If we can't, the only remaining
7015 cases where we can do anything are if the second operand is a
7016 constant. */
7017 sub_strict_overflow_p = false;
7018 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7019 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7020 if (t1 != 0 && t2 != 0
7021 && TYPE_OVERFLOW_WRAPS (ctype)
7022 && (code == MULT_EXPR
7023 /* If not multiplication, we can only do this if both operands
7024 are divisible by c. */
7025 || (multiple_of_p (ctype, op0, c)
7026 && multiple_of_p (ctype, op1, c))))
7028 if (sub_strict_overflow_p)
7029 *strict_overflow_p = true;
7030 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7031 fold_convert (ctype, t2));
7034 /* If this was a subtraction, negate OP1 and set it to be an addition.
7035 This simplifies the logic below. */
7036 if (tcode == MINUS_EXPR)
7038 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7039 /* If OP1 was not easily negatable, the constant may be OP0. */
7040 if (TREE_CODE (op0) == INTEGER_CST)
7042 std::swap (op0, op1);
7043 std::swap (t1, t2);
7047 if (TREE_CODE (op1) != INTEGER_CST)
7048 break;
7050 /* If either OP1 or C are negative, this optimization is not safe for
7051 some of the division and remainder types while for others we need
7052 to change the code. */
7053 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7055 if (code == CEIL_DIV_EXPR)
7056 code = FLOOR_DIV_EXPR;
7057 else if (code == FLOOR_DIV_EXPR)
7058 code = CEIL_DIV_EXPR;
7059 else if (code != MULT_EXPR
7060 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7061 break;
7064 /* If it's a multiply or a division/modulus operation of a multiple
7065 of our constant, do the operation and verify it doesn't overflow. */
7066 if (code == MULT_EXPR
7067 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7068 TYPE_SIGN (type)))
7070 op1 = const_binop (code, fold_convert (ctype, op1),
7071 fold_convert (ctype, c));
7072 /* We allow the constant to overflow with wrapping semantics. */
7073 if (op1 == 0
7074 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7075 break;
7077 else
7078 break;
7080 /* If we have an unsigned type, we cannot widen the operation since it
7081 will change the result if the original computation overflowed. */
7082 if (TYPE_UNSIGNED (ctype) && ctype != type)
7083 break;
7085 /* The last case is if we are a multiply. In that case, we can
7086 apply the distributive law to commute the multiply and addition
7087 if the multiplication of the constants doesn't overflow
7088 and overflow is defined. With undefined overflow
7089 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7090 But fold_plusminus_mult_expr would factor back any power-of-two
7091 value so do not distribute in the first place in this case. */
7092 if (code == MULT_EXPR
7093 && TYPE_OVERFLOW_WRAPS (ctype)
7094 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7095 return fold_build2 (tcode, ctype,
7096 fold_build2 (code, ctype,
7097 fold_convert (ctype, op0),
7098 fold_convert (ctype, c)),
7099 op1);
7101 break;
7103 case MULT_EXPR:
7104 /* We have a special case here if we are doing something like
7105 (C * 8) % 4 since we know that's zero. */
7106 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7107 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7108 /* If the multiplication can overflow we cannot optimize this. */
7109 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7110 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7111 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7112 TYPE_SIGN (type)))
7114 *strict_overflow_p = true;
7115 return omit_one_operand (type, integer_zero_node, op0);
7118 /* ... fall through ... */
7120 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7121 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7122 /* If we can extract our operation from the LHS, do so and return a
7123 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7124 do something only if the second operand is a constant. */
7125 if (same_p
7126 && TYPE_OVERFLOW_WRAPS (ctype)
7127 && (t1 = extract_muldiv (op0, c, code, wide_type,
7128 strict_overflow_p)) != 0)
7129 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7130 fold_convert (ctype, op1));
7131 else if (tcode == MULT_EXPR && code == MULT_EXPR
7132 && TYPE_OVERFLOW_WRAPS (ctype)
7133 && (t1 = extract_muldiv (op1, c, code, wide_type,
7134 strict_overflow_p)) != 0)
7135 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7136 fold_convert (ctype, t1));
7137 else if (TREE_CODE (op1) != INTEGER_CST)
7138 return 0;
7140 /* If these are the same operation types, we can associate them
7141 assuming no overflow. */
7142 if (tcode == code)
7144 bool overflow_p = false;
7145 wi::overflow_type overflow_mul;
7146 signop sign = TYPE_SIGN (ctype);
7147 unsigned prec = TYPE_PRECISION (ctype);
7148 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7149 wi::to_wide (c, prec),
7150 sign, &overflow_mul);
7151 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7152 if (overflow_mul
7153 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7154 overflow_p = true;
7155 if (!overflow_p)
7156 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7157 wide_int_to_tree (ctype, mul));
7160 /* If these operations "cancel" each other, we have the main
7161 optimizations of this pass, which occur when either constant is a
7162 multiple of the other, in which case we replace this with either an
7163 operation or CODE or TCODE.
7165 If we have an unsigned type, we cannot do this since it will change
7166 the result if the original computation overflowed. */
7167 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7168 && !TYPE_OVERFLOW_SANITIZED (ctype)
7169 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7170 || (tcode == MULT_EXPR
7171 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7172 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7173 && code != MULT_EXPR)))
7175 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7176 TYPE_SIGN (type)))
7178 *strict_overflow_p = true;
7179 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7180 fold_convert (ctype,
7181 const_binop (TRUNC_DIV_EXPR,
7182 op1, c)));
7184 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7185 TYPE_SIGN (type)))
7187 *strict_overflow_p = true;
7188 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7189 fold_convert (ctype,
7190 const_binop (TRUNC_DIV_EXPR,
7191 c, op1)));
7194 break;
7196 default:
7197 break;
7200 return 0;
7203 /* Return a node which has the indicated constant VALUE (either 0 or
7204 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7205 and is of the indicated TYPE. */
7207 tree
7208 constant_boolean_node (bool value, tree type)
7210 if (type == integer_type_node)
7211 return value ? integer_one_node : integer_zero_node;
7212 else if (type == boolean_type_node)
7213 return value ? boolean_true_node : boolean_false_node;
7214 else if (VECTOR_TYPE_P (type))
7215 return build_vector_from_val (type,
7216 build_int_cst (TREE_TYPE (type),
7217 value ? -1 : 0));
7218 else
7219 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7223 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7224 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7225 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7226 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7227 COND is the first argument to CODE; otherwise (as in the example
7228 given here), it is the second argument. TYPE is the type of the
7229 original expression. Return NULL_TREE if no simplification is
7230 possible. */
7232 static tree
7233 fold_binary_op_with_conditional_arg (location_t loc,
7234 enum tree_code code,
7235 tree type, tree op0, tree op1,
7236 tree cond, tree arg, int cond_first_p)
7238 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7239 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7240 tree test, true_value, false_value;
7241 tree lhs = NULL_TREE;
7242 tree rhs = NULL_TREE;
7243 enum tree_code cond_code = COND_EXPR;
7245 /* Do not move possibly trapping operations into the conditional as this
7246 pessimizes code and causes gimplification issues when applied late. */
7247 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7248 ANY_INTEGRAL_TYPE_P (type)
7249 && TYPE_OVERFLOW_TRAPS (type), op1))
7250 return NULL_TREE;
7252 if (TREE_CODE (cond) == COND_EXPR
7253 || TREE_CODE (cond) == VEC_COND_EXPR)
7255 test = TREE_OPERAND (cond, 0);
7256 true_value = TREE_OPERAND (cond, 1);
7257 false_value = TREE_OPERAND (cond, 2);
7258 /* If this operand throws an expression, then it does not make
7259 sense to try to perform a logical or arithmetic operation
7260 involving it. */
7261 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7262 lhs = true_value;
7263 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7264 rhs = false_value;
7266 else if (!(TREE_CODE (type) != VECTOR_TYPE
7267 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7269 tree testtype = TREE_TYPE (cond);
7270 test = cond;
7271 true_value = constant_boolean_node (true, testtype);
7272 false_value = constant_boolean_node (false, testtype);
7274 else
7275 /* Detect the case of mixing vector and scalar types - bail out. */
7276 return NULL_TREE;
7278 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7279 cond_code = VEC_COND_EXPR;
7281 /* This transformation is only worthwhile if we don't have to wrap ARG
7282 in a SAVE_EXPR and the operation can be simplified without recursing
7283 on at least one of the branches once its pushed inside the COND_EXPR. */
7284 if (!TREE_CONSTANT (arg)
7285 && (TREE_SIDE_EFFECTS (arg)
7286 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7287 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7288 return NULL_TREE;
7290 arg = fold_convert_loc (loc, arg_type, arg);
7291 if (lhs == 0)
7293 true_value = fold_convert_loc (loc, cond_type, true_value);
7294 if (cond_first_p)
7295 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7296 else
7297 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7299 if (rhs == 0)
7301 false_value = fold_convert_loc (loc, cond_type, false_value);
7302 if (cond_first_p)
7303 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7304 else
7305 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7308 /* Check that we have simplified at least one of the branches. */
7309 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7310 return NULL_TREE;
7312 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7316 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7318 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7319 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7320 if ARG - ZERO_ARG is the same as X.
7322 If ARG is NULL, check for any value of type TYPE.
7324 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7325 and finite. The problematic cases are when X is zero, and its mode
7326 has signed zeros. In the case of rounding towards -infinity,
7327 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7328 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7330 bool
7331 fold_real_zero_addition_p (const_tree type, const_tree arg,
7332 const_tree zero_arg, int negate)
7334 if (!real_zerop (zero_arg))
7335 return false;
7337 /* Don't allow the fold with -fsignaling-nans. */
7338 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7339 return false;
7341 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7342 if (!HONOR_SIGNED_ZEROS (type))
7343 return true;
7345 /* There is no case that is safe for all rounding modes. */
7346 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7347 return false;
7349 /* In a vector or complex, we would need to check the sign of all zeros. */
7350 if (TREE_CODE (zero_arg) == VECTOR_CST)
7351 zero_arg = uniform_vector_p (zero_arg);
7352 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7353 return false;
7355 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7356 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7357 negate = !negate;
7359 /* The mode has signed zeros, and we have to honor their sign.
7360 In this situation, there are only two cases we can return true for.
7361 (i) X - 0 is the same as X with default rounding.
7362 (ii) X + 0 is X when X can't possibly be -0.0. */
7363 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7366 /* Subroutine of match.pd that optimizes comparisons of a division by
7367 a nonzero integer constant against an integer constant, i.e.
7368 X/C1 op C2.
7370 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7371 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7373 enum tree_code
7374 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7375 tree *hi, bool *neg_overflow)
7377 tree prod, tmp, type = TREE_TYPE (c1);
7378 signop sign = TYPE_SIGN (type);
7379 wi::overflow_type overflow;
7381 /* We have to do this the hard way to detect unsigned overflow.
7382 prod = int_const_binop (MULT_EXPR, c1, c2); */
7383 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7384 prod = force_fit_type (type, val, -1, overflow);
7385 *neg_overflow = false;
7387 if (sign == UNSIGNED)
7389 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7390 *lo = prod;
7392 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7393 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7394 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7396 else if (tree_int_cst_sgn (c1) >= 0)
7398 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7399 switch (tree_int_cst_sgn (c2))
7401 case -1:
7402 *neg_overflow = true;
7403 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7404 *hi = prod;
7405 break;
7407 case 0:
7408 *lo = fold_negate_const (tmp, type);
7409 *hi = tmp;
7410 break;
7412 case 1:
7413 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7414 *lo = prod;
7415 break;
7417 default:
7418 gcc_unreachable ();
7421 else
7423 /* A negative divisor reverses the relational operators. */
7424 code = swap_tree_comparison (code);
7426 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7427 switch (tree_int_cst_sgn (c2))
7429 case -1:
7430 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7431 *lo = prod;
7432 break;
7434 case 0:
7435 *hi = fold_negate_const (tmp, type);
7436 *lo = tmp;
7437 break;
7439 case 1:
7440 *neg_overflow = true;
7441 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7442 *hi = prod;
7443 break;
7445 default:
7446 gcc_unreachable ();
7450 if (code != EQ_EXPR && code != NE_EXPR)
7451 return code;
7453 if (TREE_OVERFLOW (*lo)
7454 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7455 *lo = NULL_TREE;
7456 if (TREE_OVERFLOW (*hi)
7457 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7458 *hi = NULL_TREE;
7460 return code;
7463 /* Test whether it is preferable to swap two operands, ARG0 and
7464 ARG1, for example because ARG0 is an integer constant and ARG1
7465 isn't. */
7467 bool
7468 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7470 if (CONSTANT_CLASS_P (arg1))
7471 return false;
7472 if (CONSTANT_CLASS_P (arg0))
7473 return true;
7475 STRIP_NOPS (arg0);
7476 STRIP_NOPS (arg1);
7478 if (TREE_CONSTANT (arg1))
7479 return false;
7480 if (TREE_CONSTANT (arg0))
7481 return true;
7483 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7484 for commutative and comparison operators. Ensuring a canonical
7485 form allows the optimizers to find additional redundancies without
7486 having to explicitly check for both orderings. */
7487 if (TREE_CODE (arg0) == SSA_NAME
7488 && TREE_CODE (arg1) == SSA_NAME
7489 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7490 return true;
7492 /* Put SSA_NAMEs last. */
7493 if (TREE_CODE (arg1) == SSA_NAME)
7494 return false;
7495 if (TREE_CODE (arg0) == SSA_NAME)
7496 return true;
7498 /* Put variables last. */
7499 if (DECL_P (arg1))
7500 return false;
7501 if (DECL_P (arg0))
7502 return true;
7504 return false;
7508 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7509 means A >= Y && A != MAX, but in this case we know that
7510 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7512 static tree
7513 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7515 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7517 if (TREE_CODE (bound) == LT_EXPR)
7518 a = TREE_OPERAND (bound, 0);
7519 else if (TREE_CODE (bound) == GT_EXPR)
7520 a = TREE_OPERAND (bound, 1);
7521 else
7522 return NULL_TREE;
7524 typea = TREE_TYPE (a);
7525 if (!INTEGRAL_TYPE_P (typea)
7526 && !POINTER_TYPE_P (typea))
7527 return NULL_TREE;
7529 if (TREE_CODE (ineq) == LT_EXPR)
7531 a1 = TREE_OPERAND (ineq, 1);
7532 y = TREE_OPERAND (ineq, 0);
7534 else if (TREE_CODE (ineq) == GT_EXPR)
7536 a1 = TREE_OPERAND (ineq, 0);
7537 y = TREE_OPERAND (ineq, 1);
7539 else
7540 return NULL_TREE;
7542 if (TREE_TYPE (a1) != typea)
7543 return NULL_TREE;
7545 if (POINTER_TYPE_P (typea))
7547 /* Convert the pointer types into integer before taking the difference. */
7548 tree ta = fold_convert_loc (loc, ssizetype, a);
7549 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7550 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7552 else
7553 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7555 if (!diff || !integer_onep (diff))
7556 return NULL_TREE;
7558 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7561 /* Fold a sum or difference of at least one multiplication.
7562 Returns the folded tree or NULL if no simplification could be made. */
7564 static tree
7565 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7566 tree arg0, tree arg1)
7568 tree arg00, arg01, arg10, arg11;
7569 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7571 /* (A * C) +- (B * C) -> (A+-B) * C.
7572 (A * C) +- A -> A * (C+-1).
7573 We are most concerned about the case where C is a constant,
7574 but other combinations show up during loop reduction. Since
7575 it is not difficult, try all four possibilities. */
7577 if (TREE_CODE (arg0) == MULT_EXPR)
7579 arg00 = TREE_OPERAND (arg0, 0);
7580 arg01 = TREE_OPERAND (arg0, 1);
7582 else if (TREE_CODE (arg0) == INTEGER_CST)
7584 arg00 = build_one_cst (type);
7585 arg01 = arg0;
7587 else
7589 /* We cannot generate constant 1 for fract. */
7590 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7591 return NULL_TREE;
7592 arg00 = arg0;
7593 arg01 = build_one_cst (type);
7595 if (TREE_CODE (arg1) == MULT_EXPR)
7597 arg10 = TREE_OPERAND (arg1, 0);
7598 arg11 = TREE_OPERAND (arg1, 1);
7600 else if (TREE_CODE (arg1) == INTEGER_CST)
7602 arg10 = build_one_cst (type);
7603 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7604 the purpose of this canonicalization. */
7605 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7606 && negate_expr_p (arg1)
7607 && code == PLUS_EXPR)
7609 arg11 = negate_expr (arg1);
7610 code = MINUS_EXPR;
7612 else
7613 arg11 = arg1;
7615 else
7617 /* We cannot generate constant 1 for fract. */
7618 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7619 return NULL_TREE;
7620 arg10 = arg1;
7621 arg11 = build_one_cst (type);
7623 same = NULL_TREE;
7625 /* Prefer factoring a common non-constant. */
7626 if (operand_equal_p (arg00, arg10, 0))
7627 same = arg00, alt0 = arg01, alt1 = arg11;
7628 else if (operand_equal_p (arg01, arg11, 0))
7629 same = arg01, alt0 = arg00, alt1 = arg10;
7630 else if (operand_equal_p (arg00, arg11, 0))
7631 same = arg00, alt0 = arg01, alt1 = arg10;
7632 else if (operand_equal_p (arg01, arg10, 0))
7633 same = arg01, alt0 = arg00, alt1 = arg11;
7635 /* No identical multiplicands; see if we can find a common
7636 power-of-two factor in non-power-of-two multiplies. This
7637 can help in multi-dimensional array access. */
7638 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7640 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7641 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7642 HOST_WIDE_INT tmp;
7643 bool swap = false;
7644 tree maybe_same;
7646 /* Move min of absolute values to int11. */
7647 if (absu_hwi (int01) < absu_hwi (int11))
7649 tmp = int01, int01 = int11, int11 = tmp;
7650 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7651 maybe_same = arg01;
7652 swap = true;
7654 else
7655 maybe_same = arg11;
7657 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7658 if (factor > 1
7659 && pow2p_hwi (factor)
7660 && (int01 & (factor - 1)) == 0
7661 /* The remainder should not be a constant, otherwise we
7662 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7663 increased the number of multiplications necessary. */
7664 && TREE_CODE (arg10) != INTEGER_CST)
7666 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7667 build_int_cst (TREE_TYPE (arg00),
7668 int01 / int11));
7669 alt1 = arg10;
7670 same = maybe_same;
7671 if (swap)
7672 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7676 if (!same)
7677 return NULL_TREE;
7679 if (! ANY_INTEGRAL_TYPE_P (type)
7680 || TYPE_OVERFLOW_WRAPS (type)
7681 /* We are neither factoring zero nor minus one. */
7682 || TREE_CODE (same) == INTEGER_CST)
7683 return fold_build2_loc (loc, MULT_EXPR, type,
7684 fold_build2_loc (loc, code, type,
7685 fold_convert_loc (loc, type, alt0),
7686 fold_convert_loc (loc, type, alt1)),
7687 fold_convert_loc (loc, type, same));
7689 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7690 same may be minus one and thus the multiplication may overflow. Perform
7691 the sum operation in an unsigned type. */
7692 tree utype = unsigned_type_for (type);
7693 tree tem = fold_build2_loc (loc, code, utype,
7694 fold_convert_loc (loc, utype, alt0),
7695 fold_convert_loc (loc, utype, alt1));
7696 /* If the sum evaluated to a constant that is not -INF the multiplication
7697 cannot overflow. */
7698 if (TREE_CODE (tem) == INTEGER_CST
7699 && (wi::to_wide (tem)
7700 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7701 return fold_build2_loc (loc, MULT_EXPR, type,
7702 fold_convert (type, tem), same);
7704 /* Do not resort to unsigned multiplication because
7705 we lose the no-overflow property of the expression. */
7706 return NULL_TREE;
7709 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7710 specified by EXPR into the buffer PTR of length LEN bytes.
7711 Return the number of bytes placed in the buffer, or zero
7712 upon failure. */
7714 static int
7715 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7717 tree type = TREE_TYPE (expr);
7718 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7719 int byte, offset, word, words;
7720 unsigned char value;
7722 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7723 return 0;
7724 if (off == -1)
7725 off = 0;
7727 if (ptr == NULL)
7728 /* Dry run. */
7729 return MIN (len, total_bytes - off);
7731 words = total_bytes / UNITS_PER_WORD;
7733 for (byte = 0; byte < total_bytes; byte++)
7735 int bitpos = byte * BITS_PER_UNIT;
7736 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7737 number of bytes. */
7738 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7740 if (total_bytes > UNITS_PER_WORD)
7742 word = byte / UNITS_PER_WORD;
7743 if (WORDS_BIG_ENDIAN)
7744 word = (words - 1) - word;
7745 offset = word * UNITS_PER_WORD;
7746 if (BYTES_BIG_ENDIAN)
7747 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7748 else
7749 offset += byte % UNITS_PER_WORD;
7751 else
7752 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7753 if (offset >= off && offset - off < len)
7754 ptr[offset - off] = value;
7756 return MIN (len, total_bytes - off);
7760 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7761 specified by EXPR into the buffer PTR of length LEN bytes.
7762 Return the number of bytes placed in the buffer, or zero
7763 upon failure. */
7765 static int
7766 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7768 tree type = TREE_TYPE (expr);
7769 scalar_mode mode = SCALAR_TYPE_MODE (type);
7770 int total_bytes = GET_MODE_SIZE (mode);
7771 FIXED_VALUE_TYPE value;
7772 tree i_value, i_type;
7774 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7775 return 0;
7777 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7779 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7780 return 0;
7782 value = TREE_FIXED_CST (expr);
7783 i_value = double_int_to_tree (i_type, value.data);
7785 return native_encode_int (i_value, ptr, len, off);
7789 /* Subroutine of native_encode_expr. Encode the REAL_CST
7790 specified by EXPR into the buffer PTR of length LEN bytes.
7791 Return the number of bytes placed in the buffer, or zero
7792 upon failure. */
7794 static int
7795 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7797 tree type = TREE_TYPE (expr);
7798 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7799 int byte, offset, word, words, bitpos;
7800 unsigned char value;
7802 /* There are always 32 bits in each long, no matter the size of
7803 the hosts long. We handle floating point representations with
7804 up to 192 bits. */
7805 long tmp[6];
7807 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7808 return 0;
7809 if (off == -1)
7810 off = 0;
7812 if (ptr == NULL)
7813 /* Dry run. */
7814 return MIN (len, total_bytes - off);
7816 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7818 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7820 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7821 bitpos += BITS_PER_UNIT)
7823 byte = (bitpos / BITS_PER_UNIT) & 3;
7824 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7826 if (UNITS_PER_WORD < 4)
7828 word = byte / UNITS_PER_WORD;
7829 if (WORDS_BIG_ENDIAN)
7830 word = (words - 1) - word;
7831 offset = word * UNITS_PER_WORD;
7832 if (BYTES_BIG_ENDIAN)
7833 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7834 else
7835 offset += byte % UNITS_PER_WORD;
7837 else
7839 offset = byte;
7840 if (BYTES_BIG_ENDIAN)
7842 /* Reverse bytes within each long, or within the entire float
7843 if it's smaller than a long (for HFmode). */
7844 offset = MIN (3, total_bytes - 1) - offset;
7845 gcc_assert (offset >= 0);
7848 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7849 if (offset >= off
7850 && offset - off < len)
7851 ptr[offset - off] = value;
7853 return MIN (len, total_bytes - off);
7856 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7857 specified by EXPR into the buffer PTR of length LEN bytes.
7858 Return the number of bytes placed in the buffer, or zero
7859 upon failure. */
7861 static int
7862 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7864 int rsize, isize;
7865 tree part;
7867 part = TREE_REALPART (expr);
7868 rsize = native_encode_expr (part, ptr, len, off);
7869 if (off == -1 && rsize == 0)
7870 return 0;
7871 part = TREE_IMAGPART (expr);
7872 if (off != -1)
7873 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7874 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7875 len - rsize, off);
7876 if (off == -1 && isize != rsize)
7877 return 0;
7878 return rsize + isize;
7881 /* Like native_encode_vector, but only encode the first COUNT elements.
7882 The other arguments are as for native_encode_vector. */
7884 static int
7885 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7886 int off, unsigned HOST_WIDE_INT count)
7888 tree itype = TREE_TYPE (TREE_TYPE (expr));
7889 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7890 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7892 /* This is the only case in which elements can be smaller than a byte.
7893 Element 0 is always in the lsb of the containing byte. */
7894 unsigned int elt_bits = TYPE_PRECISION (itype);
7895 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7896 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7897 return 0;
7899 if (off == -1)
7900 off = 0;
7902 /* Zero the buffer and then set bits later where necessary. */
7903 int extract_bytes = MIN (len, total_bytes - off);
7904 if (ptr)
7905 memset (ptr, 0, extract_bytes);
7907 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7908 unsigned int first_elt = off * elts_per_byte;
7909 unsigned int extract_elts = extract_bytes * elts_per_byte;
7910 for (unsigned int i = 0; i < extract_elts; ++i)
7912 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7913 if (TREE_CODE (elt) != INTEGER_CST)
7914 return 0;
7916 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7918 unsigned int bit = i * elt_bits;
7919 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7922 return extract_bytes;
7925 int offset = 0;
7926 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7927 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7929 if (off >= size)
7931 off -= size;
7932 continue;
7934 tree elem = VECTOR_CST_ELT (expr, i);
7935 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7936 len - offset, off);
7937 if ((off == -1 && res != size) || res == 0)
7938 return 0;
7939 offset += res;
7940 if (offset >= len)
7941 return (off == -1 && i < count - 1) ? 0 : offset;
7942 if (off != -1)
7943 off = 0;
7945 return offset;
7948 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7949 specified by EXPR into the buffer PTR of length LEN bytes.
7950 Return the number of bytes placed in the buffer, or zero
7951 upon failure. */
7953 static int
7954 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7956 unsigned HOST_WIDE_INT count;
7957 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7958 return 0;
7959 return native_encode_vector_part (expr, ptr, len, off, count);
7963 /* Subroutine of native_encode_expr. Encode the STRING_CST
7964 specified by EXPR into the buffer PTR of length LEN bytes.
7965 Return the number of bytes placed in the buffer, or zero
7966 upon failure. */
7968 static int
7969 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7971 tree type = TREE_TYPE (expr);
7973 /* Wide-char strings are encoded in target byte-order so native
7974 encoding them is trivial. */
7975 if (BITS_PER_UNIT != CHAR_BIT
7976 || TREE_CODE (type) != ARRAY_TYPE
7977 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7978 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7979 return 0;
7981 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7982 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7983 return 0;
7984 if (off == -1)
7985 off = 0;
7986 len = MIN (total_bytes - off, len);
7987 if (ptr == NULL)
7988 /* Dry run. */;
7989 else
7991 int written = 0;
7992 if (off < TREE_STRING_LENGTH (expr))
7994 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7995 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7997 memset (ptr + written, 0, len - written);
7999 return len;
8003 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8004 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8005 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8006 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8007 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8008 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8009 Return the number of bytes placed in the buffer, or zero upon failure. */
8012 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8014 /* We don't support starting at negative offset and -1 is special. */
8015 if (off < -1)
8016 return 0;
8018 switch (TREE_CODE (expr))
8020 case INTEGER_CST:
8021 return native_encode_int (expr, ptr, len, off);
8023 case REAL_CST:
8024 return native_encode_real (expr, ptr, len, off);
8026 case FIXED_CST:
8027 return native_encode_fixed (expr, ptr, len, off);
8029 case COMPLEX_CST:
8030 return native_encode_complex (expr, ptr, len, off);
8032 case VECTOR_CST:
8033 return native_encode_vector (expr, ptr, len, off);
8035 case STRING_CST:
8036 return native_encode_string (expr, ptr, len, off);
8038 default:
8039 return 0;
8043 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8044 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8045 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8046 machine modes, we can't just use build_nonstandard_integer_type. */
8048 tree
8049 find_bitfield_repr_type (int fieldsize, int len)
8051 machine_mode mode;
8052 for (int pass = 0; pass < 2; pass++)
8054 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8055 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8056 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8057 && known_eq (GET_MODE_PRECISION (mode),
8058 GET_MODE_BITSIZE (mode))
8059 && known_le (GET_MODE_SIZE (mode), len))
8061 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8062 if (ret && TYPE_MODE (ret) == mode)
8063 return ret;
8067 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8068 if (int_n_enabled_p[i]
8069 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8070 && int_n_trees[i].unsigned_type)
8072 tree ret = int_n_trees[i].unsigned_type;
8073 mode = TYPE_MODE (ret);
8074 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8075 && known_eq (GET_MODE_PRECISION (mode),
8076 GET_MODE_BITSIZE (mode))
8077 && known_le (GET_MODE_SIZE (mode), len))
8078 return ret;
8081 return NULL_TREE;
8084 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8085 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8086 to be non-NULL and OFF zero), then in addition to filling the
8087 bytes pointed by PTR with the value also clear any bits pointed
8088 by MASK that are known to be initialized, keep them as is for
8089 e.g. uninitialized padding bits or uninitialized fields. */
8092 native_encode_initializer (tree init, unsigned char *ptr, int len,
8093 int off, unsigned char *mask)
8095 int r;
8097 /* We don't support starting at negative offset and -1 is special. */
8098 if (off < -1 || init == NULL_TREE)
8099 return 0;
8101 gcc_assert (mask == NULL || (off == 0 && ptr));
8103 STRIP_NOPS (init);
8104 switch (TREE_CODE (init))
8106 case VIEW_CONVERT_EXPR:
8107 case NON_LVALUE_EXPR:
8108 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8109 mask);
8110 default:
8111 r = native_encode_expr (init, ptr, len, off);
8112 if (mask)
8113 memset (mask, 0, r);
8114 return r;
8115 case CONSTRUCTOR:
8116 tree type = TREE_TYPE (init);
8117 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8118 if (total_bytes < 0)
8119 return 0;
8120 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8121 return 0;
8122 int o = off == -1 ? 0 : off;
8123 if (TREE_CODE (type) == ARRAY_TYPE)
8125 tree min_index;
8126 unsigned HOST_WIDE_INT cnt;
8127 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8128 constructor_elt *ce;
8130 if (!TYPE_DOMAIN (type)
8131 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8132 return 0;
8134 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8135 if (fieldsize <= 0)
8136 return 0;
8138 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8139 if (ptr)
8140 memset (ptr, '\0', MIN (total_bytes - off, len));
8142 for (cnt = 0; ; cnt++)
8144 tree val = NULL_TREE, index = NULL_TREE;
8145 HOST_WIDE_INT pos = curpos, count = 0;
8146 bool full = false;
8147 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8149 val = ce->value;
8150 index = ce->index;
8152 else if (mask == NULL
8153 || CONSTRUCTOR_NO_CLEARING (init)
8154 || curpos >= total_bytes)
8155 break;
8156 else
8157 pos = total_bytes;
8159 if (index && TREE_CODE (index) == RANGE_EXPR)
8161 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8162 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8163 return 0;
8164 offset_int wpos
8165 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8166 - wi::to_offset (min_index),
8167 TYPE_PRECISION (sizetype));
8168 wpos *= fieldsize;
8169 if (!wi::fits_shwi_p (pos))
8170 return 0;
8171 pos = wpos.to_shwi ();
8172 offset_int wcount
8173 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8174 - wi::to_offset (TREE_OPERAND (index, 0)),
8175 TYPE_PRECISION (sizetype));
8176 if (!wi::fits_shwi_p (wcount))
8177 return 0;
8178 count = wcount.to_shwi ();
8180 else if (index)
8182 if (TREE_CODE (index) != INTEGER_CST)
8183 return 0;
8184 offset_int wpos
8185 = wi::sext (wi::to_offset (index)
8186 - wi::to_offset (min_index),
8187 TYPE_PRECISION (sizetype));
8188 wpos *= fieldsize;
8189 if (!wi::fits_shwi_p (wpos))
8190 return 0;
8191 pos = wpos.to_shwi ();
8194 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8196 if (valueinit == -1)
8198 tree zero = build_zero_cst (TREE_TYPE (type));
8199 r = native_encode_initializer (zero, ptr + curpos,
8200 fieldsize, 0,
8201 mask + curpos);
8202 if (TREE_CODE (zero) == CONSTRUCTOR)
8203 ggc_free (zero);
8204 if (!r)
8205 return 0;
8206 valueinit = curpos;
8207 curpos += fieldsize;
8209 while (curpos != pos)
8211 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8212 memcpy (mask + curpos, mask + valueinit, fieldsize);
8213 curpos += fieldsize;
8217 curpos = pos;
8218 if (val)
8221 if (off == -1
8222 || (curpos >= off
8223 && (curpos + fieldsize
8224 <= (HOST_WIDE_INT) off + len)))
8226 if (full)
8228 if (ptr)
8229 memcpy (ptr + (curpos - o), ptr + (pos - o),
8230 fieldsize);
8231 if (mask)
8232 memcpy (mask + curpos, mask + pos, fieldsize);
8234 else if (!native_encode_initializer (val,
8236 ? ptr + curpos - o
8237 : NULL,
8238 fieldsize,
8239 off == -1 ? -1
8240 : 0,
8241 mask
8242 ? mask + curpos
8243 : NULL))
8244 return 0;
8245 else
8247 full = true;
8248 pos = curpos;
8251 else if (curpos + fieldsize > off
8252 && curpos < (HOST_WIDE_INT) off + len)
8254 /* Partial overlap. */
8255 unsigned char *p = NULL;
8256 int no = 0;
8257 int l;
8258 gcc_assert (mask == NULL);
8259 if (curpos >= off)
8261 if (ptr)
8262 p = ptr + curpos - off;
8263 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8264 fieldsize);
8266 else
8268 p = ptr;
8269 no = off - curpos;
8270 l = len;
8272 if (!native_encode_initializer (val, p, l, no, NULL))
8273 return 0;
8275 curpos += fieldsize;
8277 while (count-- != 0);
8279 return MIN (total_bytes - off, len);
8281 else if (TREE_CODE (type) == RECORD_TYPE
8282 || TREE_CODE (type) == UNION_TYPE)
8284 unsigned HOST_WIDE_INT cnt;
8285 constructor_elt *ce;
8286 tree fld_base = TYPE_FIELDS (type);
8287 tree to_free = NULL_TREE;
8289 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8290 if (ptr != NULL)
8291 memset (ptr, '\0', MIN (total_bytes - o, len));
8292 for (cnt = 0; ; cnt++)
8294 tree val = NULL_TREE, field = NULL_TREE;
8295 HOST_WIDE_INT pos = 0, fieldsize;
8296 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8298 if (to_free)
8300 ggc_free (to_free);
8301 to_free = NULL_TREE;
8304 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8306 val = ce->value;
8307 field = ce->index;
8308 if (field == NULL_TREE)
8309 return 0;
8311 pos = int_byte_position (field);
8312 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8313 continue;
8315 else if (mask == NULL
8316 || CONSTRUCTOR_NO_CLEARING (init))
8317 break;
8318 else
8319 pos = total_bytes;
8321 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8323 tree fld;
8324 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8326 if (TREE_CODE (fld) != FIELD_DECL)
8327 continue;
8328 if (fld == field)
8329 break;
8330 if (DECL_PADDING_P (fld))
8331 continue;
8332 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8333 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8334 return 0;
8335 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8336 continue;
8337 break;
8339 if (fld == NULL_TREE)
8341 if (ce == NULL)
8342 break;
8343 return 0;
8345 fld_base = DECL_CHAIN (fld);
8346 if (fld != field)
8348 cnt--;
8349 field = fld;
8350 pos = int_byte_position (field);
8351 val = build_zero_cst (TREE_TYPE (fld));
8352 if (TREE_CODE (val) == CONSTRUCTOR)
8353 to_free = val;
8357 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8358 && TYPE_DOMAIN (TREE_TYPE (field))
8359 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8361 if (mask || off != -1)
8362 return 0;
8363 if (val == NULL_TREE)
8364 continue;
8365 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8366 return 0;
8367 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8368 if (fieldsize < 0
8369 || (int) fieldsize != fieldsize
8370 || (pos + fieldsize) > INT_MAX)
8371 return 0;
8372 if (pos + fieldsize > total_bytes)
8374 if (ptr != NULL && total_bytes < len)
8375 memset (ptr + total_bytes, '\0',
8376 MIN (pos + fieldsize, len) - total_bytes);
8377 total_bytes = pos + fieldsize;
8380 else
8382 if (DECL_SIZE_UNIT (field) == NULL_TREE
8383 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8384 return 0;
8385 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8387 if (fieldsize == 0)
8388 continue;
8390 /* Prepare to deal with integral bit-fields and filter out other
8391 bit-fields that do not start and end on a byte boundary. */
8392 if (DECL_BIT_FIELD (field))
8394 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8395 return 0;
8396 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8397 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8399 bpos %= BITS_PER_UNIT;
8400 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8401 epos = fieldsize % BITS_PER_UNIT;
8402 fieldsize += BITS_PER_UNIT - 1;
8403 fieldsize /= BITS_PER_UNIT;
8405 else if (bpos % BITS_PER_UNIT
8406 || DECL_SIZE (field) == NULL_TREE
8407 || !tree_fits_shwi_p (DECL_SIZE (field))
8408 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8409 return 0;
8412 if (off != -1 && pos + fieldsize <= off)
8413 continue;
8415 if (val == NULL_TREE)
8416 continue;
8418 if (DECL_BIT_FIELD (field)
8419 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8421 /* FIXME: Handle PDP endian. */
8422 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8423 return 0;
8425 if (TREE_CODE (val) != INTEGER_CST)
8426 return 0;
8428 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8429 tree repr_type = NULL_TREE;
8430 HOST_WIDE_INT rpos = 0;
8431 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8433 rpos = int_byte_position (repr);
8434 repr_type = TREE_TYPE (repr);
8436 else
8438 repr_type = find_bitfield_repr_type (fieldsize, len);
8439 if (repr_type == NULL_TREE)
8440 return 0;
8441 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8442 gcc_assert (repr_size > 0 && repr_size <= len);
8443 if (pos + repr_size <= o + len)
8444 rpos = pos;
8445 else
8447 rpos = o + len - repr_size;
8448 gcc_assert (rpos <= pos);
8452 if (rpos > pos)
8453 return 0;
8454 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8455 int diff = (TYPE_PRECISION (repr_type)
8456 - TYPE_PRECISION (TREE_TYPE (field)));
8457 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8458 if (!BYTES_BIG_ENDIAN)
8459 w = wi::lshift (w, bitoff);
8460 else
8461 w = wi::lshift (w, diff - bitoff);
8462 val = wide_int_to_tree (repr_type, w);
8464 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8465 / BITS_PER_UNIT + 1];
8466 int l = native_encode_int (val, buf, sizeof buf, 0);
8467 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8468 return 0;
8470 if (ptr == NULL)
8471 continue;
8473 /* If the bitfield does not start at byte boundary, handle
8474 the partial byte at the start. */
8475 if (bpos
8476 && (off == -1 || (pos >= off && len >= 1)))
8478 if (!BYTES_BIG_ENDIAN)
8480 int msk = (1 << bpos) - 1;
8481 buf[pos - rpos] &= ~msk;
8482 buf[pos - rpos] |= ptr[pos - o] & msk;
8483 if (mask)
8485 if (fieldsize > 1 || epos == 0)
8486 mask[pos] &= msk;
8487 else
8488 mask[pos] &= (msk | ~((1 << epos) - 1));
8491 else
8493 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8494 buf[pos - rpos] &= msk;
8495 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8496 if (mask)
8498 if (fieldsize > 1 || epos == 0)
8499 mask[pos] &= ~msk;
8500 else
8501 mask[pos] &= (~msk
8502 | ((1 << (BITS_PER_UNIT - epos))
8503 - 1));
8507 /* If the bitfield does not end at byte boundary, handle
8508 the partial byte at the end. */
8509 if (epos
8510 && (off == -1
8511 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8513 if (!BYTES_BIG_ENDIAN)
8515 int msk = (1 << epos) - 1;
8516 buf[pos - rpos + fieldsize - 1] &= msk;
8517 buf[pos - rpos + fieldsize - 1]
8518 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8519 if (mask && (fieldsize > 1 || bpos == 0))
8520 mask[pos + fieldsize - 1] &= ~msk;
8522 else
8524 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8525 buf[pos - rpos + fieldsize - 1] &= ~msk;
8526 buf[pos - rpos + fieldsize - 1]
8527 |= ptr[pos + fieldsize - 1 - o] & msk;
8528 if (mask && (fieldsize > 1 || bpos == 0))
8529 mask[pos + fieldsize - 1] &= msk;
8532 if (off == -1
8533 || (pos >= off
8534 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8536 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8537 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8538 memset (mask + pos + (bpos != 0), 0,
8539 fieldsize - (bpos != 0) - (epos != 0));
8541 else
8543 /* Partial overlap. */
8544 HOST_WIDE_INT fsz = fieldsize;
8545 gcc_assert (mask == NULL);
8546 if (pos < off)
8548 fsz -= (off - pos);
8549 pos = off;
8551 if (pos + fsz > (HOST_WIDE_INT) off + len)
8552 fsz = (HOST_WIDE_INT) off + len - pos;
8553 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8555 continue;
8558 if (off == -1
8559 || (pos >= off
8560 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8562 int fldsize = fieldsize;
8563 if (off == -1)
8565 tree fld = DECL_CHAIN (field);
8566 while (fld)
8568 if (TREE_CODE (fld) == FIELD_DECL)
8569 break;
8570 fld = DECL_CHAIN (fld);
8572 if (fld == NULL_TREE)
8573 fldsize = len - pos;
8575 r = native_encode_initializer (val, ptr ? ptr + pos - o
8576 : NULL,
8577 fldsize,
8578 off == -1 ? -1 : 0,
8579 mask ? mask + pos : NULL);
8580 if (!r)
8581 return 0;
8582 if (off == -1
8583 && fldsize != fieldsize
8584 && r > fieldsize
8585 && pos + r > total_bytes)
8586 total_bytes = pos + r;
8588 else
8590 /* Partial overlap. */
8591 unsigned char *p = NULL;
8592 int no = 0;
8593 int l;
8594 gcc_assert (mask == NULL);
8595 if (pos >= off)
8597 if (ptr)
8598 p = ptr + pos - off;
8599 l = MIN ((HOST_WIDE_INT) off + len - pos,
8600 fieldsize);
8602 else
8604 p = ptr;
8605 no = off - pos;
8606 l = len;
8608 if (!native_encode_initializer (val, p, l, no, NULL))
8609 return 0;
8612 return MIN (total_bytes - off, len);
8614 return 0;
8619 /* Subroutine of native_interpret_expr. Interpret the contents of
8620 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8621 If the buffer cannot be interpreted, return NULL_TREE. */
8623 static tree
8624 native_interpret_int (tree type, const unsigned char *ptr, int len)
8626 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8628 if (total_bytes > len
8629 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8630 return NULL_TREE;
8632 wide_int result = wi::from_buffer (ptr, total_bytes);
8634 return wide_int_to_tree (type, result);
8638 /* Subroutine of native_interpret_expr. Interpret the contents of
8639 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8640 If the buffer cannot be interpreted, return NULL_TREE. */
8642 static tree
8643 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8645 scalar_mode mode = SCALAR_TYPE_MODE (type);
8646 int total_bytes = GET_MODE_SIZE (mode);
8647 double_int result;
8648 FIXED_VALUE_TYPE fixed_value;
8650 if (total_bytes > len
8651 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8652 return NULL_TREE;
8654 result = double_int::from_buffer (ptr, total_bytes);
8655 fixed_value = fixed_from_double_int (result, mode);
8657 return build_fixed (type, fixed_value);
8661 /* Subroutine of native_interpret_expr. Interpret the contents of
8662 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8663 If the buffer cannot be interpreted, return NULL_TREE. */
8665 tree
8666 native_interpret_real (tree type, const unsigned char *ptr, int len)
8668 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8669 int total_bytes = GET_MODE_SIZE (mode);
8670 unsigned char value;
8671 /* There are always 32 bits in each long, no matter the size of
8672 the hosts long. We handle floating point representations with
8673 up to 192 bits. */
8674 REAL_VALUE_TYPE r;
8675 long tmp[6];
8677 if (total_bytes > len || total_bytes > 24)
8678 return NULL_TREE;
8679 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8681 memset (tmp, 0, sizeof (tmp));
8682 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8683 bitpos += BITS_PER_UNIT)
8685 /* Both OFFSET and BYTE index within a long;
8686 bitpos indexes the whole float. */
8687 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8688 if (UNITS_PER_WORD < 4)
8690 int word = byte / UNITS_PER_WORD;
8691 if (WORDS_BIG_ENDIAN)
8692 word = (words - 1) - word;
8693 offset = word * UNITS_PER_WORD;
8694 if (BYTES_BIG_ENDIAN)
8695 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8696 else
8697 offset += byte % UNITS_PER_WORD;
8699 else
8701 offset = byte;
8702 if (BYTES_BIG_ENDIAN)
8704 /* Reverse bytes within each long, or within the entire float
8705 if it's smaller than a long (for HFmode). */
8706 offset = MIN (3, total_bytes - 1) - offset;
8707 gcc_assert (offset >= 0);
8710 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8712 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8715 real_from_target (&r, tmp, mode);
8716 return build_real (type, r);
8720 /* Subroutine of native_interpret_expr. Interpret the contents of
8721 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8722 If the buffer cannot be interpreted, return NULL_TREE. */
8724 static tree
8725 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8727 tree etype, rpart, ipart;
8728 int size;
8730 etype = TREE_TYPE (type);
8731 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8732 if (size * 2 > len)
8733 return NULL_TREE;
8734 rpart = native_interpret_expr (etype, ptr, size);
8735 if (!rpart)
8736 return NULL_TREE;
8737 ipart = native_interpret_expr (etype, ptr+size, size);
8738 if (!ipart)
8739 return NULL_TREE;
8740 return build_complex (type, rpart, ipart);
8743 /* Read a vector of type TYPE from the target memory image given by BYTES,
8744 which contains LEN bytes. The vector is known to be encodable using
8745 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8747 Return the vector on success, otherwise return null. */
8749 static tree
8750 native_interpret_vector_part (tree type, const unsigned char *bytes,
8751 unsigned int len, unsigned int npatterns,
8752 unsigned int nelts_per_pattern)
8754 tree elt_type = TREE_TYPE (type);
8755 if (VECTOR_BOOLEAN_TYPE_P (type)
8756 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8758 /* This is the only case in which elements can be smaller than a byte.
8759 Element 0 is always in the lsb of the containing byte. */
8760 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8761 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8762 return NULL_TREE;
8764 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8765 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8767 unsigned int bit_index = i * elt_bits;
8768 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8769 unsigned int lsb = bit_index % BITS_PER_UNIT;
8770 builder.quick_push (bytes[byte_index] & (1 << lsb)
8771 ? build_all_ones_cst (elt_type)
8772 : build_zero_cst (elt_type));
8774 return builder.build ();
8777 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8778 if (elt_bytes * npatterns * nelts_per_pattern > len)
8779 return NULL_TREE;
8781 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8782 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8784 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8785 if (!elt)
8786 return NULL_TREE;
8787 builder.quick_push (elt);
8788 bytes += elt_bytes;
8790 return builder.build ();
8793 /* Subroutine of native_interpret_expr. Interpret the contents of
8794 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8795 If the buffer cannot be interpreted, return NULL_TREE. */
8797 static tree
8798 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8800 unsigned HOST_WIDE_INT size;
8802 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8803 || size > len)
8804 return NULL_TREE;
8806 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8807 return native_interpret_vector_part (type, ptr, len, count, 1);
8811 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8812 the buffer PTR of length LEN as a constant of type TYPE. For
8813 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8814 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8815 return NULL_TREE. */
8817 tree
8818 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8820 switch (TREE_CODE (type))
8822 case INTEGER_TYPE:
8823 case ENUMERAL_TYPE:
8824 case BOOLEAN_TYPE:
8825 case POINTER_TYPE:
8826 case REFERENCE_TYPE:
8827 case OFFSET_TYPE:
8828 return native_interpret_int (type, ptr, len);
8830 case REAL_TYPE:
8831 if (tree ret = native_interpret_real (type, ptr, len))
8833 /* For floating point values in composite modes, punt if this
8834 folding doesn't preserve bit representation. As the mode doesn't
8835 have fixed precision while GCC pretends it does, there could be
8836 valid values that GCC can't really represent accurately.
8837 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8838 bit combinationations which GCC doesn't preserve. */
8839 unsigned char buf[24 * 2];
8840 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8841 int total_bytes = GET_MODE_SIZE (mode);
8842 memcpy (buf + 24, ptr, total_bytes);
8843 clear_type_padding_in_mask (type, buf + 24);
8844 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8845 || memcmp (buf + 24, buf, total_bytes) != 0)
8846 return NULL_TREE;
8847 return ret;
8849 return NULL_TREE;
8851 case FIXED_POINT_TYPE:
8852 return native_interpret_fixed (type, ptr, len);
8854 case COMPLEX_TYPE:
8855 return native_interpret_complex (type, ptr, len);
8857 case VECTOR_TYPE:
8858 return native_interpret_vector (type, ptr, len);
8860 default:
8861 return NULL_TREE;
8865 /* Returns true if we can interpret the contents of a native encoding
8866 as TYPE. */
8868 bool
8869 can_native_interpret_type_p (tree type)
8871 switch (TREE_CODE (type))
8873 case INTEGER_TYPE:
8874 case ENUMERAL_TYPE:
8875 case BOOLEAN_TYPE:
8876 case POINTER_TYPE:
8877 case REFERENCE_TYPE:
8878 case FIXED_POINT_TYPE:
8879 case REAL_TYPE:
8880 case COMPLEX_TYPE:
8881 case VECTOR_TYPE:
8882 case OFFSET_TYPE:
8883 return true;
8884 default:
8885 return false;
8889 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8890 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8892 tree
8893 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8894 int len)
8896 vec<constructor_elt, va_gc> *elts = NULL;
8897 if (TREE_CODE (type) == ARRAY_TYPE)
8899 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8900 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8901 return NULL_TREE;
8903 HOST_WIDE_INT cnt = 0;
8904 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8906 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8907 return NULL_TREE;
8908 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8910 if (eltsz == 0)
8911 cnt = 0;
8912 HOST_WIDE_INT pos = 0;
8913 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8915 tree v = NULL_TREE;
8916 if (pos >= len || pos + eltsz > len)
8917 return NULL_TREE;
8918 if (can_native_interpret_type_p (TREE_TYPE (type)))
8920 v = native_interpret_expr (TREE_TYPE (type),
8921 ptr + off + pos, eltsz);
8922 if (v == NULL_TREE)
8923 return NULL_TREE;
8925 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8926 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8927 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8928 eltsz);
8929 if (v == NULL_TREE)
8930 return NULL_TREE;
8931 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8933 return build_constructor (type, elts);
8935 if (TREE_CODE (type) != RECORD_TYPE)
8936 return NULL_TREE;
8937 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8939 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
8940 || is_empty_type (TREE_TYPE (field)))
8941 continue;
8942 tree fld = field;
8943 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8944 int diff = 0;
8945 tree v = NULL_TREE;
8946 if (DECL_BIT_FIELD (field))
8948 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8949 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8951 poly_int64 bitoffset;
8952 poly_uint64 field_offset, fld_offset;
8953 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8954 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8955 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8956 else
8957 bitoffset = 0;
8958 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8959 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8960 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8961 - TYPE_PRECISION (TREE_TYPE (field)));
8962 if (!bitoffset.is_constant (&bitoff)
8963 || bitoff < 0
8964 || bitoff > diff)
8965 return NULL_TREE;
8967 else
8969 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8970 return NULL_TREE;
8971 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8972 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8973 bpos %= BITS_PER_UNIT;
8974 fieldsize += bpos;
8975 fieldsize += BITS_PER_UNIT - 1;
8976 fieldsize /= BITS_PER_UNIT;
8977 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8978 if (repr_type == NULL_TREE)
8979 return NULL_TREE;
8980 sz = int_size_in_bytes (repr_type);
8981 if (sz < 0 || sz > len)
8982 return NULL_TREE;
8983 pos = int_byte_position (field);
8984 if (pos < 0 || pos > len || pos + fieldsize > len)
8985 return NULL_TREE;
8986 HOST_WIDE_INT rpos;
8987 if (pos + sz <= len)
8988 rpos = pos;
8989 else
8991 rpos = len - sz;
8992 gcc_assert (rpos <= pos);
8994 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8995 pos = rpos;
8996 diff = (TYPE_PRECISION (repr_type)
8997 - TYPE_PRECISION (TREE_TYPE (field)));
8998 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8999 if (v == NULL_TREE)
9000 return NULL_TREE;
9001 fld = NULL_TREE;
9005 if (fld)
9007 sz = int_size_in_bytes (TREE_TYPE (fld));
9008 if (sz < 0 || sz > len)
9009 return NULL_TREE;
9010 tree byte_pos = byte_position (fld);
9011 if (!tree_fits_shwi_p (byte_pos))
9012 return NULL_TREE;
9013 pos = tree_to_shwi (byte_pos);
9014 if (pos < 0 || pos > len || pos + sz > len)
9015 return NULL_TREE;
9017 if (fld == NULL_TREE)
9018 /* Already handled above. */;
9019 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9021 v = native_interpret_expr (TREE_TYPE (fld),
9022 ptr + off + pos, sz);
9023 if (v == NULL_TREE)
9024 return NULL_TREE;
9026 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9027 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9028 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9029 if (v == NULL_TREE)
9030 return NULL_TREE;
9031 if (fld != field)
9033 if (TREE_CODE (v) != INTEGER_CST)
9034 return NULL_TREE;
9036 /* FIXME: Figure out how to handle PDP endian bitfields. */
9037 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9038 return NULL_TREE;
9039 if (!BYTES_BIG_ENDIAN)
9040 v = wide_int_to_tree (TREE_TYPE (field),
9041 wi::lrshift (wi::to_wide (v), bitoff));
9042 else
9043 v = wide_int_to_tree (TREE_TYPE (field),
9044 wi::lrshift (wi::to_wide (v),
9045 diff - bitoff));
9047 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9049 return build_constructor (type, elts);
9052 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9053 or extracted constant positions and/or sizes aren't byte aligned. */
9055 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9056 bits between adjacent elements. AMNT should be within
9057 [0, BITS_PER_UNIT).
9058 Example, AMNT = 2:
9059 00011111|11100000 << 2 = 01111111|10000000
9060 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9062 void
9063 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9064 unsigned int amnt)
9066 if (amnt == 0)
9067 return;
9069 unsigned char carry_over = 0U;
9070 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9071 unsigned char clear_mask = (~0U) << amnt;
9073 for (unsigned int i = 0; i < sz; i++)
9075 unsigned prev_carry_over = carry_over;
9076 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9078 ptr[i] <<= amnt;
9079 if (i != 0)
9081 ptr[i] &= clear_mask;
9082 ptr[i] |= prev_carry_over;
9087 /* Like shift_bytes_in_array_left but for big-endian.
9088 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9089 bits between adjacent elements. AMNT should be within
9090 [0, BITS_PER_UNIT).
9091 Example, AMNT = 2:
9092 00011111|11100000 >> 2 = 00000111|11111000
9093 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9095 void
9096 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9097 unsigned int amnt)
9099 if (amnt == 0)
9100 return;
9102 unsigned char carry_over = 0U;
9103 unsigned char carry_mask = ~(~0U << amnt);
9105 for (unsigned int i = 0; i < sz; i++)
9107 unsigned prev_carry_over = carry_over;
9108 carry_over = ptr[i] & carry_mask;
9110 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9111 ptr[i] >>= amnt;
9112 ptr[i] |= prev_carry_over;
9116 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9117 directly on the VECTOR_CST encoding, in a way that works for variable-
9118 length vectors. Return the resulting VECTOR_CST on success or null
9119 on failure. */
9121 static tree
9122 fold_view_convert_vector_encoding (tree type, tree expr)
9124 tree expr_type = TREE_TYPE (expr);
9125 poly_uint64 type_bits, expr_bits;
9126 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9127 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9128 return NULL_TREE;
9130 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9131 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9132 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9133 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9135 /* We can only preserve the semantics of a stepped pattern if the new
9136 vector element is an integer of the same size. */
9137 if (VECTOR_CST_STEPPED_P (expr)
9138 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9139 return NULL_TREE;
9141 /* The number of bits needed to encode one element from every pattern
9142 of the original vector. */
9143 unsigned int expr_sequence_bits
9144 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9146 /* The number of bits needed to encode one element from every pattern
9147 of the result. */
9148 unsigned int type_sequence_bits
9149 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9151 /* Don't try to read more bytes than are available, which can happen
9152 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9153 The general VIEW_CONVERT handling can cope with that case, so there's
9154 no point complicating things here. */
9155 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9156 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9157 BITS_PER_UNIT);
9158 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9159 if (known_gt (buffer_bits, expr_bits))
9160 return NULL_TREE;
9162 /* Get enough bytes of EXPR to form the new encoding. */
9163 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9164 buffer.quick_grow (buffer_bytes);
9165 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9166 buffer_bits / expr_elt_bits)
9167 != (int) buffer_bytes)
9168 return NULL_TREE;
9170 /* Reencode the bytes as TYPE. */
9171 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9172 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9173 type_npatterns, nelts_per_pattern);
9176 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9177 TYPE at compile-time. If we're unable to perform the conversion
9178 return NULL_TREE. */
9180 static tree
9181 fold_view_convert_expr (tree type, tree expr)
9183 /* We support up to 512-bit values (for V8DFmode). */
9184 unsigned char buffer[64];
9185 int len;
9187 /* Check that the host and target are sane. */
9188 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9189 return NULL_TREE;
9191 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9192 if (tree res = fold_view_convert_vector_encoding (type, expr))
9193 return res;
9195 len = native_encode_expr (expr, buffer, sizeof (buffer));
9196 if (len == 0)
9197 return NULL_TREE;
9199 return native_interpret_expr (type, buffer, len);
9202 /* Build an expression for the address of T. Folds away INDIRECT_REF
9203 to avoid confusing the gimplify process. */
9205 tree
9206 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9208 /* The size of the object is not relevant when talking about its address. */
9209 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9210 t = TREE_OPERAND (t, 0);
9212 if (INDIRECT_REF_P (t))
9214 t = TREE_OPERAND (t, 0);
9216 if (TREE_TYPE (t) != ptrtype)
9217 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9219 else if (TREE_CODE (t) == MEM_REF
9220 && integer_zerop (TREE_OPERAND (t, 1)))
9222 t = TREE_OPERAND (t, 0);
9224 if (TREE_TYPE (t) != ptrtype)
9225 t = fold_convert_loc (loc, ptrtype, t);
9227 else if (TREE_CODE (t) == MEM_REF
9228 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9229 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9230 TREE_OPERAND (t, 0),
9231 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9232 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9234 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9236 if (TREE_TYPE (t) != ptrtype)
9237 t = fold_convert_loc (loc, ptrtype, t);
9239 else
9240 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9242 return t;
9245 /* Build an expression for the address of T. */
9247 tree
9248 build_fold_addr_expr_loc (location_t loc, tree t)
9250 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9252 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9255 /* Fold a unary expression of code CODE and type TYPE with operand
9256 OP0. Return the folded expression if folding is successful.
9257 Otherwise, return NULL_TREE. */
9259 tree
9260 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9262 tree tem;
9263 tree arg0;
9264 enum tree_code_class kind = TREE_CODE_CLASS (code);
9266 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9267 && TREE_CODE_LENGTH (code) == 1);
9269 arg0 = op0;
9270 if (arg0)
9272 if (CONVERT_EXPR_CODE_P (code)
9273 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9275 /* Don't use STRIP_NOPS, because signedness of argument type
9276 matters. */
9277 STRIP_SIGN_NOPS (arg0);
9279 else
9281 /* Strip any conversions that don't change the mode. This
9282 is safe for every expression, except for a comparison
9283 expression because its signedness is derived from its
9284 operands.
9286 Note that this is done as an internal manipulation within
9287 the constant folder, in order to find the simplest
9288 representation of the arguments so that their form can be
9289 studied. In any cases, the appropriate type conversions
9290 should be put back in the tree that will get out of the
9291 constant folder. */
9292 STRIP_NOPS (arg0);
9295 if (CONSTANT_CLASS_P (arg0))
9297 tree tem = const_unop (code, type, arg0);
9298 if (tem)
9300 if (TREE_TYPE (tem) != type)
9301 tem = fold_convert_loc (loc, type, tem);
9302 return tem;
9307 tem = generic_simplify (loc, code, type, op0);
9308 if (tem)
9309 return tem;
9311 if (TREE_CODE_CLASS (code) == tcc_unary)
9313 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9314 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9315 fold_build1_loc (loc, code, type,
9316 fold_convert_loc (loc, TREE_TYPE (op0),
9317 TREE_OPERAND (arg0, 1))));
9318 else if (TREE_CODE (arg0) == COND_EXPR)
9320 tree arg01 = TREE_OPERAND (arg0, 1);
9321 tree arg02 = TREE_OPERAND (arg0, 2);
9322 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9323 arg01 = fold_build1_loc (loc, code, type,
9324 fold_convert_loc (loc,
9325 TREE_TYPE (op0), arg01));
9326 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9327 arg02 = fold_build1_loc (loc, code, type,
9328 fold_convert_loc (loc,
9329 TREE_TYPE (op0), arg02));
9330 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9331 arg01, arg02);
9333 /* If this was a conversion, and all we did was to move into
9334 inside the COND_EXPR, bring it back out. But leave it if
9335 it is a conversion from integer to integer and the
9336 result precision is no wider than a word since such a
9337 conversion is cheap and may be optimized away by combine,
9338 while it couldn't if it were outside the COND_EXPR. Then return
9339 so we don't get into an infinite recursion loop taking the
9340 conversion out and then back in. */
9342 if ((CONVERT_EXPR_CODE_P (code)
9343 || code == NON_LVALUE_EXPR)
9344 && TREE_CODE (tem) == COND_EXPR
9345 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9346 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9347 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9348 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9349 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9350 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9351 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9352 && (INTEGRAL_TYPE_P
9353 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9354 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9355 || flag_syntax_only))
9356 tem = build1_loc (loc, code, type,
9357 build3 (COND_EXPR,
9358 TREE_TYPE (TREE_OPERAND
9359 (TREE_OPERAND (tem, 1), 0)),
9360 TREE_OPERAND (tem, 0),
9361 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9362 TREE_OPERAND (TREE_OPERAND (tem, 2),
9363 0)));
9364 return tem;
9368 switch (code)
9370 case NON_LVALUE_EXPR:
9371 if (!maybe_lvalue_p (op0))
9372 return fold_convert_loc (loc, type, op0);
9373 return NULL_TREE;
9375 CASE_CONVERT:
9376 case FLOAT_EXPR:
9377 case FIX_TRUNC_EXPR:
9378 if (COMPARISON_CLASS_P (op0))
9380 /* If we have (type) (a CMP b) and type is an integral type, return
9381 new expression involving the new type. Canonicalize
9382 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9383 non-integral type.
9384 Do not fold the result as that would not simplify further, also
9385 folding again results in recursions. */
9386 if (TREE_CODE (type) == BOOLEAN_TYPE)
9387 return build2_loc (loc, TREE_CODE (op0), type,
9388 TREE_OPERAND (op0, 0),
9389 TREE_OPERAND (op0, 1));
9390 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9391 && TREE_CODE (type) != VECTOR_TYPE)
9392 return build3_loc (loc, COND_EXPR, type, op0,
9393 constant_boolean_node (true, type),
9394 constant_boolean_node (false, type));
9397 /* Handle (T *)&A.B.C for A being of type T and B and C
9398 living at offset zero. This occurs frequently in
9399 C++ upcasting and then accessing the base. */
9400 if (TREE_CODE (op0) == ADDR_EXPR
9401 && POINTER_TYPE_P (type)
9402 && handled_component_p (TREE_OPERAND (op0, 0)))
9404 poly_int64 bitsize, bitpos;
9405 tree offset;
9406 machine_mode mode;
9407 int unsignedp, reversep, volatilep;
9408 tree base
9409 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9410 &offset, &mode, &unsignedp, &reversep,
9411 &volatilep);
9412 /* If the reference was to a (constant) zero offset, we can use
9413 the address of the base if it has the same base type
9414 as the result type and the pointer type is unqualified. */
9415 if (!offset
9416 && known_eq (bitpos, 0)
9417 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9418 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9419 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9420 return fold_convert_loc (loc, type,
9421 build_fold_addr_expr_loc (loc, base));
9424 if (TREE_CODE (op0) == MODIFY_EXPR
9425 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9426 /* Detect assigning a bitfield. */
9427 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9428 && DECL_BIT_FIELD
9429 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9431 /* Don't leave an assignment inside a conversion
9432 unless assigning a bitfield. */
9433 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9434 /* First do the assignment, then return converted constant. */
9435 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9436 suppress_warning (tem /* What warning? */);
9437 TREE_USED (tem) = 1;
9438 return tem;
9441 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9442 constants (if x has signed type, the sign bit cannot be set
9443 in c). This folds extension into the BIT_AND_EXPR.
9444 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9445 very likely don't have maximal range for their precision and this
9446 transformation effectively doesn't preserve non-maximal ranges. */
9447 if (TREE_CODE (type) == INTEGER_TYPE
9448 && TREE_CODE (op0) == BIT_AND_EXPR
9449 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9451 tree and_expr = op0;
9452 tree and0 = TREE_OPERAND (and_expr, 0);
9453 tree and1 = TREE_OPERAND (and_expr, 1);
9454 int change = 0;
9456 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9457 || (TYPE_PRECISION (type)
9458 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9459 change = 1;
9460 else if (TYPE_PRECISION (TREE_TYPE (and1))
9461 <= HOST_BITS_PER_WIDE_INT
9462 && tree_fits_uhwi_p (and1))
9464 unsigned HOST_WIDE_INT cst;
9466 cst = tree_to_uhwi (and1);
9467 cst &= HOST_WIDE_INT_M1U
9468 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9469 change = (cst == 0);
9470 if (change
9471 && !flag_syntax_only
9472 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9473 == ZERO_EXTEND))
9475 tree uns = unsigned_type_for (TREE_TYPE (and0));
9476 and0 = fold_convert_loc (loc, uns, and0);
9477 and1 = fold_convert_loc (loc, uns, and1);
9480 if (change)
9482 tem = force_fit_type (type, wi::to_widest (and1), 0,
9483 TREE_OVERFLOW (and1));
9484 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9485 fold_convert_loc (loc, type, and0), tem);
9489 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9490 cast (T1)X will fold away. We assume that this happens when X itself
9491 is a cast. */
9492 if (POINTER_TYPE_P (type)
9493 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9494 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9496 tree arg00 = TREE_OPERAND (arg0, 0);
9497 tree arg01 = TREE_OPERAND (arg0, 1);
9499 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9500 when the pointed type needs higher alignment than
9501 the p+ first operand's pointed type. */
9502 if (!in_gimple_form
9503 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9504 && (min_align_of_type (TREE_TYPE (type))
9505 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9506 return NULL_TREE;
9508 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9509 when type is a reference type and arg00's type is not,
9510 because arg00 could be validly nullptr and if arg01 doesn't return,
9511 we don't want false positive binding of reference to nullptr. */
9512 if (TREE_CODE (type) == REFERENCE_TYPE
9513 && !in_gimple_form
9514 && sanitize_flags_p (SANITIZE_NULL)
9515 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9516 return NULL_TREE;
9518 arg00 = fold_convert_loc (loc, type, arg00);
9519 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9522 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9523 of the same precision, and X is an integer type not narrower than
9524 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9525 if (INTEGRAL_TYPE_P (type)
9526 && TREE_CODE (op0) == BIT_NOT_EXPR
9527 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9528 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9529 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9531 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9532 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9533 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9534 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9535 fold_convert_loc (loc, type, tem));
9538 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9539 type of X and Y (integer types only). */
9540 if (INTEGRAL_TYPE_P (type)
9541 && TREE_CODE (op0) == MULT_EXPR
9542 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9543 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9544 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9545 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9547 /* Be careful not to introduce new overflows. */
9548 tree mult_type;
9549 if (TYPE_OVERFLOW_WRAPS (type))
9550 mult_type = type;
9551 else
9552 mult_type = unsigned_type_for (type);
9554 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9556 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9557 fold_convert_loc (loc, mult_type,
9558 TREE_OPERAND (op0, 0)),
9559 fold_convert_loc (loc, mult_type,
9560 TREE_OPERAND (op0, 1)));
9561 return fold_convert_loc (loc, type, tem);
9565 return NULL_TREE;
9567 case VIEW_CONVERT_EXPR:
9568 if (TREE_CODE (op0) == MEM_REF)
9570 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9571 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9572 tem = fold_build2_loc (loc, MEM_REF, type,
9573 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9574 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9575 return tem;
9578 return NULL_TREE;
9580 case NEGATE_EXPR:
9581 tem = fold_negate_expr (loc, arg0);
9582 if (tem)
9583 return fold_convert_loc (loc, type, tem);
9584 return NULL_TREE;
9586 case ABS_EXPR:
9587 /* Convert fabs((double)float) into (double)fabsf(float). */
9588 if (TREE_CODE (arg0) == NOP_EXPR
9589 && TREE_CODE (type) == REAL_TYPE)
9591 tree targ0 = strip_float_extensions (arg0);
9592 if (targ0 != arg0)
9593 return fold_convert_loc (loc, type,
9594 fold_build1_loc (loc, ABS_EXPR,
9595 TREE_TYPE (targ0),
9596 targ0));
9598 return NULL_TREE;
9600 case BIT_NOT_EXPR:
9601 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9602 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9603 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9604 fold_convert_loc (loc, type,
9605 TREE_OPERAND (arg0, 0)))))
9606 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9607 fold_convert_loc (loc, type,
9608 TREE_OPERAND (arg0, 1)));
9609 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9610 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9611 fold_convert_loc (loc, type,
9612 TREE_OPERAND (arg0, 1)))))
9613 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9614 fold_convert_loc (loc, type,
9615 TREE_OPERAND (arg0, 0)), tem);
9617 return NULL_TREE;
9619 case TRUTH_NOT_EXPR:
9620 /* Note that the operand of this must be an int
9621 and its values must be 0 or 1.
9622 ("true" is a fixed value perhaps depending on the language,
9623 but we don't handle values other than 1 correctly yet.) */
9624 tem = fold_truth_not_expr (loc, arg0);
9625 if (!tem)
9626 return NULL_TREE;
9627 return fold_convert_loc (loc, type, tem);
9629 case INDIRECT_REF:
9630 /* Fold *&X to X if X is an lvalue. */
9631 if (TREE_CODE (op0) == ADDR_EXPR)
9633 tree op00 = TREE_OPERAND (op0, 0);
9634 if ((VAR_P (op00)
9635 || TREE_CODE (op00) == PARM_DECL
9636 || TREE_CODE (op00) == RESULT_DECL)
9637 && !TREE_READONLY (op00))
9638 return op00;
9640 return NULL_TREE;
9642 default:
9643 return NULL_TREE;
9644 } /* switch (code) */
9648 /* If the operation was a conversion do _not_ mark a resulting constant
9649 with TREE_OVERFLOW if the original constant was not. These conversions
9650 have implementation defined behavior and retaining the TREE_OVERFLOW
9651 flag here would confuse later passes such as VRP. */
9652 tree
9653 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9654 tree type, tree op0)
9656 tree res = fold_unary_loc (loc, code, type, op0);
9657 if (res
9658 && TREE_CODE (res) == INTEGER_CST
9659 && TREE_CODE (op0) == INTEGER_CST
9660 && CONVERT_EXPR_CODE_P (code))
9661 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9663 return res;
9666 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9667 operands OP0 and OP1. LOC is the location of the resulting expression.
9668 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9669 Return the folded expression if folding is successful. Otherwise,
9670 return NULL_TREE. */
9671 static tree
9672 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9673 tree arg0, tree arg1, tree op0, tree op1)
9675 tree tem;
9677 /* We only do these simplifications if we are optimizing. */
9678 if (!optimize)
9679 return NULL_TREE;
9681 /* Check for things like (A || B) && (A || C). We can convert this
9682 to A || (B && C). Note that either operator can be any of the four
9683 truth and/or operations and the transformation will still be
9684 valid. Also note that we only care about order for the
9685 ANDIF and ORIF operators. If B contains side effects, this
9686 might change the truth-value of A. */
9687 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9688 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9689 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9690 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9691 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9692 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9694 tree a00 = TREE_OPERAND (arg0, 0);
9695 tree a01 = TREE_OPERAND (arg0, 1);
9696 tree a10 = TREE_OPERAND (arg1, 0);
9697 tree a11 = TREE_OPERAND (arg1, 1);
9698 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9699 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9700 && (code == TRUTH_AND_EXPR
9701 || code == TRUTH_OR_EXPR));
9703 if (operand_equal_p (a00, a10, 0))
9704 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9705 fold_build2_loc (loc, code, type, a01, a11));
9706 else if (commutative && operand_equal_p (a00, a11, 0))
9707 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9708 fold_build2_loc (loc, code, type, a01, a10));
9709 else if (commutative && operand_equal_p (a01, a10, 0))
9710 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9711 fold_build2_loc (loc, code, type, a00, a11));
9713 /* This case if tricky because we must either have commutative
9714 operators or else A10 must not have side-effects. */
9716 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9717 && operand_equal_p (a01, a11, 0))
9718 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9719 fold_build2_loc (loc, code, type, a00, a10),
9720 a01);
9723 /* See if we can build a range comparison. */
9724 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9725 return tem;
9727 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9728 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9730 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9731 if (tem)
9732 return fold_build2_loc (loc, code, type, tem, arg1);
9735 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9736 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9738 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9739 if (tem)
9740 return fold_build2_loc (loc, code, type, arg0, tem);
9743 /* Check for the possibility of merging component references. If our
9744 lhs is another similar operation, try to merge its rhs with our
9745 rhs. Then try to merge our lhs and rhs. */
9746 if (TREE_CODE (arg0) == code
9747 && (tem = fold_truth_andor_1 (loc, code, type,
9748 TREE_OPERAND (arg0, 1), arg1)) != 0)
9749 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9751 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9752 return tem;
9754 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9755 if (param_logical_op_non_short_circuit != -1)
9756 logical_op_non_short_circuit
9757 = param_logical_op_non_short_circuit;
9758 if (logical_op_non_short_circuit
9759 && !sanitize_coverage_p ()
9760 && (code == TRUTH_AND_EXPR
9761 || code == TRUTH_ANDIF_EXPR
9762 || code == TRUTH_OR_EXPR
9763 || code == TRUTH_ORIF_EXPR))
9765 enum tree_code ncode, icode;
9767 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9768 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9769 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9771 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9772 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9773 We don't want to pack more than two leafs to a non-IF AND/OR
9774 expression.
9775 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9776 equal to IF-CODE, then we don't want to add right-hand operand.
9777 If the inner right-hand side of left-hand operand has
9778 side-effects, or isn't simple, then we can't add to it,
9779 as otherwise we might destroy if-sequence. */
9780 if (TREE_CODE (arg0) == icode
9781 && simple_condition_p (arg1)
9782 /* Needed for sequence points to handle trappings, and
9783 side-effects. */
9784 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9786 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9787 arg1);
9788 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9789 tem);
9791 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9792 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9793 else if (TREE_CODE (arg1) == icode
9794 && simple_condition_p (arg0)
9795 /* Needed for sequence points to handle trappings, and
9796 side-effects. */
9797 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9799 tem = fold_build2_loc (loc, ncode, type,
9800 arg0, TREE_OPERAND (arg1, 0));
9801 return fold_build2_loc (loc, icode, type, tem,
9802 TREE_OPERAND (arg1, 1));
9804 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9805 into (A OR B).
9806 For sequence point consistancy, we need to check for trapping,
9807 and side-effects. */
9808 else if (code == icode && simple_condition_p (arg0)
9809 && simple_condition_p (arg1))
9810 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9813 return NULL_TREE;
9816 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9817 by changing CODE to reduce the magnitude of constants involved in
9818 ARG0 of the comparison.
9819 Returns a canonicalized comparison tree if a simplification was
9820 possible, otherwise returns NULL_TREE.
9821 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9822 valid if signed overflow is undefined. */
9824 static tree
9825 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9826 tree arg0, tree arg1,
9827 bool *strict_overflow_p)
9829 enum tree_code code0 = TREE_CODE (arg0);
9830 tree t, cst0 = NULL_TREE;
9831 int sgn0;
9833 /* Match A +- CST code arg1. We can change this only if overflow
9834 is undefined. */
9835 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9836 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9837 /* In principle pointers also have undefined overflow behavior,
9838 but that causes problems elsewhere. */
9839 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9840 && (code0 == MINUS_EXPR
9841 || code0 == PLUS_EXPR)
9842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9843 return NULL_TREE;
9845 /* Identify the constant in arg0 and its sign. */
9846 cst0 = TREE_OPERAND (arg0, 1);
9847 sgn0 = tree_int_cst_sgn (cst0);
9849 /* Overflowed constants and zero will cause problems. */
9850 if (integer_zerop (cst0)
9851 || TREE_OVERFLOW (cst0))
9852 return NULL_TREE;
9854 /* See if we can reduce the magnitude of the constant in
9855 arg0 by changing the comparison code. */
9856 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9857 if (code == LT_EXPR
9858 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9859 code = LE_EXPR;
9860 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9861 else if (code == GT_EXPR
9862 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9863 code = GE_EXPR;
9864 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9865 else if (code == LE_EXPR
9866 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9867 code = LT_EXPR;
9868 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9869 else if (code == GE_EXPR
9870 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9871 code = GT_EXPR;
9872 else
9873 return NULL_TREE;
9874 *strict_overflow_p = true;
9876 /* Now build the constant reduced in magnitude. But not if that
9877 would produce one outside of its types range. */
9878 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9879 && ((sgn0 == 1
9880 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9881 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9882 || (sgn0 == -1
9883 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9884 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9885 return NULL_TREE;
9887 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9888 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9889 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9890 t = fold_convert (TREE_TYPE (arg1), t);
9892 return fold_build2_loc (loc, code, type, t, arg1);
9895 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9896 overflow further. Try to decrease the magnitude of constants involved
9897 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9898 and put sole constants at the second argument position.
9899 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9901 static tree
9902 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9903 tree arg0, tree arg1)
9905 tree t;
9906 bool strict_overflow_p;
9907 const char * const warnmsg = G_("assuming signed overflow does not occur "
9908 "when reducing constant in comparison");
9910 /* Try canonicalization by simplifying arg0. */
9911 strict_overflow_p = false;
9912 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9913 &strict_overflow_p);
9914 if (t)
9916 if (strict_overflow_p)
9917 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9918 return t;
9921 /* Try canonicalization by simplifying arg1 using the swapped
9922 comparison. */
9923 code = swap_tree_comparison (code);
9924 strict_overflow_p = false;
9925 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9926 &strict_overflow_p);
9927 if (t && strict_overflow_p)
9928 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9929 return t;
9932 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9933 space. This is used to avoid issuing overflow warnings for
9934 expressions like &p->x which cannot wrap. */
9936 static bool
9937 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9939 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9940 return true;
9942 if (maybe_lt (bitpos, 0))
9943 return true;
9945 poly_wide_int wi_offset;
9946 int precision = TYPE_PRECISION (TREE_TYPE (base));
9947 if (offset == NULL_TREE)
9948 wi_offset = wi::zero (precision);
9949 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9950 return true;
9951 else
9952 wi_offset = wi::to_poly_wide (offset);
9954 wi::overflow_type overflow;
9955 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9956 precision);
9957 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9958 if (overflow)
9959 return true;
9961 poly_uint64 total_hwi, size;
9962 if (!total.to_uhwi (&total_hwi)
9963 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9964 &size)
9965 || known_eq (size, 0U))
9966 return true;
9968 if (known_le (total_hwi, size))
9969 return false;
9971 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9972 array. */
9973 if (TREE_CODE (base) == ADDR_EXPR
9974 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9975 &size)
9976 && maybe_ne (size, 0U)
9977 && known_le (total_hwi, size))
9978 return false;
9980 return true;
9983 /* Return a positive integer when the symbol DECL is known to have
9984 a nonzero address, zero when it's known not to (e.g., it's a weak
9985 symbol), and a negative integer when the symbol is not yet in the
9986 symbol table and so whether or not its address is zero is unknown.
9987 For function local objects always return positive integer. */
9988 static int
9989 maybe_nonzero_address (tree decl)
9991 /* Normally, don't do anything for variables and functions before symtab is
9992 built; it is quite possible that DECL will be declared weak later.
9993 But if folding_initializer, we need a constant answer now, so create
9994 the symtab entry and prevent later weak declaration. */
9995 if (DECL_P (decl) && decl_in_symtab_p (decl))
9996 if (struct symtab_node *symbol
9997 = (folding_initializer
9998 ? symtab_node::get_create (decl)
9999 : symtab_node::get (decl)))
10000 return symbol->nonzero_address ();
10002 /* Function local objects are never NULL. */
10003 if (DECL_P (decl)
10004 && (DECL_CONTEXT (decl)
10005 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10006 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10007 return 1;
10009 return -1;
10012 /* Subroutine of fold_binary. This routine performs all of the
10013 transformations that are common to the equality/inequality
10014 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10015 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10016 fold_binary should call fold_binary. Fold a comparison with
10017 tree code CODE and type TYPE with operands OP0 and OP1. Return
10018 the folded comparison or NULL_TREE. */
10020 static tree
10021 fold_comparison (location_t loc, enum tree_code code, tree type,
10022 tree op0, tree op1)
10024 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10025 tree arg0, arg1, tem;
10027 arg0 = op0;
10028 arg1 = op1;
10030 STRIP_SIGN_NOPS (arg0);
10031 STRIP_SIGN_NOPS (arg1);
10033 /* For comparisons of pointers we can decompose it to a compile time
10034 comparison of the base objects and the offsets into the object.
10035 This requires at least one operand being an ADDR_EXPR or a
10036 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10037 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10038 && (TREE_CODE (arg0) == ADDR_EXPR
10039 || TREE_CODE (arg1) == ADDR_EXPR
10040 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10041 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10043 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10044 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10045 machine_mode mode;
10046 int volatilep, reversep, unsignedp;
10047 bool indirect_base0 = false, indirect_base1 = false;
10049 /* Get base and offset for the access. Strip ADDR_EXPR for
10050 get_inner_reference, but put it back by stripping INDIRECT_REF
10051 off the base object if possible. indirect_baseN will be true
10052 if baseN is not an address but refers to the object itself. */
10053 base0 = arg0;
10054 if (TREE_CODE (arg0) == ADDR_EXPR)
10056 base0
10057 = get_inner_reference (TREE_OPERAND (arg0, 0),
10058 &bitsize, &bitpos0, &offset0, &mode,
10059 &unsignedp, &reversep, &volatilep);
10060 if (INDIRECT_REF_P (base0))
10061 base0 = TREE_OPERAND (base0, 0);
10062 else
10063 indirect_base0 = true;
10065 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10067 base0 = TREE_OPERAND (arg0, 0);
10068 STRIP_SIGN_NOPS (base0);
10069 if (TREE_CODE (base0) == ADDR_EXPR)
10071 base0
10072 = get_inner_reference (TREE_OPERAND (base0, 0),
10073 &bitsize, &bitpos0, &offset0, &mode,
10074 &unsignedp, &reversep, &volatilep);
10075 if (INDIRECT_REF_P (base0))
10076 base0 = TREE_OPERAND (base0, 0);
10077 else
10078 indirect_base0 = true;
10080 if (offset0 == NULL_TREE || integer_zerop (offset0))
10081 offset0 = TREE_OPERAND (arg0, 1);
10082 else
10083 offset0 = size_binop (PLUS_EXPR, offset0,
10084 TREE_OPERAND (arg0, 1));
10085 if (poly_int_tree_p (offset0))
10087 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10088 TYPE_PRECISION (sizetype));
10089 tem <<= LOG2_BITS_PER_UNIT;
10090 tem += bitpos0;
10091 if (tem.to_shwi (&bitpos0))
10092 offset0 = NULL_TREE;
10096 base1 = arg1;
10097 if (TREE_CODE (arg1) == ADDR_EXPR)
10099 base1
10100 = get_inner_reference (TREE_OPERAND (arg1, 0),
10101 &bitsize, &bitpos1, &offset1, &mode,
10102 &unsignedp, &reversep, &volatilep);
10103 if (INDIRECT_REF_P (base1))
10104 base1 = TREE_OPERAND (base1, 0);
10105 else
10106 indirect_base1 = true;
10108 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10110 base1 = TREE_OPERAND (arg1, 0);
10111 STRIP_SIGN_NOPS (base1);
10112 if (TREE_CODE (base1) == ADDR_EXPR)
10114 base1
10115 = get_inner_reference (TREE_OPERAND (base1, 0),
10116 &bitsize, &bitpos1, &offset1, &mode,
10117 &unsignedp, &reversep, &volatilep);
10118 if (INDIRECT_REF_P (base1))
10119 base1 = TREE_OPERAND (base1, 0);
10120 else
10121 indirect_base1 = true;
10123 if (offset1 == NULL_TREE || integer_zerop (offset1))
10124 offset1 = TREE_OPERAND (arg1, 1);
10125 else
10126 offset1 = size_binop (PLUS_EXPR, offset1,
10127 TREE_OPERAND (arg1, 1));
10128 if (poly_int_tree_p (offset1))
10130 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10131 TYPE_PRECISION (sizetype));
10132 tem <<= LOG2_BITS_PER_UNIT;
10133 tem += bitpos1;
10134 if (tem.to_shwi (&bitpos1))
10135 offset1 = NULL_TREE;
10139 /* If we have equivalent bases we might be able to simplify. */
10140 if (indirect_base0 == indirect_base1
10141 && operand_equal_p (base0, base1,
10142 indirect_base0 ? OEP_ADDRESS_OF : 0))
10144 /* We can fold this expression to a constant if the non-constant
10145 offset parts are equal. */
10146 if ((offset0 == offset1
10147 || (offset0 && offset1
10148 && operand_equal_p (offset0, offset1, 0)))
10149 && (equality_code
10150 || (indirect_base0
10151 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10152 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10154 if (!equality_code
10155 && maybe_ne (bitpos0, bitpos1)
10156 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10157 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10158 fold_overflow_warning (("assuming pointer wraparound does not "
10159 "occur when comparing P +- C1 with "
10160 "P +- C2"),
10161 WARN_STRICT_OVERFLOW_CONDITIONAL);
10163 switch (code)
10165 case EQ_EXPR:
10166 if (known_eq (bitpos0, bitpos1))
10167 return constant_boolean_node (true, type);
10168 if (known_ne (bitpos0, bitpos1))
10169 return constant_boolean_node (false, type);
10170 break;
10171 case NE_EXPR:
10172 if (known_ne (bitpos0, bitpos1))
10173 return constant_boolean_node (true, type);
10174 if (known_eq (bitpos0, bitpos1))
10175 return constant_boolean_node (false, type);
10176 break;
10177 case LT_EXPR:
10178 if (known_lt (bitpos0, bitpos1))
10179 return constant_boolean_node (true, type);
10180 if (known_ge (bitpos0, bitpos1))
10181 return constant_boolean_node (false, type);
10182 break;
10183 case LE_EXPR:
10184 if (known_le (bitpos0, bitpos1))
10185 return constant_boolean_node (true, type);
10186 if (known_gt (bitpos0, bitpos1))
10187 return constant_boolean_node (false, type);
10188 break;
10189 case GE_EXPR:
10190 if (known_ge (bitpos0, bitpos1))
10191 return constant_boolean_node (true, type);
10192 if (known_lt (bitpos0, bitpos1))
10193 return constant_boolean_node (false, type);
10194 break;
10195 case GT_EXPR:
10196 if (known_gt (bitpos0, bitpos1))
10197 return constant_boolean_node (true, type);
10198 if (known_le (bitpos0, bitpos1))
10199 return constant_boolean_node (false, type);
10200 break;
10201 default:;
10204 /* We can simplify the comparison to a comparison of the variable
10205 offset parts if the constant offset parts are equal.
10206 Be careful to use signed sizetype here because otherwise we
10207 mess with array offsets in the wrong way. This is possible
10208 because pointer arithmetic is restricted to retain within an
10209 object and overflow on pointer differences is undefined as of
10210 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10211 else if (known_eq (bitpos0, bitpos1)
10212 && (equality_code
10213 || (indirect_base0
10214 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10215 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10217 /* By converting to signed sizetype we cover middle-end pointer
10218 arithmetic which operates on unsigned pointer types of size
10219 type size and ARRAY_REF offsets which are properly sign or
10220 zero extended from their type in case it is narrower than
10221 sizetype. */
10222 if (offset0 == NULL_TREE)
10223 offset0 = build_int_cst (ssizetype, 0);
10224 else
10225 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10226 if (offset1 == NULL_TREE)
10227 offset1 = build_int_cst (ssizetype, 0);
10228 else
10229 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10231 if (!equality_code
10232 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10233 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10234 fold_overflow_warning (("assuming pointer wraparound does not "
10235 "occur when comparing P +- C1 with "
10236 "P +- C2"),
10237 WARN_STRICT_OVERFLOW_COMPARISON);
10239 return fold_build2_loc (loc, code, type, offset0, offset1);
10242 /* For equal offsets we can simplify to a comparison of the
10243 base addresses. */
10244 else if (known_eq (bitpos0, bitpos1)
10245 && (indirect_base0
10246 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10247 && (indirect_base1
10248 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10249 && ((offset0 == offset1)
10250 || (offset0 && offset1
10251 && operand_equal_p (offset0, offset1, 0))))
10253 if (indirect_base0)
10254 base0 = build_fold_addr_expr_loc (loc, base0);
10255 if (indirect_base1)
10256 base1 = build_fold_addr_expr_loc (loc, base1);
10257 return fold_build2_loc (loc, code, type, base0, base1);
10259 /* Comparison between an ordinary (non-weak) symbol and a null
10260 pointer can be eliminated since such symbols must have a non
10261 null address. In C, relational expressions between pointers
10262 to objects and null pointers are undefined. The results
10263 below follow the C++ rules with the additional property that
10264 every object pointer compares greater than a null pointer.
10266 else if (((DECL_P (base0)
10267 && maybe_nonzero_address (base0) > 0
10268 /* Avoid folding references to struct members at offset 0 to
10269 prevent tests like '&ptr->firstmember == 0' from getting
10270 eliminated. When ptr is null, although the -> expression
10271 is strictly speaking invalid, GCC retains it as a matter
10272 of QoI. See PR c/44555. */
10273 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10274 || CONSTANT_CLASS_P (base0))
10275 && indirect_base0
10276 /* The caller guarantees that when one of the arguments is
10277 constant (i.e., null in this case) it is second. */
10278 && integer_zerop (arg1))
10280 switch (code)
10282 case EQ_EXPR:
10283 case LE_EXPR:
10284 case LT_EXPR:
10285 return constant_boolean_node (false, type);
10286 case GE_EXPR:
10287 case GT_EXPR:
10288 case NE_EXPR:
10289 return constant_boolean_node (true, type);
10290 default:
10291 gcc_unreachable ();
10296 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10297 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10298 the resulting offset is smaller in absolute value than the
10299 original one and has the same sign. */
10300 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10301 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10302 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10303 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10304 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10305 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10306 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10307 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10309 tree const1 = TREE_OPERAND (arg0, 1);
10310 tree const2 = TREE_OPERAND (arg1, 1);
10311 tree variable1 = TREE_OPERAND (arg0, 0);
10312 tree variable2 = TREE_OPERAND (arg1, 0);
10313 tree cst;
10314 const char * const warnmsg = G_("assuming signed overflow does not "
10315 "occur when combining constants around "
10316 "a comparison");
10318 /* Put the constant on the side where it doesn't overflow and is
10319 of lower absolute value and of same sign than before. */
10320 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10321 ? MINUS_EXPR : PLUS_EXPR,
10322 const2, const1);
10323 if (!TREE_OVERFLOW (cst)
10324 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10325 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10327 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10328 return fold_build2_loc (loc, code, type,
10329 variable1,
10330 fold_build2_loc (loc, TREE_CODE (arg1),
10331 TREE_TYPE (arg1),
10332 variable2, cst));
10335 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10336 ? MINUS_EXPR : PLUS_EXPR,
10337 const1, const2);
10338 if (!TREE_OVERFLOW (cst)
10339 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10340 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10342 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10343 return fold_build2_loc (loc, code, type,
10344 fold_build2_loc (loc, TREE_CODE (arg0),
10345 TREE_TYPE (arg0),
10346 variable1, cst),
10347 variable2);
10351 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10352 if (tem)
10353 return tem;
10355 /* If we are comparing an expression that just has comparisons
10356 of two integer values, arithmetic expressions of those comparisons,
10357 and constants, we can simplify it. There are only three cases
10358 to check: the two values can either be equal, the first can be
10359 greater, or the second can be greater. Fold the expression for
10360 those three values. Since each value must be 0 or 1, we have
10361 eight possibilities, each of which corresponds to the constant 0
10362 or 1 or one of the six possible comparisons.
10364 This handles common cases like (a > b) == 0 but also handles
10365 expressions like ((x > y) - (y > x)) > 0, which supposedly
10366 occur in macroized code. */
10368 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10370 tree cval1 = 0, cval2 = 0;
10372 if (twoval_comparison_p (arg0, &cval1, &cval2)
10373 /* Don't handle degenerate cases here; they should already
10374 have been handled anyway. */
10375 && cval1 != 0 && cval2 != 0
10376 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10377 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10378 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10379 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10380 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10381 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10382 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10384 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10385 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10387 /* We can't just pass T to eval_subst in case cval1 or cval2
10388 was the same as ARG1. */
10390 tree high_result
10391 = fold_build2_loc (loc, code, type,
10392 eval_subst (loc, arg0, cval1, maxval,
10393 cval2, minval),
10394 arg1);
10395 tree equal_result
10396 = fold_build2_loc (loc, code, type,
10397 eval_subst (loc, arg0, cval1, maxval,
10398 cval2, maxval),
10399 arg1);
10400 tree low_result
10401 = fold_build2_loc (loc, code, type,
10402 eval_subst (loc, arg0, cval1, minval,
10403 cval2, maxval),
10404 arg1);
10406 /* All three of these results should be 0 or 1. Confirm they are.
10407 Then use those values to select the proper code to use. */
10409 if (TREE_CODE (high_result) == INTEGER_CST
10410 && TREE_CODE (equal_result) == INTEGER_CST
10411 && TREE_CODE (low_result) == INTEGER_CST)
10413 /* Make a 3-bit mask with the high-order bit being the
10414 value for `>', the next for '=', and the low for '<'. */
10415 switch ((integer_onep (high_result) * 4)
10416 + (integer_onep (equal_result) * 2)
10417 + integer_onep (low_result))
10419 case 0:
10420 /* Always false. */
10421 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10422 case 1:
10423 code = LT_EXPR;
10424 break;
10425 case 2:
10426 code = EQ_EXPR;
10427 break;
10428 case 3:
10429 code = LE_EXPR;
10430 break;
10431 case 4:
10432 code = GT_EXPR;
10433 break;
10434 case 5:
10435 code = NE_EXPR;
10436 break;
10437 case 6:
10438 code = GE_EXPR;
10439 break;
10440 case 7:
10441 /* Always true. */
10442 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10445 return fold_build2_loc (loc, code, type, cval1, cval2);
10450 return NULL_TREE;
10454 /* Subroutine of fold_binary. Optimize complex multiplications of the
10455 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10456 argument EXPR represents the expression "z" of type TYPE. */
10458 static tree
10459 fold_mult_zconjz (location_t loc, tree type, tree expr)
10461 tree itype = TREE_TYPE (type);
10462 tree rpart, ipart, tem;
10464 if (TREE_CODE (expr) == COMPLEX_EXPR)
10466 rpart = TREE_OPERAND (expr, 0);
10467 ipart = TREE_OPERAND (expr, 1);
10469 else if (TREE_CODE (expr) == COMPLEX_CST)
10471 rpart = TREE_REALPART (expr);
10472 ipart = TREE_IMAGPART (expr);
10474 else
10476 expr = save_expr (expr);
10477 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10478 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10481 rpart = save_expr (rpart);
10482 ipart = save_expr (ipart);
10483 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10484 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10485 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10486 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10487 build_zero_cst (itype));
10491 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10492 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10493 true if successful. */
10495 static bool
10496 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10498 unsigned HOST_WIDE_INT i, nunits;
10500 if (TREE_CODE (arg) == VECTOR_CST
10501 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10503 for (i = 0; i < nunits; ++i)
10504 elts[i] = VECTOR_CST_ELT (arg, i);
10506 else if (TREE_CODE (arg) == CONSTRUCTOR)
10508 constructor_elt *elt;
10510 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10511 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10512 return false;
10513 else
10514 elts[i] = elt->value;
10516 else
10517 return false;
10518 for (; i < nelts; i++)
10519 elts[i]
10520 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10521 return true;
10524 /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10525 mask for VLA vec_perm folding.
10526 REASON if specified, will contain the reason why SEL is not suitable.
10527 Used only for debugging and unit-testing. */
10529 static bool
10530 valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10531 const vec_perm_indices &sel,
10532 const char **reason = NULL)
10534 unsigned sel_npatterns = sel.encoding ().npatterns ();
10535 unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10537 if (!(pow2p_hwi (sel_npatterns)
10538 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10539 && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10541 if (reason)
10542 *reason = "npatterns is not power of 2";
10543 return false;
10546 /* We want to avoid cases where sel.length is not a multiple of npatterns.
10547 For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10548 poly_uint64 esel;
10549 if (!multiple_p (sel.length (), sel_npatterns, &esel))
10551 if (reason)
10552 *reason = "sel.length is not multiple of sel_npatterns";
10553 return false;
10556 if (sel_nelts_per_pattern < 3)
10557 return true;
10559 for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10561 poly_uint64 a1 = sel[pattern + sel_npatterns];
10562 poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10563 HOST_WIDE_INT step;
10564 if (!poly_int64 (a2 - a1).is_constant (&step))
10566 if (reason)
10567 *reason = "step is not constant";
10568 return false;
10570 // FIXME: Punt on step < 0 for now, revisit later.
10571 if (step < 0)
10572 return false;
10573 if (step == 0)
10574 continue;
10576 if (!pow2p_hwi (step))
10578 if (reason)
10579 *reason = "step is not power of 2";
10580 return false;
10583 /* Ensure that stepped sequence of the pattern selects elements
10584 only from the same input vector. */
10585 uint64_t q1, qe;
10586 poly_uint64 r1, re;
10587 poly_uint64 ae = a1 + (esel - 2) * step;
10588 poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10590 if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10591 && can_div_trunc_p (ae, arg_len, &qe, &re)
10592 && q1 == qe))
10594 if (reason)
10595 *reason = "crossed input vectors";
10596 return false;
10599 /* Ensure that the stepped sequence always selects from the same
10600 input pattern. */
10601 unsigned arg_npatterns
10602 = ((q1 & 0) == 0) ? VECTOR_CST_NPATTERNS (arg0)
10603 : VECTOR_CST_NPATTERNS (arg1);
10605 if (!multiple_p (step, arg_npatterns))
10607 if (reason)
10608 *reason = "step is not multiple of npatterns";
10609 return false;
10613 return true;
10616 /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10617 the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10618 REASON has same purpose as described in
10619 valid_mask_for_fold_vec_perm_cst_p. */
10621 static tree
10622 fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10623 const char **reason = NULL)
10625 unsigned res_npatterns, res_nelts_per_pattern;
10626 unsigned HOST_WIDE_INT res_nelts;
10628 /* (1) If SEL is a suitable mask as determined by
10629 valid_mask_for_fold_vec_perm_cst_p, then:
10630 res_npatterns = max of npatterns between ARG0, ARG1, and SEL
10631 res_nelts_per_pattern = max of nelts_per_pattern between
10632 ARG0, ARG1 and SEL.
10633 (2) If SEL is not a suitable mask, and TYPE is VLS then:
10634 res_npatterns = nelts in result vector.
10635 res_nelts_per_pattern = 1.
10636 This exception is made so that VLS ARG0, ARG1 and SEL work as before. */
10637 if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10639 res_npatterns
10640 = std::max (VECTOR_CST_NPATTERNS (arg0),
10641 std::max (VECTOR_CST_NPATTERNS (arg1),
10642 sel.encoding ().npatterns ()));
10644 res_nelts_per_pattern
10645 = std::max (VECTOR_CST_NELTS_PER_PATTERN (arg0),
10646 std::max (VECTOR_CST_NELTS_PER_PATTERN (arg1),
10647 sel.encoding ().nelts_per_pattern ()));
10649 res_nelts = res_npatterns * res_nelts_per_pattern;
10651 else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10653 res_npatterns = res_nelts;
10654 res_nelts_per_pattern = 1;
10656 else
10657 return NULL_TREE;
10659 tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10660 for (unsigned i = 0; i < res_nelts; i++)
10662 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10663 uint64_t q;
10664 poly_uint64 r;
10665 unsigned HOST_WIDE_INT index;
10667 /* Punt if sel[i] /trunc_div len cannot be determined,
10668 because the input vector to be chosen will depend on
10669 runtime vector length.
10670 For example if len == 4 + 4x, and sel[i] == 4,
10671 If len at runtime equals 4, we choose arg1[0].
10672 For any other value of len > 4 at runtime, we choose arg0[4].
10673 which makes the element choice dependent on runtime vector length. */
10674 if (!can_div_trunc_p (sel[i], len, &q, &r))
10676 if (reason)
10677 *reason = "cannot divide selector element by arg len";
10678 return NULL_TREE;
10681 /* sel[i] % len will give the index of element in the chosen input
10682 vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10683 we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10684 if (!r.is_constant (&index))
10686 if (reason)
10687 *reason = "remainder is not constant";
10688 return NULL_TREE;
10691 tree arg = ((q & 1) == 0) ? arg0 : arg1;
10692 tree elem = vector_cst_elt (arg, index);
10693 out_elts.quick_push (elem);
10696 return out_elts.build ();
10699 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10700 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10701 NULL_TREE otherwise. */
10703 tree
10704 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10706 unsigned int i;
10707 unsigned HOST_WIDE_INT nelts;
10709 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10710 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10711 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10713 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10714 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10715 return NULL_TREE;
10717 if (TREE_CODE (arg0) == VECTOR_CST
10718 && TREE_CODE (arg1) == VECTOR_CST)
10719 return fold_vec_perm_cst (type, arg0, arg1, sel);
10721 /* For fall back case, we want to ensure we have VLS vectors
10722 with equal length. */
10723 if (!sel.length ().is_constant (&nelts))
10724 return NULL_TREE;
10726 gcc_assert (known_eq (sel.length (),
10727 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10728 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10729 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10730 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10731 return NULL_TREE;
10733 vec<constructor_elt, va_gc> *v;
10734 vec_alloc (v, nelts);
10735 for (i = 0; i < nelts; i++)
10737 HOST_WIDE_INT index;
10738 if (!sel[i].is_constant (&index))
10739 return NULL_TREE;
10740 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10742 return build_constructor (type, v);
10745 /* Try to fold a pointer difference of type TYPE two address expressions of
10746 array references AREF0 and AREF1 using location LOC. Return a
10747 simplified expression for the difference or NULL_TREE. */
10749 static tree
10750 fold_addr_of_array_ref_difference (location_t loc, tree type,
10751 tree aref0, tree aref1,
10752 bool use_pointer_diff)
10754 tree base0 = TREE_OPERAND (aref0, 0);
10755 tree base1 = TREE_OPERAND (aref1, 0);
10756 tree base_offset = build_int_cst (type, 0);
10758 /* If the bases are array references as well, recurse. If the bases
10759 are pointer indirections compute the difference of the pointers.
10760 If the bases are equal, we are set. */
10761 if ((TREE_CODE (base0) == ARRAY_REF
10762 && TREE_CODE (base1) == ARRAY_REF
10763 && (base_offset
10764 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10765 use_pointer_diff)))
10766 || (INDIRECT_REF_P (base0)
10767 && INDIRECT_REF_P (base1)
10768 && (base_offset
10769 = use_pointer_diff
10770 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10771 TREE_OPERAND (base0, 0),
10772 TREE_OPERAND (base1, 0))
10773 : fold_binary_loc (loc, MINUS_EXPR, type,
10774 fold_convert (type,
10775 TREE_OPERAND (base0, 0)),
10776 fold_convert (type,
10777 TREE_OPERAND (base1, 0)))))
10778 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10780 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10781 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10782 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10783 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10784 return fold_build2_loc (loc, PLUS_EXPR, type,
10785 base_offset,
10786 fold_build2_loc (loc, MULT_EXPR, type,
10787 diff, esz));
10789 return NULL_TREE;
10792 /* If the real or vector real constant CST of type TYPE has an exact
10793 inverse, return it, else return NULL. */
10795 tree
10796 exact_inverse (tree type, tree cst)
10798 REAL_VALUE_TYPE r;
10799 tree unit_type;
10800 machine_mode mode;
10802 switch (TREE_CODE (cst))
10804 case REAL_CST:
10805 r = TREE_REAL_CST (cst);
10807 if (exact_real_inverse (TYPE_MODE (type), &r))
10808 return build_real (type, r);
10810 return NULL_TREE;
10812 case VECTOR_CST:
10814 unit_type = TREE_TYPE (type);
10815 mode = TYPE_MODE (unit_type);
10817 tree_vector_builder elts;
10818 if (!elts.new_unary_operation (type, cst, false))
10819 return NULL_TREE;
10820 unsigned int count = elts.encoded_nelts ();
10821 for (unsigned int i = 0; i < count; ++i)
10823 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10824 if (!exact_real_inverse (mode, &r))
10825 return NULL_TREE;
10826 elts.quick_push (build_real (unit_type, r));
10829 return elts.build ();
10832 default:
10833 return NULL_TREE;
10837 /* Mask out the tz least significant bits of X of type TYPE where
10838 tz is the number of trailing zeroes in Y. */
10839 static wide_int
10840 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10842 int tz = wi::ctz (y);
10843 if (tz > 0)
10844 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10845 return x;
10848 /* Return true when T is an address and is known to be nonzero.
10849 For floating point we further ensure that T is not denormal.
10850 Similar logic is present in nonzero_address in rtlanal.h.
10852 If the return value is based on the assumption that signed overflow
10853 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10854 change *STRICT_OVERFLOW_P. */
10856 static bool
10857 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10859 tree type = TREE_TYPE (t);
10860 enum tree_code code;
10862 /* Doing something useful for floating point would need more work. */
10863 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10864 return false;
10866 code = TREE_CODE (t);
10867 switch (TREE_CODE_CLASS (code))
10869 case tcc_unary:
10870 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10871 strict_overflow_p);
10872 case tcc_binary:
10873 case tcc_comparison:
10874 return tree_binary_nonzero_warnv_p (code, type,
10875 TREE_OPERAND (t, 0),
10876 TREE_OPERAND (t, 1),
10877 strict_overflow_p);
10878 case tcc_constant:
10879 case tcc_declaration:
10880 case tcc_reference:
10881 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10883 default:
10884 break;
10887 switch (code)
10889 case TRUTH_NOT_EXPR:
10890 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10891 strict_overflow_p);
10893 case TRUTH_AND_EXPR:
10894 case TRUTH_OR_EXPR:
10895 case TRUTH_XOR_EXPR:
10896 return tree_binary_nonzero_warnv_p (code, type,
10897 TREE_OPERAND (t, 0),
10898 TREE_OPERAND (t, 1),
10899 strict_overflow_p);
10901 case COND_EXPR:
10902 case CONSTRUCTOR:
10903 case OBJ_TYPE_REF:
10904 case ADDR_EXPR:
10905 case WITH_SIZE_EXPR:
10906 case SSA_NAME:
10907 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10909 case COMPOUND_EXPR:
10910 case MODIFY_EXPR:
10911 case BIND_EXPR:
10912 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10913 strict_overflow_p);
10915 case SAVE_EXPR:
10916 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10917 strict_overflow_p);
10919 case CALL_EXPR:
10921 tree fndecl = get_callee_fndecl (t);
10922 if (!fndecl) return false;
10923 if (flag_delete_null_pointer_checks && !flag_check_new
10924 && DECL_IS_OPERATOR_NEW_P (fndecl)
10925 && !TREE_NOTHROW (fndecl))
10926 return true;
10927 if (flag_delete_null_pointer_checks
10928 && lookup_attribute ("returns_nonnull",
10929 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10930 return true;
10931 return alloca_call_p (t);
10934 default:
10935 break;
10937 return false;
10940 /* Return true when T is an address and is known to be nonzero.
10941 Handle warnings about undefined signed overflow. */
10943 bool
10944 tree_expr_nonzero_p (tree t)
10946 bool ret, strict_overflow_p;
10948 strict_overflow_p = false;
10949 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10950 if (strict_overflow_p)
10951 fold_overflow_warning (("assuming signed overflow does not occur when "
10952 "determining that expression is always "
10953 "non-zero"),
10954 WARN_STRICT_OVERFLOW_MISC);
10955 return ret;
10958 /* Return true if T is known not to be equal to an integer W. */
10960 bool
10961 expr_not_equal_to (tree t, const wide_int &w)
10963 int_range_max vr;
10964 switch (TREE_CODE (t))
10966 case INTEGER_CST:
10967 return wi::to_wide (t) != w;
10969 case SSA_NAME:
10970 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10971 return false;
10973 if (cfun)
10974 get_range_query (cfun)->range_of_expr (vr, t);
10975 else
10976 get_global_range_query ()->range_of_expr (vr, t);
10978 if (!vr.undefined_p () && !vr.contains_p (w))
10979 return true;
10980 /* If T has some known zero bits and W has any of those bits set,
10981 then T is known not to be equal to W. */
10982 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10983 TYPE_PRECISION (TREE_TYPE (t))), 0))
10984 return true;
10985 return false;
10987 default:
10988 return false;
10992 /* Fold a binary expression of code CODE and type TYPE with operands
10993 OP0 and OP1. LOC is the location of the resulting expression.
10994 Return the folded expression if folding is successful. Otherwise,
10995 return NULL_TREE. */
10997 tree
10998 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10999 tree op0, tree op1)
11001 enum tree_code_class kind = TREE_CODE_CLASS (code);
11002 tree arg0, arg1, tem;
11003 tree t1 = NULL_TREE;
11004 bool strict_overflow_p;
11005 unsigned int prec;
11007 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11008 && TREE_CODE_LENGTH (code) == 2
11009 && op0 != NULL_TREE
11010 && op1 != NULL_TREE);
11012 arg0 = op0;
11013 arg1 = op1;
11015 /* Strip any conversions that don't change the mode. This is
11016 safe for every expression, except for a comparison expression
11017 because its signedness is derived from its operands. So, in
11018 the latter case, only strip conversions that don't change the
11019 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11020 preserved.
11022 Note that this is done as an internal manipulation within the
11023 constant folder, in order to find the simplest representation
11024 of the arguments so that their form can be studied. In any
11025 cases, the appropriate type conversions should be put back in
11026 the tree that will get out of the constant folder. */
11028 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11030 STRIP_SIGN_NOPS (arg0);
11031 STRIP_SIGN_NOPS (arg1);
11033 else
11035 STRIP_NOPS (arg0);
11036 STRIP_NOPS (arg1);
11039 /* Note that TREE_CONSTANT isn't enough: static var addresses are
11040 constant but we can't do arithmetic on them. */
11041 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11043 tem = const_binop (code, type, arg0, arg1);
11044 if (tem != NULL_TREE)
11046 if (TREE_TYPE (tem) != type)
11047 tem = fold_convert_loc (loc, type, tem);
11048 return tem;
11052 /* If this is a commutative operation, and ARG0 is a constant, move it
11053 to ARG1 to reduce the number of tests below. */
11054 if (commutative_tree_code (code)
11055 && tree_swap_operands_p (arg0, arg1))
11056 return fold_build2_loc (loc, code, type, op1, op0);
11058 /* Likewise if this is a comparison, and ARG0 is a constant, move it
11059 to ARG1 to reduce the number of tests below. */
11060 if (kind == tcc_comparison
11061 && tree_swap_operands_p (arg0, arg1))
11062 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11064 tem = generic_simplify (loc, code, type, op0, op1);
11065 if (tem)
11066 return tem;
11068 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11070 First check for cases where an arithmetic operation is applied to a
11071 compound, conditional, or comparison operation. Push the arithmetic
11072 operation inside the compound or conditional to see if any folding
11073 can then be done. Convert comparison to conditional for this purpose.
11074 The also optimizes non-constant cases that used to be done in
11075 expand_expr.
11077 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11078 one of the operands is a comparison and the other is a comparison, a
11079 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11080 code below would make the expression more complex. Change it to a
11081 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11082 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11084 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11085 || code == EQ_EXPR || code == NE_EXPR)
11086 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11087 && ((truth_value_p (TREE_CODE (arg0))
11088 && (truth_value_p (TREE_CODE (arg1))
11089 || (TREE_CODE (arg1) == BIT_AND_EXPR
11090 && integer_onep (TREE_OPERAND (arg1, 1)))))
11091 || (truth_value_p (TREE_CODE (arg1))
11092 && (truth_value_p (TREE_CODE (arg0))
11093 || (TREE_CODE (arg0) == BIT_AND_EXPR
11094 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11096 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11097 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11098 : TRUTH_XOR_EXPR,
11099 boolean_type_node,
11100 fold_convert_loc (loc, boolean_type_node, arg0),
11101 fold_convert_loc (loc, boolean_type_node, arg1));
11103 if (code == EQ_EXPR)
11104 tem = invert_truthvalue_loc (loc, tem);
11106 return fold_convert_loc (loc, type, tem);
11109 if (TREE_CODE_CLASS (code) == tcc_binary
11110 || TREE_CODE_CLASS (code) == tcc_comparison)
11112 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11114 tem = fold_build2_loc (loc, code, type,
11115 fold_convert_loc (loc, TREE_TYPE (op0),
11116 TREE_OPERAND (arg0, 1)), op1);
11117 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11118 tem);
11120 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11122 tem = fold_build2_loc (loc, code, type, op0,
11123 fold_convert_loc (loc, TREE_TYPE (op1),
11124 TREE_OPERAND (arg1, 1)));
11125 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11126 tem);
11129 if (TREE_CODE (arg0) == COND_EXPR
11130 || TREE_CODE (arg0) == VEC_COND_EXPR
11131 || COMPARISON_CLASS_P (arg0))
11133 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11134 arg0, arg1,
11135 /*cond_first_p=*/1);
11136 if (tem != NULL_TREE)
11137 return tem;
11140 if (TREE_CODE (arg1) == COND_EXPR
11141 || TREE_CODE (arg1) == VEC_COND_EXPR
11142 || COMPARISON_CLASS_P (arg1))
11144 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11145 arg1, arg0,
11146 /*cond_first_p=*/0);
11147 if (tem != NULL_TREE)
11148 return tem;
11152 switch (code)
11154 case MEM_REF:
11155 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11156 if (TREE_CODE (arg0) == ADDR_EXPR
11157 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11159 tree iref = TREE_OPERAND (arg0, 0);
11160 return fold_build2 (MEM_REF, type,
11161 TREE_OPERAND (iref, 0),
11162 int_const_binop (PLUS_EXPR, arg1,
11163 TREE_OPERAND (iref, 1)));
11166 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11167 if (TREE_CODE (arg0) == ADDR_EXPR
11168 && handled_component_p (TREE_OPERAND (arg0, 0)))
11170 tree base;
11171 poly_int64 coffset;
11172 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11173 &coffset);
11174 if (!base)
11175 return NULL_TREE;
11176 return fold_build2 (MEM_REF, type,
11177 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11178 int_const_binop (PLUS_EXPR, arg1,
11179 size_int (coffset)));
11182 return NULL_TREE;
11184 case POINTER_PLUS_EXPR:
11185 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11186 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11187 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11188 return fold_convert_loc (loc, type,
11189 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11190 fold_convert_loc (loc, sizetype,
11191 arg1),
11192 fold_convert_loc (loc, sizetype,
11193 arg0)));
11195 return NULL_TREE;
11197 case PLUS_EXPR:
11198 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11200 /* X + (X / CST) * -CST is X % CST. */
11201 if (TREE_CODE (arg1) == MULT_EXPR
11202 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11203 && operand_equal_p (arg0,
11204 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11206 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11207 tree cst1 = TREE_OPERAND (arg1, 1);
11208 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11209 cst1, cst0);
11210 if (sum && integer_zerop (sum))
11211 return fold_convert_loc (loc, type,
11212 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11213 TREE_TYPE (arg0), arg0,
11214 cst0));
11218 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11219 one. Make sure the type is not saturating and has the signedness of
11220 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11221 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11222 if ((TREE_CODE (arg0) == MULT_EXPR
11223 || TREE_CODE (arg1) == MULT_EXPR)
11224 && !TYPE_SATURATING (type)
11225 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11226 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11227 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11229 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11230 if (tem)
11231 return tem;
11234 if (! FLOAT_TYPE_P (type))
11236 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11237 (plus (plus (mult) (mult)) (foo)) so that we can
11238 take advantage of the factoring cases below. */
11239 if (ANY_INTEGRAL_TYPE_P (type)
11240 && TYPE_OVERFLOW_WRAPS (type)
11241 && (((TREE_CODE (arg0) == PLUS_EXPR
11242 || TREE_CODE (arg0) == MINUS_EXPR)
11243 && TREE_CODE (arg1) == MULT_EXPR)
11244 || ((TREE_CODE (arg1) == PLUS_EXPR
11245 || TREE_CODE (arg1) == MINUS_EXPR)
11246 && TREE_CODE (arg0) == MULT_EXPR)))
11248 tree parg0, parg1, parg, marg;
11249 enum tree_code pcode;
11251 if (TREE_CODE (arg1) == MULT_EXPR)
11252 parg = arg0, marg = arg1;
11253 else
11254 parg = arg1, marg = arg0;
11255 pcode = TREE_CODE (parg);
11256 parg0 = TREE_OPERAND (parg, 0);
11257 parg1 = TREE_OPERAND (parg, 1);
11258 STRIP_NOPS (parg0);
11259 STRIP_NOPS (parg1);
11261 if (TREE_CODE (parg0) == MULT_EXPR
11262 && TREE_CODE (parg1) != MULT_EXPR)
11263 return fold_build2_loc (loc, pcode, type,
11264 fold_build2_loc (loc, PLUS_EXPR, type,
11265 fold_convert_loc (loc, type,
11266 parg0),
11267 fold_convert_loc (loc, type,
11268 marg)),
11269 fold_convert_loc (loc, type, parg1));
11270 if (TREE_CODE (parg0) != MULT_EXPR
11271 && TREE_CODE (parg1) == MULT_EXPR)
11272 return
11273 fold_build2_loc (loc, PLUS_EXPR, type,
11274 fold_convert_loc (loc, type, parg0),
11275 fold_build2_loc (loc, pcode, type,
11276 fold_convert_loc (loc, type, marg),
11277 fold_convert_loc (loc, type,
11278 parg1)));
11281 else
11283 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11284 to __complex__ ( x, y ). This is not the same for SNaNs or
11285 if signed zeros are involved. */
11286 if (!HONOR_SNANS (arg0)
11287 && !HONOR_SIGNED_ZEROS (arg0)
11288 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11290 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11291 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11292 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11293 bool arg0rz = false, arg0iz = false;
11294 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11295 || (arg0i && (arg0iz = real_zerop (arg0i))))
11297 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11298 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11299 if (arg0rz && arg1i && real_zerop (arg1i))
11301 tree rp = arg1r ? arg1r
11302 : build1 (REALPART_EXPR, rtype, arg1);
11303 tree ip = arg0i ? arg0i
11304 : build1 (IMAGPART_EXPR, rtype, arg0);
11305 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11307 else if (arg0iz && arg1r && real_zerop (arg1r))
11309 tree rp = arg0r ? arg0r
11310 : build1 (REALPART_EXPR, rtype, arg0);
11311 tree ip = arg1i ? arg1i
11312 : build1 (IMAGPART_EXPR, rtype, arg1);
11313 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11318 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11319 We associate floats only if the user has specified
11320 -fassociative-math. */
11321 if (flag_associative_math
11322 && TREE_CODE (arg1) == PLUS_EXPR
11323 && TREE_CODE (arg0) != MULT_EXPR)
11325 tree tree10 = TREE_OPERAND (arg1, 0);
11326 tree tree11 = TREE_OPERAND (arg1, 1);
11327 if (TREE_CODE (tree11) == MULT_EXPR
11328 && TREE_CODE (tree10) == MULT_EXPR)
11330 tree tree0;
11331 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11332 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11335 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11336 We associate floats only if the user has specified
11337 -fassociative-math. */
11338 if (flag_associative_math
11339 && TREE_CODE (arg0) == PLUS_EXPR
11340 && TREE_CODE (arg1) != MULT_EXPR)
11342 tree tree00 = TREE_OPERAND (arg0, 0);
11343 tree tree01 = TREE_OPERAND (arg0, 1);
11344 if (TREE_CODE (tree01) == MULT_EXPR
11345 && TREE_CODE (tree00) == MULT_EXPR)
11347 tree tree0;
11348 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11349 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11354 bit_rotate:
11355 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11356 is a rotate of A by C1 bits. */
11357 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11358 is a rotate of A by B bits.
11359 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11360 though in this case CODE must be | and not + or ^, otherwise
11361 it doesn't return A when B is 0. */
11363 enum tree_code code0, code1;
11364 tree rtype;
11365 code0 = TREE_CODE (arg0);
11366 code1 = TREE_CODE (arg1);
11367 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11368 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11369 && operand_equal_p (TREE_OPERAND (arg0, 0),
11370 TREE_OPERAND (arg1, 0), 0)
11371 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11372 TYPE_UNSIGNED (rtype))
11373 /* Only create rotates in complete modes. Other cases are not
11374 expanded properly. */
11375 && (element_precision (rtype)
11376 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11378 tree tree01, tree11;
11379 tree orig_tree01, orig_tree11;
11380 enum tree_code code01, code11;
11382 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11383 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11384 STRIP_NOPS (tree01);
11385 STRIP_NOPS (tree11);
11386 code01 = TREE_CODE (tree01);
11387 code11 = TREE_CODE (tree11);
11388 if (code11 != MINUS_EXPR
11389 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11391 std::swap (code0, code1);
11392 std::swap (code01, code11);
11393 std::swap (tree01, tree11);
11394 std::swap (orig_tree01, orig_tree11);
11396 if (code01 == INTEGER_CST
11397 && code11 == INTEGER_CST
11398 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11399 == element_precision (rtype)))
11401 tem = build2_loc (loc, LROTATE_EXPR,
11402 rtype, TREE_OPERAND (arg0, 0),
11403 code0 == LSHIFT_EXPR
11404 ? orig_tree01 : orig_tree11);
11405 return fold_convert_loc (loc, type, tem);
11407 else if (code11 == MINUS_EXPR)
11409 tree tree110, tree111;
11410 tree110 = TREE_OPERAND (tree11, 0);
11411 tree111 = TREE_OPERAND (tree11, 1);
11412 STRIP_NOPS (tree110);
11413 STRIP_NOPS (tree111);
11414 if (TREE_CODE (tree110) == INTEGER_CST
11415 && compare_tree_int (tree110,
11416 element_precision (rtype)) == 0
11417 && operand_equal_p (tree01, tree111, 0))
11419 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11420 ? LROTATE_EXPR : RROTATE_EXPR),
11421 rtype, TREE_OPERAND (arg0, 0),
11422 orig_tree01);
11423 return fold_convert_loc (loc, type, tem);
11426 else if (code == BIT_IOR_EXPR
11427 && code11 == BIT_AND_EXPR
11428 && pow2p_hwi (element_precision (rtype)))
11430 tree tree110, tree111;
11431 tree110 = TREE_OPERAND (tree11, 0);
11432 tree111 = TREE_OPERAND (tree11, 1);
11433 STRIP_NOPS (tree110);
11434 STRIP_NOPS (tree111);
11435 if (TREE_CODE (tree110) == NEGATE_EXPR
11436 && TREE_CODE (tree111) == INTEGER_CST
11437 && compare_tree_int (tree111,
11438 element_precision (rtype) - 1) == 0
11439 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11441 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11442 ? LROTATE_EXPR : RROTATE_EXPR),
11443 rtype, TREE_OPERAND (arg0, 0),
11444 orig_tree01);
11445 return fold_convert_loc (loc, type, tem);
11451 associate:
11452 /* In most languages, can't associate operations on floats through
11453 parentheses. Rather than remember where the parentheses were, we
11454 don't associate floats at all, unless the user has specified
11455 -fassociative-math.
11456 And, we need to make sure type is not saturating. */
11458 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11459 && !TYPE_SATURATING (type)
11460 && !TYPE_OVERFLOW_SANITIZED (type))
11462 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11463 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11464 tree atype = type;
11465 bool ok = true;
11467 /* Split both trees into variables, constants, and literals. Then
11468 associate each group together, the constants with literals,
11469 then the result with variables. This increases the chances of
11470 literals being recombined later and of generating relocatable
11471 expressions for the sum of a constant and literal. */
11472 var0 = split_tree (arg0, type, code,
11473 &minus_var0, &con0, &minus_con0,
11474 &lit0, &minus_lit0, 0);
11475 var1 = split_tree (arg1, type, code,
11476 &minus_var1, &con1, &minus_con1,
11477 &lit1, &minus_lit1, code == MINUS_EXPR);
11479 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11480 if (code == MINUS_EXPR)
11481 code = PLUS_EXPR;
11483 /* With undefined overflow prefer doing association in a type
11484 which wraps on overflow, if that is one of the operand types. */
11485 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11486 && !TYPE_OVERFLOW_WRAPS (type))
11488 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11489 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11490 atype = TREE_TYPE (arg0);
11491 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11492 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11493 atype = TREE_TYPE (arg1);
11494 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11497 /* With undefined overflow we can only associate constants with one
11498 variable, and constants whose association doesn't overflow. */
11499 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11500 && !TYPE_OVERFLOW_WRAPS (atype))
11502 if ((var0 && var1) || (minus_var0 && minus_var1))
11504 /* ??? If split_tree would handle NEGATE_EXPR we could
11505 simply reject these cases and the allowed cases would
11506 be the var0/minus_var1 ones. */
11507 tree tmp0 = var0 ? var0 : minus_var0;
11508 tree tmp1 = var1 ? var1 : minus_var1;
11509 bool one_neg = false;
11511 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11513 tmp0 = TREE_OPERAND (tmp0, 0);
11514 one_neg = !one_neg;
11516 if (CONVERT_EXPR_P (tmp0)
11517 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11518 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11519 <= TYPE_PRECISION (atype)))
11520 tmp0 = TREE_OPERAND (tmp0, 0);
11521 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11523 tmp1 = TREE_OPERAND (tmp1, 0);
11524 one_neg = !one_neg;
11526 if (CONVERT_EXPR_P (tmp1)
11527 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11528 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11529 <= TYPE_PRECISION (atype)))
11530 tmp1 = TREE_OPERAND (tmp1, 0);
11531 /* The only case we can still associate with two variables
11532 is if they cancel out. */
11533 if (!one_neg
11534 || !operand_equal_p (tmp0, tmp1, 0))
11535 ok = false;
11537 else if ((var0 && minus_var1
11538 && ! operand_equal_p (var0, minus_var1, 0))
11539 || (minus_var0 && var1
11540 && ! operand_equal_p (minus_var0, var1, 0)))
11541 ok = false;
11544 /* Only do something if we found more than two objects. Otherwise,
11545 nothing has changed and we risk infinite recursion. */
11546 if (ok
11547 && ((var0 != 0) + (var1 != 0)
11548 + (minus_var0 != 0) + (minus_var1 != 0)
11549 + (con0 != 0) + (con1 != 0)
11550 + (minus_con0 != 0) + (minus_con1 != 0)
11551 + (lit0 != 0) + (lit1 != 0)
11552 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11554 var0 = associate_trees (loc, var0, var1, code, atype);
11555 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11556 code, atype);
11557 con0 = associate_trees (loc, con0, con1, code, atype);
11558 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11559 code, atype);
11560 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11561 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11562 code, atype);
11564 if (minus_var0 && var0)
11566 var0 = associate_trees (loc, var0, minus_var0,
11567 MINUS_EXPR, atype);
11568 minus_var0 = 0;
11570 if (minus_con0 && con0)
11572 con0 = associate_trees (loc, con0, minus_con0,
11573 MINUS_EXPR, atype);
11574 minus_con0 = 0;
11577 /* Preserve the MINUS_EXPR if the negative part of the literal is
11578 greater than the positive part. Otherwise, the multiplicative
11579 folding code (i.e extract_muldiv) may be fooled in case
11580 unsigned constants are subtracted, like in the following
11581 example: ((X*2 + 4) - 8U)/2. */
11582 if (minus_lit0 && lit0)
11584 if (TREE_CODE (lit0) == INTEGER_CST
11585 && TREE_CODE (minus_lit0) == INTEGER_CST
11586 && tree_int_cst_lt (lit0, minus_lit0)
11587 /* But avoid ending up with only negated parts. */
11588 && (var0 || con0))
11590 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11591 MINUS_EXPR, atype);
11592 lit0 = 0;
11594 else
11596 lit0 = associate_trees (loc, lit0, minus_lit0,
11597 MINUS_EXPR, atype);
11598 minus_lit0 = 0;
11602 /* Don't introduce overflows through reassociation. */
11603 if ((lit0 && TREE_OVERFLOW_P (lit0))
11604 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11605 return NULL_TREE;
11607 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11608 con0 = associate_trees (loc, con0, lit0, code, atype);
11609 lit0 = 0;
11610 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11611 code, atype);
11612 minus_lit0 = 0;
11614 /* Eliminate minus_con0. */
11615 if (minus_con0)
11617 if (con0)
11618 con0 = associate_trees (loc, con0, minus_con0,
11619 MINUS_EXPR, atype);
11620 else if (var0)
11621 var0 = associate_trees (loc, var0, minus_con0,
11622 MINUS_EXPR, atype);
11623 else
11624 gcc_unreachable ();
11625 minus_con0 = 0;
11628 /* Eliminate minus_var0. */
11629 if (minus_var0)
11631 if (con0)
11632 con0 = associate_trees (loc, con0, minus_var0,
11633 MINUS_EXPR, atype);
11634 else
11635 gcc_unreachable ();
11636 minus_var0 = 0;
11639 return
11640 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11641 code, atype));
11645 return NULL_TREE;
11647 case POINTER_DIFF_EXPR:
11648 case MINUS_EXPR:
11649 /* Fold &a[i] - &a[j] to i-j. */
11650 if (TREE_CODE (arg0) == ADDR_EXPR
11651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11652 && TREE_CODE (arg1) == ADDR_EXPR
11653 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11655 tree tem = fold_addr_of_array_ref_difference (loc, type,
11656 TREE_OPERAND (arg0, 0),
11657 TREE_OPERAND (arg1, 0),
11658 code
11659 == POINTER_DIFF_EXPR);
11660 if (tem)
11661 return tem;
11664 /* Further transformations are not for pointers. */
11665 if (code == POINTER_DIFF_EXPR)
11666 return NULL_TREE;
11668 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11669 if (TREE_CODE (arg0) == NEGATE_EXPR
11670 && negate_expr_p (op1)
11671 /* If arg0 is e.g. unsigned int and type is int, then this could
11672 introduce UB, because if A is INT_MIN at runtime, the original
11673 expression can be well defined while the latter is not.
11674 See PR83269. */
11675 && !(ANY_INTEGRAL_TYPE_P (type)
11676 && TYPE_OVERFLOW_UNDEFINED (type)
11677 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11678 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11679 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11680 fold_convert_loc (loc, type,
11681 TREE_OPERAND (arg0, 0)));
11683 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11684 __complex__ ( x, -y ). This is not the same for SNaNs or if
11685 signed zeros are involved. */
11686 if (!HONOR_SNANS (arg0)
11687 && !HONOR_SIGNED_ZEROS (arg0)
11688 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11690 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11691 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11692 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11693 bool arg0rz = false, arg0iz = false;
11694 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11695 || (arg0i && (arg0iz = real_zerop (arg0i))))
11697 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11698 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11699 if (arg0rz && arg1i && real_zerop (arg1i))
11701 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11702 arg1r ? arg1r
11703 : build1 (REALPART_EXPR, rtype, arg1));
11704 tree ip = arg0i ? arg0i
11705 : build1 (IMAGPART_EXPR, rtype, arg0);
11706 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11708 else if (arg0iz && arg1r && real_zerop (arg1r))
11710 tree rp = arg0r ? arg0r
11711 : build1 (REALPART_EXPR, rtype, arg0);
11712 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11713 arg1i ? arg1i
11714 : build1 (IMAGPART_EXPR, rtype, arg1));
11715 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11720 /* A - B -> A + (-B) if B is easily negatable. */
11721 if (negate_expr_p (op1)
11722 && ! TYPE_OVERFLOW_SANITIZED (type)
11723 && ((FLOAT_TYPE_P (type)
11724 /* Avoid this transformation if B is a positive REAL_CST. */
11725 && (TREE_CODE (op1) != REAL_CST
11726 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11727 || INTEGRAL_TYPE_P (type)))
11728 return fold_build2_loc (loc, PLUS_EXPR, type,
11729 fold_convert_loc (loc, type, arg0),
11730 negate_expr (op1));
11732 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11733 one. Make sure the type is not saturating and has the signedness of
11734 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11735 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11736 if ((TREE_CODE (arg0) == MULT_EXPR
11737 || TREE_CODE (arg1) == MULT_EXPR)
11738 && !TYPE_SATURATING (type)
11739 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11740 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11741 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11743 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11744 if (tem)
11745 return tem;
11748 goto associate;
11750 case MULT_EXPR:
11751 if (! FLOAT_TYPE_P (type))
11753 /* Transform x * -C into -x * C if x is easily negatable. */
11754 if (TREE_CODE (op1) == INTEGER_CST
11755 && tree_int_cst_sgn (op1) == -1
11756 && negate_expr_p (op0)
11757 && negate_expr_p (op1)
11758 && (tem = negate_expr (op1)) != op1
11759 && ! TREE_OVERFLOW (tem))
11760 return fold_build2_loc (loc, MULT_EXPR, type,
11761 fold_convert_loc (loc, type,
11762 negate_expr (op0)), tem);
11764 strict_overflow_p = false;
11765 if (TREE_CODE (arg1) == INTEGER_CST
11766 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11767 &strict_overflow_p)) != 0)
11769 if (strict_overflow_p)
11770 fold_overflow_warning (("assuming signed overflow does not "
11771 "occur when simplifying "
11772 "multiplication"),
11773 WARN_STRICT_OVERFLOW_MISC);
11774 return fold_convert_loc (loc, type, tem);
11777 /* Optimize z * conj(z) for integer complex numbers. */
11778 if (TREE_CODE (arg0) == CONJ_EXPR
11779 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11780 return fold_mult_zconjz (loc, type, arg1);
11781 if (TREE_CODE (arg1) == CONJ_EXPR
11782 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11783 return fold_mult_zconjz (loc, type, arg0);
11785 else
11787 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11788 This is not the same for NaNs or if signed zeros are
11789 involved. */
11790 if (!HONOR_NANS (arg0)
11791 && !HONOR_SIGNED_ZEROS (arg0)
11792 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11793 && TREE_CODE (arg1) == COMPLEX_CST
11794 && real_zerop (TREE_REALPART (arg1)))
11796 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11797 if (real_onep (TREE_IMAGPART (arg1)))
11798 return
11799 fold_build2_loc (loc, COMPLEX_EXPR, type,
11800 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11801 rtype, arg0)),
11802 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11803 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11804 return
11805 fold_build2_loc (loc, COMPLEX_EXPR, type,
11806 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11807 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11808 rtype, arg0)));
11811 /* Optimize z * conj(z) for floating point complex numbers.
11812 Guarded by flag_unsafe_math_optimizations as non-finite
11813 imaginary components don't produce scalar results. */
11814 if (flag_unsafe_math_optimizations
11815 && TREE_CODE (arg0) == CONJ_EXPR
11816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11817 return fold_mult_zconjz (loc, type, arg1);
11818 if (flag_unsafe_math_optimizations
11819 && TREE_CODE (arg1) == CONJ_EXPR
11820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11821 return fold_mult_zconjz (loc, type, arg0);
11823 goto associate;
11825 case BIT_IOR_EXPR:
11826 /* Canonicalize (X & C1) | C2. */
11827 if (TREE_CODE (arg0) == BIT_AND_EXPR
11828 && TREE_CODE (arg1) == INTEGER_CST
11829 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11831 int width = TYPE_PRECISION (type), w;
11832 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11833 wide_int c2 = wi::to_wide (arg1);
11835 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11836 if ((c1 & c2) == c1)
11837 return omit_one_operand_loc (loc, type, arg1,
11838 TREE_OPERAND (arg0, 0));
11840 wide_int msk = wi::mask (width, false,
11841 TYPE_PRECISION (TREE_TYPE (arg1)));
11843 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11844 if (wi::bit_and_not (msk, c1 | c2) == 0)
11846 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11847 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11850 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11851 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11852 mode which allows further optimizations. */
11853 c1 &= msk;
11854 c2 &= msk;
11855 wide_int c3 = wi::bit_and_not (c1, c2);
11856 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11858 wide_int mask = wi::mask (w, false,
11859 TYPE_PRECISION (type));
11860 if (((c1 | c2) & mask) == mask
11861 && wi::bit_and_not (c1, mask) == 0)
11863 c3 = mask;
11864 break;
11868 if (c3 != c1)
11870 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11871 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11872 wide_int_to_tree (type, c3));
11873 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11877 /* See if this can be simplified into a rotate first. If that
11878 is unsuccessful continue in the association code. */
11879 goto bit_rotate;
11881 case BIT_XOR_EXPR:
11882 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11883 if (TREE_CODE (arg0) == BIT_AND_EXPR
11884 && INTEGRAL_TYPE_P (type)
11885 && integer_onep (TREE_OPERAND (arg0, 1))
11886 && integer_onep (arg1))
11887 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11888 build_zero_cst (TREE_TYPE (arg0)));
11890 /* See if this can be simplified into a rotate first. If that
11891 is unsuccessful continue in the association code. */
11892 goto bit_rotate;
11894 case BIT_AND_EXPR:
11895 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11896 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11897 && INTEGRAL_TYPE_P (type)
11898 && integer_onep (TREE_OPERAND (arg0, 1))
11899 && integer_onep (arg1))
11901 tree tem2;
11902 tem = TREE_OPERAND (arg0, 0);
11903 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11904 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11905 tem, tem2);
11906 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11907 build_zero_cst (TREE_TYPE (tem)));
11909 /* Fold ~X & 1 as (X & 1) == 0. */
11910 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11911 && INTEGRAL_TYPE_P (type)
11912 && integer_onep (arg1))
11914 tree tem2;
11915 tem = TREE_OPERAND (arg0, 0);
11916 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11917 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11918 tem, tem2);
11919 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11920 build_zero_cst (TREE_TYPE (tem)));
11922 /* Fold !X & 1 as X == 0. */
11923 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11924 && integer_onep (arg1))
11926 tem = TREE_OPERAND (arg0, 0);
11927 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11928 build_zero_cst (TREE_TYPE (tem)));
11931 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11932 multiple of 1 << CST. */
11933 if (TREE_CODE (arg1) == INTEGER_CST)
11935 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11936 wide_int ncst1 = -cst1;
11937 if ((cst1 & ncst1) == ncst1
11938 && multiple_of_p (type, arg0,
11939 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11940 return fold_convert_loc (loc, type, arg0);
11943 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11944 bits from CST2. */
11945 if (TREE_CODE (arg1) == INTEGER_CST
11946 && TREE_CODE (arg0) == MULT_EXPR
11947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11949 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11950 wide_int masked
11951 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11953 if (masked == 0)
11954 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11955 arg0, arg1);
11956 else if (masked != warg1)
11958 /* Avoid the transform if arg1 is a mask of some
11959 mode which allows further optimizations. */
11960 int pop = wi::popcount (warg1);
11961 if (!(pop >= BITS_PER_UNIT
11962 && pow2p_hwi (pop)
11963 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11964 return fold_build2_loc (loc, code, type, op0,
11965 wide_int_to_tree (type, masked));
11969 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11970 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11971 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11973 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11975 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11976 if (mask == -1)
11977 return
11978 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11981 goto associate;
11983 case RDIV_EXPR:
11984 /* Don't touch a floating-point divide by zero unless the mode
11985 of the constant can represent infinity. */
11986 if (TREE_CODE (arg1) == REAL_CST
11987 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11988 && real_zerop (arg1))
11989 return NULL_TREE;
11991 /* (-A) / (-B) -> A / B */
11992 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11993 return fold_build2_loc (loc, RDIV_EXPR, type,
11994 TREE_OPERAND (arg0, 0),
11995 negate_expr (arg1));
11996 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11997 return fold_build2_loc (loc, RDIV_EXPR, type,
11998 negate_expr (arg0),
11999 TREE_OPERAND (arg1, 0));
12000 return NULL_TREE;
12002 case TRUNC_DIV_EXPR:
12003 /* Fall through */
12005 case FLOOR_DIV_EXPR:
12006 /* Simplify A / (B << N) where A and B are positive and B is
12007 a power of 2, to A >> (N + log2(B)). */
12008 strict_overflow_p = false;
12009 if (TREE_CODE (arg1) == LSHIFT_EXPR
12010 && (TYPE_UNSIGNED (type)
12011 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12013 tree sval = TREE_OPERAND (arg1, 0);
12014 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12016 tree sh_cnt = TREE_OPERAND (arg1, 1);
12017 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12018 wi::exact_log2 (wi::to_wide (sval)));
12020 if (strict_overflow_p)
12021 fold_overflow_warning (("assuming signed overflow does not "
12022 "occur when simplifying A / (B << N)"),
12023 WARN_STRICT_OVERFLOW_MISC);
12025 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12026 sh_cnt, pow2);
12027 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12028 fold_convert_loc (loc, type, arg0), sh_cnt);
12032 /* Fall through */
12034 case ROUND_DIV_EXPR:
12035 case CEIL_DIV_EXPR:
12036 case EXACT_DIV_EXPR:
12037 if (integer_zerop (arg1))
12038 return NULL_TREE;
12040 /* Convert -A / -B to A / B when the type is signed and overflow is
12041 undefined. */
12042 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12043 && TREE_CODE (op0) == NEGATE_EXPR
12044 && negate_expr_p (op1))
12046 if (ANY_INTEGRAL_TYPE_P (type))
12047 fold_overflow_warning (("assuming signed overflow does not occur "
12048 "when distributing negation across "
12049 "division"),
12050 WARN_STRICT_OVERFLOW_MISC);
12051 return fold_build2_loc (loc, code, type,
12052 fold_convert_loc (loc, type,
12053 TREE_OPERAND (arg0, 0)),
12054 negate_expr (op1));
12056 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12057 && TREE_CODE (arg1) == NEGATE_EXPR
12058 && negate_expr_p (op0))
12060 if (ANY_INTEGRAL_TYPE_P (type))
12061 fold_overflow_warning (("assuming signed overflow does not occur "
12062 "when distributing negation across "
12063 "division"),
12064 WARN_STRICT_OVERFLOW_MISC);
12065 return fold_build2_loc (loc, code, type,
12066 negate_expr (op0),
12067 fold_convert_loc (loc, type,
12068 TREE_OPERAND (arg1, 0)));
12071 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12072 operation, EXACT_DIV_EXPR.
12074 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12075 At one time others generated faster code, it's not clear if they do
12076 after the last round to changes to the DIV code in expmed.cc. */
12077 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12078 && multiple_of_p (type, arg0, arg1))
12079 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12080 fold_convert (type, arg0),
12081 fold_convert (type, arg1));
12083 strict_overflow_p = false;
12084 if (TREE_CODE (arg1) == INTEGER_CST
12085 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12086 &strict_overflow_p)) != 0)
12088 if (strict_overflow_p)
12089 fold_overflow_warning (("assuming signed overflow does not occur "
12090 "when simplifying division"),
12091 WARN_STRICT_OVERFLOW_MISC);
12092 return fold_convert_loc (loc, type, tem);
12095 return NULL_TREE;
12097 case CEIL_MOD_EXPR:
12098 case FLOOR_MOD_EXPR:
12099 case ROUND_MOD_EXPR:
12100 case TRUNC_MOD_EXPR:
12101 strict_overflow_p = false;
12102 if (TREE_CODE (arg1) == INTEGER_CST
12103 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12104 &strict_overflow_p)) != 0)
12106 if (strict_overflow_p)
12107 fold_overflow_warning (("assuming signed overflow does not occur "
12108 "when simplifying modulus"),
12109 WARN_STRICT_OVERFLOW_MISC);
12110 return fold_convert_loc (loc, type, tem);
12113 return NULL_TREE;
12115 case LROTATE_EXPR:
12116 case RROTATE_EXPR:
12117 case RSHIFT_EXPR:
12118 case LSHIFT_EXPR:
12119 /* Since negative shift count is not well-defined,
12120 don't try to compute it in the compiler. */
12121 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12122 return NULL_TREE;
12124 prec = element_precision (type);
12126 /* If we have a rotate of a bit operation with the rotate count and
12127 the second operand of the bit operation both constant,
12128 permute the two operations. */
12129 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12130 && (TREE_CODE (arg0) == BIT_AND_EXPR
12131 || TREE_CODE (arg0) == BIT_IOR_EXPR
12132 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12135 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12136 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12137 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12138 fold_build2_loc (loc, code, type,
12139 arg00, arg1),
12140 fold_build2_loc (loc, code, type,
12141 arg01, arg1));
12144 /* Two consecutive rotates adding up to the some integer
12145 multiple of the precision of the type can be ignored. */
12146 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12147 && TREE_CODE (arg0) == RROTATE_EXPR
12148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12149 && wi::umod_trunc (wi::to_wide (arg1)
12150 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12151 prec) == 0)
12152 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12154 return NULL_TREE;
12156 case MIN_EXPR:
12157 case MAX_EXPR:
12158 goto associate;
12160 case TRUTH_ANDIF_EXPR:
12161 /* Note that the operands of this must be ints
12162 and their values must be 0 or 1.
12163 ("true" is a fixed value perhaps depending on the language.) */
12164 /* If first arg is constant zero, return it. */
12165 if (integer_zerop (arg0))
12166 return fold_convert_loc (loc, type, arg0);
12167 /* FALLTHRU */
12168 case TRUTH_AND_EXPR:
12169 /* If either arg is constant true, drop it. */
12170 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12172 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12173 /* Preserve sequence points. */
12174 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12175 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12176 /* If second arg is constant zero, result is zero, but first arg
12177 must be evaluated. */
12178 if (integer_zerop (arg1))
12179 return omit_one_operand_loc (loc, type, arg1, arg0);
12180 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12181 case will be handled here. */
12182 if (integer_zerop (arg0))
12183 return omit_one_operand_loc (loc, type, arg0, arg1);
12185 /* !X && X is always false. */
12186 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12188 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12189 /* X && !X is always false. */
12190 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12191 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12192 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12194 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12195 means A >= Y && A != MAX, but in this case we know that
12196 A < X <= MAX. */
12198 if (!TREE_SIDE_EFFECTS (arg0)
12199 && !TREE_SIDE_EFFECTS (arg1))
12201 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12202 if (tem && !operand_equal_p (tem, arg0, 0))
12203 return fold_convert (type,
12204 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12205 tem, arg1));
12207 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12208 if (tem && !operand_equal_p (tem, arg1, 0))
12209 return fold_convert (type,
12210 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12211 arg0, tem));
12214 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12215 != NULL_TREE)
12216 return tem;
12218 return NULL_TREE;
12220 case TRUTH_ORIF_EXPR:
12221 /* Note that the operands of this must be ints
12222 and their values must be 0 or true.
12223 ("true" is a fixed value perhaps depending on the language.) */
12224 /* If first arg is constant true, return it. */
12225 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12226 return fold_convert_loc (loc, type, arg0);
12227 /* FALLTHRU */
12228 case TRUTH_OR_EXPR:
12229 /* If either arg is constant zero, drop it. */
12230 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12231 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12232 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12233 /* Preserve sequence points. */
12234 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12235 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12236 /* If second arg is constant true, result is true, but we must
12237 evaluate first arg. */
12238 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12239 return omit_one_operand_loc (loc, type, arg1, arg0);
12240 /* Likewise for first arg, but note this only occurs here for
12241 TRUTH_OR_EXPR. */
12242 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12243 return omit_one_operand_loc (loc, type, arg0, arg1);
12245 /* !X || X is always true. */
12246 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12247 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12248 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12249 /* X || !X is always true. */
12250 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12251 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12252 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12254 /* (X && !Y) || (!X && Y) is X ^ Y */
12255 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12256 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12258 tree a0, a1, l0, l1, n0, n1;
12260 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12261 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12263 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12264 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12266 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12267 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12269 if ((operand_equal_p (n0, a0, 0)
12270 && operand_equal_p (n1, a1, 0))
12271 || (operand_equal_p (n0, a1, 0)
12272 && operand_equal_p (n1, a0, 0)))
12273 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12276 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12277 != NULL_TREE)
12278 return tem;
12280 return NULL_TREE;
12282 case TRUTH_XOR_EXPR:
12283 /* If the second arg is constant zero, drop it. */
12284 if (integer_zerop (arg1))
12285 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12286 /* If the second arg is constant true, this is a logical inversion. */
12287 if (integer_onep (arg1))
12289 tem = invert_truthvalue_loc (loc, arg0);
12290 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12292 /* Identical arguments cancel to zero. */
12293 if (operand_equal_p (arg0, arg1, 0))
12294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12296 /* !X ^ X is always true. */
12297 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12298 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12299 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12301 /* X ^ !X is always true. */
12302 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12304 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12306 return NULL_TREE;
12308 case EQ_EXPR:
12309 case NE_EXPR:
12310 STRIP_NOPS (arg0);
12311 STRIP_NOPS (arg1);
12313 tem = fold_comparison (loc, code, type, op0, op1);
12314 if (tem != NULL_TREE)
12315 return tem;
12317 /* bool_var != 1 becomes !bool_var. */
12318 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12319 && code == NE_EXPR)
12320 return fold_convert_loc (loc, type,
12321 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12322 TREE_TYPE (arg0), arg0));
12324 /* bool_var == 0 becomes !bool_var. */
12325 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12326 && code == EQ_EXPR)
12327 return fold_convert_loc (loc, type,
12328 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12329 TREE_TYPE (arg0), arg0));
12331 /* !exp != 0 becomes !exp */
12332 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12333 && code == NE_EXPR)
12334 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12336 /* If this is an EQ or NE comparison with zero and ARG0 is
12337 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12338 two operations, but the latter can be done in one less insn
12339 on machines that have only two-operand insns or on which a
12340 constant cannot be the first operand. */
12341 if (TREE_CODE (arg0) == BIT_AND_EXPR
12342 && integer_zerop (arg1))
12344 tree arg00 = TREE_OPERAND (arg0, 0);
12345 tree arg01 = TREE_OPERAND (arg0, 1);
12346 if (TREE_CODE (arg00) == LSHIFT_EXPR
12347 && integer_onep (TREE_OPERAND (arg00, 0)))
12349 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12350 arg01, TREE_OPERAND (arg00, 1));
12351 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12352 build_one_cst (TREE_TYPE (arg0)));
12353 return fold_build2_loc (loc, code, type,
12354 fold_convert_loc (loc, TREE_TYPE (arg1),
12355 tem), arg1);
12357 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12358 && integer_onep (TREE_OPERAND (arg01, 0)))
12360 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12361 arg00, TREE_OPERAND (arg01, 1));
12362 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12363 build_one_cst (TREE_TYPE (arg0)));
12364 return fold_build2_loc (loc, code, type,
12365 fold_convert_loc (loc, TREE_TYPE (arg1),
12366 tem), arg1);
12370 /* If this is a comparison of a field, we may be able to simplify it. */
12371 if ((TREE_CODE (arg0) == COMPONENT_REF
12372 || TREE_CODE (arg0) == BIT_FIELD_REF)
12373 /* Handle the constant case even without -O
12374 to make sure the warnings are given. */
12375 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12377 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12378 if (t1)
12379 return t1;
12382 /* Optimize comparisons of strlen vs zero to a compare of the
12383 first character of the string vs zero. To wit,
12384 strlen(ptr) == 0 => *ptr == 0
12385 strlen(ptr) != 0 => *ptr != 0
12386 Other cases should reduce to one of these two (or a constant)
12387 due to the return value of strlen being unsigned. */
12388 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12390 tree fndecl = get_callee_fndecl (arg0);
12392 if (fndecl
12393 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12394 && call_expr_nargs (arg0) == 1
12395 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12396 == POINTER_TYPE))
12398 tree ptrtype
12399 = build_pointer_type (build_qualified_type (char_type_node,
12400 TYPE_QUAL_CONST));
12401 tree ptr = fold_convert_loc (loc, ptrtype,
12402 CALL_EXPR_ARG (arg0, 0));
12403 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12404 return fold_build2_loc (loc, code, type, iref,
12405 build_int_cst (TREE_TYPE (iref), 0));
12409 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12410 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12411 if (TREE_CODE (arg0) == RSHIFT_EXPR
12412 && integer_zerop (arg1)
12413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12415 tree arg00 = TREE_OPERAND (arg0, 0);
12416 tree arg01 = TREE_OPERAND (arg0, 1);
12417 tree itype = TREE_TYPE (arg00);
12418 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12420 if (TYPE_UNSIGNED (itype))
12422 itype = signed_type_for (itype);
12423 arg00 = fold_convert_loc (loc, itype, arg00);
12425 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12426 type, arg00, build_zero_cst (itype));
12430 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12431 (X & C) == 0 when C is a single bit. */
12432 if (TREE_CODE (arg0) == BIT_AND_EXPR
12433 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12434 && integer_zerop (arg1)
12435 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12437 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12438 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12439 TREE_OPERAND (arg0, 1));
12440 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12441 type, tem,
12442 fold_convert_loc (loc, TREE_TYPE (arg0),
12443 arg1));
12446 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12447 constant C is a power of two, i.e. a single bit. */
12448 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12449 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12450 && integer_zerop (arg1)
12451 && integer_pow2p (TREE_OPERAND (arg0, 1))
12452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12453 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12455 tree arg00 = TREE_OPERAND (arg0, 0);
12456 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12457 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12460 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12461 when is C is a power of two, i.e. a single bit. */
12462 if (TREE_CODE (arg0) == BIT_AND_EXPR
12463 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12464 && integer_zerop (arg1)
12465 && integer_pow2p (TREE_OPERAND (arg0, 1))
12466 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12467 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12469 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12470 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12471 arg000, TREE_OPERAND (arg0, 1));
12472 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12473 tem, build_int_cst (TREE_TYPE (tem), 0));
12476 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12477 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12479 tree arg00 = TREE_OPERAND (arg0, 0);
12480 tree arg01 = TREE_OPERAND (arg0, 1);
12481 tree arg10 = TREE_OPERAND (arg1, 0);
12482 tree arg11 = TREE_OPERAND (arg1, 1);
12483 tree itype = TREE_TYPE (arg0);
12485 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12486 operand_equal_p guarantees no side-effects so we don't need
12487 to use omit_one_operand on Z. */
12488 if (operand_equal_p (arg01, arg11, 0))
12489 return fold_build2_loc (loc, code, type, arg00,
12490 fold_convert_loc (loc, TREE_TYPE (arg00),
12491 arg10));
12492 if (operand_equal_p (arg01, arg10, 0))
12493 return fold_build2_loc (loc, code, type, arg00,
12494 fold_convert_loc (loc, TREE_TYPE (arg00),
12495 arg11));
12496 if (operand_equal_p (arg00, arg11, 0))
12497 return fold_build2_loc (loc, code, type, arg01,
12498 fold_convert_loc (loc, TREE_TYPE (arg01),
12499 arg10));
12500 if (operand_equal_p (arg00, arg10, 0))
12501 return fold_build2_loc (loc, code, type, arg01,
12502 fold_convert_loc (loc, TREE_TYPE (arg01),
12503 arg11));
12505 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12506 if (TREE_CODE (arg01) == INTEGER_CST
12507 && TREE_CODE (arg11) == INTEGER_CST)
12509 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12510 fold_convert_loc (loc, itype, arg11));
12511 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12512 return fold_build2_loc (loc, code, type, tem,
12513 fold_convert_loc (loc, itype, arg10));
12517 /* Attempt to simplify equality/inequality comparisons of complex
12518 values. Only lower the comparison if the result is known or
12519 can be simplified to a single scalar comparison. */
12520 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12521 || TREE_CODE (arg0) == COMPLEX_CST)
12522 && (TREE_CODE (arg1) == COMPLEX_EXPR
12523 || TREE_CODE (arg1) == COMPLEX_CST))
12525 tree real0, imag0, real1, imag1;
12526 tree rcond, icond;
12528 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12530 real0 = TREE_OPERAND (arg0, 0);
12531 imag0 = TREE_OPERAND (arg0, 1);
12533 else
12535 real0 = TREE_REALPART (arg0);
12536 imag0 = TREE_IMAGPART (arg0);
12539 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12541 real1 = TREE_OPERAND (arg1, 0);
12542 imag1 = TREE_OPERAND (arg1, 1);
12544 else
12546 real1 = TREE_REALPART (arg1);
12547 imag1 = TREE_IMAGPART (arg1);
12550 rcond = fold_binary_loc (loc, code, type, real0, real1);
12551 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12553 if (integer_zerop (rcond))
12555 if (code == EQ_EXPR)
12556 return omit_two_operands_loc (loc, type, boolean_false_node,
12557 imag0, imag1);
12558 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12560 else
12562 if (code == NE_EXPR)
12563 return omit_two_operands_loc (loc, type, boolean_true_node,
12564 imag0, imag1);
12565 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12569 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12570 if (icond && TREE_CODE (icond) == INTEGER_CST)
12572 if (integer_zerop (icond))
12574 if (code == EQ_EXPR)
12575 return omit_two_operands_loc (loc, type, boolean_false_node,
12576 real0, real1);
12577 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12579 else
12581 if (code == NE_EXPR)
12582 return omit_two_operands_loc (loc, type, boolean_true_node,
12583 real0, real1);
12584 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12589 return NULL_TREE;
12591 case LT_EXPR:
12592 case GT_EXPR:
12593 case LE_EXPR:
12594 case GE_EXPR:
12595 tem = fold_comparison (loc, code, type, op0, op1);
12596 if (tem != NULL_TREE)
12597 return tem;
12599 /* Transform comparisons of the form X +- C CMP X. */
12600 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12601 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12603 && !HONOR_SNANS (arg0))
12605 tree arg01 = TREE_OPERAND (arg0, 1);
12606 enum tree_code code0 = TREE_CODE (arg0);
12607 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12609 /* (X - c) > X becomes false. */
12610 if (code == GT_EXPR
12611 && ((code0 == MINUS_EXPR && is_positive >= 0)
12612 || (code0 == PLUS_EXPR && is_positive <= 0)))
12613 return constant_boolean_node (0, type);
12615 /* Likewise (X + c) < X becomes false. */
12616 if (code == LT_EXPR
12617 && ((code0 == PLUS_EXPR && is_positive >= 0)
12618 || (code0 == MINUS_EXPR && is_positive <= 0)))
12619 return constant_boolean_node (0, type);
12621 /* Convert (X - c) <= X to true. */
12622 if (!HONOR_NANS (arg1)
12623 && code == LE_EXPR
12624 && ((code0 == MINUS_EXPR && is_positive >= 0)
12625 || (code0 == PLUS_EXPR && is_positive <= 0)))
12626 return constant_boolean_node (1, type);
12628 /* Convert (X + c) >= X to true. */
12629 if (!HONOR_NANS (arg1)
12630 && code == GE_EXPR
12631 && ((code0 == PLUS_EXPR && is_positive >= 0)
12632 || (code0 == MINUS_EXPR && is_positive <= 0)))
12633 return constant_boolean_node (1, type);
12636 /* If we are comparing an ABS_EXPR with a constant, we can
12637 convert all the cases into explicit comparisons, but they may
12638 well not be faster than doing the ABS and one comparison.
12639 But ABS (X) <= C is a range comparison, which becomes a subtraction
12640 and a comparison, and is probably faster. */
12641 if (code == LE_EXPR
12642 && TREE_CODE (arg1) == INTEGER_CST
12643 && TREE_CODE (arg0) == ABS_EXPR
12644 && ! TREE_SIDE_EFFECTS (arg0)
12645 && (tem = negate_expr (arg1)) != 0
12646 && TREE_CODE (tem) == INTEGER_CST
12647 && !TREE_OVERFLOW (tem))
12648 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12649 build2 (GE_EXPR, type,
12650 TREE_OPERAND (arg0, 0), tem),
12651 build2 (LE_EXPR, type,
12652 TREE_OPERAND (arg0, 0), arg1));
12654 /* Convert ABS_EXPR<x> >= 0 to true. */
12655 strict_overflow_p = false;
12656 if (code == GE_EXPR
12657 && (integer_zerop (arg1)
12658 || (! HONOR_NANS (arg0)
12659 && real_zerop (arg1)))
12660 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12662 if (strict_overflow_p)
12663 fold_overflow_warning (("assuming signed overflow does not occur "
12664 "when simplifying comparison of "
12665 "absolute value and zero"),
12666 WARN_STRICT_OVERFLOW_CONDITIONAL);
12667 return omit_one_operand_loc (loc, type,
12668 constant_boolean_node (true, type),
12669 arg0);
12672 /* Convert ABS_EXPR<x> < 0 to false. */
12673 strict_overflow_p = false;
12674 if (code == LT_EXPR
12675 && (integer_zerop (arg1) || real_zerop (arg1))
12676 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12678 if (strict_overflow_p)
12679 fold_overflow_warning (("assuming signed overflow does not occur "
12680 "when simplifying comparison of "
12681 "absolute value and zero"),
12682 WARN_STRICT_OVERFLOW_CONDITIONAL);
12683 return omit_one_operand_loc (loc, type,
12684 constant_boolean_node (false, type),
12685 arg0);
12688 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12689 and similarly for >= into !=. */
12690 if ((code == LT_EXPR || code == GE_EXPR)
12691 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12692 && TREE_CODE (arg1) == LSHIFT_EXPR
12693 && integer_onep (TREE_OPERAND (arg1, 0)))
12694 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12695 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12696 TREE_OPERAND (arg1, 1)),
12697 build_zero_cst (TREE_TYPE (arg0)));
12699 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12700 otherwise Y might be >= # of bits in X's type and thus e.g.
12701 (unsigned char) (1 << Y) for Y 15 might be 0.
12702 If the cast is widening, then 1 << Y should have unsigned type,
12703 otherwise if Y is number of bits in the signed shift type minus 1,
12704 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12705 31 might be 0xffffffff80000000. */
12706 if ((code == LT_EXPR || code == GE_EXPR)
12707 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12708 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12709 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12710 && CONVERT_EXPR_P (arg1)
12711 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12712 && (element_precision (TREE_TYPE (arg1))
12713 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12714 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12715 || (element_precision (TREE_TYPE (arg1))
12716 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12717 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12719 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12720 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12721 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12722 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12723 build_zero_cst (TREE_TYPE (arg0)));
12726 return NULL_TREE;
12728 case UNORDERED_EXPR:
12729 case ORDERED_EXPR:
12730 case UNLT_EXPR:
12731 case UNLE_EXPR:
12732 case UNGT_EXPR:
12733 case UNGE_EXPR:
12734 case UNEQ_EXPR:
12735 case LTGT_EXPR:
12736 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12738 tree targ0 = strip_float_extensions (arg0);
12739 tree targ1 = strip_float_extensions (arg1);
12740 tree newtype = TREE_TYPE (targ0);
12742 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12743 newtype = TREE_TYPE (targ1);
12745 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0)))
12746 return fold_build2_loc (loc, code, type,
12747 fold_convert_loc (loc, newtype, targ0),
12748 fold_convert_loc (loc, newtype, targ1));
12751 return NULL_TREE;
12753 case COMPOUND_EXPR:
12754 /* When pedantic, a compound expression can be neither an lvalue
12755 nor an integer constant expression. */
12756 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12757 return NULL_TREE;
12758 /* Don't let (0, 0) be null pointer constant. */
12759 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12760 : fold_convert_loc (loc, type, arg1);
12761 return tem;
12763 default:
12764 return NULL_TREE;
12765 } /* switch (code) */
12768 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12769 ((A & N) + B) & M -> (A + B) & M
12770 Similarly if (N & M) == 0,
12771 ((A | N) + B) & M -> (A + B) & M
12772 and for - instead of + (or unary - instead of +)
12773 and/or ^ instead of |.
12774 If B is constant and (B & M) == 0, fold into A & M.
12776 This function is a helper for match.pd patterns. Return non-NULL
12777 type in which the simplified operation should be performed only
12778 if any optimization is possible.
12780 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12781 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12782 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12783 +/-. */
12784 tree
12785 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12786 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12787 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12788 tree *pmop)
12790 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12791 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12792 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12793 if (~cst1 == 0
12794 || (cst1 & (cst1 + 1)) != 0
12795 || !INTEGRAL_TYPE_P (type)
12796 || (!TYPE_OVERFLOW_WRAPS (type)
12797 && TREE_CODE (type) != INTEGER_TYPE)
12798 || (wi::max_value (type) & cst1) != cst1)
12799 return NULL_TREE;
12801 enum tree_code codes[2] = { code00, code01 };
12802 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12803 int which = 0;
12804 wide_int cst0;
12806 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12807 arg1 (M) is == (1LL << cst) - 1.
12808 Store C into PMOP[0] and D into PMOP[1]. */
12809 pmop[0] = arg00;
12810 pmop[1] = arg01;
12811 which = code != NEGATE_EXPR;
12813 for (; which >= 0; which--)
12814 switch (codes[which])
12816 case BIT_AND_EXPR:
12817 case BIT_IOR_EXPR:
12818 case BIT_XOR_EXPR:
12819 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12820 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12821 if (codes[which] == BIT_AND_EXPR)
12823 if (cst0 != cst1)
12824 break;
12826 else if (cst0 != 0)
12827 break;
12828 /* If C or D is of the form (A & N) where
12829 (N & M) == M, or of the form (A | N) or
12830 (A ^ N) where (N & M) == 0, replace it with A. */
12831 pmop[which] = arg0xx[2 * which];
12832 break;
12833 case ERROR_MARK:
12834 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12835 break;
12836 /* If C or D is a N where (N & M) == 0, it can be
12837 omitted (replaced with 0). */
12838 if ((code == PLUS_EXPR
12839 || (code == MINUS_EXPR && which == 0))
12840 && (cst1 & wi::to_wide (pmop[which])) == 0)
12841 pmop[which] = build_int_cst (type, 0);
12842 /* Similarly, with C - N where (-N & M) == 0. */
12843 if (code == MINUS_EXPR
12844 && which == 1
12845 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12846 pmop[which] = build_int_cst (type, 0);
12847 break;
12848 default:
12849 gcc_unreachable ();
12852 /* Only build anything new if we optimized one or both arguments above. */
12853 if (pmop[0] == arg00 && pmop[1] == arg01)
12854 return NULL_TREE;
12856 if (TYPE_OVERFLOW_WRAPS (type))
12857 return type;
12858 else
12859 return unsigned_type_for (type);
12862 /* Used by contains_label_[p1]. */
12864 struct contains_label_data
12866 hash_set<tree> *pset;
12867 bool inside_switch_p;
12870 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12871 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12872 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12874 static tree
12875 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12877 contains_label_data *d = (contains_label_data *) data;
12878 switch (TREE_CODE (*tp))
12880 case LABEL_EXPR:
12881 return *tp;
12883 case CASE_LABEL_EXPR:
12884 if (!d->inside_switch_p)
12885 return *tp;
12886 return NULL_TREE;
12888 case SWITCH_EXPR:
12889 if (!d->inside_switch_p)
12891 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12892 return *tp;
12893 d->inside_switch_p = true;
12894 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12895 return *tp;
12896 d->inside_switch_p = false;
12897 *walk_subtrees = 0;
12899 return NULL_TREE;
12901 case GOTO_EXPR:
12902 *walk_subtrees = 0;
12903 return NULL_TREE;
12905 default:
12906 return NULL_TREE;
12910 /* Return whether the sub-tree ST contains a label which is accessible from
12911 outside the sub-tree. */
12913 static bool
12914 contains_label_p (tree st)
12916 hash_set<tree> pset;
12917 contains_label_data data = { &pset, false };
12918 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12921 /* Fold a ternary expression of code CODE and type TYPE with operands
12922 OP0, OP1, and OP2. Return the folded expression if folding is
12923 successful. Otherwise, return NULL_TREE. */
12925 tree
12926 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12927 tree op0, tree op1, tree op2)
12929 tree tem;
12930 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12931 enum tree_code_class kind = TREE_CODE_CLASS (code);
12933 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12934 && TREE_CODE_LENGTH (code) == 3);
12936 /* If this is a commutative operation, and OP0 is a constant, move it
12937 to OP1 to reduce the number of tests below. */
12938 if (commutative_ternary_tree_code (code)
12939 && tree_swap_operands_p (op0, op1))
12940 return fold_build3_loc (loc, code, type, op1, op0, op2);
12942 tem = generic_simplify (loc, code, type, op0, op1, op2);
12943 if (tem)
12944 return tem;
12946 /* Strip any conversions that don't change the mode. This is safe
12947 for every expression, except for a comparison expression because
12948 its signedness is derived from its operands. So, in the latter
12949 case, only strip conversions that don't change the signedness.
12951 Note that this is done as an internal manipulation within the
12952 constant folder, in order to find the simplest representation of
12953 the arguments so that their form can be studied. In any cases,
12954 the appropriate type conversions should be put back in the tree
12955 that will get out of the constant folder. */
12956 if (op0)
12958 arg0 = op0;
12959 STRIP_NOPS (arg0);
12962 if (op1)
12964 arg1 = op1;
12965 STRIP_NOPS (arg1);
12968 if (op2)
12970 arg2 = op2;
12971 STRIP_NOPS (arg2);
12974 switch (code)
12976 case COMPONENT_REF:
12977 if (TREE_CODE (arg0) == CONSTRUCTOR
12978 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12980 unsigned HOST_WIDE_INT idx;
12981 tree field, value;
12982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12983 if (field == arg1)
12984 return value;
12986 return NULL_TREE;
12988 case COND_EXPR:
12989 case VEC_COND_EXPR:
12990 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12991 so all simple results must be passed through pedantic_non_lvalue. */
12992 if (TREE_CODE (arg0) == INTEGER_CST)
12994 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12995 tem = integer_zerop (arg0) ? op2 : op1;
12996 /* Only optimize constant conditions when the selected branch
12997 has the same type as the COND_EXPR. This avoids optimizing
12998 away "c ? x : throw", where the throw has a void type.
12999 Avoid throwing away that operand which contains label. */
13000 if ((!TREE_SIDE_EFFECTS (unused_op)
13001 || !contains_label_p (unused_op))
13002 && (! VOID_TYPE_P (TREE_TYPE (tem))
13003 || VOID_TYPE_P (type)))
13004 return protected_set_expr_location_unshare (tem, loc);
13005 return NULL_TREE;
13007 else if (TREE_CODE (arg0) == VECTOR_CST)
13009 unsigned HOST_WIDE_INT nelts;
13010 if ((TREE_CODE (arg1) == VECTOR_CST
13011 || TREE_CODE (arg1) == CONSTRUCTOR)
13012 && (TREE_CODE (arg2) == VECTOR_CST
13013 || TREE_CODE (arg2) == CONSTRUCTOR)
13014 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13016 vec_perm_builder sel (nelts, nelts, 1);
13017 for (unsigned int i = 0; i < nelts; i++)
13019 tree val = VECTOR_CST_ELT (arg0, i);
13020 if (integer_all_onesp (val))
13021 sel.quick_push (i);
13022 else if (integer_zerop (val))
13023 sel.quick_push (nelts + i);
13024 else /* Currently unreachable. */
13025 return NULL_TREE;
13027 vec_perm_indices indices (sel, 2, nelts);
13028 tree t = fold_vec_perm (type, arg1, arg2, indices);
13029 if (t != NULL_TREE)
13030 return t;
13034 /* If we have A op B ? A : C, we may be able to convert this to a
13035 simpler expression, depending on the operation and the values
13036 of B and C. Signed zeros prevent all of these transformations,
13037 for reasons given above each one.
13039 Also try swapping the arguments and inverting the conditional. */
13040 if (COMPARISON_CLASS_P (arg0)
13041 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13042 && !HONOR_SIGNED_ZEROS (op1))
13044 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13045 TREE_OPERAND (arg0, 0),
13046 TREE_OPERAND (arg0, 1),
13047 op1, op2);
13048 if (tem)
13049 return tem;
13052 if (COMPARISON_CLASS_P (arg0)
13053 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13054 && !HONOR_SIGNED_ZEROS (op2))
13056 enum tree_code comp_code = TREE_CODE (arg0);
13057 tree arg00 = TREE_OPERAND (arg0, 0);
13058 tree arg01 = TREE_OPERAND (arg0, 1);
13059 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13060 if (comp_code != ERROR_MARK)
13061 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13062 arg00,
13063 arg01,
13064 op2, op1);
13065 if (tem)
13066 return tem;
13069 /* If the second operand is simpler than the third, swap them
13070 since that produces better jump optimization results. */
13071 if (truth_value_p (TREE_CODE (arg0))
13072 && tree_swap_operands_p (op1, op2))
13074 location_t loc0 = expr_location_or (arg0, loc);
13075 /* See if this can be inverted. If it can't, possibly because
13076 it was a floating-point inequality comparison, don't do
13077 anything. */
13078 tem = fold_invert_truthvalue (loc0, arg0);
13079 if (tem)
13080 return fold_build3_loc (loc, code, type, tem, op2, op1);
13083 /* Convert A ? 1 : 0 to simply A. */
13084 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13085 : (integer_onep (op1)
13086 && !VECTOR_TYPE_P (type)))
13087 && integer_zerop (op2)
13088 /* If we try to convert OP0 to our type, the
13089 call to fold will try to move the conversion inside
13090 a COND, which will recurse. In that case, the COND_EXPR
13091 is probably the best choice, so leave it alone. */
13092 && type == TREE_TYPE (arg0))
13093 return protected_set_expr_location_unshare (arg0, loc);
13095 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13096 over COND_EXPR in cases such as floating point comparisons. */
13097 if (integer_zerop (op1)
13098 && code == COND_EXPR
13099 && integer_onep (op2)
13100 && !VECTOR_TYPE_P (type)
13101 && truth_value_p (TREE_CODE (arg0)))
13102 return fold_convert_loc (loc, type,
13103 invert_truthvalue_loc (loc, arg0));
13105 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13106 if (TREE_CODE (arg0) == LT_EXPR
13107 && integer_zerop (TREE_OPERAND (arg0, 1))
13108 && integer_zerop (op2)
13109 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13111 /* sign_bit_p looks through both zero and sign extensions,
13112 but for this optimization only sign extensions are
13113 usable. */
13114 tree tem2 = TREE_OPERAND (arg0, 0);
13115 while (tem != tem2)
13117 if (TREE_CODE (tem2) != NOP_EXPR
13118 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13120 tem = NULL_TREE;
13121 break;
13123 tem2 = TREE_OPERAND (tem2, 0);
13125 /* sign_bit_p only checks ARG1 bits within A's precision.
13126 If <sign bit of A> has wider type than A, bits outside
13127 of A's precision in <sign bit of A> need to be checked.
13128 If they are all 0, this optimization needs to be done
13129 in unsigned A's type, if they are all 1 in signed A's type,
13130 otherwise this can't be done. */
13131 if (tem
13132 && TYPE_PRECISION (TREE_TYPE (tem))
13133 < TYPE_PRECISION (TREE_TYPE (arg1))
13134 && TYPE_PRECISION (TREE_TYPE (tem))
13135 < TYPE_PRECISION (type))
13137 int inner_width, outer_width;
13138 tree tem_type;
13140 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13141 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13142 if (outer_width > TYPE_PRECISION (type))
13143 outer_width = TYPE_PRECISION (type);
13145 wide_int mask = wi::shifted_mask
13146 (inner_width, outer_width - inner_width, false,
13147 TYPE_PRECISION (TREE_TYPE (arg1)));
13149 wide_int common = mask & wi::to_wide (arg1);
13150 if (common == mask)
13152 tem_type = signed_type_for (TREE_TYPE (tem));
13153 tem = fold_convert_loc (loc, tem_type, tem);
13155 else if (common == 0)
13157 tem_type = unsigned_type_for (TREE_TYPE (tem));
13158 tem = fold_convert_loc (loc, tem_type, tem);
13160 else
13161 tem = NULL;
13164 if (tem)
13165 return
13166 fold_convert_loc (loc, type,
13167 fold_build2_loc (loc, BIT_AND_EXPR,
13168 TREE_TYPE (tem), tem,
13169 fold_convert_loc (loc,
13170 TREE_TYPE (tem),
13171 arg1)));
13174 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13175 already handled above. */
13176 if (TREE_CODE (arg0) == BIT_AND_EXPR
13177 && integer_onep (TREE_OPERAND (arg0, 1))
13178 && integer_zerop (op2)
13179 && integer_pow2p (arg1))
13181 tree tem = TREE_OPERAND (arg0, 0);
13182 STRIP_NOPS (tem);
13183 if (TREE_CODE (tem) == RSHIFT_EXPR
13184 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13185 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13186 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13187 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13188 fold_convert_loc (loc, type,
13189 TREE_OPERAND (tem, 0)),
13190 op1);
13193 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13194 is probably obsolete because the first operand should be a
13195 truth value (that's why we have the two cases above), but let's
13196 leave it in until we can confirm this for all front-ends. */
13197 if (integer_zerop (op2)
13198 && TREE_CODE (arg0) == NE_EXPR
13199 && integer_zerop (TREE_OPERAND (arg0, 1))
13200 && integer_pow2p (arg1)
13201 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13202 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13203 arg1, OEP_ONLY_CONST)
13204 /* operand_equal_p compares just value, not precision, so e.g.
13205 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13206 second operand 32-bit -128, which is not a power of two (or vice
13207 versa. */
13208 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13209 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13211 /* Disable the transformations below for vectors, since
13212 fold_binary_op_with_conditional_arg may undo them immediately,
13213 yielding an infinite loop. */
13214 if (code == VEC_COND_EXPR)
13215 return NULL_TREE;
13217 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13218 if (integer_zerop (op2)
13219 && truth_value_p (TREE_CODE (arg0))
13220 && truth_value_p (TREE_CODE (arg1))
13221 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13222 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13223 : TRUTH_ANDIF_EXPR,
13224 type, fold_convert_loc (loc, type, arg0), op1);
13226 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13227 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13228 && truth_value_p (TREE_CODE (arg0))
13229 && truth_value_p (TREE_CODE (arg1))
13230 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13232 location_t loc0 = expr_location_or (arg0, loc);
13233 /* Only perform transformation if ARG0 is easily inverted. */
13234 tem = fold_invert_truthvalue (loc0, arg0);
13235 if (tem)
13236 return fold_build2_loc (loc, code == VEC_COND_EXPR
13237 ? BIT_IOR_EXPR
13238 : TRUTH_ORIF_EXPR,
13239 type, fold_convert_loc (loc, type, tem),
13240 op1);
13243 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13244 if (integer_zerop (arg1)
13245 && truth_value_p (TREE_CODE (arg0))
13246 && truth_value_p (TREE_CODE (op2))
13247 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13249 location_t loc0 = expr_location_or (arg0, loc);
13250 /* Only perform transformation if ARG0 is easily inverted. */
13251 tem = fold_invert_truthvalue (loc0, arg0);
13252 if (tem)
13253 return fold_build2_loc (loc, code == VEC_COND_EXPR
13254 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13255 type, fold_convert_loc (loc, type, tem),
13256 op2);
13259 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13260 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13261 && truth_value_p (TREE_CODE (arg0))
13262 && truth_value_p (TREE_CODE (op2))
13263 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13264 return fold_build2_loc (loc, code == VEC_COND_EXPR
13265 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13266 type, fold_convert_loc (loc, type, arg0), op2);
13268 return NULL_TREE;
13270 case CALL_EXPR:
13271 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13272 of fold_ternary on them. */
13273 gcc_unreachable ();
13275 case BIT_FIELD_REF:
13276 if (TREE_CODE (arg0) == VECTOR_CST
13277 && (type == TREE_TYPE (TREE_TYPE (arg0))
13278 || (VECTOR_TYPE_P (type)
13279 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13280 && tree_fits_uhwi_p (op1)
13281 && tree_fits_uhwi_p (op2))
13283 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13284 unsigned HOST_WIDE_INT width
13285 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13286 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13287 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13288 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13290 if (n != 0
13291 && (idx % width) == 0
13292 && (n % width) == 0
13293 && known_le ((idx + n) / width,
13294 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13296 idx = idx / width;
13297 n = n / width;
13299 if (TREE_CODE (arg0) == VECTOR_CST)
13301 if (n == 1)
13303 tem = VECTOR_CST_ELT (arg0, idx);
13304 if (VECTOR_TYPE_P (type))
13305 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13306 return tem;
13309 tree_vector_builder vals (type, n, 1);
13310 for (unsigned i = 0; i < n; ++i)
13311 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13312 return vals.build ();
13317 /* On constants we can use native encode/interpret to constant
13318 fold (nearly) all BIT_FIELD_REFs. */
13319 if (CONSTANT_CLASS_P (arg0)
13320 && can_native_interpret_type_p (type)
13321 && BITS_PER_UNIT == 8
13322 && tree_fits_uhwi_p (op1)
13323 && tree_fits_uhwi_p (op2))
13325 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13326 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13327 /* Limit us to a reasonable amount of work. To relax the
13328 other limitations we need bit-shifting of the buffer
13329 and rounding up the size. */
13330 if (bitpos % BITS_PER_UNIT == 0
13331 && bitsize % BITS_PER_UNIT == 0
13332 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13334 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13335 unsigned HOST_WIDE_INT len
13336 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13337 bitpos / BITS_PER_UNIT);
13338 if (len > 0
13339 && len * BITS_PER_UNIT >= bitsize)
13341 tree v = native_interpret_expr (type, b,
13342 bitsize / BITS_PER_UNIT);
13343 if (v)
13344 return v;
13349 return NULL_TREE;
13351 case VEC_PERM_EXPR:
13352 /* Perform constant folding of BIT_INSERT_EXPR. */
13353 if (TREE_CODE (arg2) == VECTOR_CST
13354 && TREE_CODE (op0) == VECTOR_CST
13355 && TREE_CODE (op1) == VECTOR_CST)
13357 /* Build a vector of integers from the tree mask. */
13358 vec_perm_builder builder;
13359 if (!tree_to_vec_perm_builder (&builder, arg2))
13360 return NULL_TREE;
13362 /* Create a vec_perm_indices for the integer vector. */
13363 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13364 bool single_arg = (op0 == op1);
13365 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13366 return fold_vec_perm (type, op0, op1, sel);
13368 return NULL_TREE;
13370 case BIT_INSERT_EXPR:
13371 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13372 if (TREE_CODE (arg0) == INTEGER_CST
13373 && TREE_CODE (arg1) == INTEGER_CST)
13375 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13376 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13377 wide_int tem = (wi::to_wide (arg0)
13378 & wi::shifted_mask (bitpos, bitsize, true,
13379 TYPE_PRECISION (type)));
13380 wide_int tem2
13381 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13382 bitsize), bitpos);
13383 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13385 else if (TREE_CODE (arg0) == VECTOR_CST
13386 && CONSTANT_CLASS_P (arg1)
13387 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13388 TREE_TYPE (arg1)))
13390 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13391 unsigned HOST_WIDE_INT elsize
13392 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13393 if (bitpos % elsize == 0)
13395 unsigned k = bitpos / elsize;
13396 unsigned HOST_WIDE_INT nelts;
13397 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13398 return arg0;
13399 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13401 tree_vector_builder elts (type, nelts, 1);
13402 elts.quick_grow (nelts);
13403 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13404 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13405 return elts.build ();
13409 return NULL_TREE;
13411 default:
13412 return NULL_TREE;
13413 } /* switch (code) */
13416 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13417 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13418 constructor element index of the value returned. If the element is
13419 not found NULL_TREE is returned and *CTOR_IDX is updated to
13420 the index of the element after the ACCESS_INDEX position (which
13421 may be outside of the CTOR array). */
13423 tree
13424 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13425 unsigned *ctor_idx)
13427 tree index_type = NULL_TREE;
13428 signop index_sgn = UNSIGNED;
13429 offset_int low_bound = 0;
13431 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13433 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13434 if (domain_type && TYPE_MIN_VALUE (domain_type))
13436 /* Static constructors for variably sized objects makes no sense. */
13437 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13438 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13439 /* ??? When it is obvious that the range is signed, treat it so. */
13440 if (TYPE_UNSIGNED (index_type)
13441 && TYPE_MAX_VALUE (domain_type)
13442 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13443 TYPE_MIN_VALUE (domain_type)))
13445 index_sgn = SIGNED;
13446 low_bound
13447 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13448 SIGNED);
13450 else
13452 index_sgn = TYPE_SIGN (index_type);
13453 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13458 if (index_type)
13459 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13460 index_sgn);
13462 offset_int index = low_bound;
13463 if (index_type)
13464 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13466 offset_int max_index = index;
13467 unsigned cnt;
13468 tree cfield, cval;
13469 bool first_p = true;
13471 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13473 /* Array constructor might explicitly set index, or specify a range,
13474 or leave index NULL meaning that it is next index after previous
13475 one. */
13476 if (cfield)
13478 if (TREE_CODE (cfield) == INTEGER_CST)
13479 max_index = index
13480 = offset_int::from (wi::to_wide (cfield), index_sgn);
13481 else
13483 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13484 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13485 index_sgn);
13486 max_index
13487 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13488 index_sgn);
13489 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13492 else if (!first_p)
13494 index = max_index + 1;
13495 if (index_type)
13496 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13497 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13498 max_index = index;
13500 else
13501 first_p = false;
13503 /* Do we have match? */
13504 if (wi::cmp (access_index, index, index_sgn) >= 0)
13506 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13508 if (ctor_idx)
13509 *ctor_idx = cnt;
13510 return cval;
13513 else if (in_gimple_form)
13514 /* We're past the element we search for. Note during parsing
13515 the elements might not be sorted.
13516 ??? We should use a binary search and a flag on the
13517 CONSTRUCTOR as to whether elements are sorted in declaration
13518 order. */
13519 break;
13521 if (ctor_idx)
13522 *ctor_idx = cnt;
13523 return NULL_TREE;
13526 /* Perform constant folding and related simplification of EXPR.
13527 The related simplifications include x*1 => x, x*0 => 0, etc.,
13528 and application of the associative law.
13529 NOP_EXPR conversions may be removed freely (as long as we
13530 are careful not to change the type of the overall expression).
13531 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13532 but we can constant-fold them if they have constant operands. */
13534 #ifdef ENABLE_FOLD_CHECKING
13535 # define fold(x) fold_1 (x)
13536 static tree fold_1 (tree);
13537 static
13538 #endif
13539 tree
13540 fold (tree expr)
13542 const tree t = expr;
13543 enum tree_code code = TREE_CODE (t);
13544 enum tree_code_class kind = TREE_CODE_CLASS (code);
13545 tree tem;
13546 location_t loc = EXPR_LOCATION (expr);
13548 /* Return right away if a constant. */
13549 if (kind == tcc_constant)
13550 return t;
13552 /* CALL_EXPR-like objects with variable numbers of operands are
13553 treated specially. */
13554 if (kind == tcc_vl_exp)
13556 if (code == CALL_EXPR)
13558 tem = fold_call_expr (loc, expr, false);
13559 return tem ? tem : expr;
13561 return expr;
13564 if (IS_EXPR_CODE_CLASS (kind))
13566 tree type = TREE_TYPE (t);
13567 tree op0, op1, op2;
13569 switch (TREE_CODE_LENGTH (code))
13571 case 1:
13572 op0 = TREE_OPERAND (t, 0);
13573 tem = fold_unary_loc (loc, code, type, op0);
13574 return tem ? tem : expr;
13575 case 2:
13576 op0 = TREE_OPERAND (t, 0);
13577 op1 = TREE_OPERAND (t, 1);
13578 tem = fold_binary_loc (loc, code, type, op0, op1);
13579 return tem ? tem : expr;
13580 case 3:
13581 op0 = TREE_OPERAND (t, 0);
13582 op1 = TREE_OPERAND (t, 1);
13583 op2 = TREE_OPERAND (t, 2);
13584 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13585 return tem ? tem : expr;
13586 default:
13587 break;
13591 switch (code)
13593 case ARRAY_REF:
13595 tree op0 = TREE_OPERAND (t, 0);
13596 tree op1 = TREE_OPERAND (t, 1);
13598 if (TREE_CODE (op1) == INTEGER_CST
13599 && TREE_CODE (op0) == CONSTRUCTOR
13600 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13602 tree val = get_array_ctor_element_at_index (op0,
13603 wi::to_offset (op1));
13604 if (val)
13605 return val;
13608 return t;
13611 /* Return a VECTOR_CST if possible. */
13612 case CONSTRUCTOR:
13614 tree type = TREE_TYPE (t);
13615 if (TREE_CODE (type) != VECTOR_TYPE)
13616 return t;
13618 unsigned i;
13619 tree val;
13620 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13621 if (! CONSTANT_CLASS_P (val))
13622 return t;
13624 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13627 case CONST_DECL:
13628 return fold (DECL_INITIAL (t));
13630 default:
13631 return t;
13632 } /* switch (code) */
13635 #ifdef ENABLE_FOLD_CHECKING
13636 #undef fold
13638 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13639 hash_table<nofree_ptr_hash<const tree_node> > *);
13640 static void fold_check_failed (const_tree, const_tree);
13641 void print_fold_checksum (const_tree);
13643 /* When --enable-checking=fold, compute a digest of expr before
13644 and after actual fold call to see if fold did not accidentally
13645 change original expr. */
13647 tree
13648 fold (tree expr)
13650 tree ret;
13651 struct md5_ctx ctx;
13652 unsigned char checksum_before[16], checksum_after[16];
13653 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13655 md5_init_ctx (&ctx);
13656 fold_checksum_tree (expr, &ctx, &ht);
13657 md5_finish_ctx (&ctx, checksum_before);
13658 ht.empty ();
13660 ret = fold_1 (expr);
13662 md5_init_ctx (&ctx);
13663 fold_checksum_tree (expr, &ctx, &ht);
13664 md5_finish_ctx (&ctx, checksum_after);
13666 if (memcmp (checksum_before, checksum_after, 16))
13667 fold_check_failed (expr, ret);
13669 return ret;
13672 void
13673 print_fold_checksum (const_tree expr)
13675 struct md5_ctx ctx;
13676 unsigned char checksum[16], cnt;
13677 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13679 md5_init_ctx (&ctx);
13680 fold_checksum_tree (expr, &ctx, &ht);
13681 md5_finish_ctx (&ctx, checksum);
13682 for (cnt = 0; cnt < 16; ++cnt)
13683 fprintf (stderr, "%02x", checksum[cnt]);
13684 putc ('\n', stderr);
13687 static void
13688 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13690 internal_error ("fold check: original tree changed by fold");
13693 static void
13694 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13695 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13697 const tree_node **slot;
13698 enum tree_code code;
13699 union tree_node *buf;
13700 int i, len;
13702 recursive_label:
13703 if (expr == NULL)
13704 return;
13705 slot = ht->find_slot (expr, INSERT);
13706 if (*slot != NULL)
13707 return;
13708 *slot = expr;
13709 code = TREE_CODE (expr);
13710 if (TREE_CODE_CLASS (code) == tcc_declaration
13711 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13713 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13714 size_t sz = tree_size (expr);
13715 buf = XALLOCAVAR (union tree_node, sz);
13716 memcpy ((char *) buf, expr, sz);
13717 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13718 buf->decl_with_vis.symtab_node = NULL;
13719 buf->base.nowarning_flag = 0;
13720 expr = (tree) buf;
13722 else if (TREE_CODE_CLASS (code) == tcc_type
13723 && (TYPE_POINTER_TO (expr)
13724 || TYPE_REFERENCE_TO (expr)
13725 || TYPE_CACHED_VALUES_P (expr)
13726 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13727 || TYPE_NEXT_VARIANT (expr)
13728 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13730 /* Allow these fields to be modified. */
13731 tree tmp;
13732 size_t sz = tree_size (expr);
13733 buf = XALLOCAVAR (union tree_node, sz);
13734 memcpy ((char *) buf, expr, sz);
13735 expr = tmp = (tree) buf;
13736 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13737 TYPE_POINTER_TO (tmp) = NULL;
13738 TYPE_REFERENCE_TO (tmp) = NULL;
13739 TYPE_NEXT_VARIANT (tmp) = NULL;
13740 TYPE_ALIAS_SET (tmp) = -1;
13741 if (TYPE_CACHED_VALUES_P (tmp))
13743 TYPE_CACHED_VALUES_P (tmp) = 0;
13744 TYPE_CACHED_VALUES (tmp) = NULL;
13747 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13749 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13750 that and change builtins.cc etc. instead - see PR89543. */
13751 size_t sz = tree_size (expr);
13752 buf = XALLOCAVAR (union tree_node, sz);
13753 memcpy ((char *) buf, expr, sz);
13754 buf->base.nowarning_flag = 0;
13755 expr = (tree) buf;
13757 md5_process_bytes (expr, tree_size (expr), ctx);
13758 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13759 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13760 if (TREE_CODE_CLASS (code) != tcc_type
13761 && TREE_CODE_CLASS (code) != tcc_declaration
13762 && code != TREE_LIST
13763 && code != SSA_NAME
13764 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13765 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13766 switch (TREE_CODE_CLASS (code))
13768 case tcc_constant:
13769 switch (code)
13771 case STRING_CST:
13772 md5_process_bytes (TREE_STRING_POINTER (expr),
13773 TREE_STRING_LENGTH (expr), ctx);
13774 break;
13775 case COMPLEX_CST:
13776 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13777 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13778 break;
13779 case VECTOR_CST:
13780 len = vector_cst_encoded_nelts (expr);
13781 for (i = 0; i < len; ++i)
13782 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13783 break;
13784 default:
13785 break;
13787 break;
13788 case tcc_exceptional:
13789 switch (code)
13791 case TREE_LIST:
13792 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13793 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13794 expr = TREE_CHAIN (expr);
13795 goto recursive_label;
13796 break;
13797 case TREE_VEC:
13798 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13799 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13800 break;
13801 default:
13802 break;
13804 break;
13805 case tcc_expression:
13806 case tcc_reference:
13807 case tcc_comparison:
13808 case tcc_unary:
13809 case tcc_binary:
13810 case tcc_statement:
13811 case tcc_vl_exp:
13812 len = TREE_OPERAND_LENGTH (expr);
13813 for (i = 0; i < len; ++i)
13814 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13815 break;
13816 case tcc_declaration:
13817 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13818 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13819 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13821 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13822 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13823 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13824 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13825 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13828 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13830 if (TREE_CODE (expr) == FUNCTION_DECL)
13832 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13833 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13835 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13837 break;
13838 case tcc_type:
13839 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13840 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13841 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13842 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13843 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13844 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13845 if (INTEGRAL_TYPE_P (expr)
13846 || SCALAR_FLOAT_TYPE_P (expr))
13848 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13849 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13851 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13852 if (RECORD_OR_UNION_TYPE_P (expr))
13853 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13854 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13855 break;
13856 default:
13857 break;
13861 /* Helper function for outputting the checksum of a tree T. When
13862 debugging with gdb, you can "define mynext" to be "next" followed
13863 by "call debug_fold_checksum (op0)", then just trace down till the
13864 outputs differ. */
13866 DEBUG_FUNCTION void
13867 debug_fold_checksum (const_tree t)
13869 int i;
13870 unsigned char checksum[16];
13871 struct md5_ctx ctx;
13872 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13874 md5_init_ctx (&ctx);
13875 fold_checksum_tree (t, &ctx, &ht);
13876 md5_finish_ctx (&ctx, checksum);
13877 ht.empty ();
13879 for (i = 0; i < 16; i++)
13880 fprintf (stderr, "%d ", checksum[i]);
13882 fprintf (stderr, "\n");
13885 #endif
13887 /* Fold a unary tree expression with code CODE of type TYPE with an
13888 operand OP0. LOC is the location of the resulting expression.
13889 Return a folded expression if successful. Otherwise, return a tree
13890 expression with code CODE of type TYPE with an operand OP0. */
13892 tree
13893 fold_build1_loc (location_t loc,
13894 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13896 tree tem;
13897 #ifdef ENABLE_FOLD_CHECKING
13898 unsigned char checksum_before[16], checksum_after[16];
13899 struct md5_ctx ctx;
13900 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13902 md5_init_ctx (&ctx);
13903 fold_checksum_tree (op0, &ctx, &ht);
13904 md5_finish_ctx (&ctx, checksum_before);
13905 ht.empty ();
13906 #endif
13908 tem = fold_unary_loc (loc, code, type, op0);
13909 if (!tem)
13910 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13912 #ifdef ENABLE_FOLD_CHECKING
13913 md5_init_ctx (&ctx);
13914 fold_checksum_tree (op0, &ctx, &ht);
13915 md5_finish_ctx (&ctx, checksum_after);
13917 if (memcmp (checksum_before, checksum_after, 16))
13918 fold_check_failed (op0, tem);
13919 #endif
13920 return tem;
13923 /* Fold a binary tree expression with code CODE of type TYPE with
13924 operands OP0 and OP1. LOC is the location of the resulting
13925 expression. Return a folded expression if successful. Otherwise,
13926 return a tree expression with code CODE of type TYPE with operands
13927 OP0 and OP1. */
13929 tree
13930 fold_build2_loc (location_t loc,
13931 enum tree_code code, tree type, tree op0, tree op1
13932 MEM_STAT_DECL)
13934 tree tem;
13935 #ifdef ENABLE_FOLD_CHECKING
13936 unsigned char checksum_before_op0[16],
13937 checksum_before_op1[16],
13938 checksum_after_op0[16],
13939 checksum_after_op1[16];
13940 struct md5_ctx ctx;
13941 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13943 md5_init_ctx (&ctx);
13944 fold_checksum_tree (op0, &ctx, &ht);
13945 md5_finish_ctx (&ctx, checksum_before_op0);
13946 ht.empty ();
13948 md5_init_ctx (&ctx);
13949 fold_checksum_tree (op1, &ctx, &ht);
13950 md5_finish_ctx (&ctx, checksum_before_op1);
13951 ht.empty ();
13952 #endif
13954 tem = fold_binary_loc (loc, code, type, op0, op1);
13955 if (!tem)
13956 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13958 #ifdef ENABLE_FOLD_CHECKING
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (op0, &ctx, &ht);
13961 md5_finish_ctx (&ctx, checksum_after_op0);
13962 ht.empty ();
13964 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13965 fold_check_failed (op0, tem);
13967 md5_init_ctx (&ctx);
13968 fold_checksum_tree (op1, &ctx, &ht);
13969 md5_finish_ctx (&ctx, checksum_after_op1);
13971 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13972 fold_check_failed (op1, tem);
13973 #endif
13974 return tem;
13977 /* Fold a ternary tree expression with code CODE of type TYPE with
13978 operands OP0, OP1, and OP2. Return a folded expression if
13979 successful. Otherwise, return a tree expression with code CODE of
13980 type TYPE with operands OP0, OP1, and OP2. */
13982 tree
13983 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13984 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13986 tree tem;
13987 #ifdef ENABLE_FOLD_CHECKING
13988 unsigned char checksum_before_op0[16],
13989 checksum_before_op1[16],
13990 checksum_before_op2[16],
13991 checksum_after_op0[16],
13992 checksum_after_op1[16],
13993 checksum_after_op2[16];
13994 struct md5_ctx ctx;
13995 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13997 md5_init_ctx (&ctx);
13998 fold_checksum_tree (op0, &ctx, &ht);
13999 md5_finish_ctx (&ctx, checksum_before_op0);
14000 ht.empty ();
14002 md5_init_ctx (&ctx);
14003 fold_checksum_tree (op1, &ctx, &ht);
14004 md5_finish_ctx (&ctx, checksum_before_op1);
14005 ht.empty ();
14007 md5_init_ctx (&ctx);
14008 fold_checksum_tree (op2, &ctx, &ht);
14009 md5_finish_ctx (&ctx, checksum_before_op2);
14010 ht.empty ();
14011 #endif
14013 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14014 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14015 if (!tem)
14016 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14018 #ifdef ENABLE_FOLD_CHECKING
14019 md5_init_ctx (&ctx);
14020 fold_checksum_tree (op0, &ctx, &ht);
14021 md5_finish_ctx (&ctx, checksum_after_op0);
14022 ht.empty ();
14024 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14025 fold_check_failed (op0, tem);
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (op1, &ctx, &ht);
14029 md5_finish_ctx (&ctx, checksum_after_op1);
14030 ht.empty ();
14032 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14033 fold_check_failed (op1, tem);
14035 md5_init_ctx (&ctx);
14036 fold_checksum_tree (op2, &ctx, &ht);
14037 md5_finish_ctx (&ctx, checksum_after_op2);
14039 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14040 fold_check_failed (op2, tem);
14041 #endif
14042 return tem;
14045 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14046 arguments in ARGARRAY, and a null static chain.
14047 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14048 of type TYPE from the given operands as constructed by build_call_array. */
14050 tree
14051 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14052 int nargs, tree *argarray)
14054 tree tem;
14055 #ifdef ENABLE_FOLD_CHECKING
14056 unsigned char checksum_before_fn[16],
14057 checksum_before_arglist[16],
14058 checksum_after_fn[16],
14059 checksum_after_arglist[16];
14060 struct md5_ctx ctx;
14061 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14062 int i;
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (fn, &ctx, &ht);
14066 md5_finish_ctx (&ctx, checksum_before_fn);
14067 ht.empty ();
14069 md5_init_ctx (&ctx);
14070 for (i = 0; i < nargs; i++)
14071 fold_checksum_tree (argarray[i], &ctx, &ht);
14072 md5_finish_ctx (&ctx, checksum_before_arglist);
14073 ht.empty ();
14074 #endif
14076 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14077 if (!tem)
14078 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14080 #ifdef ENABLE_FOLD_CHECKING
14081 md5_init_ctx (&ctx);
14082 fold_checksum_tree (fn, &ctx, &ht);
14083 md5_finish_ctx (&ctx, checksum_after_fn);
14084 ht.empty ();
14086 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14087 fold_check_failed (fn, tem);
14089 md5_init_ctx (&ctx);
14090 for (i = 0; i < nargs; i++)
14091 fold_checksum_tree (argarray[i], &ctx, &ht);
14092 md5_finish_ctx (&ctx, checksum_after_arglist);
14094 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14095 fold_check_failed (NULL_TREE, tem);
14096 #endif
14097 return tem;
14100 /* Perform constant folding and related simplification of initializer
14101 expression EXPR. These behave identically to "fold_buildN" but ignore
14102 potential run-time traps and exceptions that fold must preserve. */
14104 #define START_FOLD_INIT \
14105 int saved_signaling_nans = flag_signaling_nans;\
14106 int saved_trapping_math = flag_trapping_math;\
14107 int saved_rounding_math = flag_rounding_math;\
14108 int saved_trapv = flag_trapv;\
14109 int saved_folding_initializer = folding_initializer;\
14110 flag_signaling_nans = 0;\
14111 flag_trapping_math = 0;\
14112 flag_rounding_math = 0;\
14113 flag_trapv = 0;\
14114 folding_initializer = 1;
14116 #define END_FOLD_INIT \
14117 flag_signaling_nans = saved_signaling_nans;\
14118 flag_trapping_math = saved_trapping_math;\
14119 flag_rounding_math = saved_rounding_math;\
14120 flag_trapv = saved_trapv;\
14121 folding_initializer = saved_folding_initializer;
14123 tree
14124 fold_init (tree expr)
14126 tree result;
14127 START_FOLD_INIT;
14129 result = fold (expr);
14131 END_FOLD_INIT;
14132 return result;
14135 tree
14136 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14137 tree type, tree op)
14139 tree result;
14140 START_FOLD_INIT;
14142 result = fold_build1_loc (loc, code, type, op);
14144 END_FOLD_INIT;
14145 return result;
14148 tree
14149 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14150 tree type, tree op0, tree op1)
14152 tree result;
14153 START_FOLD_INIT;
14155 result = fold_build2_loc (loc, code, type, op0, op1);
14157 END_FOLD_INIT;
14158 return result;
14161 tree
14162 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14163 int nargs, tree *argarray)
14165 tree result;
14166 START_FOLD_INIT;
14168 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14170 END_FOLD_INIT;
14171 return result;
14174 tree
14175 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14176 tree lhs, tree rhs)
14178 tree result;
14179 START_FOLD_INIT;
14181 result = fold_binary_loc (loc, code, type, lhs, rhs);
14183 END_FOLD_INIT;
14184 return result;
14187 #undef START_FOLD_INIT
14188 #undef END_FOLD_INIT
14190 /* Determine if first argument is a multiple of second argument. Return
14191 false if it is not, or we cannot easily determined it to be.
14193 An example of the sort of thing we care about (at this point; this routine
14194 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14195 fold cases do now) is discovering that
14197 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14199 is a multiple of
14201 SAVE_EXPR (J * 8)
14203 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14205 This code also handles discovering that
14207 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14209 is a multiple of 8 so we don't have to worry about dealing with a
14210 possible remainder.
14212 Note that we *look* inside a SAVE_EXPR only to determine how it was
14213 calculated; it is not safe for fold to do much of anything else with the
14214 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14215 at run time. For example, the latter example above *cannot* be implemented
14216 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14217 evaluation time of the original SAVE_EXPR is not necessarily the same at
14218 the time the new expression is evaluated. The only optimization of this
14219 sort that would be valid is changing
14221 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14223 divided by 8 to
14225 SAVE_EXPR (I) * SAVE_EXPR (J)
14227 (where the same SAVE_EXPR (J) is used in the original and the
14228 transformed version).
14230 NOWRAP specifies whether all outer operations in TYPE should
14231 be considered not wrapping. Any type conversion within TOP acts
14232 as a barrier and we will fall back to NOWRAP being false.
14233 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14234 as not wrapping even though they are generally using unsigned arithmetic. */
14236 bool
14237 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14239 gimple *stmt;
14240 tree op1, op2;
14242 if (operand_equal_p (top, bottom, 0))
14243 return true;
14245 if (TREE_CODE (type) != INTEGER_TYPE)
14246 return false;
14248 switch (TREE_CODE (top))
14250 case BIT_AND_EXPR:
14251 /* Bitwise and provides a power of two multiple. If the mask is
14252 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14253 if (!integer_pow2p (bottom))
14254 return false;
14255 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14256 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14258 case MULT_EXPR:
14259 /* If the multiplication can wrap we cannot recurse further unless
14260 the bottom is a power of two which is where wrapping does not
14261 matter. */
14262 if (!nowrap
14263 && !TYPE_OVERFLOW_UNDEFINED (type)
14264 && !integer_pow2p (bottom))
14265 return false;
14266 if (TREE_CODE (bottom) == INTEGER_CST)
14268 op1 = TREE_OPERAND (top, 0);
14269 op2 = TREE_OPERAND (top, 1);
14270 if (TREE_CODE (op1) == INTEGER_CST)
14271 std::swap (op1, op2);
14272 if (TREE_CODE (op2) == INTEGER_CST)
14274 if (multiple_of_p (type, op2, bottom, nowrap))
14275 return true;
14276 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14277 if (multiple_of_p (type, bottom, op2, nowrap))
14279 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14280 wi::to_widest (op2));
14281 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14283 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14284 return multiple_of_p (type, op1, op2, nowrap);
14287 return multiple_of_p (type, op1, bottom, nowrap);
14290 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14291 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14293 case LSHIFT_EXPR:
14294 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14295 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14297 op1 = TREE_OPERAND (top, 1);
14298 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14300 wide_int mul_op
14301 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14302 return multiple_of_p (type,
14303 wide_int_to_tree (type, mul_op), bottom,
14304 nowrap);
14307 return false;
14309 case MINUS_EXPR:
14310 case PLUS_EXPR:
14311 /* If the addition or subtraction can wrap we cannot recurse further
14312 unless bottom is a power of two which is where wrapping does not
14313 matter. */
14314 if (!nowrap
14315 && !TYPE_OVERFLOW_UNDEFINED (type)
14316 && !integer_pow2p (bottom))
14317 return false;
14319 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14320 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14321 but 0xfffffffd is not. */
14322 op1 = TREE_OPERAND (top, 1);
14323 if (TREE_CODE (top) == PLUS_EXPR
14324 && nowrap
14325 && TYPE_UNSIGNED (type)
14326 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14327 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14329 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14330 precisely, so be conservative here checking if both op0 and op1
14331 are multiple of bottom. Note we check the second operand first
14332 since it's usually simpler. */
14333 return (multiple_of_p (type, op1, bottom, nowrap)
14334 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14336 CASE_CONVERT:
14337 /* Can't handle conversions from non-integral or wider integral type. */
14338 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14339 || (TYPE_PRECISION (type)
14340 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14341 return false;
14342 /* NOWRAP only extends to operations in the outermost type so
14343 make sure to strip it off here. */
14344 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14345 TREE_OPERAND (top, 0), bottom, false);
14347 case SAVE_EXPR:
14348 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14350 case COND_EXPR:
14351 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14352 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14354 case INTEGER_CST:
14355 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14356 return false;
14357 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14358 SIGNED);
14360 case SSA_NAME:
14361 if (TREE_CODE (bottom) == INTEGER_CST
14362 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14363 && gimple_code (stmt) == GIMPLE_ASSIGN)
14365 enum tree_code code = gimple_assign_rhs_code (stmt);
14367 /* Check for special cases to see if top is defined as multiple
14368 of bottom:
14370 top = (X & ~(bottom - 1) ; bottom is power of 2
14374 Y = X % bottom
14375 top = X - Y. */
14376 if (code == BIT_AND_EXPR
14377 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14378 && TREE_CODE (op2) == INTEGER_CST
14379 && integer_pow2p (bottom)
14380 && wi::multiple_of_p (wi::to_widest (op2),
14381 wi::to_widest (bottom), UNSIGNED))
14382 return true;
14384 op1 = gimple_assign_rhs1 (stmt);
14385 if (code == MINUS_EXPR
14386 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14387 && TREE_CODE (op2) == SSA_NAME
14388 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14389 && gimple_code (stmt) == GIMPLE_ASSIGN
14390 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14391 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14392 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14393 return true;
14396 /* fall through */
14398 default:
14399 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14400 return multiple_p (wi::to_poly_widest (top),
14401 wi::to_poly_widest (bottom));
14403 return false;
14407 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14408 This function returns true for integer expressions, and returns
14409 false if uncertain. */
14411 bool
14412 tree_expr_finite_p (const_tree x)
14414 machine_mode mode = element_mode (x);
14415 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14416 return true;
14417 switch (TREE_CODE (x))
14419 case REAL_CST:
14420 return real_isfinite (TREE_REAL_CST_PTR (x));
14421 case COMPLEX_CST:
14422 return tree_expr_finite_p (TREE_REALPART (x))
14423 && tree_expr_finite_p (TREE_IMAGPART (x));
14424 case FLOAT_EXPR:
14425 return true;
14426 case ABS_EXPR:
14427 case CONVERT_EXPR:
14428 case NON_LVALUE_EXPR:
14429 case NEGATE_EXPR:
14430 case SAVE_EXPR:
14431 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14432 case MIN_EXPR:
14433 case MAX_EXPR:
14434 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14435 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14436 case COND_EXPR:
14437 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14438 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14439 case CALL_EXPR:
14440 switch (get_call_combined_fn (x))
14442 CASE_CFN_FABS:
14443 CASE_CFN_FABS_FN:
14444 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14445 CASE_CFN_FMAX:
14446 CASE_CFN_FMAX_FN:
14447 CASE_CFN_FMIN:
14448 CASE_CFN_FMIN_FN:
14449 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14450 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14451 default:
14452 return false;
14455 default:
14456 return false;
14460 /* Return true if expression X evaluates to an infinity.
14461 This function returns false for integer expressions. */
14463 bool
14464 tree_expr_infinite_p (const_tree x)
14466 if (!HONOR_INFINITIES (x))
14467 return false;
14468 switch (TREE_CODE (x))
14470 case REAL_CST:
14471 return real_isinf (TREE_REAL_CST_PTR (x));
14472 case ABS_EXPR:
14473 case NEGATE_EXPR:
14474 case NON_LVALUE_EXPR:
14475 case SAVE_EXPR:
14476 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14477 case COND_EXPR:
14478 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14479 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14480 default:
14481 return false;
14485 /* Return true if expression X could evaluate to an infinity.
14486 This function returns false for integer expressions, and returns
14487 true if uncertain. */
14489 bool
14490 tree_expr_maybe_infinite_p (const_tree x)
14492 if (!HONOR_INFINITIES (x))
14493 return false;
14494 switch (TREE_CODE (x))
14496 case REAL_CST:
14497 return real_isinf (TREE_REAL_CST_PTR (x));
14498 case FLOAT_EXPR:
14499 return false;
14500 case ABS_EXPR:
14501 case NEGATE_EXPR:
14502 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14503 case COND_EXPR:
14504 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14505 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14506 default:
14507 return true;
14511 /* Return true if expression X evaluates to a signaling NaN.
14512 This function returns false for integer expressions. */
14514 bool
14515 tree_expr_signaling_nan_p (const_tree x)
14517 if (!HONOR_SNANS (x))
14518 return false;
14519 switch (TREE_CODE (x))
14521 case REAL_CST:
14522 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14523 case NON_LVALUE_EXPR:
14524 case SAVE_EXPR:
14525 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14526 case COND_EXPR:
14527 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14528 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14529 default:
14530 return false;
14534 /* Return true if expression X could evaluate to a signaling NaN.
14535 This function returns false for integer expressions, and returns
14536 true if uncertain. */
14538 bool
14539 tree_expr_maybe_signaling_nan_p (const_tree x)
14541 if (!HONOR_SNANS (x))
14542 return false;
14543 switch (TREE_CODE (x))
14545 case REAL_CST:
14546 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14547 case FLOAT_EXPR:
14548 return false;
14549 case ABS_EXPR:
14550 case CONVERT_EXPR:
14551 case NEGATE_EXPR:
14552 case NON_LVALUE_EXPR:
14553 case SAVE_EXPR:
14554 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14555 case MIN_EXPR:
14556 case MAX_EXPR:
14557 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14558 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14559 case COND_EXPR:
14560 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14561 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14562 case CALL_EXPR:
14563 switch (get_call_combined_fn (x))
14565 CASE_CFN_FABS:
14566 CASE_CFN_FABS_FN:
14567 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14568 CASE_CFN_FMAX:
14569 CASE_CFN_FMAX_FN:
14570 CASE_CFN_FMIN:
14571 CASE_CFN_FMIN_FN:
14572 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14573 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14574 default:
14575 return true;
14577 default:
14578 return true;
14582 /* Return true if expression X evaluates to a NaN.
14583 This function returns false for integer expressions. */
14585 bool
14586 tree_expr_nan_p (const_tree x)
14588 if (!HONOR_NANS (x))
14589 return false;
14590 switch (TREE_CODE (x))
14592 case REAL_CST:
14593 return real_isnan (TREE_REAL_CST_PTR (x));
14594 case NON_LVALUE_EXPR:
14595 case SAVE_EXPR:
14596 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14597 case COND_EXPR:
14598 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14599 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14600 default:
14601 return false;
14605 /* Return true if expression X could evaluate to a NaN.
14606 This function returns false for integer expressions, and returns
14607 true if uncertain. */
14609 bool
14610 tree_expr_maybe_nan_p (const_tree x)
14612 if (!HONOR_NANS (x))
14613 return false;
14614 switch (TREE_CODE (x))
14616 case REAL_CST:
14617 return real_isnan (TREE_REAL_CST_PTR (x));
14618 case FLOAT_EXPR:
14619 return false;
14620 case PLUS_EXPR:
14621 case MINUS_EXPR:
14622 case MULT_EXPR:
14623 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14624 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14625 case ABS_EXPR:
14626 case CONVERT_EXPR:
14627 case NEGATE_EXPR:
14628 case NON_LVALUE_EXPR:
14629 case SAVE_EXPR:
14630 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14631 case MIN_EXPR:
14632 case MAX_EXPR:
14633 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14634 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14635 case COND_EXPR:
14636 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14637 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14638 case CALL_EXPR:
14639 switch (get_call_combined_fn (x))
14641 CASE_CFN_FABS:
14642 CASE_CFN_FABS_FN:
14643 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14644 CASE_CFN_FMAX:
14645 CASE_CFN_FMAX_FN:
14646 CASE_CFN_FMIN:
14647 CASE_CFN_FMIN_FN:
14648 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14649 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14650 default:
14651 return true;
14653 default:
14654 return true;
14658 /* Return true if expression X could evaluate to -0.0.
14659 This function returns true if uncertain. */
14661 bool
14662 tree_expr_maybe_real_minus_zero_p (const_tree x)
14664 if (!HONOR_SIGNED_ZEROS (x))
14665 return false;
14666 switch (TREE_CODE (x))
14668 case REAL_CST:
14669 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14670 case INTEGER_CST:
14671 case FLOAT_EXPR:
14672 case ABS_EXPR:
14673 return false;
14674 case NON_LVALUE_EXPR:
14675 case SAVE_EXPR:
14676 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14677 case COND_EXPR:
14678 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14679 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14680 case CALL_EXPR:
14681 switch (get_call_combined_fn (x))
14683 CASE_CFN_FABS:
14684 CASE_CFN_FABS_FN:
14685 return false;
14686 default:
14687 break;
14689 default:
14690 break;
14692 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14693 * but currently those predicates require tree and not const_tree. */
14694 return true;
14697 #define tree_expr_nonnegative_warnv_p(X, Y) \
14698 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14700 #define RECURSE(X) \
14701 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14703 /* Return true if CODE or TYPE is known to be non-negative. */
14705 static bool
14706 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14708 if (!VECTOR_TYPE_P (type)
14709 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14710 && truth_value_p (code))
14711 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14712 have a signed:1 type (where the value is -1 and 0). */
14713 return true;
14714 return false;
14717 /* Return true if (CODE OP0) is known to be non-negative. If the return
14718 value is based on the assumption that signed overflow is undefined,
14719 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14720 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14722 bool
14723 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14724 bool *strict_overflow_p, int depth)
14726 if (TYPE_UNSIGNED (type))
14727 return true;
14729 switch (code)
14731 case ABS_EXPR:
14732 /* We can't return 1 if flag_wrapv is set because
14733 ABS_EXPR<INT_MIN> = INT_MIN. */
14734 if (!ANY_INTEGRAL_TYPE_P (type))
14735 return true;
14736 if (TYPE_OVERFLOW_UNDEFINED (type))
14738 *strict_overflow_p = true;
14739 return true;
14741 break;
14743 case NON_LVALUE_EXPR:
14744 case FLOAT_EXPR:
14745 case FIX_TRUNC_EXPR:
14746 return RECURSE (op0);
14748 CASE_CONVERT:
14750 tree inner_type = TREE_TYPE (op0);
14751 tree outer_type = type;
14753 if (SCALAR_FLOAT_TYPE_P (outer_type))
14755 if (SCALAR_FLOAT_TYPE_P (inner_type))
14756 return RECURSE (op0);
14757 if (INTEGRAL_TYPE_P (inner_type))
14759 if (TYPE_UNSIGNED (inner_type))
14760 return true;
14761 return RECURSE (op0);
14764 else if (INTEGRAL_TYPE_P (outer_type))
14766 if (SCALAR_FLOAT_TYPE_P (inner_type))
14767 return RECURSE (op0);
14768 if (INTEGRAL_TYPE_P (inner_type))
14769 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14770 && TYPE_UNSIGNED (inner_type);
14773 break;
14775 default:
14776 return tree_simple_nonnegative_warnv_p (code, type);
14779 /* We don't know sign of `t', so be conservative and return false. */
14780 return false;
14783 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14784 value is based on the assumption that signed overflow is undefined,
14785 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14786 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14788 bool
14789 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14790 tree op1, bool *strict_overflow_p,
14791 int depth)
14793 if (TYPE_UNSIGNED (type))
14794 return true;
14796 switch (code)
14798 case POINTER_PLUS_EXPR:
14799 case PLUS_EXPR:
14800 if (FLOAT_TYPE_P (type))
14801 return RECURSE (op0) && RECURSE (op1);
14803 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14804 both unsigned and at least 2 bits shorter than the result. */
14805 if (TREE_CODE (type) == INTEGER_TYPE
14806 && TREE_CODE (op0) == NOP_EXPR
14807 && TREE_CODE (op1) == NOP_EXPR)
14809 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14810 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14811 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14812 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14814 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14815 TYPE_PRECISION (inner2)) + 1;
14816 return prec < TYPE_PRECISION (type);
14819 break;
14821 case MULT_EXPR:
14822 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14824 /* x * x is always non-negative for floating point x
14825 or without overflow. */
14826 if (operand_equal_p (op0, op1, 0)
14827 || (RECURSE (op0) && RECURSE (op1)))
14829 if (ANY_INTEGRAL_TYPE_P (type)
14830 && TYPE_OVERFLOW_UNDEFINED (type))
14831 *strict_overflow_p = true;
14832 return true;
14836 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14837 both unsigned and their total bits is shorter than the result. */
14838 if (TREE_CODE (type) == INTEGER_TYPE
14839 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14840 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14842 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14843 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14844 : TREE_TYPE (op0);
14845 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14846 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14847 : TREE_TYPE (op1);
14849 bool unsigned0 = TYPE_UNSIGNED (inner0);
14850 bool unsigned1 = TYPE_UNSIGNED (inner1);
14852 if (TREE_CODE (op0) == INTEGER_CST)
14853 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14855 if (TREE_CODE (op1) == INTEGER_CST)
14856 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14858 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14859 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14861 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14862 ? tree_int_cst_min_precision (op0, UNSIGNED)
14863 : TYPE_PRECISION (inner0);
14865 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14866 ? tree_int_cst_min_precision (op1, UNSIGNED)
14867 : TYPE_PRECISION (inner1);
14869 return precision0 + precision1 < TYPE_PRECISION (type);
14872 return false;
14874 case BIT_AND_EXPR:
14875 return RECURSE (op0) || RECURSE (op1);
14877 case MAX_EXPR:
14878 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14879 things. */
14880 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14881 return RECURSE (op0) && RECURSE (op1);
14882 return RECURSE (op0) || RECURSE (op1);
14884 case BIT_IOR_EXPR:
14885 case BIT_XOR_EXPR:
14886 case MIN_EXPR:
14887 case RDIV_EXPR:
14888 case TRUNC_DIV_EXPR:
14889 case CEIL_DIV_EXPR:
14890 case FLOOR_DIV_EXPR:
14891 case ROUND_DIV_EXPR:
14892 return RECURSE (op0) && RECURSE (op1);
14894 case TRUNC_MOD_EXPR:
14895 return RECURSE (op0);
14897 case FLOOR_MOD_EXPR:
14898 return RECURSE (op1);
14900 case CEIL_MOD_EXPR:
14901 case ROUND_MOD_EXPR:
14902 default:
14903 return tree_simple_nonnegative_warnv_p (code, type);
14906 /* We don't know sign of `t', so be conservative and return false. */
14907 return false;
14910 /* Return true if T is known to be non-negative. If the return
14911 value is based on the assumption that signed overflow is undefined,
14912 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14913 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14915 bool
14916 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14918 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14919 return true;
14921 switch (TREE_CODE (t))
14923 case INTEGER_CST:
14924 return tree_int_cst_sgn (t) >= 0;
14926 case REAL_CST:
14927 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14929 case FIXED_CST:
14930 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14932 case COND_EXPR:
14933 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14935 case SSA_NAME:
14936 /* Limit the depth of recursion to avoid quadratic behavior.
14937 This is expected to catch almost all occurrences in practice.
14938 If this code misses important cases that unbounded recursion
14939 would not, passes that need this information could be revised
14940 to provide it through dataflow propagation. */
14941 return (!name_registered_for_update_p (t)
14942 && depth < param_max_ssa_name_query_depth
14943 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14944 strict_overflow_p, depth));
14946 default:
14947 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14951 /* Return true if T is known to be non-negative. If the return
14952 value is based on the assumption that signed overflow is undefined,
14953 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14954 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14956 bool
14957 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14958 bool *strict_overflow_p, int depth)
14960 switch (fn)
14962 CASE_CFN_ACOS:
14963 CASE_CFN_ACOS_FN:
14964 CASE_CFN_ACOSH:
14965 CASE_CFN_ACOSH_FN:
14966 CASE_CFN_CABS:
14967 CASE_CFN_CABS_FN:
14968 CASE_CFN_COSH:
14969 CASE_CFN_COSH_FN:
14970 CASE_CFN_ERFC:
14971 CASE_CFN_ERFC_FN:
14972 CASE_CFN_EXP:
14973 CASE_CFN_EXP_FN:
14974 CASE_CFN_EXP10:
14975 CASE_CFN_EXP2:
14976 CASE_CFN_EXP2_FN:
14977 CASE_CFN_FABS:
14978 CASE_CFN_FABS_FN:
14979 CASE_CFN_FDIM:
14980 CASE_CFN_FDIM_FN:
14981 CASE_CFN_HYPOT:
14982 CASE_CFN_HYPOT_FN:
14983 CASE_CFN_POW10:
14984 CASE_CFN_FFS:
14985 CASE_CFN_PARITY:
14986 CASE_CFN_POPCOUNT:
14987 CASE_CFN_CLZ:
14988 CASE_CFN_CLRSB:
14989 case CFN_BUILT_IN_BSWAP16:
14990 case CFN_BUILT_IN_BSWAP32:
14991 case CFN_BUILT_IN_BSWAP64:
14992 case CFN_BUILT_IN_BSWAP128:
14993 /* Always true. */
14994 return true;
14996 CASE_CFN_SQRT:
14997 CASE_CFN_SQRT_FN:
14998 /* sqrt(-0.0) is -0.0. */
14999 if (!HONOR_SIGNED_ZEROS (type))
15000 return true;
15001 return RECURSE (arg0);
15003 CASE_CFN_ASINH:
15004 CASE_CFN_ASINH_FN:
15005 CASE_CFN_ATAN:
15006 CASE_CFN_ATAN_FN:
15007 CASE_CFN_ATANH:
15008 CASE_CFN_ATANH_FN:
15009 CASE_CFN_CBRT:
15010 CASE_CFN_CBRT_FN:
15011 CASE_CFN_CEIL:
15012 CASE_CFN_CEIL_FN:
15013 CASE_CFN_ERF:
15014 CASE_CFN_ERF_FN:
15015 CASE_CFN_EXPM1:
15016 CASE_CFN_EXPM1_FN:
15017 CASE_CFN_FLOOR:
15018 CASE_CFN_FLOOR_FN:
15019 CASE_CFN_FMOD:
15020 CASE_CFN_FMOD_FN:
15021 CASE_CFN_FREXP:
15022 CASE_CFN_FREXP_FN:
15023 CASE_CFN_ICEIL:
15024 CASE_CFN_IFLOOR:
15025 CASE_CFN_IRINT:
15026 CASE_CFN_IROUND:
15027 CASE_CFN_LCEIL:
15028 CASE_CFN_LDEXP:
15029 CASE_CFN_LFLOOR:
15030 CASE_CFN_LLCEIL:
15031 CASE_CFN_LLFLOOR:
15032 CASE_CFN_LLRINT:
15033 CASE_CFN_LLRINT_FN:
15034 CASE_CFN_LLROUND:
15035 CASE_CFN_LLROUND_FN:
15036 CASE_CFN_LRINT:
15037 CASE_CFN_LRINT_FN:
15038 CASE_CFN_LROUND:
15039 CASE_CFN_LROUND_FN:
15040 CASE_CFN_MODF:
15041 CASE_CFN_MODF_FN:
15042 CASE_CFN_NEARBYINT:
15043 CASE_CFN_NEARBYINT_FN:
15044 CASE_CFN_RINT:
15045 CASE_CFN_RINT_FN:
15046 CASE_CFN_ROUND:
15047 CASE_CFN_ROUND_FN:
15048 CASE_CFN_ROUNDEVEN:
15049 CASE_CFN_ROUNDEVEN_FN:
15050 CASE_CFN_SCALB:
15051 CASE_CFN_SCALBLN:
15052 CASE_CFN_SCALBLN_FN:
15053 CASE_CFN_SCALBN:
15054 CASE_CFN_SCALBN_FN:
15055 CASE_CFN_SIGNBIT:
15056 CASE_CFN_SIGNIFICAND:
15057 CASE_CFN_SINH:
15058 CASE_CFN_SINH_FN:
15059 CASE_CFN_TANH:
15060 CASE_CFN_TANH_FN:
15061 CASE_CFN_TRUNC:
15062 CASE_CFN_TRUNC_FN:
15063 /* True if the 1st argument is nonnegative. */
15064 return RECURSE (arg0);
15066 CASE_CFN_FMAX:
15067 CASE_CFN_FMAX_FN:
15068 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15069 things. In the presence of sNaNs, we're only guaranteed to be
15070 non-negative if both operands are non-negative. In the presence
15071 of qNaNs, we're non-negative if either operand is non-negative
15072 and can't be a qNaN, or if both operands are non-negative. */
15073 if (tree_expr_maybe_signaling_nan_p (arg0) ||
15074 tree_expr_maybe_signaling_nan_p (arg1))
15075 return RECURSE (arg0) && RECURSE (arg1);
15076 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15077 || RECURSE (arg1))
15078 : (RECURSE (arg1)
15079 && !tree_expr_maybe_nan_p (arg1));
15081 CASE_CFN_FMIN:
15082 CASE_CFN_FMIN_FN:
15083 /* True if the 1st AND 2nd arguments are nonnegative. */
15084 return RECURSE (arg0) && RECURSE (arg1);
15086 CASE_CFN_COPYSIGN:
15087 CASE_CFN_COPYSIGN_FN:
15088 /* True if the 2nd argument is nonnegative. */
15089 return RECURSE (arg1);
15091 CASE_CFN_POWI:
15092 /* True if the 1st argument is nonnegative or the second
15093 argument is an even integer. */
15094 if (TREE_CODE (arg1) == INTEGER_CST
15095 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15096 return true;
15097 return RECURSE (arg0);
15099 CASE_CFN_POW:
15100 CASE_CFN_POW_FN:
15101 /* True if the 1st argument is nonnegative or the second
15102 argument is an even integer valued real. */
15103 if (TREE_CODE (arg1) == REAL_CST)
15105 REAL_VALUE_TYPE c;
15106 HOST_WIDE_INT n;
15108 c = TREE_REAL_CST (arg1);
15109 n = real_to_integer (&c);
15110 if ((n & 1) == 0)
15112 REAL_VALUE_TYPE cint;
15113 real_from_integer (&cint, VOIDmode, n, SIGNED);
15114 if (real_identical (&c, &cint))
15115 return true;
15118 return RECURSE (arg0);
15120 default:
15121 break;
15123 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15126 /* Return true if T is known to be non-negative. If the return
15127 value is based on the assumption that signed overflow is undefined,
15128 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15129 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15131 static bool
15132 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15134 enum tree_code code = TREE_CODE (t);
15135 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15136 return true;
15138 switch (code)
15140 case TARGET_EXPR:
15142 tree temp = TARGET_EXPR_SLOT (t);
15143 t = TARGET_EXPR_INITIAL (t);
15145 /* If the initializer is non-void, then it's a normal expression
15146 that will be assigned to the slot. */
15147 if (!VOID_TYPE_P (TREE_TYPE (t)))
15148 return RECURSE (t);
15150 /* Otherwise, the initializer sets the slot in some way. One common
15151 way is an assignment statement at the end of the initializer. */
15152 while (1)
15154 if (TREE_CODE (t) == BIND_EXPR)
15155 t = expr_last (BIND_EXPR_BODY (t));
15156 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15157 || TREE_CODE (t) == TRY_CATCH_EXPR)
15158 t = expr_last (TREE_OPERAND (t, 0));
15159 else if (TREE_CODE (t) == STATEMENT_LIST)
15160 t = expr_last (t);
15161 else
15162 break;
15164 if (TREE_CODE (t) == MODIFY_EXPR
15165 && TREE_OPERAND (t, 0) == temp)
15166 return RECURSE (TREE_OPERAND (t, 1));
15168 return false;
15171 case CALL_EXPR:
15173 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15174 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15176 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15177 get_call_combined_fn (t),
15178 arg0,
15179 arg1,
15180 strict_overflow_p, depth);
15182 case COMPOUND_EXPR:
15183 case MODIFY_EXPR:
15184 return RECURSE (TREE_OPERAND (t, 1));
15186 case BIND_EXPR:
15187 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15189 case SAVE_EXPR:
15190 return RECURSE (TREE_OPERAND (t, 0));
15192 default:
15193 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15197 #undef RECURSE
15198 #undef tree_expr_nonnegative_warnv_p
15200 /* Return true if T is known to be non-negative. If the return
15201 value is based on the assumption that signed overflow is undefined,
15202 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15203 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15205 bool
15206 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15208 enum tree_code code;
15209 if (t == error_mark_node)
15210 return false;
15212 code = TREE_CODE (t);
15213 switch (TREE_CODE_CLASS (code))
15215 case tcc_binary:
15216 case tcc_comparison:
15217 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15218 TREE_TYPE (t),
15219 TREE_OPERAND (t, 0),
15220 TREE_OPERAND (t, 1),
15221 strict_overflow_p, depth);
15223 case tcc_unary:
15224 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15225 TREE_TYPE (t),
15226 TREE_OPERAND (t, 0),
15227 strict_overflow_p, depth);
15229 case tcc_constant:
15230 case tcc_declaration:
15231 case tcc_reference:
15232 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15234 default:
15235 break;
15238 switch (code)
15240 case TRUTH_AND_EXPR:
15241 case TRUTH_OR_EXPR:
15242 case TRUTH_XOR_EXPR:
15243 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15244 TREE_TYPE (t),
15245 TREE_OPERAND (t, 0),
15246 TREE_OPERAND (t, 1),
15247 strict_overflow_p, depth);
15248 case TRUTH_NOT_EXPR:
15249 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15250 TREE_TYPE (t),
15251 TREE_OPERAND (t, 0),
15252 strict_overflow_p, depth);
15254 case COND_EXPR:
15255 case CONSTRUCTOR:
15256 case OBJ_TYPE_REF:
15257 case ADDR_EXPR:
15258 case WITH_SIZE_EXPR:
15259 case SSA_NAME:
15260 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15262 default:
15263 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15267 /* Return true if `t' is known to be non-negative. Handle warnings
15268 about undefined signed overflow. */
15270 bool
15271 tree_expr_nonnegative_p (tree t)
15273 bool ret, strict_overflow_p;
15275 strict_overflow_p = false;
15276 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15277 if (strict_overflow_p)
15278 fold_overflow_warning (("assuming signed overflow does not occur when "
15279 "determining that expression is always "
15280 "non-negative"),
15281 WARN_STRICT_OVERFLOW_MISC);
15282 return ret;
15286 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15287 For floating point we further ensure that T is not denormal.
15288 Similar logic is present in nonzero_address in rtlanal.h.
15290 If the return value is based on the assumption that signed overflow
15291 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15292 change *STRICT_OVERFLOW_P. */
15294 bool
15295 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15296 bool *strict_overflow_p)
15298 switch (code)
15300 case ABS_EXPR:
15301 return tree_expr_nonzero_warnv_p (op0,
15302 strict_overflow_p);
15304 case NOP_EXPR:
15306 tree inner_type = TREE_TYPE (op0);
15307 tree outer_type = type;
15309 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15310 && tree_expr_nonzero_warnv_p (op0,
15311 strict_overflow_p));
15313 break;
15315 case NON_LVALUE_EXPR:
15316 return tree_expr_nonzero_warnv_p (op0,
15317 strict_overflow_p);
15319 default:
15320 break;
15323 return false;
15326 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15327 For floating point we further ensure that T is not denormal.
15328 Similar logic is present in nonzero_address in rtlanal.h.
15330 If the return value is based on the assumption that signed overflow
15331 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15332 change *STRICT_OVERFLOW_P. */
15334 bool
15335 tree_binary_nonzero_warnv_p (enum tree_code code,
15336 tree type,
15337 tree op0,
15338 tree op1, bool *strict_overflow_p)
15340 bool sub_strict_overflow_p;
15341 switch (code)
15343 case POINTER_PLUS_EXPR:
15344 case PLUS_EXPR:
15345 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15347 /* With the presence of negative values it is hard
15348 to say something. */
15349 sub_strict_overflow_p = false;
15350 if (!tree_expr_nonnegative_warnv_p (op0,
15351 &sub_strict_overflow_p)
15352 || !tree_expr_nonnegative_warnv_p (op1,
15353 &sub_strict_overflow_p))
15354 return false;
15355 /* One of operands must be positive and the other non-negative. */
15356 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15357 overflows, on a twos-complement machine the sum of two
15358 nonnegative numbers can never be zero. */
15359 return (tree_expr_nonzero_warnv_p (op0,
15360 strict_overflow_p)
15361 || tree_expr_nonzero_warnv_p (op1,
15362 strict_overflow_p));
15364 break;
15366 case MULT_EXPR:
15367 if (TYPE_OVERFLOW_UNDEFINED (type))
15369 if (tree_expr_nonzero_warnv_p (op0,
15370 strict_overflow_p)
15371 && tree_expr_nonzero_warnv_p (op1,
15372 strict_overflow_p))
15374 *strict_overflow_p = true;
15375 return true;
15378 break;
15380 case MIN_EXPR:
15381 sub_strict_overflow_p = false;
15382 if (tree_expr_nonzero_warnv_p (op0,
15383 &sub_strict_overflow_p)
15384 && tree_expr_nonzero_warnv_p (op1,
15385 &sub_strict_overflow_p))
15387 if (sub_strict_overflow_p)
15388 *strict_overflow_p = true;
15390 break;
15392 case MAX_EXPR:
15393 sub_strict_overflow_p = false;
15394 if (tree_expr_nonzero_warnv_p (op0,
15395 &sub_strict_overflow_p))
15397 if (sub_strict_overflow_p)
15398 *strict_overflow_p = true;
15400 /* When both operands are nonzero, then MAX must be too. */
15401 if (tree_expr_nonzero_warnv_p (op1,
15402 strict_overflow_p))
15403 return true;
15405 /* MAX where operand 0 is positive is positive. */
15406 return tree_expr_nonnegative_warnv_p (op0,
15407 strict_overflow_p);
15409 /* MAX where operand 1 is positive is positive. */
15410 else if (tree_expr_nonzero_warnv_p (op1,
15411 &sub_strict_overflow_p)
15412 && tree_expr_nonnegative_warnv_p (op1,
15413 &sub_strict_overflow_p))
15415 if (sub_strict_overflow_p)
15416 *strict_overflow_p = true;
15417 return true;
15419 break;
15421 case BIT_IOR_EXPR:
15422 return (tree_expr_nonzero_warnv_p (op1,
15423 strict_overflow_p)
15424 || tree_expr_nonzero_warnv_p (op0,
15425 strict_overflow_p));
15427 default:
15428 break;
15431 return false;
15434 /* Return true when T is an address and is known to be nonzero.
15435 For floating point we further ensure that T is not denormal.
15436 Similar logic is present in nonzero_address in rtlanal.h.
15438 If the return value is based on the assumption that signed overflow
15439 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15440 change *STRICT_OVERFLOW_P. */
15442 bool
15443 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15445 bool sub_strict_overflow_p;
15446 switch (TREE_CODE (t))
15448 case INTEGER_CST:
15449 return !integer_zerop (t);
15451 case ADDR_EXPR:
15453 tree base = TREE_OPERAND (t, 0);
15455 if (!DECL_P (base))
15456 base = get_base_address (base);
15458 if (base && TREE_CODE (base) == TARGET_EXPR)
15459 base = TARGET_EXPR_SLOT (base);
15461 if (!base)
15462 return false;
15464 /* For objects in symbol table check if we know they are non-zero.
15465 Don't do anything for variables and functions before symtab is built;
15466 it is quite possible that they will be declared weak later. */
15467 int nonzero_addr = maybe_nonzero_address (base);
15468 if (nonzero_addr >= 0)
15469 return nonzero_addr;
15471 /* Constants are never weak. */
15472 if (CONSTANT_CLASS_P (base))
15473 return true;
15475 return false;
15478 case COND_EXPR:
15479 sub_strict_overflow_p = false;
15480 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15481 &sub_strict_overflow_p)
15482 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15483 &sub_strict_overflow_p))
15485 if (sub_strict_overflow_p)
15486 *strict_overflow_p = true;
15487 return true;
15489 break;
15491 case SSA_NAME:
15492 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15493 break;
15494 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15496 default:
15497 break;
15499 return false;
15502 #define integer_valued_real_p(X) \
15503 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15505 #define RECURSE(X) \
15506 ((integer_valued_real_p) (X, depth + 1))
15508 /* Return true if the floating point result of (CODE OP0) has an
15509 integer value. We also allow +Inf, -Inf and NaN to be considered
15510 integer values. Return false for signaling NaN.
15512 DEPTH is the current nesting depth of the query. */
15514 bool
15515 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15517 switch (code)
15519 case FLOAT_EXPR:
15520 return true;
15522 case ABS_EXPR:
15523 return RECURSE (op0);
15525 CASE_CONVERT:
15527 tree type = TREE_TYPE (op0);
15528 if (TREE_CODE (type) == INTEGER_TYPE)
15529 return true;
15530 if (SCALAR_FLOAT_TYPE_P (type))
15531 return RECURSE (op0);
15532 break;
15535 default:
15536 break;
15538 return false;
15541 /* Return true if the floating point result of (CODE OP0 OP1) has an
15542 integer value. We also allow +Inf, -Inf and NaN to be considered
15543 integer values. Return false for signaling NaN.
15545 DEPTH is the current nesting depth of the query. */
15547 bool
15548 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15550 switch (code)
15552 case PLUS_EXPR:
15553 case MINUS_EXPR:
15554 case MULT_EXPR:
15555 case MIN_EXPR:
15556 case MAX_EXPR:
15557 return RECURSE (op0) && RECURSE (op1);
15559 default:
15560 break;
15562 return false;
15565 /* Return true if the floating point result of calling FNDECL with arguments
15566 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15567 considered integer values. Return false for signaling NaN. If FNDECL
15568 takes fewer than 2 arguments, the remaining ARGn are null.
15570 DEPTH is the current nesting depth of the query. */
15572 bool
15573 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15575 switch (fn)
15577 CASE_CFN_CEIL:
15578 CASE_CFN_CEIL_FN:
15579 CASE_CFN_FLOOR:
15580 CASE_CFN_FLOOR_FN:
15581 CASE_CFN_NEARBYINT:
15582 CASE_CFN_NEARBYINT_FN:
15583 CASE_CFN_RINT:
15584 CASE_CFN_RINT_FN:
15585 CASE_CFN_ROUND:
15586 CASE_CFN_ROUND_FN:
15587 CASE_CFN_ROUNDEVEN:
15588 CASE_CFN_ROUNDEVEN_FN:
15589 CASE_CFN_TRUNC:
15590 CASE_CFN_TRUNC_FN:
15591 return true;
15593 CASE_CFN_FMIN:
15594 CASE_CFN_FMIN_FN:
15595 CASE_CFN_FMAX:
15596 CASE_CFN_FMAX_FN:
15597 return RECURSE (arg0) && RECURSE (arg1);
15599 default:
15600 break;
15602 return false;
15605 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15606 has an integer value. We also allow +Inf, -Inf and NaN to be
15607 considered integer values. Return false for signaling NaN.
15609 DEPTH is the current nesting depth of the query. */
15611 bool
15612 integer_valued_real_single_p (tree t, int depth)
15614 switch (TREE_CODE (t))
15616 case REAL_CST:
15617 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15619 case COND_EXPR:
15620 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15622 case SSA_NAME:
15623 /* Limit the depth of recursion to avoid quadratic behavior.
15624 This is expected to catch almost all occurrences in practice.
15625 If this code misses important cases that unbounded recursion
15626 would not, passes that need this information could be revised
15627 to provide it through dataflow propagation. */
15628 return (!name_registered_for_update_p (t)
15629 && depth < param_max_ssa_name_query_depth
15630 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15631 depth));
15633 default:
15634 break;
15636 return false;
15639 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15640 has an integer value. We also allow +Inf, -Inf and NaN to be
15641 considered integer values. Return false for signaling NaN.
15643 DEPTH is the current nesting depth of the query. */
15645 static bool
15646 integer_valued_real_invalid_p (tree t, int depth)
15648 switch (TREE_CODE (t))
15650 case COMPOUND_EXPR:
15651 case MODIFY_EXPR:
15652 case BIND_EXPR:
15653 return RECURSE (TREE_OPERAND (t, 1));
15655 case SAVE_EXPR:
15656 return RECURSE (TREE_OPERAND (t, 0));
15658 default:
15659 break;
15661 return false;
15664 #undef RECURSE
15665 #undef integer_valued_real_p
15667 /* Return true if the floating point expression T has an integer value.
15668 We also allow +Inf, -Inf and NaN to be considered integer values.
15669 Return false for signaling NaN.
15671 DEPTH is the current nesting depth of the query. */
15673 bool
15674 integer_valued_real_p (tree t, int depth)
15676 if (t == error_mark_node)
15677 return false;
15679 STRIP_ANY_LOCATION_WRAPPER (t);
15681 tree_code code = TREE_CODE (t);
15682 switch (TREE_CODE_CLASS (code))
15684 case tcc_binary:
15685 case tcc_comparison:
15686 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15687 TREE_OPERAND (t, 1), depth);
15689 case tcc_unary:
15690 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15692 case tcc_constant:
15693 case tcc_declaration:
15694 case tcc_reference:
15695 return integer_valued_real_single_p (t, depth);
15697 default:
15698 break;
15701 switch (code)
15703 case COND_EXPR:
15704 case SSA_NAME:
15705 return integer_valued_real_single_p (t, depth);
15707 case CALL_EXPR:
15709 tree arg0 = (call_expr_nargs (t) > 0
15710 ? CALL_EXPR_ARG (t, 0)
15711 : NULL_TREE);
15712 tree arg1 = (call_expr_nargs (t) > 1
15713 ? CALL_EXPR_ARG (t, 1)
15714 : NULL_TREE);
15715 return integer_valued_real_call_p (get_call_combined_fn (t),
15716 arg0, arg1, depth);
15719 default:
15720 return integer_valued_real_invalid_p (t, depth);
15724 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15725 attempt to fold the expression to a constant without modifying TYPE,
15726 OP0 or OP1.
15728 If the expression could be simplified to a constant, then return
15729 the constant. If the expression would not be simplified to a
15730 constant, then return NULL_TREE. */
15732 tree
15733 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15735 tree tem = fold_binary (code, type, op0, op1);
15736 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15739 /* Given the components of a unary expression CODE, TYPE and OP0,
15740 attempt to fold the expression to a constant without modifying
15741 TYPE or OP0.
15743 If the expression could be simplified to a constant, then return
15744 the constant. If the expression would not be simplified to a
15745 constant, then return NULL_TREE. */
15747 tree
15748 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15750 tree tem = fold_unary (code, type, op0);
15751 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15754 /* If EXP represents referencing an element in a constant string
15755 (either via pointer arithmetic or array indexing), return the
15756 tree representing the value accessed, otherwise return NULL. */
15758 tree
15759 fold_read_from_constant_string (tree exp)
15761 if ((INDIRECT_REF_P (exp)
15762 || TREE_CODE (exp) == ARRAY_REF)
15763 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15765 tree exp1 = TREE_OPERAND (exp, 0);
15766 tree index;
15767 tree string;
15768 location_t loc = EXPR_LOCATION (exp);
15770 if (INDIRECT_REF_P (exp))
15771 string = string_constant (exp1, &index, NULL, NULL);
15772 else
15774 tree low_bound = array_ref_low_bound (exp);
15775 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15777 /* Optimize the special-case of a zero lower bound.
15779 We convert the low_bound to sizetype to avoid some problems
15780 with constant folding. (E.g. suppose the lower bound is 1,
15781 and its mode is QI. Without the conversion,l (ARRAY
15782 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15783 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15784 if (! integer_zerop (low_bound))
15785 index = size_diffop_loc (loc, index,
15786 fold_convert_loc (loc, sizetype, low_bound));
15788 string = exp1;
15791 scalar_int_mode char_mode;
15792 if (string
15793 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15794 && TREE_CODE (string) == STRING_CST
15795 && tree_fits_uhwi_p (index)
15796 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15797 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15798 &char_mode)
15799 && GET_MODE_SIZE (char_mode) == 1)
15800 return build_int_cst_type (TREE_TYPE (exp),
15801 (TREE_STRING_POINTER (string)
15802 [TREE_INT_CST_LOW (index)]));
15804 return NULL;
15807 /* Folds a read from vector element at IDX of vector ARG. */
15809 tree
15810 fold_read_from_vector (tree arg, poly_uint64 idx)
15812 unsigned HOST_WIDE_INT i;
15813 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15814 && known_ge (idx, 0u)
15815 && idx.is_constant (&i))
15817 if (TREE_CODE (arg) == VECTOR_CST)
15818 return VECTOR_CST_ELT (arg, i);
15819 else if (TREE_CODE (arg) == CONSTRUCTOR)
15821 if (CONSTRUCTOR_NELTS (arg)
15822 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15823 return NULL_TREE;
15824 if (i >= CONSTRUCTOR_NELTS (arg))
15825 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15826 return CONSTRUCTOR_ELT (arg, i)->value;
15829 return NULL_TREE;
15832 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15833 an integer constant, real, or fixed-point constant.
15835 TYPE is the type of the result. */
15837 static tree
15838 fold_negate_const (tree arg0, tree type)
15840 tree t = NULL_TREE;
15842 switch (TREE_CODE (arg0))
15844 case REAL_CST:
15845 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15846 break;
15848 case FIXED_CST:
15850 FIXED_VALUE_TYPE f;
15851 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15852 &(TREE_FIXED_CST (arg0)), NULL,
15853 TYPE_SATURATING (type));
15854 t = build_fixed (type, f);
15855 /* Propagate overflow flags. */
15856 if (overflow_p | TREE_OVERFLOW (arg0))
15857 TREE_OVERFLOW (t) = 1;
15858 break;
15861 default:
15862 if (poly_int_tree_p (arg0))
15864 wi::overflow_type overflow;
15865 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15866 t = force_fit_type (type, res, 1,
15867 (overflow && ! TYPE_UNSIGNED (type))
15868 || TREE_OVERFLOW (arg0));
15869 break;
15872 gcc_unreachable ();
15875 return t;
15878 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15879 an integer constant or real constant.
15881 TYPE is the type of the result. */
15883 tree
15884 fold_abs_const (tree arg0, tree type)
15886 tree t = NULL_TREE;
15888 switch (TREE_CODE (arg0))
15890 case INTEGER_CST:
15892 /* If the value is unsigned or non-negative, then the absolute value
15893 is the same as the ordinary value. */
15894 wide_int val = wi::to_wide (arg0);
15895 wi::overflow_type overflow = wi::OVF_NONE;
15896 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15899 /* If the value is negative, then the absolute value is
15900 its negation. */
15901 else
15902 val = wi::neg (val, &overflow);
15904 /* Force to the destination type, set TREE_OVERFLOW for signed
15905 TYPE only. */
15906 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15908 break;
15910 case REAL_CST:
15911 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15912 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15913 else
15914 t = arg0;
15915 break;
15917 default:
15918 gcc_unreachable ();
15921 return t;
15924 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15925 constant. TYPE is the type of the result. */
15927 static tree
15928 fold_not_const (const_tree arg0, tree type)
15930 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15932 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15935 /* Given CODE, a relational operator, the target type, TYPE and two
15936 constant operands OP0 and OP1, return the result of the
15937 relational operation. If the result is not a compile time
15938 constant, then return NULL_TREE. */
15940 static tree
15941 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15943 int result, invert;
15945 /* From here on, the only cases we handle are when the result is
15946 known to be a constant. */
15948 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15950 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15951 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15953 /* Handle the cases where either operand is a NaN. */
15954 if (real_isnan (c0) || real_isnan (c1))
15956 switch (code)
15958 case EQ_EXPR:
15959 case ORDERED_EXPR:
15960 result = 0;
15961 break;
15963 case NE_EXPR:
15964 case UNORDERED_EXPR:
15965 case UNLT_EXPR:
15966 case UNLE_EXPR:
15967 case UNGT_EXPR:
15968 case UNGE_EXPR:
15969 case UNEQ_EXPR:
15970 result = 1;
15971 break;
15973 case LT_EXPR:
15974 case LE_EXPR:
15975 case GT_EXPR:
15976 case GE_EXPR:
15977 case LTGT_EXPR:
15978 if (flag_trapping_math)
15979 return NULL_TREE;
15980 result = 0;
15981 break;
15983 default:
15984 gcc_unreachable ();
15987 return constant_boolean_node (result, type);
15990 return constant_boolean_node (real_compare (code, c0, c1), type);
15993 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15995 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15996 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15997 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16000 /* Handle equality/inequality of complex constants. */
16001 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16003 tree rcond = fold_relational_const (code, type,
16004 TREE_REALPART (op0),
16005 TREE_REALPART (op1));
16006 tree icond = fold_relational_const (code, type,
16007 TREE_IMAGPART (op0),
16008 TREE_IMAGPART (op1));
16009 if (code == EQ_EXPR)
16010 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16011 else if (code == NE_EXPR)
16012 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16013 else
16014 return NULL_TREE;
16017 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16019 if (!VECTOR_TYPE_P (type))
16021 /* Have vector comparison with scalar boolean result. */
16022 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16023 && known_eq (VECTOR_CST_NELTS (op0),
16024 VECTOR_CST_NELTS (op1)));
16025 unsigned HOST_WIDE_INT nunits;
16026 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16027 return NULL_TREE;
16028 for (unsigned i = 0; i < nunits; i++)
16030 tree elem0 = VECTOR_CST_ELT (op0, i);
16031 tree elem1 = VECTOR_CST_ELT (op1, i);
16032 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16033 if (tmp == NULL_TREE)
16034 return NULL_TREE;
16035 if (integer_zerop (tmp))
16036 return constant_boolean_node (code == NE_EXPR, type);
16038 return constant_boolean_node (code == EQ_EXPR, type);
16040 tree_vector_builder elts;
16041 if (!elts.new_binary_operation (type, op0, op1, false))
16042 return NULL_TREE;
16043 unsigned int count = elts.encoded_nelts ();
16044 for (unsigned i = 0; i < count; i++)
16046 tree elem_type = TREE_TYPE (type);
16047 tree elem0 = VECTOR_CST_ELT (op0, i);
16048 tree elem1 = VECTOR_CST_ELT (op1, i);
16050 tree tem = fold_relational_const (code, elem_type,
16051 elem0, elem1);
16053 if (tem == NULL_TREE)
16054 return NULL_TREE;
16056 elts.quick_push (build_int_cst (elem_type,
16057 integer_zerop (tem) ? 0 : -1));
16060 return elts.build ();
16063 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16065 To compute GT, swap the arguments and do LT.
16066 To compute GE, do LT and invert the result.
16067 To compute LE, swap the arguments, do LT and invert the result.
16068 To compute NE, do EQ and invert the result.
16070 Therefore, the code below must handle only EQ and LT. */
16072 if (code == LE_EXPR || code == GT_EXPR)
16074 std::swap (op0, op1);
16075 code = swap_tree_comparison (code);
16078 /* Note that it is safe to invert for real values here because we
16079 have already handled the one case that it matters. */
16081 invert = 0;
16082 if (code == NE_EXPR || code == GE_EXPR)
16084 invert = 1;
16085 code = invert_tree_comparison (code, false);
16088 /* Compute a result for LT or EQ if args permit;
16089 Otherwise return T. */
16090 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16092 if (code == EQ_EXPR)
16093 result = tree_int_cst_equal (op0, op1);
16094 else
16095 result = tree_int_cst_lt (op0, op1);
16097 else
16098 return NULL_TREE;
16100 if (invert)
16101 result ^= 1;
16102 return constant_boolean_node (result, type);
16105 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16106 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16107 itself. */
16109 tree
16110 fold_build_cleanup_point_expr (tree type, tree expr)
16112 /* If the expression does not have side effects then we don't have to wrap
16113 it with a cleanup point expression. */
16114 if (!TREE_SIDE_EFFECTS (expr))
16115 return expr;
16117 /* If the expression is a return, check to see if the expression inside the
16118 return has no side effects or the right hand side of the modify expression
16119 inside the return. If either don't have side effects set we don't need to
16120 wrap the expression in a cleanup point expression. Note we don't check the
16121 left hand side of the modify because it should always be a return decl. */
16122 if (TREE_CODE (expr) == RETURN_EXPR)
16124 tree op = TREE_OPERAND (expr, 0);
16125 if (!op || !TREE_SIDE_EFFECTS (op))
16126 return expr;
16127 op = TREE_OPERAND (op, 1);
16128 if (!TREE_SIDE_EFFECTS (op))
16129 return expr;
16132 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16135 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16136 of an indirection through OP0, or NULL_TREE if no simplification is
16137 possible. */
16139 tree
16140 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16142 tree sub = op0;
16143 tree subtype;
16144 poly_uint64 const_op01;
16146 STRIP_NOPS (sub);
16147 subtype = TREE_TYPE (sub);
16148 if (!POINTER_TYPE_P (subtype)
16149 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16150 return NULL_TREE;
16152 if (TREE_CODE (sub) == ADDR_EXPR)
16154 tree op = TREE_OPERAND (sub, 0);
16155 tree optype = TREE_TYPE (op);
16157 /* *&CONST_DECL -> to the value of the const decl. */
16158 if (TREE_CODE (op) == CONST_DECL)
16159 return DECL_INITIAL (op);
16160 /* *&p => p; make sure to handle *&"str"[cst] here. */
16161 if (type == optype)
16163 tree fop = fold_read_from_constant_string (op);
16164 if (fop)
16165 return fop;
16166 else
16167 return op;
16169 /* *(foo *)&fooarray => fooarray[0] */
16170 else if (TREE_CODE (optype) == ARRAY_TYPE
16171 && type == TREE_TYPE (optype)
16172 && (!in_gimple_form
16173 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16175 tree type_domain = TYPE_DOMAIN (optype);
16176 tree min_val = size_zero_node;
16177 if (type_domain && TYPE_MIN_VALUE (type_domain))
16178 min_val = TYPE_MIN_VALUE (type_domain);
16179 if (in_gimple_form
16180 && TREE_CODE (min_val) != INTEGER_CST)
16181 return NULL_TREE;
16182 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16183 NULL_TREE, NULL_TREE);
16185 /* *(foo *)&complexfoo => __real__ complexfoo */
16186 else if (TREE_CODE (optype) == COMPLEX_TYPE
16187 && type == TREE_TYPE (optype))
16188 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16189 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16190 else if (VECTOR_TYPE_P (optype)
16191 && type == TREE_TYPE (optype))
16193 tree part_width = TYPE_SIZE (type);
16194 tree index = bitsize_int (0);
16195 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16196 index);
16200 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16201 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16203 tree op00 = TREE_OPERAND (sub, 0);
16204 tree op01 = TREE_OPERAND (sub, 1);
16206 STRIP_NOPS (op00);
16207 if (TREE_CODE (op00) == ADDR_EXPR)
16209 tree op00type;
16210 op00 = TREE_OPERAND (op00, 0);
16211 op00type = TREE_TYPE (op00);
16213 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16214 if (VECTOR_TYPE_P (op00type)
16215 && type == TREE_TYPE (op00type)
16216 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16217 but we want to treat offsets with MSB set as negative.
16218 For the code below negative offsets are invalid and
16219 TYPE_SIZE of the element is something unsigned, so
16220 check whether op01 fits into poly_int64, which implies
16221 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16222 then just use poly_uint64 because we want to treat the
16223 value as unsigned. */
16224 && tree_fits_poly_int64_p (op01))
16226 tree part_width = TYPE_SIZE (type);
16227 poly_uint64 max_offset
16228 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16229 * TYPE_VECTOR_SUBPARTS (op00type));
16230 if (known_lt (const_op01, max_offset))
16232 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16233 return fold_build3_loc (loc,
16234 BIT_FIELD_REF, type, op00,
16235 part_width, index);
16238 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16239 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16240 && type == TREE_TYPE (op00type))
16242 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16243 const_op01))
16244 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16246 /* ((foo *)&fooarray)[1] => fooarray[1] */
16247 else if (TREE_CODE (op00type) == ARRAY_TYPE
16248 && type == TREE_TYPE (op00type))
16250 tree type_domain = TYPE_DOMAIN (op00type);
16251 tree min_val = size_zero_node;
16252 if (type_domain && TYPE_MIN_VALUE (type_domain))
16253 min_val = TYPE_MIN_VALUE (type_domain);
16254 poly_uint64 type_size, index;
16255 if (poly_int_tree_p (min_val)
16256 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16257 && multiple_p (const_op01, type_size, &index))
16259 poly_offset_int off = index + wi::to_poly_offset (min_val);
16260 op01 = wide_int_to_tree (sizetype, off);
16261 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16262 NULL_TREE, NULL_TREE);
16268 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16269 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16270 && type == TREE_TYPE (TREE_TYPE (subtype))
16271 && (!in_gimple_form
16272 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16274 tree type_domain;
16275 tree min_val = size_zero_node;
16276 sub = build_fold_indirect_ref_loc (loc, sub);
16277 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16278 if (type_domain && TYPE_MIN_VALUE (type_domain))
16279 min_val = TYPE_MIN_VALUE (type_domain);
16280 if (in_gimple_form
16281 && TREE_CODE (min_val) != INTEGER_CST)
16282 return NULL_TREE;
16283 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16284 NULL_TREE);
16287 return NULL_TREE;
16290 /* Builds an expression for an indirection through T, simplifying some
16291 cases. */
16293 tree
16294 build_fold_indirect_ref_loc (location_t loc, tree t)
16296 tree type = TREE_TYPE (TREE_TYPE (t));
16297 tree sub = fold_indirect_ref_1 (loc, type, t);
16299 if (sub)
16300 return sub;
16302 return build1_loc (loc, INDIRECT_REF, type, t);
16305 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16307 tree
16308 fold_indirect_ref_loc (location_t loc, tree t)
16310 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16312 if (sub)
16313 return sub;
16314 else
16315 return t;
16318 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16319 whose result is ignored. The type of the returned tree need not be
16320 the same as the original expression. */
16322 tree
16323 fold_ignored_result (tree t)
16325 if (!TREE_SIDE_EFFECTS (t))
16326 return integer_zero_node;
16328 for (;;)
16329 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16331 case tcc_unary:
16332 t = TREE_OPERAND (t, 0);
16333 break;
16335 case tcc_binary:
16336 case tcc_comparison:
16337 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16338 t = TREE_OPERAND (t, 0);
16339 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16340 t = TREE_OPERAND (t, 1);
16341 else
16342 return t;
16343 break;
16345 case tcc_expression:
16346 switch (TREE_CODE (t))
16348 case COMPOUND_EXPR:
16349 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16350 return t;
16351 t = TREE_OPERAND (t, 0);
16352 break;
16354 case COND_EXPR:
16355 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16356 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16357 return t;
16358 t = TREE_OPERAND (t, 0);
16359 break;
16361 default:
16362 return t;
16364 break;
16366 default:
16367 return t;
16371 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16373 tree
16374 round_up_loc (location_t loc, tree value, unsigned int divisor)
16376 tree div = NULL_TREE;
16378 if (divisor == 1)
16379 return value;
16381 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16382 have to do anything. Only do this when we are not given a const,
16383 because in that case, this check is more expensive than just
16384 doing it. */
16385 if (TREE_CODE (value) != INTEGER_CST)
16387 div = build_int_cst (TREE_TYPE (value), divisor);
16389 if (multiple_of_p (TREE_TYPE (value), value, div))
16390 return value;
16393 /* If divisor is a power of two, simplify this to bit manipulation. */
16394 if (pow2_or_zerop (divisor))
16396 if (TREE_CODE (value) == INTEGER_CST)
16398 wide_int val = wi::to_wide (value);
16399 bool overflow_p;
16401 if ((val & (divisor - 1)) == 0)
16402 return value;
16404 overflow_p = TREE_OVERFLOW (value);
16405 val += divisor - 1;
16406 val &= (int) -divisor;
16407 if (val == 0)
16408 overflow_p = true;
16410 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16412 else
16414 tree t;
16416 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16417 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16418 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16419 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16422 else
16424 if (!div)
16425 div = build_int_cst (TREE_TYPE (value), divisor);
16426 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16427 value = size_binop_loc (loc, MULT_EXPR, value, div);
16430 return value;
16433 /* Likewise, but round down. */
16435 tree
16436 round_down_loc (location_t loc, tree value, int divisor)
16438 tree div = NULL_TREE;
16440 gcc_assert (divisor > 0);
16441 if (divisor == 1)
16442 return value;
16444 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16445 have to do anything. Only do this when we are not given a const,
16446 because in that case, this check is more expensive than just
16447 doing it. */
16448 if (TREE_CODE (value) != INTEGER_CST)
16450 div = build_int_cst (TREE_TYPE (value), divisor);
16452 if (multiple_of_p (TREE_TYPE (value), value, div))
16453 return value;
16456 /* If divisor is a power of two, simplify this to bit manipulation. */
16457 if (pow2_or_zerop (divisor))
16459 tree t;
16461 t = build_int_cst (TREE_TYPE (value), -divisor);
16462 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16464 else
16466 if (!div)
16467 div = build_int_cst (TREE_TYPE (value), divisor);
16468 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16469 value = size_binop_loc (loc, MULT_EXPR, value, div);
16472 return value;
16475 /* Returns the pointer to the base of the object addressed by EXP and
16476 extracts the information about the offset of the access, storing it
16477 to PBITPOS and POFFSET. */
16479 static tree
16480 split_address_to_core_and_offset (tree exp,
16481 poly_int64_pod *pbitpos, tree *poffset)
16483 tree core;
16484 machine_mode mode;
16485 int unsignedp, reversep, volatilep;
16486 poly_int64 bitsize;
16487 location_t loc = EXPR_LOCATION (exp);
16489 if (TREE_CODE (exp) == SSA_NAME)
16490 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16491 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16492 exp = gimple_assign_rhs1 (def);
16494 if (TREE_CODE (exp) == ADDR_EXPR)
16496 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16497 poffset, &mode, &unsignedp, &reversep,
16498 &volatilep);
16499 core = build_fold_addr_expr_loc (loc, core);
16501 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16503 core = TREE_OPERAND (exp, 0);
16504 STRIP_NOPS (core);
16505 *pbitpos = 0;
16506 *poffset = TREE_OPERAND (exp, 1);
16507 if (poly_int_tree_p (*poffset))
16509 poly_offset_int tem
16510 = wi::sext (wi::to_poly_offset (*poffset),
16511 TYPE_PRECISION (TREE_TYPE (*poffset)));
16512 tem <<= LOG2_BITS_PER_UNIT;
16513 if (tem.to_shwi (pbitpos))
16514 *poffset = NULL_TREE;
16517 else
16519 core = exp;
16520 *pbitpos = 0;
16521 *poffset = NULL_TREE;
16524 return core;
16527 /* Returns true if addresses of E1 and E2 differ by a constant, false
16528 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16530 bool
16531 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16533 tree core1, core2;
16534 poly_int64 bitpos1, bitpos2;
16535 tree toffset1, toffset2, tdiff, type;
16537 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16538 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16540 poly_int64 bytepos1, bytepos2;
16541 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16542 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16543 || !operand_equal_p (core1, core2, 0))
16544 return false;
16546 if (toffset1 && toffset2)
16548 type = TREE_TYPE (toffset1);
16549 if (type != TREE_TYPE (toffset2))
16550 toffset2 = fold_convert (type, toffset2);
16552 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16553 if (!cst_and_fits_in_hwi (tdiff))
16554 return false;
16556 *diff = int_cst_value (tdiff);
16558 else if (toffset1 || toffset2)
16560 /* If only one of the offsets is non-constant, the difference cannot
16561 be a constant. */
16562 return false;
16564 else
16565 *diff = 0;
16567 *diff += bytepos1 - bytepos2;
16568 return true;
16571 /* Return OFF converted to a pointer offset type suitable as offset for
16572 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16573 tree
16574 convert_to_ptrofftype_loc (location_t loc, tree off)
16576 if (ptrofftype_p (TREE_TYPE (off)))
16577 return off;
16578 return fold_convert_loc (loc, sizetype, off);
16581 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16582 tree
16583 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16585 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16586 ptr, convert_to_ptrofftype_loc (loc, off));
16589 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16590 tree
16591 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16593 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16594 ptr, size_int (off));
16597 /* Return a pointer to a NUL-terminated string containing the sequence
16598 of bytes corresponding to the representation of the object referred to
16599 by SRC (or a subsequence of such bytes within it if SRC is a reference
16600 to an initialized constant array plus some constant offset).
16601 Set *STRSIZE the number of bytes in the constant sequence including
16602 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16603 where A is the array that stores the constant sequence that SRC points
16604 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16605 need not point to a string or even an array of characters but may point
16606 to an object of any type. */
16608 const char *
16609 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16611 /* The offset into the array A storing the string, and A's byte size. */
16612 tree offset_node;
16613 tree mem_size;
16615 if (strsize)
16616 *strsize = 0;
16618 if (strsize)
16619 src = byte_representation (src, &offset_node, &mem_size, NULL);
16620 else
16621 src = string_constant (src, &offset_node, &mem_size, NULL);
16622 if (!src)
16623 return NULL;
16625 unsigned HOST_WIDE_INT offset = 0;
16626 if (offset_node != NULL_TREE)
16628 if (!tree_fits_uhwi_p (offset_node))
16629 return NULL;
16630 else
16631 offset = tree_to_uhwi (offset_node);
16634 if (!tree_fits_uhwi_p (mem_size))
16635 return NULL;
16637 /* ARRAY_SIZE is the byte size of the array the constant sequence
16638 is stored in and equal to sizeof A. INIT_BYTES is the number
16639 of bytes in the constant sequence used to initialize the array,
16640 including any embedded NULs as well as the terminating NUL (for
16641 strings), but not including any trailing zeros/NULs past
16642 the terminating one appended implicitly to a string literal to
16643 zero out the remainder of the array it's stored in. For example,
16644 given:
16645 const char a[7] = "abc\0d";
16646 n = strlen (a + 1);
16647 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16648 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16649 is equal to strlen (A) + 1. */
16650 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16651 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16652 const char *string = TREE_STRING_POINTER (src);
16654 /* Ideally this would turn into a gcc_checking_assert over time. */
16655 if (init_bytes > array_size)
16656 init_bytes = array_size;
16658 if (init_bytes == 0 || offset >= array_size)
16659 return NULL;
16661 if (strsize)
16663 /* Compute and store the number of characters from the beginning
16664 of the substring at OFFSET to the end, including the terminating
16665 nul. Offsets past the initial length refer to null strings. */
16666 if (offset < init_bytes)
16667 *strsize = init_bytes - offset;
16668 else
16669 *strsize = 1;
16671 else
16673 tree eltype = TREE_TYPE (TREE_TYPE (src));
16674 /* Support only properly NUL-terminated single byte strings. */
16675 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16676 return NULL;
16677 if (string[init_bytes - 1] != '\0')
16678 return NULL;
16681 return offset < init_bytes ? string + offset : "";
16684 /* Return a pointer to a NUL-terminated string corresponding to
16685 the expression STR referencing a constant string, possibly
16686 involving a constant offset. Return null if STR either doesn't
16687 reference a constant string or if it involves a nonconstant
16688 offset. */
16690 const char *
16691 c_getstr (tree str)
16693 return getbyterep (str, NULL);
16696 /* Given a tree T, compute which bits in T may be nonzero. */
16698 wide_int
16699 tree_nonzero_bits (const_tree t)
16701 switch (TREE_CODE (t))
16703 case INTEGER_CST:
16704 return wi::to_wide (t);
16705 case SSA_NAME:
16706 return get_nonzero_bits (t);
16707 case NON_LVALUE_EXPR:
16708 case SAVE_EXPR:
16709 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16710 case BIT_AND_EXPR:
16711 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16712 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16713 case BIT_IOR_EXPR:
16714 case BIT_XOR_EXPR:
16715 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16716 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16717 case COND_EXPR:
16718 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16719 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16720 CASE_CONVERT:
16721 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16722 TYPE_PRECISION (TREE_TYPE (t)),
16723 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16724 case PLUS_EXPR:
16725 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16727 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16728 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16729 if (wi::bit_and (nzbits1, nzbits2) == 0)
16730 return wi::bit_or (nzbits1, nzbits2);
16732 break;
16733 case LSHIFT_EXPR:
16734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16736 tree type = TREE_TYPE (t);
16737 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16738 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16739 TYPE_PRECISION (type));
16740 return wi::neg_p (arg1)
16741 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16742 : wi::lshift (nzbits, arg1);
16744 break;
16745 case RSHIFT_EXPR:
16746 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16748 tree type = TREE_TYPE (t);
16749 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16750 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16751 TYPE_PRECISION (type));
16752 return wi::neg_p (arg1)
16753 ? wi::lshift (nzbits, -arg1)
16754 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16756 break;
16757 default:
16758 break;
16761 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16764 /* Helper function for address compare simplifications in match.pd.
16765 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16766 TYPE is the type of comparison operands.
16767 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16768 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16769 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16770 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16771 and 2 if unknown. */
16774 address_compare (tree_code code, tree type, tree op0, tree op1,
16775 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16776 bool generic)
16778 if (TREE_CODE (op0) == SSA_NAME)
16779 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16780 if (TREE_CODE (op1) == SSA_NAME)
16781 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16782 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16783 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16784 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16785 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16786 if (base0 && TREE_CODE (base0) == MEM_REF)
16788 off0 += mem_ref_offset (base0).force_shwi ();
16789 base0 = TREE_OPERAND (base0, 0);
16791 if (base1 && TREE_CODE (base1) == MEM_REF)
16793 off1 += mem_ref_offset (base1).force_shwi ();
16794 base1 = TREE_OPERAND (base1, 0);
16796 if (base0 == NULL_TREE || base1 == NULL_TREE)
16797 return 2;
16799 int equal = 2;
16800 /* Punt in GENERIC on variables with value expressions;
16801 the value expressions might point to fields/elements
16802 of other vars etc. */
16803 if (generic
16804 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16805 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16806 return 2;
16807 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16809 symtab_node *node0 = symtab_node::get_create (base0);
16810 symtab_node *node1 = symtab_node::get_create (base1);
16811 equal = node0->equal_address_to (node1);
16813 else if ((DECL_P (base0)
16814 || TREE_CODE (base0) == SSA_NAME
16815 || TREE_CODE (base0) == STRING_CST)
16816 && (DECL_P (base1)
16817 || TREE_CODE (base1) == SSA_NAME
16818 || TREE_CODE (base1) == STRING_CST))
16819 equal = (base0 == base1);
16820 /* Assume different STRING_CSTs with the same content will be
16821 merged. */
16822 if (equal == 0
16823 && TREE_CODE (base0) == STRING_CST
16824 && TREE_CODE (base1) == STRING_CST
16825 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16826 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16827 TREE_STRING_LENGTH (base0)) == 0)
16828 equal = 1;
16829 if (equal == 1)
16831 if (code == EQ_EXPR
16832 || code == NE_EXPR
16833 /* If the offsets are equal we can ignore overflow. */
16834 || known_eq (off0, off1)
16835 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16836 /* Or if we compare using pointers to decls or strings. */
16837 || (POINTER_TYPE_P (type)
16838 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16839 return 1;
16840 return 2;
16842 if (equal != 0)
16843 return equal;
16844 if (code != EQ_EXPR && code != NE_EXPR)
16845 return 2;
16847 /* At this point we know (or assume) the two pointers point at
16848 different objects. */
16849 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16850 off0.is_constant (&ioff0);
16851 off1.is_constant (&ioff1);
16852 /* Punt on non-zero offsets from functions. */
16853 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16854 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16855 return 2;
16856 /* Or if the bases are neither decls nor string literals. */
16857 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16858 return 2;
16859 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16860 return 2;
16861 /* For initializers, assume addresses of different functions are
16862 different. */
16863 if (folding_initializer
16864 && TREE_CODE (base0) == FUNCTION_DECL
16865 && TREE_CODE (base1) == FUNCTION_DECL)
16866 return 0;
16868 /* Compute whether one address points to the start of one
16869 object and another one to the end of another one. */
16870 poly_int64 size0 = 0, size1 = 0;
16871 if (TREE_CODE (base0) == STRING_CST)
16873 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16874 equal = 2;
16875 else
16876 size0 = TREE_STRING_LENGTH (base0);
16878 else if (TREE_CODE (base0) == FUNCTION_DECL)
16879 size0 = 1;
16880 else
16882 tree sz0 = DECL_SIZE_UNIT (base0);
16883 if (!tree_fits_poly_int64_p (sz0))
16884 equal = 2;
16885 else
16886 size0 = tree_to_poly_int64 (sz0);
16888 if (TREE_CODE (base1) == STRING_CST)
16890 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16891 equal = 2;
16892 else
16893 size1 = TREE_STRING_LENGTH (base1);
16895 else if (TREE_CODE (base1) == FUNCTION_DECL)
16896 size1 = 1;
16897 else
16899 tree sz1 = DECL_SIZE_UNIT (base1);
16900 if (!tree_fits_poly_int64_p (sz1))
16901 equal = 2;
16902 else
16903 size1 = tree_to_poly_int64 (sz1);
16905 if (equal == 0)
16907 /* If one offset is pointing (or could be) to the beginning of one
16908 object and the other is pointing to one past the last byte of the
16909 other object, punt. */
16910 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16911 equal = 2;
16912 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16913 equal = 2;
16914 /* If both offsets are the same, there are some cases we know that are
16915 ok. Either if we know they aren't zero, or if we know both sizes
16916 are no zero. */
16917 if (equal == 2
16918 && known_eq (off0, off1)
16919 && (known_ne (off0, 0)
16920 || (known_ne (size0, 0) && known_ne (size1, 0))))
16921 equal = 0;
16924 /* At this point, equal is 2 if either one or both pointers are out of
16925 bounds of their object, or one points to start of its object and the
16926 other points to end of its object. This is unspecified behavior
16927 e.g. in C++. Otherwise equal is 0. */
16928 if (folding_cxx_constexpr && equal)
16929 return equal;
16931 /* When both pointers point to string literals, even when equal is 0,
16932 due to tail merging of string literals the pointers might be the same. */
16933 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16935 if (ioff0 < 0
16936 || ioff1 < 0
16937 || ioff0 > TREE_STRING_LENGTH (base0)
16938 || ioff1 > TREE_STRING_LENGTH (base1))
16939 return 2;
16941 /* If the bytes in the string literals starting at the pointers
16942 differ, the pointers need to be different. */
16943 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16944 TREE_STRING_POINTER (base1) + ioff1,
16945 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16946 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16948 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16949 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16950 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16951 ioffmin) == 0)
16952 /* If even the bytes in the string literal before the
16953 pointers are the same, the string literals could be
16954 tail merged. */
16955 return 2;
16957 return 0;
16960 if (folding_cxx_constexpr)
16961 return 0;
16963 /* If this is a pointer comparison, ignore for now even
16964 valid equalities where one pointer is the offset zero
16965 of one object and the other to one past end of another one. */
16966 if (!INTEGRAL_TYPE_P (type))
16967 return 0;
16969 /* Assume that string literals can't be adjacent to variables
16970 (automatic or global). */
16971 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16972 return 0;
16974 /* Assume that automatic variables can't be adjacent to global
16975 variables. */
16976 if (is_global_var (base0) != is_global_var (base1))
16977 return 0;
16979 return equal;
16982 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16983 tree
16984 ctor_single_nonzero_element (const_tree t)
16986 unsigned HOST_WIDE_INT idx;
16987 constructor_elt *ce;
16988 tree elt = NULL_TREE;
16990 if (TREE_CODE (t) != CONSTRUCTOR)
16991 return NULL_TREE;
16992 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16993 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16995 if (elt)
16996 return NULL_TREE;
16997 elt = ce->value;
16999 return elt;
17002 #if CHECKING_P
17004 namespace selftest {
17006 /* Helper functions for writing tests of folding trees. */
17008 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17010 static void
17011 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17012 tree constant)
17014 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17017 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17018 wrapping WRAPPED_EXPR. */
17020 static void
17021 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17022 tree wrapped_expr)
17024 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17025 ASSERT_NE (wrapped_expr, result);
17026 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17027 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17030 /* Verify that various arithmetic binary operations are folded
17031 correctly. */
17033 static void
17034 test_arithmetic_folding ()
17036 tree type = integer_type_node;
17037 tree x = create_tmp_var_raw (type, "x");
17038 tree zero = build_zero_cst (type);
17039 tree one = build_int_cst (type, 1);
17041 /* Addition. */
17042 /* 1 <-- (0 + 1) */
17043 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17044 one);
17045 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17046 one);
17048 /* (nonlvalue)x <-- (x + 0) */
17049 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17052 /* Subtraction. */
17053 /* 0 <-- (x - x) */
17054 assert_binop_folds_to_const (x, MINUS_EXPR, x,
17055 zero);
17056 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17059 /* Multiplication. */
17060 /* 0 <-- (x * 0) */
17061 assert_binop_folds_to_const (x, MULT_EXPR, zero,
17062 zero);
17064 /* (nonlvalue)x <-- (x * 1) */
17065 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17069 namespace test_fold_vec_perm_cst {
17071 /* Build a VECTOR_CST corresponding to VMODE, and has
17072 encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17073 Fill it with randomized elements, using rand() % THRESHOLD. */
17075 static tree
17076 build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17077 unsigned nelts_per_pattern,
17078 int step = 0, int threshold = 100)
17080 tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17081 tree vectype = build_vector_type_for_mode (inner_type, vmode);
17082 tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17084 // Fill a0 for each pattern
17085 for (unsigned i = 0; i < npatterns; i++)
17086 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17088 if (nelts_per_pattern == 1)
17089 return builder.build ();
17091 // Fill a1 for each pattern
17092 for (unsigned i = 0; i < npatterns; i++)
17093 builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17095 if (nelts_per_pattern == 2)
17096 return builder.build ();
17098 for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17100 tree prev_elem = builder[i - npatterns];
17101 int prev_elem_val = TREE_INT_CST_LOW (prev_elem);
17102 int val = prev_elem_val + step;
17103 builder.quick_push (build_int_cst (inner_type, val));
17106 return builder.build ();
17109 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17110 when result is VLA. */
17112 static void
17113 validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17114 tree res, tree *expected_res)
17116 /* Actual npatterns and encoded_elts in res may be less than expected due
17117 to canonicalization. */
17118 ASSERT_TRUE (res != NULL_TREE);
17119 ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17120 ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17122 for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17123 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17126 /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17127 when the result is VLS. */
17129 static void
17130 validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17132 ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17133 for (unsigned i = 0; i < expected_nelts; i++)
17134 ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17137 /* Helper routine to push multiple elements into BUILDER. */
17138 template<unsigned N>
17139 static void builder_push_elems (vec_perm_builder& builder,
17140 poly_uint64 (&elems)[N])
17142 for (unsigned i = 0; i < N; i++)
17143 builder.quick_push (elems[i]);
17146 #define ARG0(index) vector_cst_elt (arg0, index)
17147 #define ARG1(index) vector_cst_elt (arg1, index)
17149 /* Test cases where result is VNx4SI and input vectors are V4SI. */
17151 static void
17152 test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17154 for (int i = 0; i < 10; i++)
17156 /* Case 1:
17157 sel = { 0, 4, 1, 5, ... }
17158 res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17160 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17161 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17163 tree inner_type
17164 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17165 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17167 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17168 vec_perm_builder builder (res_len, 4, 1);
17169 poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17170 builder_push_elems (builder, mask_elems);
17172 vec_perm_indices sel (builder, 2, res_len);
17173 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17175 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17176 validate_res (4, 1, res, expected_res);
17179 /* Case 2: Same as case 1, but contains an out of bounds access which
17180 should wrap around.
17181 sel = {0, 8, 4, 12, ...} (4, 1)
17182 res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17184 tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17185 tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17187 tree inner_type
17188 = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17189 tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17191 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17192 vec_perm_builder builder (res_len, 4, 1);
17193 poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17194 builder_push_elems (builder, mask_elems);
17196 vec_perm_indices sel (builder, 2, res_len);
17197 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17199 tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17200 validate_res (4, 1, res, expected_res);
17205 /* Test cases where result is V4SI and input vectors are VNx4SI. */
17207 static void
17208 test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17210 for (int i = 0; i < 10; i++)
17212 /* Case 1:
17213 sel = { 0, 1, 2, 3}
17214 res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17216 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17217 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17219 tree inner_type
17220 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17221 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17223 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17224 vec_perm_builder builder (res_len, 4, 1);
17225 poly_uint64 mask_elems[] = {0, 1, 2, 3};
17226 builder_push_elems (builder, mask_elems);
17228 vec_perm_indices sel (builder, 2, res_len);
17229 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17231 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17232 validate_res_vls (res, expected_res, 4);
17235 /* Case 2: Same as Case 1, but crossing input vector.
17236 sel = {0, 2, 4, 6}
17237 In this case,the index 4 is ambiguous since len = 4 + 4x.
17238 Since we cannot determine, which vector to choose from during
17239 compile time, should return NULL_TREE. */
17241 tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17242 tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17244 tree inner_type
17245 = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17246 tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17248 poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17249 vec_perm_builder builder (res_len, 4, 1);
17250 poly_uint64 mask_elems[] = {0, 2, 4, 6};
17251 builder_push_elems (builder, mask_elems);
17253 vec_perm_indices sel (builder, 2, res_len);
17254 const char *reason;
17255 tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17257 ASSERT_TRUE (res == NULL_TREE);
17258 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17263 /* Test all input vectors. */
17265 static void
17266 test_all_nunits (machine_mode vmode)
17268 /* Test with 10 different inputs. */
17269 for (int i = 0; i < 10; i++)
17271 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17272 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17273 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17275 /* Case 1: mask = {0, ...} // (1, 1)
17276 res = { arg0[0], ... } // (1, 1) */
17278 vec_perm_builder builder (len, 1, 1);
17279 builder.quick_push (0);
17280 vec_perm_indices sel (builder, 2, len);
17281 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17282 tree expected_res[] = { ARG0(0) };
17283 validate_res (1, 1, res, expected_res);
17286 /* Case 2: mask = {len, ...} // (1, 1)
17287 res = { arg1[0], ... } // (1, 1) */
17289 vec_perm_builder builder (len, 1, 1);
17290 builder.quick_push (len);
17291 vec_perm_indices sel (builder, 2, len);
17292 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17294 tree expected_res[] = { ARG1(0) };
17295 validate_res (1, 1, res, expected_res);
17300 /* Test all vectors which contain at-least 2 elements. */
17302 static void
17303 test_nunits_min_2 (machine_mode vmode)
17305 for (int i = 0; i < 10; i++)
17307 /* Case 1: mask = { 0, len, ... } // (2, 1)
17308 res = { arg0[0], arg1[0], ... } // (2, 1) */
17310 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17311 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17312 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17314 vec_perm_builder builder (len, 2, 1);
17315 poly_uint64 mask_elems[] = { 0, len };
17316 builder_push_elems (builder, mask_elems);
17318 vec_perm_indices sel (builder, 2, len);
17319 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17321 tree expected_res[] = { ARG0(0), ARG1(0) };
17322 validate_res (2, 1, res, expected_res);
17325 /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17326 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17328 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17329 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17330 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17332 vec_perm_builder builder (len, 2, 2);
17333 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17334 builder_push_elems (builder, mask_elems);
17336 vec_perm_indices sel (builder, 2, len);
17337 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17339 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17340 validate_res (2, 2, res, expected_res);
17343 /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17344 Test that the stepped sequence of the pattern selects from
17345 same input pattern. Since input vectors have npatterns = 2,
17346 and step (a2 - a1) = 1, step is not a multiple of npatterns
17347 in input vector. So return NULL_TREE. */
17349 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1);
17350 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17351 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17353 vec_perm_builder builder (len, 1, 3);
17354 poly_uint64 mask_elems[] = { 0, 0, 1 };
17355 builder_push_elems (builder, mask_elems);
17357 vec_perm_indices sel (builder, 2, len);
17358 const char *reason;
17359 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17360 &reason);
17361 ASSERT_TRUE (res == NULL_TREE);
17362 ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17365 /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17366 Test that stepped sequence of the pattern selects from arg0.
17367 res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17369 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17370 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17371 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17373 vec_perm_builder builder (len, 1, 3);
17374 poly_uint64 mask_elems[] = { len, 0, 1 };
17375 builder_push_elems (builder, mask_elems);
17377 vec_perm_indices sel (builder, 2, len);
17378 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17380 tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17381 validate_res (1, 3, res, expected_res);
17386 /* Test all vectors which contain at-least 4 elements. */
17388 static void
17389 test_nunits_min_4 (machine_mode vmode)
17391 for (int i = 0; i < 10; i++)
17393 /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17394 res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17396 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17397 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17398 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17400 vec_perm_builder builder (len, 4, 1);
17401 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17402 builder_push_elems (builder, mask_elems);
17404 vec_perm_indices sel (builder, 2, len);
17405 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17407 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17408 validate_res (4, 1, res, expected_res);
17411 /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17412 res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17414 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17415 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17416 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17418 vec_perm_builder builder (arg0_len, 1, 3);
17419 poly_uint64 mask_elems[] = {0, 1, 2};
17420 builder_push_elems (builder, mask_elems);
17422 vec_perm_indices sel (builder, 2, arg0_len);
17423 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17424 tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17425 validate_res (1, 3, res, expected_res);
17428 /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17429 res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17431 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17432 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17433 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17435 vec_perm_builder builder (len, 1, 3);
17436 poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17437 builder_push_elems (builder, mask_elems);
17439 vec_perm_indices sel (builder, 2, len);
17440 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17441 tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17442 validate_res (1, 3, res, expected_res);
17445 /* Case 4:
17446 sel = { len, 0, 2, ... } // (1, 3)
17447 This should return NULL because we cross the input vectors.
17448 Because,
17449 Let's assume len = C + Cx
17450 a1 = 0
17451 S = 2
17452 esel = arg0_len / sel_npatterns = C + Cx
17453 ae = 0 + (esel - 2) * S
17454 = 0 + (C + Cx - 2) * 2
17455 = 2(C-2) + 2Cx
17457 For C >= 4:
17458 Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17459 Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17460 Since q1 != qe, we cross input vectors.
17461 So return NULL_TREE. */
17463 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17464 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17465 poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17467 vec_perm_builder builder (arg0_len, 1, 3);
17468 poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17469 builder_push_elems (builder, mask_elems);
17471 vec_perm_indices sel (builder, 2, arg0_len);
17472 const char *reason;
17473 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17474 ASSERT_TRUE (res == NULL_TREE);
17475 ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17478 /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17479 mask = { 0, len, 1, len + 1, ...} // (2, 2)
17480 res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17482 Note that fold_vec_perm_cst will set
17483 res_npatterns = max(4, max(4, 2)) = 4
17484 However after canonicalizing, we will end up with shape (2, 2). */
17486 tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17487 tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17488 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17490 vec_perm_builder builder (len, 2, 2);
17491 poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17492 builder_push_elems (builder, mask_elems);
17494 vec_perm_indices sel (builder, 2, len);
17495 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17496 tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17497 validate_res (2, 2, res, expected_res);
17500 /* Case 6: Test combination in sel, where one pattern is dup and other
17501 is stepped sequence.
17502 sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17503 res = { arg0[0], arg0[0], arg0[0],
17504 arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17506 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17507 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17508 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17510 vec_perm_builder builder (len, 2, 3);
17511 poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17512 builder_push_elems (builder, mask_elems);
17514 vec_perm_indices sel (builder, 2, len);
17515 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17517 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17518 ARG0(1), ARG0(0), ARG0(2) };
17519 validate_res (2, 3, res, expected_res);
17524 /* Test all vectors which contain at-least 8 elements. */
17526 static void
17527 test_nunits_min_8 (machine_mode vmode)
17529 for (int i = 0; i < 10; i++)
17531 /* Case 1: sel_npatterns (4) > input npatterns (2)
17532 sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17533 res: { arg0[0], arg0[0], arg0[0], arg1[0],
17534 arg0[2], arg0[0], arg0[3], arg1[0],
17535 arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17537 tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17538 tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17539 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17541 vec_perm_builder builder(len, 4, 3);
17542 poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17543 4, 0, 5, len };
17544 builder_push_elems (builder, mask_elems);
17546 vec_perm_indices sel (builder, 2, len);
17547 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17549 tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17550 ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17551 ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17552 validate_res (4, 3, res, expected_res);
17557 /* Test vectors for which nunits[0] <= 4. */
17559 static void
17560 test_nunits_max_4 (machine_mode vmode)
17562 /* Case 1: mask = {0, 4, ...} // (1, 2)
17563 This should return NULL_TREE because the index 4 may choose
17564 from either arg0 or arg1 depending on vector length. */
17566 tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17567 tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17568 poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17570 vec_perm_builder builder (len, 1, 2);
17571 poly_uint64 mask_elems[] = {0, 4};
17572 builder_push_elems (builder, mask_elems);
17574 vec_perm_indices sel (builder, 2, len);
17575 const char *reason;
17576 tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17577 ASSERT_TRUE (res == NULL_TREE);
17578 ASSERT_TRUE (reason != NULL);
17579 ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17583 #undef ARG0
17584 #undef ARG1
17586 /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17588 static bool
17589 is_simple_vla_size (poly_uint64 size)
17591 if (size.is_constant ()
17592 || !pow2p_hwi (size.coeffs[0]))
17593 return false;
17594 for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17595 if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17596 return false;
17597 return true;
17600 /* Execute fold_vec_perm_cst unit tests. */
17602 static void
17603 test ()
17605 machine_mode vnx4si_mode = E_VOIDmode;
17606 machine_mode v4si_mode = E_VOIDmode;
17608 machine_mode vmode;
17609 FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17611 /* Obtain modes corresponding to VNx4SI and V4SI,
17612 to call mixed mode tests below.
17613 FIXME: Is there a better way to do this ? */
17614 if (GET_MODE_INNER (vmode) == SImode)
17616 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17617 if (is_simple_vla_size (nunits)
17618 && nunits.coeffs[0] == 4)
17619 vnx4si_mode = vmode;
17620 else if (known_eq (nunits, poly_uint64 (4)))
17621 v4si_mode = vmode;
17624 if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17625 || !targetm.vector_mode_supported_p (vmode))
17626 continue;
17628 poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17629 test_all_nunits (vmode);
17630 if (nunits.coeffs[0] >= 2)
17631 test_nunits_min_2 (vmode);
17632 if (nunits.coeffs[0] >= 4)
17633 test_nunits_min_4 (vmode);
17634 if (nunits.coeffs[0] >= 8)
17635 test_nunits_min_8 (vmode);
17637 if (nunits.coeffs[0] <= 4)
17638 test_nunits_max_4 (vmode);
17641 if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
17642 && targetm.vector_mode_supported_p (vnx4si_mode)
17643 && targetm.vector_mode_supported_p (v4si_mode))
17645 test_vnx4si_v4si (vnx4si_mode, v4si_mode);
17646 test_v4si_vnx4si (v4si_mode, vnx4si_mode);
17649 } // end of test_fold_vec_perm_cst namespace
17651 /* Verify that various binary operations on vectors are folded
17652 correctly. */
17654 static void
17655 test_vector_folding ()
17657 tree inner_type = integer_type_node;
17658 tree type = build_vector_type (inner_type, 4);
17659 tree zero = build_zero_cst (type);
17660 tree one = build_one_cst (type);
17661 tree index = build_index_vector (type, 0, 1);
17663 /* Verify equality tests that return a scalar boolean result. */
17664 tree res_type = boolean_type_node;
17665 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
17666 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17667 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17668 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17669 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17670 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17671 index, one)));
17672 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17673 index, index)));
17674 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17675 index, index)));
17678 /* Verify folding of VEC_DUPLICATE_EXPRs. */
17680 static void
17681 test_vec_duplicate_folding ()
17683 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17684 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17685 /* This will be 1 if VEC_MODE isn't a vector mode. */
17686 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17688 tree type = build_vector_type (ssizetype, nunits);
17689 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17690 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17691 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17694 /* Run all of the selftests within this file. */
17696 void
17697 fold_const_cc_tests ()
17699 test_arithmetic_folding ();
17700 test_vector_folding ();
17701 test_vec_duplicate_folding ();
17702 test_fold_vec_perm_cst::test ();
17705 } // namespace selftest
17707 #endif /* CHECKING_P */