c++: over-eager friend matching [PR109649]
[official-gcc.git] / gcc / fold-const.cc
blobdb54bfc56629408a309bf0739929e3ecd312be2f
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
153 /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
155 tree_code
156 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158 enum tree_code code = ERROR_MARK;
160 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
161 return ERROR_MARK;
163 if (!operand_equal_p (exp0, exp2))
164 return ERROR_MARK;
166 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
170 /* X <= Y - 1 equals to X < Y. */
171 if (cmp == LE_EXPR)
172 code = LT_EXPR;
173 /* X > Y - 1 equals to X >= Y. */
174 if (cmp == GT_EXPR)
175 code = GE_EXPR;
177 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
179 /* X < Y + 1 equals to X <= Y. */
180 if (cmp == LT_EXPR)
181 code = LE_EXPR;
182 /* X >= Y + 1 equals to X > Y. */
183 if (cmp == GE_EXPR)
184 code = GT_EXPR;
187 if (code != ERROR_MARK
188 || operand_equal_p (exp1, exp3))
190 if (cmp == LT_EXPR || cmp == LE_EXPR)
191 code = MIN_EXPR;
192 if (cmp == GT_EXPR || cmp == GE_EXPR)
193 code = MAX_EXPR;
195 return code;
198 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
199 Otherwise, return LOC. */
201 static location_t
202 expr_location_or (tree t, location_t loc)
204 location_t tloc = EXPR_LOCATION (t);
205 return tloc == UNKNOWN_LOCATION ? loc : tloc;
208 /* Similar to protected_set_expr_location, but never modify x in place,
209 if location can and needs to be set, unshare it. */
211 tree
212 protected_set_expr_location_unshare (tree x, location_t loc)
214 if (CAN_HAVE_LOCATION_P (x)
215 && EXPR_LOCATION (x) != loc
216 && !(TREE_CODE (x) == SAVE_EXPR
217 || TREE_CODE (x) == TARGET_EXPR
218 || TREE_CODE (x) == BIND_EXPR))
220 x = copy_node (x);
221 SET_EXPR_LOCATION (x, loc);
223 return x;
226 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
227 division and returns the quotient. Otherwise returns
228 NULL_TREE. */
230 tree
231 div_if_zero_remainder (const_tree arg1, const_tree arg2)
233 widest_int quo;
235 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
236 SIGNED, &quo))
237 return wide_int_to_tree (TREE_TYPE (arg1), quo);
239 return NULL_TREE;
242 /* This is nonzero if we should defer warnings about undefined
243 overflow. This facility exists because these warnings are a
244 special case. The code to estimate loop iterations does not want
245 to issue any warnings, since it works with expressions which do not
246 occur in user code. Various bits of cleanup code call fold(), but
247 only use the result if it has certain characteristics (e.g., is a
248 constant); that code only wants to issue a warning if the result is
249 used. */
251 static int fold_deferring_overflow_warnings;
253 /* If a warning about undefined overflow is deferred, this is the
254 warning. Note that this may cause us to turn two warnings into
255 one, but that is fine since it is sufficient to only give one
256 warning per expression. */
258 static const char* fold_deferred_overflow_warning;
260 /* If a warning about undefined overflow is deferred, this is the
261 level at which the warning should be emitted. */
263 static enum warn_strict_overflow_code fold_deferred_overflow_code;
265 /* Start deferring overflow warnings. We could use a stack here to
266 permit nested calls, but at present it is not necessary. */
268 void
269 fold_defer_overflow_warnings (void)
271 ++fold_deferring_overflow_warnings;
274 /* Stop deferring overflow warnings. If there is a pending warning,
275 and ISSUE is true, then issue the warning if appropriate. STMT is
276 the statement with which the warning should be associated (used for
277 location information); STMT may be NULL. CODE is the level of the
278 warning--a warn_strict_overflow_code value. This function will use
279 the smaller of CODE and the deferred code when deciding whether to
280 issue the warning. CODE may be zero to mean to always use the
281 deferred code. */
283 void
284 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
286 const char *warnmsg;
287 location_t locus;
289 gcc_assert (fold_deferring_overflow_warnings > 0);
290 --fold_deferring_overflow_warnings;
291 if (fold_deferring_overflow_warnings > 0)
293 if (fold_deferred_overflow_warning != NULL
294 && code != 0
295 && code < (int) fold_deferred_overflow_code)
296 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
297 return;
300 warnmsg = fold_deferred_overflow_warning;
301 fold_deferred_overflow_warning = NULL;
303 if (!issue || warnmsg == NULL)
304 return;
306 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
307 return;
309 /* Use the smallest code level when deciding to issue the
310 warning. */
311 if (code == 0 || code > (int) fold_deferred_overflow_code)
312 code = fold_deferred_overflow_code;
314 if (!issue_strict_overflow_warning (code))
315 return;
317 if (stmt == NULL)
318 locus = input_location;
319 else
320 locus = gimple_location (stmt);
321 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
324 /* Stop deferring overflow warnings, ignoring any deferred
325 warnings. */
327 void
328 fold_undefer_and_ignore_overflow_warnings (void)
330 fold_undefer_overflow_warnings (false, NULL, 0);
333 /* Whether we are deferring overflow warnings. */
335 bool
336 fold_deferring_overflow_warnings_p (void)
338 return fold_deferring_overflow_warnings > 0;
341 /* This is called when we fold something based on the fact that signed
342 overflow is undefined. */
344 void
345 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
347 if (fold_deferring_overflow_warnings > 0)
349 if (fold_deferred_overflow_warning == NULL
350 || wc < fold_deferred_overflow_code)
352 fold_deferred_overflow_warning = gmsgid;
353 fold_deferred_overflow_code = wc;
356 else if (issue_strict_overflow_warning (wc))
357 warning (OPT_Wstrict_overflow, gmsgid);
360 /* Return true if the built-in mathematical function specified by CODE
361 is odd, i.e. -f(x) == f(-x). */
363 bool
364 negate_mathfn_p (combined_fn fn)
366 switch (fn)
368 CASE_CFN_ASIN:
369 CASE_CFN_ASIN_FN:
370 CASE_CFN_ASINH:
371 CASE_CFN_ASINH_FN:
372 CASE_CFN_ATAN:
373 CASE_CFN_ATAN_FN:
374 CASE_CFN_ATANH:
375 CASE_CFN_ATANH_FN:
376 CASE_CFN_CASIN:
377 CASE_CFN_CASIN_FN:
378 CASE_CFN_CASINH:
379 CASE_CFN_CASINH_FN:
380 CASE_CFN_CATAN:
381 CASE_CFN_CATAN_FN:
382 CASE_CFN_CATANH:
383 CASE_CFN_CATANH_FN:
384 CASE_CFN_CBRT:
385 CASE_CFN_CBRT_FN:
386 CASE_CFN_CPROJ:
387 CASE_CFN_CPROJ_FN:
388 CASE_CFN_CSIN:
389 CASE_CFN_CSIN_FN:
390 CASE_CFN_CSINH:
391 CASE_CFN_CSINH_FN:
392 CASE_CFN_CTAN:
393 CASE_CFN_CTAN_FN:
394 CASE_CFN_CTANH:
395 CASE_CFN_CTANH_FN:
396 CASE_CFN_ERF:
397 CASE_CFN_ERF_FN:
398 CASE_CFN_LLROUND:
399 CASE_CFN_LLROUND_FN:
400 CASE_CFN_LROUND:
401 CASE_CFN_LROUND_FN:
402 CASE_CFN_ROUND:
403 CASE_CFN_ROUNDEVEN:
404 CASE_CFN_ROUNDEVEN_FN:
405 CASE_CFN_SIN:
406 CASE_CFN_SIN_FN:
407 CASE_CFN_SINH:
408 CASE_CFN_SINH_FN:
409 CASE_CFN_TAN:
410 CASE_CFN_TAN_FN:
411 CASE_CFN_TANH:
412 CASE_CFN_TANH_FN:
413 CASE_CFN_TRUNC:
414 CASE_CFN_TRUNC_FN:
415 return true;
417 CASE_CFN_LLRINT:
418 CASE_CFN_LLRINT_FN:
419 CASE_CFN_LRINT:
420 CASE_CFN_LRINT_FN:
421 CASE_CFN_NEARBYINT:
422 CASE_CFN_NEARBYINT_FN:
423 CASE_CFN_RINT:
424 CASE_CFN_RINT_FN:
425 return !flag_rounding_math;
427 default:
428 break;
430 return false;
433 /* Check whether we may negate an integer constant T without causing
434 overflow. */
436 bool
437 may_negate_without_overflow_p (const_tree t)
439 tree type;
441 gcc_assert (TREE_CODE (t) == INTEGER_CST);
443 type = TREE_TYPE (t);
444 if (TYPE_UNSIGNED (type))
445 return false;
447 return !wi::only_sign_bit_p (wi::to_wide (t));
450 /* Determine whether an expression T can be cheaply negated using
451 the function negate_expr without introducing undefined overflow. */
453 static bool
454 negate_expr_p (tree t)
456 tree type;
458 if (t == 0)
459 return false;
461 type = TREE_TYPE (t);
463 STRIP_SIGN_NOPS (t);
464 switch (TREE_CODE (t))
466 case INTEGER_CST:
467 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
468 return true;
470 /* Check that -CST will not overflow type. */
471 return may_negate_without_overflow_p (t);
472 case BIT_NOT_EXPR:
473 return (INTEGRAL_TYPE_P (type)
474 && TYPE_OVERFLOW_WRAPS (type));
476 case FIXED_CST:
477 return true;
479 case NEGATE_EXPR:
480 return !TYPE_OVERFLOW_SANITIZED (type);
482 case REAL_CST:
483 /* We want to canonicalize to positive real constants. Pretend
484 that only negative ones can be easily negated. */
485 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
487 case COMPLEX_CST:
488 return negate_expr_p (TREE_REALPART (t))
489 && negate_expr_p (TREE_IMAGPART (t));
491 case VECTOR_CST:
493 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
494 return true;
496 /* Steps don't prevent negation. */
497 unsigned int count = vector_cst_encoded_nelts (t);
498 for (unsigned int i = 0; i < count; ++i)
499 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
500 return false;
502 return true;
505 case COMPLEX_EXPR:
506 return negate_expr_p (TREE_OPERAND (t, 0))
507 && negate_expr_p (TREE_OPERAND (t, 1));
509 case CONJ_EXPR:
510 return negate_expr_p (TREE_OPERAND (t, 0));
512 case PLUS_EXPR:
513 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
514 || HONOR_SIGNED_ZEROS (type)
515 || (ANY_INTEGRAL_TYPE_P (type)
516 && ! TYPE_OVERFLOW_WRAPS (type)))
517 return false;
518 /* -(A + B) -> (-B) - A. */
519 if (negate_expr_p (TREE_OPERAND (t, 1)))
520 return true;
521 /* -(A + B) -> (-A) - B. */
522 return negate_expr_p (TREE_OPERAND (t, 0));
524 case MINUS_EXPR:
525 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
526 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
527 && !HONOR_SIGNED_ZEROS (type)
528 && (! ANY_INTEGRAL_TYPE_P (type)
529 || TYPE_OVERFLOW_WRAPS (type));
531 case MULT_EXPR:
532 if (TYPE_UNSIGNED (type))
533 break;
534 /* INT_MIN/n * n doesn't overflow while negating one operand it does
535 if n is a (negative) power of two. */
536 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
537 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
538 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
539 && (wi::popcount
540 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
541 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
542 && (wi::popcount
543 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
544 break;
546 /* Fall through. */
548 case RDIV_EXPR:
549 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
550 return negate_expr_p (TREE_OPERAND (t, 1))
551 || negate_expr_p (TREE_OPERAND (t, 0));
552 break;
554 case TRUNC_DIV_EXPR:
555 case ROUND_DIV_EXPR:
556 case EXACT_DIV_EXPR:
557 if (TYPE_UNSIGNED (type))
558 break;
559 /* In general we can't negate A in A / B, because if A is INT_MIN and
560 B is not 1 we change the sign of the result. */
561 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
562 && negate_expr_p (TREE_OPERAND (t, 0)))
563 return true;
564 /* In general we can't negate B in A / B, because if A is INT_MIN and
565 B is 1, we may turn this into INT_MIN / -1 which is undefined
566 and actually traps on some architectures. */
567 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
568 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
569 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
570 && ! integer_onep (TREE_OPERAND (t, 1))))
571 return negate_expr_p (TREE_OPERAND (t, 1));
572 break;
574 case NOP_EXPR:
575 /* Negate -((double)float) as (double)(-float). */
576 if (TREE_CODE (type) == REAL_TYPE)
578 tree tem = strip_float_extensions (t);
579 if (tem != t)
580 return negate_expr_p (tem);
582 break;
584 case CALL_EXPR:
585 /* Negate -f(x) as f(-x). */
586 if (negate_mathfn_p (get_call_combined_fn (t)))
587 return negate_expr_p (CALL_EXPR_ARG (t, 0));
588 break;
590 case RSHIFT_EXPR:
591 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
592 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
594 tree op1 = TREE_OPERAND (t, 1);
595 if (wi::to_wide (op1) == element_precision (type) - 1)
596 return true;
598 break;
600 default:
601 break;
603 return false;
606 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
607 simplification is possible.
608 If negate_expr_p would return true for T, NULL_TREE will never be
609 returned. */
611 static tree
612 fold_negate_expr_1 (location_t loc, tree t)
614 tree type = TREE_TYPE (t);
615 tree tem;
617 switch (TREE_CODE (t))
619 /* Convert - (~A) to A + 1. */
620 case BIT_NOT_EXPR:
621 if (INTEGRAL_TYPE_P (type))
622 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
623 build_one_cst (type));
624 break;
626 case INTEGER_CST:
627 tem = fold_negate_const (t, type);
628 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
629 || (ANY_INTEGRAL_TYPE_P (type)
630 && !TYPE_OVERFLOW_TRAPS (type)
631 && TYPE_OVERFLOW_WRAPS (type))
632 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
633 return tem;
634 break;
636 case POLY_INT_CST:
637 case REAL_CST:
638 case FIXED_CST:
639 tem = fold_negate_const (t, type);
640 return tem;
642 case COMPLEX_CST:
644 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
645 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
646 if (rpart && ipart)
647 return build_complex (type, rpart, ipart);
649 break;
651 case VECTOR_CST:
653 tree_vector_builder elts;
654 elts.new_unary_operation (type, t, true);
655 unsigned int count = elts.encoded_nelts ();
656 for (unsigned int i = 0; i < count; ++i)
658 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
659 if (elt == NULL_TREE)
660 return NULL_TREE;
661 elts.quick_push (elt);
664 return elts.build ();
667 case COMPLEX_EXPR:
668 if (negate_expr_p (t))
669 return fold_build2_loc (loc, COMPLEX_EXPR, type,
670 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
671 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
672 break;
674 case CONJ_EXPR:
675 if (negate_expr_p (t))
676 return fold_build1_loc (loc, CONJ_EXPR, type,
677 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
678 break;
680 case NEGATE_EXPR:
681 if (!TYPE_OVERFLOW_SANITIZED (type))
682 return TREE_OPERAND (t, 0);
683 break;
685 case PLUS_EXPR:
686 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
687 && !HONOR_SIGNED_ZEROS (type))
689 /* -(A + B) -> (-B) - A. */
690 if (negate_expr_p (TREE_OPERAND (t, 1)))
692 tem = negate_expr (TREE_OPERAND (t, 1));
693 return fold_build2_loc (loc, MINUS_EXPR, type,
694 tem, TREE_OPERAND (t, 0));
697 /* -(A + B) -> (-A) - B. */
698 if (negate_expr_p (TREE_OPERAND (t, 0)))
700 tem = negate_expr (TREE_OPERAND (t, 0));
701 return fold_build2_loc (loc, MINUS_EXPR, type,
702 tem, TREE_OPERAND (t, 1));
705 break;
707 case MINUS_EXPR:
708 /* - (A - B) -> B - A */
709 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
710 && !HONOR_SIGNED_ZEROS (type))
711 return fold_build2_loc (loc, MINUS_EXPR, type,
712 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
713 break;
715 case MULT_EXPR:
716 if (TYPE_UNSIGNED (type))
717 break;
719 /* Fall through. */
721 case RDIV_EXPR:
722 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
724 tem = TREE_OPERAND (t, 1);
725 if (negate_expr_p (tem))
726 return fold_build2_loc (loc, TREE_CODE (t), type,
727 TREE_OPERAND (t, 0), negate_expr (tem));
728 tem = TREE_OPERAND (t, 0);
729 if (negate_expr_p (tem))
730 return fold_build2_loc (loc, TREE_CODE (t), type,
731 negate_expr (tem), TREE_OPERAND (t, 1));
733 break;
735 case TRUNC_DIV_EXPR:
736 case ROUND_DIV_EXPR:
737 case EXACT_DIV_EXPR:
738 if (TYPE_UNSIGNED (type))
739 break;
740 /* In general we can't negate A in A / B, because if A is INT_MIN and
741 B is not 1 we change the sign of the result. */
742 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
743 && negate_expr_p (TREE_OPERAND (t, 0)))
744 return fold_build2_loc (loc, TREE_CODE (t), type,
745 negate_expr (TREE_OPERAND (t, 0)),
746 TREE_OPERAND (t, 1));
747 /* In general we can't negate B in A / B, because if A is INT_MIN and
748 B is 1, we may turn this into INT_MIN / -1 which is undefined
749 and actually traps on some architectures. */
750 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
751 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
752 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
753 && ! integer_onep (TREE_OPERAND (t, 1))))
754 && negate_expr_p (TREE_OPERAND (t, 1)))
755 return fold_build2_loc (loc, TREE_CODE (t), type,
756 TREE_OPERAND (t, 0),
757 negate_expr (TREE_OPERAND (t, 1)));
758 break;
760 case NOP_EXPR:
761 /* Convert -((double)float) into (double)(-float). */
762 if (TREE_CODE (type) == REAL_TYPE)
764 tem = strip_float_extensions (t);
765 if (tem != t && negate_expr_p (tem))
766 return fold_convert_loc (loc, type, negate_expr (tem));
768 break;
770 case CALL_EXPR:
771 /* Negate -f(x) as f(-x). */
772 if (negate_mathfn_p (get_call_combined_fn (t))
773 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
775 tree fndecl, arg;
777 fndecl = get_callee_fndecl (t);
778 arg = negate_expr (CALL_EXPR_ARG (t, 0));
779 return build_call_expr_loc (loc, fndecl, 1, arg);
781 break;
783 case RSHIFT_EXPR:
784 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
785 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
787 tree op1 = TREE_OPERAND (t, 1);
788 if (wi::to_wide (op1) == element_precision (type) - 1)
790 tree ntype = TYPE_UNSIGNED (type)
791 ? signed_type_for (type)
792 : unsigned_type_for (type);
793 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
794 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
795 return fold_convert_loc (loc, type, temp);
798 break;
800 default:
801 break;
804 return NULL_TREE;
807 /* A wrapper for fold_negate_expr_1. */
809 static tree
810 fold_negate_expr (location_t loc, tree t)
812 tree type = TREE_TYPE (t);
813 STRIP_SIGN_NOPS (t);
814 tree tem = fold_negate_expr_1 (loc, t);
815 if (tem == NULL_TREE)
816 return NULL_TREE;
817 return fold_convert_loc (loc, type, tem);
820 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
821 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
822 return NULL_TREE. */
824 static tree
825 negate_expr (tree t)
827 tree type, tem;
828 location_t loc;
830 if (t == NULL_TREE)
831 return NULL_TREE;
833 loc = EXPR_LOCATION (t);
834 type = TREE_TYPE (t);
835 STRIP_SIGN_NOPS (t);
837 tem = fold_negate_expr (loc, t);
838 if (!tem)
839 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
840 return fold_convert_loc (loc, type, tem);
843 /* Split a tree IN into a constant, literal and variable parts that could be
844 combined with CODE to make IN. "constant" means an expression with
845 TREE_CONSTANT but that isn't an actual constant. CODE must be a
846 commutative arithmetic operation. Store the constant part into *CONP,
847 the literal in *LITP and return the variable part. If a part isn't
848 present, set it to null. If the tree does not decompose in this way,
849 return the entire tree as the variable part and the other parts as null.
851 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
852 case, we negate an operand that was subtracted. Except if it is a
853 literal for which we use *MINUS_LITP instead.
855 If NEGATE_P is true, we are negating all of IN, again except a literal
856 for which we use *MINUS_LITP instead. If a variable part is of pointer
857 type, it is negated after converting to TYPE. This prevents us from
858 generating illegal MINUS pointer expression. LOC is the location of
859 the converted variable part.
861 If IN is itself a literal or constant, return it as appropriate.
863 Note that we do not guarantee that any of the three values will be the
864 same type as IN, but they will have the same signedness and mode. */
866 static tree
867 split_tree (tree in, tree type, enum tree_code code,
868 tree *minus_varp, tree *conp, tree *minus_conp,
869 tree *litp, tree *minus_litp, int negate_p)
871 tree var = 0;
872 *minus_varp = 0;
873 *conp = 0;
874 *minus_conp = 0;
875 *litp = 0;
876 *minus_litp = 0;
878 /* Strip any conversions that don't change the machine mode or signedness. */
879 STRIP_SIGN_NOPS (in);
881 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
882 || TREE_CODE (in) == FIXED_CST)
883 *litp = in;
884 else if (TREE_CODE (in) == code
885 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
886 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
887 /* We can associate addition and subtraction together (even
888 though the C standard doesn't say so) for integers because
889 the value is not affected. For reals, the value might be
890 affected, so we can't. */
891 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
892 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
893 || (code == MINUS_EXPR
894 && (TREE_CODE (in) == PLUS_EXPR
895 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
897 tree op0 = TREE_OPERAND (in, 0);
898 tree op1 = TREE_OPERAND (in, 1);
899 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
900 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
902 /* First see if either of the operands is a literal, then a constant. */
903 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
904 || TREE_CODE (op0) == FIXED_CST)
905 *litp = op0, op0 = 0;
906 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
907 || TREE_CODE (op1) == FIXED_CST)
908 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
910 if (op0 != 0 && TREE_CONSTANT (op0))
911 *conp = op0, op0 = 0;
912 else if (op1 != 0 && TREE_CONSTANT (op1))
913 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
915 /* If we haven't dealt with either operand, this is not a case we can
916 decompose. Otherwise, VAR is either of the ones remaining, if any. */
917 if (op0 != 0 && op1 != 0)
918 var = in;
919 else if (op0 != 0)
920 var = op0;
921 else
922 var = op1, neg_var_p = neg1_p;
924 /* Now do any needed negations. */
925 if (neg_litp_p)
926 *minus_litp = *litp, *litp = 0;
927 if (neg_conp_p && *conp)
928 *minus_conp = *conp, *conp = 0;
929 if (neg_var_p && var)
930 *minus_varp = var, var = 0;
932 else if (TREE_CONSTANT (in))
933 *conp = in;
934 else if (TREE_CODE (in) == BIT_NOT_EXPR
935 && code == PLUS_EXPR)
937 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
938 when IN is constant. */
939 *litp = build_minus_one_cst (type);
940 *minus_varp = TREE_OPERAND (in, 0);
942 else
943 var = in;
945 if (negate_p)
947 if (*litp)
948 *minus_litp = *litp, *litp = 0;
949 else if (*minus_litp)
950 *litp = *minus_litp, *minus_litp = 0;
951 if (*conp)
952 *minus_conp = *conp, *conp = 0;
953 else if (*minus_conp)
954 *conp = *minus_conp, *minus_conp = 0;
955 if (var)
956 *minus_varp = var, var = 0;
957 else if (*minus_varp)
958 var = *minus_varp, *minus_varp = 0;
961 if (*litp
962 && TREE_OVERFLOW_P (*litp))
963 *litp = drop_tree_overflow (*litp);
964 if (*minus_litp
965 && TREE_OVERFLOW_P (*minus_litp))
966 *minus_litp = drop_tree_overflow (*minus_litp);
968 return var;
971 /* Re-associate trees split by the above function. T1 and T2 are
972 either expressions to associate or null. Return the new
973 expression, if any. LOC is the location of the new expression. If
974 we build an operation, do it in TYPE and with CODE. */
976 static tree
977 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
979 if (t1 == 0)
981 gcc_assert (t2 == 0 || code != MINUS_EXPR);
982 return t2;
984 else if (t2 == 0)
985 return t1;
987 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
988 try to fold this since we will have infinite recursion. But do
989 deal with any NEGATE_EXPRs. */
990 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
991 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
992 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
994 if (code == PLUS_EXPR)
996 if (TREE_CODE (t1) == NEGATE_EXPR)
997 return build2_loc (loc, MINUS_EXPR, type,
998 fold_convert_loc (loc, type, t2),
999 fold_convert_loc (loc, type,
1000 TREE_OPERAND (t1, 0)));
1001 else if (TREE_CODE (t2) == NEGATE_EXPR)
1002 return build2_loc (loc, MINUS_EXPR, type,
1003 fold_convert_loc (loc, type, t1),
1004 fold_convert_loc (loc, type,
1005 TREE_OPERAND (t2, 0)));
1006 else if (integer_zerop (t2))
1007 return fold_convert_loc (loc, type, t1);
1009 else if (code == MINUS_EXPR)
1011 if (integer_zerop (t2))
1012 return fold_convert_loc (loc, type, t1);
1015 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1016 fold_convert_loc (loc, type, t2));
1019 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1020 fold_convert_loc (loc, type, t2));
1023 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1024 for use in int_const_binop, size_binop and size_diffop. */
1026 static bool
1027 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1029 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1030 return false;
1031 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1032 return false;
1034 switch (code)
1036 case LSHIFT_EXPR:
1037 case RSHIFT_EXPR:
1038 case LROTATE_EXPR:
1039 case RROTATE_EXPR:
1040 return true;
1042 default:
1043 break;
1046 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1047 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1048 && TYPE_MODE (type1) == TYPE_MODE (type2);
1051 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1052 a new constant in RES. Return FALSE if we don't know how to
1053 evaluate CODE at compile-time. */
1055 bool
1056 wide_int_binop (wide_int &res,
1057 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1058 signop sign, wi::overflow_type *overflow)
1060 wide_int tmp;
1061 *overflow = wi::OVF_NONE;
1062 switch (code)
1064 case BIT_IOR_EXPR:
1065 res = wi::bit_or (arg1, arg2);
1066 break;
1068 case BIT_XOR_EXPR:
1069 res = wi::bit_xor (arg1, arg2);
1070 break;
1072 case BIT_AND_EXPR:
1073 res = wi::bit_and (arg1, arg2);
1074 break;
1076 case LSHIFT_EXPR:
1077 if (wi::neg_p (arg2))
1078 return false;
1079 res = wi::lshift (arg1, arg2);
1080 break;
1082 case RSHIFT_EXPR:
1083 if (wi::neg_p (arg2))
1084 return false;
1085 /* It's unclear from the C standard whether shifts can overflow.
1086 The following code ignores overflow; perhaps a C standard
1087 interpretation ruling is needed. */
1088 res = wi::rshift (arg1, arg2, sign);
1089 break;
1091 case RROTATE_EXPR:
1092 case LROTATE_EXPR:
1093 if (wi::neg_p (arg2))
1095 tmp = -arg2;
1096 if (code == RROTATE_EXPR)
1097 code = LROTATE_EXPR;
1098 else
1099 code = RROTATE_EXPR;
1101 else
1102 tmp = arg2;
1104 if (code == RROTATE_EXPR)
1105 res = wi::rrotate (arg1, tmp);
1106 else
1107 res = wi::lrotate (arg1, tmp);
1108 break;
1110 case PLUS_EXPR:
1111 res = wi::add (arg1, arg2, sign, overflow);
1112 break;
1114 case MINUS_EXPR:
1115 res = wi::sub (arg1, arg2, sign, overflow);
1116 break;
1118 case MULT_EXPR:
1119 res = wi::mul (arg1, arg2, sign, overflow);
1120 break;
1122 case MULT_HIGHPART_EXPR:
1123 res = wi::mul_high (arg1, arg2, sign);
1124 break;
1126 case TRUNC_DIV_EXPR:
1127 case EXACT_DIV_EXPR:
1128 if (arg2 == 0)
1129 return false;
1130 res = wi::div_trunc (arg1, arg2, sign, overflow);
1131 break;
1133 case FLOOR_DIV_EXPR:
1134 if (arg2 == 0)
1135 return false;
1136 res = wi::div_floor (arg1, arg2, sign, overflow);
1137 break;
1139 case CEIL_DIV_EXPR:
1140 if (arg2 == 0)
1141 return false;
1142 res = wi::div_ceil (arg1, arg2, sign, overflow);
1143 break;
1145 case ROUND_DIV_EXPR:
1146 if (arg2 == 0)
1147 return false;
1148 res = wi::div_round (arg1, arg2, sign, overflow);
1149 break;
1151 case TRUNC_MOD_EXPR:
1152 if (arg2 == 0)
1153 return false;
1154 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1155 break;
1157 case FLOOR_MOD_EXPR:
1158 if (arg2 == 0)
1159 return false;
1160 res = wi::mod_floor (arg1, arg2, sign, overflow);
1161 break;
1163 case CEIL_MOD_EXPR:
1164 if (arg2 == 0)
1165 return false;
1166 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1167 break;
1169 case ROUND_MOD_EXPR:
1170 if (arg2 == 0)
1171 return false;
1172 res = wi::mod_round (arg1, arg2, sign, overflow);
1173 break;
1175 case MIN_EXPR:
1176 res = wi::min (arg1, arg2, sign);
1177 break;
1179 case MAX_EXPR:
1180 res = wi::max (arg1, arg2, sign);
1181 break;
1183 default:
1184 return false;
1186 return true;
1189 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1190 produce a new constant in RES. Return FALSE if we don't know how
1191 to evaluate CODE at compile-time. */
1193 static bool
1194 poly_int_binop (poly_wide_int &res, enum tree_code code,
1195 const_tree arg1, const_tree arg2,
1196 signop sign, wi::overflow_type *overflow)
1198 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1199 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1200 switch (code)
1202 case PLUS_EXPR:
1203 res = wi::add (wi::to_poly_wide (arg1),
1204 wi::to_poly_wide (arg2), sign, overflow);
1205 break;
1207 case MINUS_EXPR:
1208 res = wi::sub (wi::to_poly_wide (arg1),
1209 wi::to_poly_wide (arg2), sign, overflow);
1210 break;
1212 case MULT_EXPR:
1213 if (TREE_CODE (arg2) == INTEGER_CST)
1214 res = wi::mul (wi::to_poly_wide (arg1),
1215 wi::to_wide (arg2), sign, overflow);
1216 else if (TREE_CODE (arg1) == INTEGER_CST)
1217 res = wi::mul (wi::to_poly_wide (arg2),
1218 wi::to_wide (arg1), sign, overflow);
1219 else
1220 return NULL_TREE;
1221 break;
1223 case LSHIFT_EXPR:
1224 if (TREE_CODE (arg2) == INTEGER_CST)
1225 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1226 else
1227 return false;
1228 break;
1230 case BIT_IOR_EXPR:
1231 if (TREE_CODE (arg2) != INTEGER_CST
1232 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1233 &res))
1234 return false;
1235 break;
1237 default:
1238 return false;
1240 return true;
1243 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1244 produce a new constant. Return NULL_TREE if we don't know how to
1245 evaluate CODE at compile-time. */
1247 tree
1248 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1249 int overflowable)
1251 poly_wide_int poly_res;
1252 tree type = TREE_TYPE (arg1);
1253 signop sign = TYPE_SIGN (type);
1254 wi::overflow_type overflow = wi::OVF_NONE;
1256 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1258 wide_int warg1 = wi::to_wide (arg1), res;
1259 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1260 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1261 return NULL_TREE;
1262 poly_res = res;
1264 else if (!poly_int_tree_p (arg1)
1265 || !poly_int_tree_p (arg2)
1266 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1267 return NULL_TREE;
1268 return force_fit_type (type, poly_res, overflowable,
1269 (((sign == SIGNED || overflowable == -1)
1270 && overflow)
1271 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1274 /* Return true if binary operation OP distributes over addition in operand
1275 OPNO, with the other operand being held constant. OPNO counts from 1. */
1277 static bool
1278 distributes_over_addition_p (tree_code op, int opno)
1280 switch (op)
1282 case PLUS_EXPR:
1283 case MINUS_EXPR:
1284 case MULT_EXPR:
1285 return true;
1287 case LSHIFT_EXPR:
1288 return opno == 1;
1290 default:
1291 return false;
1295 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1296 constant. We assume ARG1 and ARG2 have the same data type, or at least
1297 are the same kind of constant and the same machine mode. Return zero if
1298 combining the constants is not allowed in the current operating mode. */
1300 static tree
1301 const_binop (enum tree_code code, tree arg1, tree arg2)
1303 /* Sanity check for the recursive cases. */
1304 if (!arg1 || !arg2)
1305 return NULL_TREE;
1307 STRIP_NOPS (arg1);
1308 STRIP_NOPS (arg2);
1310 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1312 if (code == POINTER_PLUS_EXPR)
1313 return int_const_binop (PLUS_EXPR,
1314 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1316 return int_const_binop (code, arg1, arg2);
1319 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1321 machine_mode mode;
1322 REAL_VALUE_TYPE d1;
1323 REAL_VALUE_TYPE d2;
1324 REAL_VALUE_TYPE value;
1325 REAL_VALUE_TYPE result;
1326 bool inexact;
1327 tree t, type;
1329 /* The following codes are handled by real_arithmetic. */
1330 switch (code)
1332 case PLUS_EXPR:
1333 case MINUS_EXPR:
1334 case MULT_EXPR:
1335 case RDIV_EXPR:
1336 case MIN_EXPR:
1337 case MAX_EXPR:
1338 break;
1340 default:
1341 return NULL_TREE;
1344 d1 = TREE_REAL_CST (arg1);
1345 d2 = TREE_REAL_CST (arg2);
1347 type = TREE_TYPE (arg1);
1348 mode = TYPE_MODE (type);
1350 /* Don't perform operation if we honor signaling NaNs and
1351 either operand is a signaling NaN. */
1352 if (HONOR_SNANS (mode)
1353 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1354 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1355 return NULL_TREE;
1357 /* Don't perform operation if it would raise a division
1358 by zero exception. */
1359 if (code == RDIV_EXPR
1360 && real_equal (&d2, &dconst0)
1361 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1362 return NULL_TREE;
1364 /* If either operand is a NaN, just return it. Otherwise, set up
1365 for floating-point trap; we return an overflow. */
1366 if (REAL_VALUE_ISNAN (d1))
1368 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1369 is off. */
1370 d1.signalling = 0;
1371 t = build_real (type, d1);
1372 return t;
1374 else if (REAL_VALUE_ISNAN (d2))
1376 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1377 is off. */
1378 d2.signalling = 0;
1379 t = build_real (type, d2);
1380 return t;
1383 inexact = real_arithmetic (&value, code, &d1, &d2);
1384 real_convert (&result, mode, &value);
1386 /* Don't constant fold this floating point operation if
1387 both operands are not NaN but the result is NaN, and
1388 flag_trapping_math. Such operations should raise an
1389 invalid operation exception. */
1390 if (flag_trapping_math
1391 && MODE_HAS_NANS (mode)
1392 && REAL_VALUE_ISNAN (result)
1393 && !REAL_VALUE_ISNAN (d1)
1394 && !REAL_VALUE_ISNAN (d2))
1395 return NULL_TREE;
1397 /* Don't constant fold this floating point operation if
1398 the result has overflowed and flag_trapping_math. */
1399 if (flag_trapping_math
1400 && MODE_HAS_INFINITIES (mode)
1401 && REAL_VALUE_ISINF (result)
1402 && !REAL_VALUE_ISINF (d1)
1403 && !REAL_VALUE_ISINF (d2))
1404 return NULL_TREE;
1406 /* Don't constant fold this floating point operation if the
1407 result may dependent upon the run-time rounding mode and
1408 flag_rounding_math is set, or if GCC's software emulation
1409 is unable to accurately represent the result. */
1410 if ((flag_rounding_math
1411 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1412 && (inexact || !real_identical (&result, &value)))
1413 return NULL_TREE;
1415 t = build_real (type, result);
1417 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1418 return t;
1421 if (TREE_CODE (arg1) == FIXED_CST)
1423 FIXED_VALUE_TYPE f1;
1424 FIXED_VALUE_TYPE f2;
1425 FIXED_VALUE_TYPE result;
1426 tree t, type;
1427 int sat_p;
1428 bool overflow_p;
1430 /* The following codes are handled by fixed_arithmetic. */
1431 switch (code)
1433 case PLUS_EXPR:
1434 case MINUS_EXPR:
1435 case MULT_EXPR:
1436 case TRUNC_DIV_EXPR:
1437 if (TREE_CODE (arg2) != FIXED_CST)
1438 return NULL_TREE;
1439 f2 = TREE_FIXED_CST (arg2);
1440 break;
1442 case LSHIFT_EXPR:
1443 case RSHIFT_EXPR:
1445 if (TREE_CODE (arg2) != INTEGER_CST)
1446 return NULL_TREE;
1447 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1448 f2.data.high = w2.elt (1);
1449 f2.data.low = w2.ulow ();
1450 f2.mode = SImode;
1452 break;
1454 default:
1455 return NULL_TREE;
1458 f1 = TREE_FIXED_CST (arg1);
1459 type = TREE_TYPE (arg1);
1460 sat_p = TYPE_SATURATING (type);
1461 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1462 t = build_fixed (type, result);
1463 /* Propagate overflow flags. */
1464 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1465 TREE_OVERFLOW (t) = 1;
1466 return t;
1469 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1471 tree type = TREE_TYPE (arg1);
1472 tree r1 = TREE_REALPART (arg1);
1473 tree i1 = TREE_IMAGPART (arg1);
1474 tree r2 = TREE_REALPART (arg2);
1475 tree i2 = TREE_IMAGPART (arg2);
1476 tree real, imag;
1478 switch (code)
1480 case PLUS_EXPR:
1481 case MINUS_EXPR:
1482 real = const_binop (code, r1, r2);
1483 imag = const_binop (code, i1, i2);
1484 break;
1486 case MULT_EXPR:
1487 if (COMPLEX_FLOAT_TYPE_P (type))
1488 return do_mpc_arg2 (arg1, arg2, type,
1489 /* do_nonfinite= */ folding_initializer,
1490 mpc_mul);
1492 real = const_binop (MINUS_EXPR,
1493 const_binop (MULT_EXPR, r1, r2),
1494 const_binop (MULT_EXPR, i1, i2));
1495 imag = const_binop (PLUS_EXPR,
1496 const_binop (MULT_EXPR, r1, i2),
1497 const_binop (MULT_EXPR, i1, r2));
1498 break;
1500 case RDIV_EXPR:
1501 if (COMPLEX_FLOAT_TYPE_P (type))
1502 return do_mpc_arg2 (arg1, arg2, type,
1503 /* do_nonfinite= */ folding_initializer,
1504 mpc_div);
1505 /* Fallthru. */
1506 case TRUNC_DIV_EXPR:
1507 case CEIL_DIV_EXPR:
1508 case FLOOR_DIV_EXPR:
1509 case ROUND_DIV_EXPR:
1510 if (flag_complex_method == 0)
1512 /* Keep this algorithm in sync with
1513 tree-complex.cc:expand_complex_div_straight().
1515 Expand complex division to scalars, straightforward algorithm.
1516 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1517 t = br*br + bi*bi
1519 tree magsquared
1520 = const_binop (PLUS_EXPR,
1521 const_binop (MULT_EXPR, r2, r2),
1522 const_binop (MULT_EXPR, i2, i2));
1523 tree t1
1524 = const_binop (PLUS_EXPR,
1525 const_binop (MULT_EXPR, r1, r2),
1526 const_binop (MULT_EXPR, i1, i2));
1527 tree t2
1528 = const_binop (MINUS_EXPR,
1529 const_binop (MULT_EXPR, i1, r2),
1530 const_binop (MULT_EXPR, r1, i2));
1532 real = const_binop (code, t1, magsquared);
1533 imag = const_binop (code, t2, magsquared);
1535 else
1537 /* Keep this algorithm in sync with
1538 tree-complex.cc:expand_complex_div_wide().
1540 Expand complex division to scalars, modified algorithm to minimize
1541 overflow with wide input ranges. */
1542 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1543 fold_abs_const (r2, TREE_TYPE (type)),
1544 fold_abs_const (i2, TREE_TYPE (type)));
1546 if (integer_nonzerop (compare))
1548 /* In the TRUE branch, we compute
1549 ratio = br/bi;
1550 div = (br * ratio) + bi;
1551 tr = (ar * ratio) + ai;
1552 ti = (ai * ratio) - ar;
1553 tr = tr / div;
1554 ti = ti / div; */
1555 tree ratio = const_binop (code, r2, i2);
1556 tree div = const_binop (PLUS_EXPR, i2,
1557 const_binop (MULT_EXPR, r2, ratio));
1558 real = const_binop (MULT_EXPR, r1, ratio);
1559 real = const_binop (PLUS_EXPR, real, i1);
1560 real = const_binop (code, real, div);
1562 imag = const_binop (MULT_EXPR, i1, ratio);
1563 imag = const_binop (MINUS_EXPR, imag, r1);
1564 imag = const_binop (code, imag, div);
1566 else
1568 /* In the FALSE branch, we compute
1569 ratio = d/c;
1570 divisor = (d * ratio) + c;
1571 tr = (b * ratio) + a;
1572 ti = b - (a * ratio);
1573 tr = tr / div;
1574 ti = ti / div; */
1575 tree ratio = const_binop (code, i2, r2);
1576 tree div = const_binop (PLUS_EXPR, r2,
1577 const_binop (MULT_EXPR, i2, ratio));
1579 real = const_binop (MULT_EXPR, i1, ratio);
1580 real = const_binop (PLUS_EXPR, real, r1);
1581 real = const_binop (code, real, div);
1583 imag = const_binop (MULT_EXPR, r1, ratio);
1584 imag = const_binop (MINUS_EXPR, i1, imag);
1585 imag = const_binop (code, imag, div);
1588 break;
1590 default:
1591 return NULL_TREE;
1594 if (real && imag)
1595 return build_complex (type, real, imag);
1598 if (TREE_CODE (arg1) == VECTOR_CST
1599 && TREE_CODE (arg2) == VECTOR_CST
1600 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1601 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1603 tree type = TREE_TYPE (arg1);
1604 bool step_ok_p;
1605 if (VECTOR_CST_STEPPED_P (arg1)
1606 && VECTOR_CST_STEPPED_P (arg2))
1607 /* We can operate directly on the encoding if:
1609 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1610 implies
1611 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1613 Addition and subtraction are the supported operators
1614 for which this is true. */
1615 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1616 else if (VECTOR_CST_STEPPED_P (arg1))
1617 /* We can operate directly on stepped encodings if:
1619 a3 - a2 == a2 - a1
1620 implies:
1621 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1623 which is true if (x -> x op c) distributes over addition. */
1624 step_ok_p = distributes_over_addition_p (code, 1);
1625 else
1626 /* Similarly in reverse. */
1627 step_ok_p = distributes_over_addition_p (code, 2);
1628 tree_vector_builder elts;
1629 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1630 return NULL_TREE;
1631 unsigned int count = elts.encoded_nelts ();
1632 for (unsigned int i = 0; i < count; ++i)
1634 tree elem1 = VECTOR_CST_ELT (arg1, i);
1635 tree elem2 = VECTOR_CST_ELT (arg2, i);
1637 tree elt = const_binop (code, elem1, elem2);
1639 /* It is possible that const_binop cannot handle the given
1640 code and return NULL_TREE */
1641 if (elt == NULL_TREE)
1642 return NULL_TREE;
1643 elts.quick_push (elt);
1646 return elts.build ();
1649 /* Shifts allow a scalar offset for a vector. */
1650 if (TREE_CODE (arg1) == VECTOR_CST
1651 && TREE_CODE (arg2) == INTEGER_CST)
1653 tree type = TREE_TYPE (arg1);
1654 bool step_ok_p = distributes_over_addition_p (code, 1);
1655 tree_vector_builder elts;
1656 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1657 return NULL_TREE;
1658 unsigned int count = elts.encoded_nelts ();
1659 for (unsigned int i = 0; i < count; ++i)
1661 tree elem1 = VECTOR_CST_ELT (arg1, i);
1663 tree elt = const_binop (code, elem1, arg2);
1665 /* It is possible that const_binop cannot handle the given
1666 code and return NULL_TREE. */
1667 if (elt == NULL_TREE)
1668 return NULL_TREE;
1669 elts.quick_push (elt);
1672 return elts.build ();
1674 return NULL_TREE;
1677 /* Overload that adds a TYPE parameter to be able to dispatch
1678 to fold_relational_const. */
1680 tree
1681 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1683 if (TREE_CODE_CLASS (code) == tcc_comparison)
1684 return fold_relational_const (code, type, arg1, arg2);
1686 /* ??? Until we make the const_binop worker take the type of the
1687 result as argument put those cases that need it here. */
1688 switch (code)
1690 case VEC_SERIES_EXPR:
1691 if (CONSTANT_CLASS_P (arg1)
1692 && CONSTANT_CLASS_P (arg2))
1693 return build_vec_series (type, arg1, arg2);
1694 return NULL_TREE;
1696 case COMPLEX_EXPR:
1697 if ((TREE_CODE (arg1) == REAL_CST
1698 && TREE_CODE (arg2) == REAL_CST)
1699 || (TREE_CODE (arg1) == INTEGER_CST
1700 && TREE_CODE (arg2) == INTEGER_CST))
1701 return build_complex (type, arg1, arg2);
1702 return NULL_TREE;
1704 case POINTER_DIFF_EXPR:
1705 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1707 poly_offset_int res = (wi::to_poly_offset (arg1)
1708 - wi::to_poly_offset (arg2));
1709 return force_fit_type (type, res, 1,
1710 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1712 return NULL_TREE;
1714 case VEC_PACK_TRUNC_EXPR:
1715 case VEC_PACK_FIX_TRUNC_EXPR:
1716 case VEC_PACK_FLOAT_EXPR:
1718 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1720 if (TREE_CODE (arg1) != VECTOR_CST
1721 || TREE_CODE (arg2) != VECTOR_CST)
1722 return NULL_TREE;
1724 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1725 return NULL_TREE;
1727 out_nelts = in_nelts * 2;
1728 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1729 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1731 tree_vector_builder elts (type, out_nelts, 1);
1732 for (i = 0; i < out_nelts; i++)
1734 tree elt = (i < in_nelts
1735 ? VECTOR_CST_ELT (arg1, i)
1736 : VECTOR_CST_ELT (arg2, i - in_nelts));
1737 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1738 ? NOP_EXPR
1739 : code == VEC_PACK_FLOAT_EXPR
1740 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1741 TREE_TYPE (type), elt);
1742 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1743 return NULL_TREE;
1744 elts.quick_push (elt);
1747 return elts.build ();
1750 case VEC_WIDEN_MULT_LO_EXPR:
1751 case VEC_WIDEN_MULT_HI_EXPR:
1752 case VEC_WIDEN_MULT_EVEN_EXPR:
1753 case VEC_WIDEN_MULT_ODD_EXPR:
1755 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1757 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1758 return NULL_TREE;
1760 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1761 return NULL_TREE;
1762 out_nelts = in_nelts / 2;
1763 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1764 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1766 if (code == VEC_WIDEN_MULT_LO_EXPR)
1767 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1768 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1769 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1770 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1771 scale = 1, ofs = 0;
1772 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1773 scale = 1, ofs = 1;
1775 tree_vector_builder elts (type, out_nelts, 1);
1776 for (out = 0; out < out_nelts; out++)
1778 unsigned int in = (out << scale) + ofs;
1779 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1780 VECTOR_CST_ELT (arg1, in));
1781 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1782 VECTOR_CST_ELT (arg2, in));
1784 if (t1 == NULL_TREE || t2 == NULL_TREE)
1785 return NULL_TREE;
1786 tree elt = const_binop (MULT_EXPR, t1, t2);
1787 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1788 return NULL_TREE;
1789 elts.quick_push (elt);
1792 return elts.build ();
1795 default:;
1798 if (TREE_CODE_CLASS (code) != tcc_binary)
1799 return NULL_TREE;
1801 /* Make sure type and arg0 have the same saturating flag. */
1802 gcc_checking_assert (TYPE_SATURATING (type)
1803 == TYPE_SATURATING (TREE_TYPE (arg1)));
1805 return const_binop (code, arg1, arg2);
1808 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1809 Return zero if computing the constants is not possible. */
1811 tree
1812 const_unop (enum tree_code code, tree type, tree arg0)
1814 /* Don't perform the operation, other than NEGATE and ABS, if
1815 flag_signaling_nans is on and the operand is a signaling NaN. */
1816 if (TREE_CODE (arg0) == REAL_CST
1817 && HONOR_SNANS (arg0)
1818 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1819 && code != NEGATE_EXPR
1820 && code != ABS_EXPR
1821 && code != ABSU_EXPR)
1822 return NULL_TREE;
1824 switch (code)
1826 CASE_CONVERT:
1827 case FLOAT_EXPR:
1828 case FIX_TRUNC_EXPR:
1829 case FIXED_CONVERT_EXPR:
1830 return fold_convert_const (code, type, arg0);
1832 case ADDR_SPACE_CONVERT_EXPR:
1833 /* If the source address is 0, and the source address space
1834 cannot have a valid object at 0, fold to dest type null. */
1835 if (integer_zerop (arg0)
1836 && !(targetm.addr_space.zero_address_valid
1837 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1838 return fold_convert_const (code, type, arg0);
1839 break;
1841 case VIEW_CONVERT_EXPR:
1842 return fold_view_convert_expr (type, arg0);
1844 case NEGATE_EXPR:
1846 /* Can't call fold_negate_const directly here as that doesn't
1847 handle all cases and we might not be able to negate some
1848 constants. */
1849 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1850 if (tem && CONSTANT_CLASS_P (tem))
1851 return tem;
1852 break;
1855 case ABS_EXPR:
1856 case ABSU_EXPR:
1857 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1858 return fold_abs_const (arg0, type);
1859 break;
1861 case CONJ_EXPR:
1862 if (TREE_CODE (arg0) == COMPLEX_CST)
1864 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1865 TREE_TYPE (type));
1866 return build_complex (type, TREE_REALPART (arg0), ipart);
1868 break;
1870 case BIT_NOT_EXPR:
1871 if (TREE_CODE (arg0) == INTEGER_CST)
1872 return fold_not_const (arg0, type);
1873 else if (POLY_INT_CST_P (arg0))
1874 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1875 /* Perform BIT_NOT_EXPR on each element individually. */
1876 else if (TREE_CODE (arg0) == VECTOR_CST)
1878 tree elem;
1880 /* This can cope with stepped encodings because ~x == -1 - x. */
1881 tree_vector_builder elements;
1882 elements.new_unary_operation (type, arg0, true);
1883 unsigned int i, count = elements.encoded_nelts ();
1884 for (i = 0; i < count; ++i)
1886 elem = VECTOR_CST_ELT (arg0, i);
1887 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1888 if (elem == NULL_TREE)
1889 break;
1890 elements.quick_push (elem);
1892 if (i == count)
1893 return elements.build ();
1895 break;
1897 case TRUTH_NOT_EXPR:
1898 if (TREE_CODE (arg0) == INTEGER_CST)
1899 return constant_boolean_node (integer_zerop (arg0), type);
1900 break;
1902 case REALPART_EXPR:
1903 if (TREE_CODE (arg0) == COMPLEX_CST)
1904 return fold_convert (type, TREE_REALPART (arg0));
1905 break;
1907 case IMAGPART_EXPR:
1908 if (TREE_CODE (arg0) == COMPLEX_CST)
1909 return fold_convert (type, TREE_IMAGPART (arg0));
1910 break;
1912 case VEC_UNPACK_LO_EXPR:
1913 case VEC_UNPACK_HI_EXPR:
1914 case VEC_UNPACK_FLOAT_LO_EXPR:
1915 case VEC_UNPACK_FLOAT_HI_EXPR:
1916 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1917 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1919 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1920 enum tree_code subcode;
1922 if (TREE_CODE (arg0) != VECTOR_CST)
1923 return NULL_TREE;
1925 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1926 return NULL_TREE;
1927 out_nelts = in_nelts / 2;
1928 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1930 unsigned int offset = 0;
1931 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1932 || code == VEC_UNPACK_FLOAT_LO_EXPR
1933 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1934 offset = out_nelts;
1936 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1937 subcode = NOP_EXPR;
1938 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1939 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1940 subcode = FLOAT_EXPR;
1941 else
1942 subcode = FIX_TRUNC_EXPR;
1944 tree_vector_builder elts (type, out_nelts, 1);
1945 for (i = 0; i < out_nelts; i++)
1947 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1948 VECTOR_CST_ELT (arg0, i + offset));
1949 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1950 return NULL_TREE;
1951 elts.quick_push (elt);
1954 return elts.build ();
1957 case VEC_DUPLICATE_EXPR:
1958 if (CONSTANT_CLASS_P (arg0))
1959 return build_vector_from_val (type, arg0);
1960 return NULL_TREE;
1962 default:
1963 break;
1966 return NULL_TREE;
1969 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1970 indicates which particular sizetype to create. */
1972 tree
1973 size_int_kind (poly_int64 number, enum size_type_kind kind)
1975 return build_int_cst (sizetype_tab[(int) kind], number);
1978 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1979 is a tree code. The type of the result is taken from the operands.
1980 Both must be equivalent integer types, ala int_binop_types_match_p.
1981 If the operands are constant, so is the result. */
1983 tree
1984 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1986 tree type = TREE_TYPE (arg0);
1988 if (arg0 == error_mark_node || arg1 == error_mark_node)
1989 return error_mark_node;
1991 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1992 TREE_TYPE (arg1)));
1994 /* Handle the special case of two poly_int constants faster. */
1995 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1997 /* And some specific cases even faster than that. */
1998 if (code == PLUS_EXPR)
2000 if (integer_zerop (arg0)
2001 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2002 return arg1;
2003 if (integer_zerop (arg1)
2004 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2005 return arg0;
2007 else if (code == MINUS_EXPR)
2009 if (integer_zerop (arg1)
2010 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2011 return arg0;
2013 else if (code == MULT_EXPR)
2015 if (integer_onep (arg0)
2016 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2017 return arg1;
2020 /* Handle general case of two integer constants. For sizetype
2021 constant calculations we always want to know about overflow,
2022 even in the unsigned case. */
2023 tree res = int_const_binop (code, arg0, arg1, -1);
2024 if (res != NULL_TREE)
2025 return res;
2028 return fold_build2_loc (loc, code, type, arg0, arg1);
2031 /* Given two values, either both of sizetype or both of bitsizetype,
2032 compute the difference between the two values. Return the value
2033 in signed type corresponding to the type of the operands. */
2035 tree
2036 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2038 tree type = TREE_TYPE (arg0);
2039 tree ctype;
2041 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2042 TREE_TYPE (arg1)));
2044 /* If the type is already signed, just do the simple thing. */
2045 if (!TYPE_UNSIGNED (type))
2046 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2048 if (type == sizetype)
2049 ctype = ssizetype;
2050 else if (type == bitsizetype)
2051 ctype = sbitsizetype;
2052 else
2053 ctype = signed_type_for (type);
2055 /* If either operand is not a constant, do the conversions to the signed
2056 type and subtract. The hardware will do the right thing with any
2057 overflow in the subtraction. */
2058 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2059 return size_binop_loc (loc, MINUS_EXPR,
2060 fold_convert_loc (loc, ctype, arg0),
2061 fold_convert_loc (loc, ctype, arg1));
2063 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2064 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2065 overflow) and negate (which can't either). Special-case a result
2066 of zero while we're here. */
2067 if (tree_int_cst_equal (arg0, arg1))
2068 return build_int_cst (ctype, 0);
2069 else if (tree_int_cst_lt (arg1, arg0))
2070 return fold_convert_loc (loc, ctype,
2071 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2072 else
2073 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2074 fold_convert_loc (loc, ctype,
2075 size_binop_loc (loc,
2076 MINUS_EXPR,
2077 arg1, arg0)));
2080 /* A subroutine of fold_convert_const handling conversions of an
2081 INTEGER_CST to another integer type. */
2083 static tree
2084 fold_convert_const_int_from_int (tree type, const_tree arg1)
2086 /* Given an integer constant, make new constant with new type,
2087 appropriately sign-extended or truncated. Use widest_int
2088 so that any extension is done according ARG1's type. */
2089 return force_fit_type (type, wi::to_widest (arg1),
2090 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2091 TREE_OVERFLOW (arg1));
2094 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2095 to an integer type. */
2097 static tree
2098 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2100 bool overflow = false;
2101 tree t;
2103 /* The following code implements the floating point to integer
2104 conversion rules required by the Java Language Specification,
2105 that IEEE NaNs are mapped to zero and values that overflow
2106 the target precision saturate, i.e. values greater than
2107 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2108 are mapped to INT_MIN. These semantics are allowed by the
2109 C and C++ standards that simply state that the behavior of
2110 FP-to-integer conversion is unspecified upon overflow. */
2112 wide_int val;
2113 REAL_VALUE_TYPE r;
2114 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2116 switch (code)
2118 case FIX_TRUNC_EXPR:
2119 real_trunc (&r, VOIDmode, &x);
2120 break;
2122 default:
2123 gcc_unreachable ();
2126 /* If R is NaN, return zero and show we have an overflow. */
2127 if (REAL_VALUE_ISNAN (r))
2129 overflow = true;
2130 val = wi::zero (TYPE_PRECISION (type));
2133 /* See if R is less than the lower bound or greater than the
2134 upper bound. */
2136 if (! overflow)
2138 tree lt = TYPE_MIN_VALUE (type);
2139 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2140 if (real_less (&r, &l))
2142 overflow = true;
2143 val = wi::to_wide (lt);
2147 if (! overflow)
2149 tree ut = TYPE_MAX_VALUE (type);
2150 if (ut)
2152 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2153 if (real_less (&u, &r))
2155 overflow = true;
2156 val = wi::to_wide (ut);
2161 if (! overflow)
2162 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2164 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2165 return t;
2168 /* A subroutine of fold_convert_const handling conversions of a
2169 FIXED_CST to an integer type. */
2171 static tree
2172 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2174 tree t;
2175 double_int temp, temp_trunc;
2176 scalar_mode mode;
2178 /* Right shift FIXED_CST to temp by fbit. */
2179 temp = TREE_FIXED_CST (arg1).data;
2180 mode = TREE_FIXED_CST (arg1).mode;
2181 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2183 temp = temp.rshift (GET_MODE_FBIT (mode),
2184 HOST_BITS_PER_DOUBLE_INT,
2185 SIGNED_FIXED_POINT_MODE_P (mode));
2187 /* Left shift temp to temp_trunc by fbit. */
2188 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2189 HOST_BITS_PER_DOUBLE_INT,
2190 SIGNED_FIXED_POINT_MODE_P (mode));
2192 else
2194 temp = double_int_zero;
2195 temp_trunc = double_int_zero;
2198 /* If FIXED_CST is negative, we need to round the value toward 0.
2199 By checking if the fractional bits are not zero to add 1 to temp. */
2200 if (SIGNED_FIXED_POINT_MODE_P (mode)
2201 && temp_trunc.is_negative ()
2202 && TREE_FIXED_CST (arg1).data != temp_trunc)
2203 temp += double_int_one;
2205 /* Given a fixed-point constant, make new constant with new type,
2206 appropriately sign-extended or truncated. */
2207 t = force_fit_type (type, temp, -1,
2208 (temp.is_negative ()
2209 && (TYPE_UNSIGNED (type)
2210 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2211 | TREE_OVERFLOW (arg1));
2213 return t;
2216 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2217 to another floating point type. */
2219 static tree
2220 fold_convert_const_real_from_real (tree type, const_tree arg1)
2222 REAL_VALUE_TYPE value;
2223 tree t;
2225 /* If the underlying modes are the same, simply treat it as
2226 copy and rebuild with TREE_REAL_CST information and the
2227 given type. */
2228 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2230 t = build_real (type, TREE_REAL_CST (arg1));
2231 return t;
2234 /* Don't perform the operation if flag_signaling_nans is on
2235 and the operand is a signaling NaN. */
2236 if (HONOR_SNANS (arg1)
2237 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2238 return NULL_TREE;
2240 /* With flag_rounding_math we should respect the current rounding mode
2241 unless the conversion is exact. */
2242 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2243 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2244 return NULL_TREE;
2246 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2247 t = build_real (type, value);
2249 /* If converting an infinity or NAN to a representation that doesn't
2250 have one, set the overflow bit so that we can produce some kind of
2251 error message at the appropriate point if necessary. It's not the
2252 most user-friendly message, but it's better than nothing. */
2253 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2254 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2255 TREE_OVERFLOW (t) = 1;
2256 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2257 && !MODE_HAS_NANS (TYPE_MODE (type)))
2258 TREE_OVERFLOW (t) = 1;
2259 /* Regular overflow, conversion produced an infinity in a mode that
2260 can't represent them. */
2261 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2262 && REAL_VALUE_ISINF (value)
2263 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2264 TREE_OVERFLOW (t) = 1;
2265 else
2266 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2267 return t;
2270 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2271 to a floating point type. */
2273 static tree
2274 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2276 REAL_VALUE_TYPE value;
2277 tree t;
2279 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2280 &TREE_FIXED_CST (arg1));
2281 t = build_real (type, value);
2283 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2284 return t;
2287 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2288 to another fixed-point type. */
2290 static tree
2291 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2293 FIXED_VALUE_TYPE value;
2294 tree t;
2295 bool overflow_p;
2297 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2298 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2299 t = build_fixed (type, value);
2301 /* Propagate overflow flags. */
2302 if (overflow_p | TREE_OVERFLOW (arg1))
2303 TREE_OVERFLOW (t) = 1;
2304 return t;
2307 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2308 to a fixed-point type. */
2310 static tree
2311 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2313 FIXED_VALUE_TYPE value;
2314 tree t;
2315 bool overflow_p;
2316 double_int di;
2318 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2320 di.low = TREE_INT_CST_ELT (arg1, 0);
2321 if (TREE_INT_CST_NUNITS (arg1) == 1)
2322 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2323 else
2324 di.high = TREE_INT_CST_ELT (arg1, 1);
2326 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2327 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2328 TYPE_SATURATING (type));
2329 t = build_fixed (type, value);
2331 /* Propagate overflow flags. */
2332 if (overflow_p | TREE_OVERFLOW (arg1))
2333 TREE_OVERFLOW (t) = 1;
2334 return t;
2337 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2338 to a fixed-point type. */
2340 static tree
2341 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2344 tree t;
2345 bool overflow_p;
2347 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2348 &TREE_REAL_CST (arg1),
2349 TYPE_SATURATING (type));
2350 t = build_fixed (type, value);
2352 /* Propagate overflow flags. */
2353 if (overflow_p | TREE_OVERFLOW (arg1))
2354 TREE_OVERFLOW (t) = 1;
2355 return t;
2358 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2359 type TYPE. If no simplification can be done return NULL_TREE. */
2361 static tree
2362 fold_convert_const (enum tree_code code, tree type, tree arg1)
2364 tree arg_type = TREE_TYPE (arg1);
2365 if (arg_type == type)
2366 return arg1;
2368 /* We can't widen types, since the runtime value could overflow the
2369 original type before being extended to the new type. */
2370 if (POLY_INT_CST_P (arg1)
2371 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2372 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2373 return build_poly_int_cst (type,
2374 poly_wide_int::from (poly_int_cst_value (arg1),
2375 TYPE_PRECISION (type),
2376 TYPE_SIGN (arg_type)));
2378 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2379 || TREE_CODE (type) == OFFSET_TYPE)
2381 if (TREE_CODE (arg1) == INTEGER_CST)
2382 return fold_convert_const_int_from_int (type, arg1);
2383 else if (TREE_CODE (arg1) == REAL_CST)
2384 return fold_convert_const_int_from_real (code, type, arg1);
2385 else if (TREE_CODE (arg1) == FIXED_CST)
2386 return fold_convert_const_int_from_fixed (type, arg1);
2388 else if (TREE_CODE (type) == REAL_TYPE)
2390 if (TREE_CODE (arg1) == INTEGER_CST)
2392 tree res = build_real_from_int_cst (type, arg1);
2393 /* Avoid the folding if flag_rounding_math is on and the
2394 conversion is not exact. */
2395 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2397 bool fail = false;
2398 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2399 TYPE_PRECISION (TREE_TYPE (arg1)));
2400 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2401 return NULL_TREE;
2403 return res;
2405 else if (TREE_CODE (arg1) == REAL_CST)
2406 return fold_convert_const_real_from_real (type, arg1);
2407 else if (TREE_CODE (arg1) == FIXED_CST)
2408 return fold_convert_const_real_from_fixed (type, arg1);
2410 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2412 if (TREE_CODE (arg1) == FIXED_CST)
2413 return fold_convert_const_fixed_from_fixed (type, arg1);
2414 else if (TREE_CODE (arg1) == INTEGER_CST)
2415 return fold_convert_const_fixed_from_int (type, arg1);
2416 else if (TREE_CODE (arg1) == REAL_CST)
2417 return fold_convert_const_fixed_from_real (type, arg1);
2419 else if (TREE_CODE (type) == VECTOR_TYPE)
2421 if (TREE_CODE (arg1) == VECTOR_CST
2422 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2424 tree elttype = TREE_TYPE (type);
2425 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2426 /* We can't handle steps directly when extending, since the
2427 values need to wrap at the original precision first. */
2428 bool step_ok_p
2429 = (INTEGRAL_TYPE_P (elttype)
2430 && INTEGRAL_TYPE_P (arg1_elttype)
2431 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2432 tree_vector_builder v;
2433 if (!v.new_unary_operation (type, arg1, step_ok_p))
2434 return NULL_TREE;
2435 unsigned int len = v.encoded_nelts ();
2436 for (unsigned int i = 0; i < len; ++i)
2438 tree elt = VECTOR_CST_ELT (arg1, i);
2439 tree cvt = fold_convert_const (code, elttype, elt);
2440 if (cvt == NULL_TREE)
2441 return NULL_TREE;
2442 v.quick_push (cvt);
2444 return v.build ();
2447 return NULL_TREE;
2450 /* Construct a vector of zero elements of vector type TYPE. */
2452 static tree
2453 build_zero_vector (tree type)
2455 tree t;
2457 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2458 return build_vector_from_val (type, t);
2461 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2463 bool
2464 fold_convertible_p (const_tree type, const_tree arg)
2466 const_tree orig = TREE_TYPE (arg);
2468 if (type == orig)
2469 return true;
2471 if (TREE_CODE (arg) == ERROR_MARK
2472 || TREE_CODE (type) == ERROR_MARK
2473 || TREE_CODE (orig) == ERROR_MARK)
2474 return false;
2476 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2477 return true;
2479 switch (TREE_CODE (type))
2481 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2482 case POINTER_TYPE: case REFERENCE_TYPE:
2483 case OFFSET_TYPE:
2484 return (INTEGRAL_TYPE_P (orig)
2485 || (POINTER_TYPE_P (orig)
2486 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2487 || TREE_CODE (orig) == OFFSET_TYPE);
2489 case REAL_TYPE:
2490 case FIXED_POINT_TYPE:
2491 case VOID_TYPE:
2492 return TREE_CODE (type) == TREE_CODE (orig);
2494 case VECTOR_TYPE:
2495 return (VECTOR_TYPE_P (orig)
2496 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2497 TYPE_VECTOR_SUBPARTS (orig))
2498 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2500 default:
2501 return false;
2505 /* Convert expression ARG to type TYPE. Used by the middle-end for
2506 simple conversions in preference to calling the front-end's convert. */
2508 tree
2509 fold_convert_loc (location_t loc, tree type, tree arg)
2511 tree orig = TREE_TYPE (arg);
2512 tree tem;
2514 if (type == orig)
2515 return arg;
2517 if (TREE_CODE (arg) == ERROR_MARK
2518 || TREE_CODE (type) == ERROR_MARK
2519 || TREE_CODE (orig) == ERROR_MARK)
2520 return error_mark_node;
2522 switch (TREE_CODE (type))
2524 case POINTER_TYPE:
2525 case REFERENCE_TYPE:
2526 /* Handle conversions between pointers to different address spaces. */
2527 if (POINTER_TYPE_P (orig)
2528 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2529 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2530 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2531 /* fall through */
2533 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2534 case OFFSET_TYPE:
2535 if (TREE_CODE (arg) == INTEGER_CST)
2537 tem = fold_convert_const (NOP_EXPR, type, arg);
2538 if (tem != NULL_TREE)
2539 return tem;
2541 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2542 || TREE_CODE (orig) == OFFSET_TYPE)
2543 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2544 if (TREE_CODE (orig) == COMPLEX_TYPE)
2545 return fold_convert_loc (loc, type,
2546 fold_build1_loc (loc, REALPART_EXPR,
2547 TREE_TYPE (orig), arg));
2548 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2549 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2550 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2552 case REAL_TYPE:
2553 if (TREE_CODE (arg) == INTEGER_CST)
2555 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2556 if (tem != NULL_TREE)
2557 return tem;
2559 else if (TREE_CODE (arg) == REAL_CST)
2561 tem = fold_convert_const (NOP_EXPR, type, arg);
2562 if (tem != NULL_TREE)
2563 return tem;
2565 else if (TREE_CODE (arg) == FIXED_CST)
2567 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2568 if (tem != NULL_TREE)
2569 return tem;
2572 switch (TREE_CODE (orig))
2574 case INTEGER_TYPE:
2575 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2576 case POINTER_TYPE: case REFERENCE_TYPE:
2577 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2579 case REAL_TYPE:
2580 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2582 case FIXED_POINT_TYPE:
2583 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2585 case COMPLEX_TYPE:
2586 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2587 return fold_convert_loc (loc, type, tem);
2589 default:
2590 gcc_unreachable ();
2593 case FIXED_POINT_TYPE:
2594 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2595 || TREE_CODE (arg) == REAL_CST)
2597 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2598 if (tem != NULL_TREE)
2599 goto fold_convert_exit;
2602 switch (TREE_CODE (orig))
2604 case FIXED_POINT_TYPE:
2605 case INTEGER_TYPE:
2606 case ENUMERAL_TYPE:
2607 case BOOLEAN_TYPE:
2608 case REAL_TYPE:
2609 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2611 case COMPLEX_TYPE:
2612 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2613 return fold_convert_loc (loc, type, tem);
2615 default:
2616 gcc_unreachable ();
2619 case COMPLEX_TYPE:
2620 switch (TREE_CODE (orig))
2622 case INTEGER_TYPE:
2623 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2624 case POINTER_TYPE: case REFERENCE_TYPE:
2625 case REAL_TYPE:
2626 case FIXED_POINT_TYPE:
2627 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2628 fold_convert_loc (loc, TREE_TYPE (type), arg),
2629 fold_convert_loc (loc, TREE_TYPE (type),
2630 integer_zero_node));
2631 case COMPLEX_TYPE:
2633 tree rpart, ipart;
2635 if (TREE_CODE (arg) == COMPLEX_EXPR)
2637 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2638 TREE_OPERAND (arg, 0));
2639 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2640 TREE_OPERAND (arg, 1));
2641 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2644 arg = save_expr (arg);
2645 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2646 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2647 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2648 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2649 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2652 default:
2653 gcc_unreachable ();
2656 case VECTOR_TYPE:
2657 if (integer_zerop (arg))
2658 return build_zero_vector (type);
2659 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2660 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2661 || TREE_CODE (orig) == VECTOR_TYPE);
2662 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2664 case VOID_TYPE:
2665 tem = fold_ignored_result (arg);
2666 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2668 default:
2669 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2670 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2671 gcc_unreachable ();
2673 fold_convert_exit:
2674 tem = protected_set_expr_location_unshare (tem, loc);
2675 return tem;
2678 /* Return false if expr can be assumed not to be an lvalue, true
2679 otherwise. */
2681 static bool
2682 maybe_lvalue_p (const_tree x)
2684 /* We only need to wrap lvalue tree codes. */
2685 switch (TREE_CODE (x))
2687 case VAR_DECL:
2688 case PARM_DECL:
2689 case RESULT_DECL:
2690 case LABEL_DECL:
2691 case FUNCTION_DECL:
2692 case SSA_NAME:
2693 case COMPOUND_LITERAL_EXPR:
2695 case COMPONENT_REF:
2696 case MEM_REF:
2697 case INDIRECT_REF:
2698 case ARRAY_REF:
2699 case ARRAY_RANGE_REF:
2700 case BIT_FIELD_REF:
2701 case OBJ_TYPE_REF:
2703 case REALPART_EXPR:
2704 case IMAGPART_EXPR:
2705 case PREINCREMENT_EXPR:
2706 case PREDECREMENT_EXPR:
2707 case SAVE_EXPR:
2708 case TRY_CATCH_EXPR:
2709 case WITH_CLEANUP_EXPR:
2710 case COMPOUND_EXPR:
2711 case MODIFY_EXPR:
2712 case TARGET_EXPR:
2713 case COND_EXPR:
2714 case BIND_EXPR:
2715 case VIEW_CONVERT_EXPR:
2716 break;
2718 default:
2719 /* Assume the worst for front-end tree codes. */
2720 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2721 break;
2722 return false;
2725 return true;
2728 /* Return an expr equal to X but certainly not valid as an lvalue. */
2730 tree
2731 non_lvalue_loc (location_t loc, tree x)
2733 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2734 us. */
2735 if (in_gimple_form)
2736 return x;
2738 if (! maybe_lvalue_p (x))
2739 return x;
2740 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2743 /* Given a tree comparison code, return the code that is the logical inverse.
2744 It is generally not safe to do this for floating-point comparisons, except
2745 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2746 ERROR_MARK in this case. */
2748 enum tree_code
2749 invert_tree_comparison (enum tree_code code, bool honor_nans)
2751 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2752 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2753 return ERROR_MARK;
2755 switch (code)
2757 case EQ_EXPR:
2758 return NE_EXPR;
2759 case NE_EXPR:
2760 return EQ_EXPR;
2761 case GT_EXPR:
2762 return honor_nans ? UNLE_EXPR : LE_EXPR;
2763 case GE_EXPR:
2764 return honor_nans ? UNLT_EXPR : LT_EXPR;
2765 case LT_EXPR:
2766 return honor_nans ? UNGE_EXPR : GE_EXPR;
2767 case LE_EXPR:
2768 return honor_nans ? UNGT_EXPR : GT_EXPR;
2769 case LTGT_EXPR:
2770 return UNEQ_EXPR;
2771 case UNEQ_EXPR:
2772 return LTGT_EXPR;
2773 case UNGT_EXPR:
2774 return LE_EXPR;
2775 case UNGE_EXPR:
2776 return LT_EXPR;
2777 case UNLT_EXPR:
2778 return GE_EXPR;
2779 case UNLE_EXPR:
2780 return GT_EXPR;
2781 case ORDERED_EXPR:
2782 return UNORDERED_EXPR;
2783 case UNORDERED_EXPR:
2784 return ORDERED_EXPR;
2785 default:
2786 gcc_unreachable ();
2790 /* Similar, but return the comparison that results if the operands are
2791 swapped. This is safe for floating-point. */
2793 enum tree_code
2794 swap_tree_comparison (enum tree_code code)
2796 switch (code)
2798 case EQ_EXPR:
2799 case NE_EXPR:
2800 case ORDERED_EXPR:
2801 case UNORDERED_EXPR:
2802 case LTGT_EXPR:
2803 case UNEQ_EXPR:
2804 return code;
2805 case GT_EXPR:
2806 return LT_EXPR;
2807 case GE_EXPR:
2808 return LE_EXPR;
2809 case LT_EXPR:
2810 return GT_EXPR;
2811 case LE_EXPR:
2812 return GE_EXPR;
2813 case UNGT_EXPR:
2814 return UNLT_EXPR;
2815 case UNGE_EXPR:
2816 return UNLE_EXPR;
2817 case UNLT_EXPR:
2818 return UNGT_EXPR;
2819 case UNLE_EXPR:
2820 return UNGE_EXPR;
2821 default:
2822 gcc_unreachable ();
2827 /* Convert a comparison tree code from an enum tree_code representation
2828 into a compcode bit-based encoding. This function is the inverse of
2829 compcode_to_comparison. */
2831 static enum comparison_code
2832 comparison_to_compcode (enum tree_code code)
2834 switch (code)
2836 case LT_EXPR:
2837 return COMPCODE_LT;
2838 case EQ_EXPR:
2839 return COMPCODE_EQ;
2840 case LE_EXPR:
2841 return COMPCODE_LE;
2842 case GT_EXPR:
2843 return COMPCODE_GT;
2844 case NE_EXPR:
2845 return COMPCODE_NE;
2846 case GE_EXPR:
2847 return COMPCODE_GE;
2848 case ORDERED_EXPR:
2849 return COMPCODE_ORD;
2850 case UNORDERED_EXPR:
2851 return COMPCODE_UNORD;
2852 case UNLT_EXPR:
2853 return COMPCODE_UNLT;
2854 case UNEQ_EXPR:
2855 return COMPCODE_UNEQ;
2856 case UNLE_EXPR:
2857 return COMPCODE_UNLE;
2858 case UNGT_EXPR:
2859 return COMPCODE_UNGT;
2860 case LTGT_EXPR:
2861 return COMPCODE_LTGT;
2862 case UNGE_EXPR:
2863 return COMPCODE_UNGE;
2864 default:
2865 gcc_unreachable ();
2869 /* Convert a compcode bit-based encoding of a comparison operator back
2870 to GCC's enum tree_code representation. This function is the
2871 inverse of comparison_to_compcode. */
2873 static enum tree_code
2874 compcode_to_comparison (enum comparison_code code)
2876 switch (code)
2878 case COMPCODE_LT:
2879 return LT_EXPR;
2880 case COMPCODE_EQ:
2881 return EQ_EXPR;
2882 case COMPCODE_LE:
2883 return LE_EXPR;
2884 case COMPCODE_GT:
2885 return GT_EXPR;
2886 case COMPCODE_NE:
2887 return NE_EXPR;
2888 case COMPCODE_GE:
2889 return GE_EXPR;
2890 case COMPCODE_ORD:
2891 return ORDERED_EXPR;
2892 case COMPCODE_UNORD:
2893 return UNORDERED_EXPR;
2894 case COMPCODE_UNLT:
2895 return UNLT_EXPR;
2896 case COMPCODE_UNEQ:
2897 return UNEQ_EXPR;
2898 case COMPCODE_UNLE:
2899 return UNLE_EXPR;
2900 case COMPCODE_UNGT:
2901 return UNGT_EXPR;
2902 case COMPCODE_LTGT:
2903 return LTGT_EXPR;
2904 case COMPCODE_UNGE:
2905 return UNGE_EXPR;
2906 default:
2907 gcc_unreachable ();
2911 /* Return true if COND1 tests the opposite condition of COND2. */
2913 bool
2914 inverse_conditions_p (const_tree cond1, const_tree cond2)
2916 return (COMPARISON_CLASS_P (cond1)
2917 && COMPARISON_CLASS_P (cond2)
2918 && (invert_tree_comparison
2919 (TREE_CODE (cond1),
2920 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2921 && operand_equal_p (TREE_OPERAND (cond1, 0),
2922 TREE_OPERAND (cond2, 0), 0)
2923 && operand_equal_p (TREE_OPERAND (cond1, 1),
2924 TREE_OPERAND (cond2, 1), 0));
2927 /* Return a tree for the comparison which is the combination of
2928 doing the AND or OR (depending on CODE) of the two operations LCODE
2929 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2930 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2931 if this makes the transformation invalid. */
2933 tree
2934 combine_comparisons (location_t loc,
2935 enum tree_code code, enum tree_code lcode,
2936 enum tree_code rcode, tree truth_type,
2937 tree ll_arg, tree lr_arg)
2939 bool honor_nans = HONOR_NANS (ll_arg);
2940 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2941 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2942 int compcode;
2944 switch (code)
2946 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2947 compcode = lcompcode & rcompcode;
2948 break;
2950 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2951 compcode = lcompcode | rcompcode;
2952 break;
2954 default:
2955 return NULL_TREE;
2958 if (!honor_nans)
2960 /* Eliminate unordered comparisons, as well as LTGT and ORD
2961 which are not used unless the mode has NaNs. */
2962 compcode &= ~COMPCODE_UNORD;
2963 if (compcode == COMPCODE_LTGT)
2964 compcode = COMPCODE_NE;
2965 else if (compcode == COMPCODE_ORD)
2966 compcode = COMPCODE_TRUE;
2968 else if (flag_trapping_math)
2970 /* Check that the original operation and the optimized ones will trap
2971 under the same condition. */
2972 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2973 && (lcompcode != COMPCODE_EQ)
2974 && (lcompcode != COMPCODE_ORD);
2975 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2976 && (rcompcode != COMPCODE_EQ)
2977 && (rcompcode != COMPCODE_ORD);
2978 bool trap = (compcode & COMPCODE_UNORD) == 0
2979 && (compcode != COMPCODE_EQ)
2980 && (compcode != COMPCODE_ORD);
2982 /* In a short-circuited boolean expression the LHS might be
2983 such that the RHS, if evaluated, will never trap. For
2984 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2985 if neither x nor y is NaN. (This is a mixed blessing: for
2986 example, the expression above will never trap, hence
2987 optimizing it to x < y would be invalid). */
2988 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2989 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2990 rtrap = false;
2992 /* If the comparison was short-circuited, and only the RHS
2993 trapped, we may now generate a spurious trap. */
2994 if (rtrap && !ltrap
2995 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2996 return NULL_TREE;
2998 /* If we changed the conditions that cause a trap, we lose. */
2999 if ((ltrap || rtrap) != trap)
3000 return NULL_TREE;
3003 if (compcode == COMPCODE_TRUE)
3004 return constant_boolean_node (true, truth_type);
3005 else if (compcode == COMPCODE_FALSE)
3006 return constant_boolean_node (false, truth_type);
3007 else
3009 enum tree_code tcode;
3011 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3012 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3016 /* Return nonzero if two operands (typically of the same tree node)
3017 are necessarily equal. FLAGS modifies behavior as follows:
3019 If OEP_ONLY_CONST is set, only return nonzero for constants.
3020 This function tests whether the operands are indistinguishable;
3021 it does not test whether they are equal using C's == operation.
3022 The distinction is important for IEEE floating point, because
3023 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3024 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3026 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3027 even though it may hold multiple values during a function.
3028 This is because a GCC tree node guarantees that nothing else is
3029 executed between the evaluation of its "operands" (which may often
3030 be evaluated in arbitrary order). Hence if the operands themselves
3031 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3032 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3033 unset means assuming isochronic (or instantaneous) tree equivalence.
3034 Unless comparing arbitrary expression trees, such as from different
3035 statements, this flag can usually be left unset.
3037 If OEP_PURE_SAME is set, then pure functions with identical arguments
3038 are considered the same. It is used when the caller has other ways
3039 to ensure that global memory is unchanged in between.
3041 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3042 not values of expressions.
3044 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3045 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3047 If OEP_BITWISE is set, then require the values to be bitwise identical
3048 rather than simply numerically equal. Do not take advantage of things
3049 like math-related flags or undefined behavior; only return true for
3050 values that are provably bitwise identical in all circumstances.
3052 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3053 any operand with side effect. This is unnecesarily conservative in the
3054 case we know that arg0 and arg1 are in disjoint code paths (such as in
3055 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3056 addresses with TREE_CONSTANT flag set so we know that &var == &var
3057 even if var is volatile. */
3059 bool
3060 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3061 unsigned int flags)
3063 bool r;
3064 if (verify_hash_value (arg0, arg1, flags, &r))
3065 return r;
3067 STRIP_ANY_LOCATION_WRAPPER (arg0);
3068 STRIP_ANY_LOCATION_WRAPPER (arg1);
3070 /* If either is ERROR_MARK, they aren't equal. */
3071 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3072 || TREE_TYPE (arg0) == error_mark_node
3073 || TREE_TYPE (arg1) == error_mark_node)
3074 return false;
3076 /* Similar, if either does not have a type (like a template id),
3077 they aren't equal. */
3078 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3079 return false;
3081 /* Bitwise identity makes no sense if the values have different layouts. */
3082 if ((flags & OEP_BITWISE)
3083 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3084 return false;
3086 /* We cannot consider pointers to different address space equal. */
3087 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3088 && POINTER_TYPE_P (TREE_TYPE (arg1))
3089 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3090 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3091 return false;
3093 /* Check equality of integer constants before bailing out due to
3094 precision differences. */
3095 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3097 /* Address of INTEGER_CST is not defined; check that we did not forget
3098 to drop the OEP_ADDRESS_OF flags. */
3099 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3100 return tree_int_cst_equal (arg0, arg1);
3103 if (!(flags & OEP_ADDRESS_OF))
3105 /* If both types don't have the same signedness, then we can't consider
3106 them equal. We must check this before the STRIP_NOPS calls
3107 because they may change the signedness of the arguments. As pointers
3108 strictly don't have a signedness, require either two pointers or
3109 two non-pointers as well. */
3110 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3111 || POINTER_TYPE_P (TREE_TYPE (arg0))
3112 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3113 return false;
3115 /* If both types don't have the same precision, then it is not safe
3116 to strip NOPs. */
3117 if (element_precision (TREE_TYPE (arg0))
3118 != element_precision (TREE_TYPE (arg1)))
3119 return false;
3121 STRIP_NOPS (arg0);
3122 STRIP_NOPS (arg1);
3124 #if 0
3125 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3126 sanity check once the issue is solved. */
3127 else
3128 /* Addresses of conversions and SSA_NAMEs (and many other things)
3129 are not defined. Check that we did not forget to drop the
3130 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3131 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3132 && TREE_CODE (arg0) != SSA_NAME);
3133 #endif
3135 /* In case both args are comparisons but with different comparison
3136 code, try to swap the comparison operands of one arg to produce
3137 a match and compare that variant. */
3138 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3139 && COMPARISON_CLASS_P (arg0)
3140 && COMPARISON_CLASS_P (arg1))
3142 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3144 if (TREE_CODE (arg0) == swap_code)
3145 return operand_equal_p (TREE_OPERAND (arg0, 0),
3146 TREE_OPERAND (arg1, 1), flags)
3147 && operand_equal_p (TREE_OPERAND (arg0, 1),
3148 TREE_OPERAND (arg1, 0), flags);
3151 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3153 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3154 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3156 else if (flags & OEP_ADDRESS_OF)
3158 /* If we are interested in comparing addresses ignore
3159 MEM_REF wrappings of the base that can appear just for
3160 TBAA reasons. */
3161 if (TREE_CODE (arg0) == MEM_REF
3162 && DECL_P (arg1)
3163 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3164 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3165 && integer_zerop (TREE_OPERAND (arg0, 1)))
3166 return true;
3167 else if (TREE_CODE (arg1) == MEM_REF
3168 && DECL_P (arg0)
3169 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3170 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3171 && integer_zerop (TREE_OPERAND (arg1, 1)))
3172 return true;
3173 return false;
3175 else
3176 return false;
3179 /* When not checking adddresses, this is needed for conversions and for
3180 COMPONENT_REF. Might as well play it safe and always test this. */
3181 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3182 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3183 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3184 && !(flags & OEP_ADDRESS_OF)))
3185 return false;
3187 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3188 We don't care about side effects in that case because the SAVE_EXPR
3189 takes care of that for us. In all other cases, two expressions are
3190 equal if they have no side effects. If we have two identical
3191 expressions with side effects that should be treated the same due
3192 to the only side effects being identical SAVE_EXPR's, that will
3193 be detected in the recursive calls below.
3194 If we are taking an invariant address of two identical objects
3195 they are necessarily equal as well. */
3196 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3197 && (TREE_CODE (arg0) == SAVE_EXPR
3198 || (flags & OEP_MATCH_SIDE_EFFECTS)
3199 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3200 return true;
3202 /* Next handle constant cases, those for which we can return 1 even
3203 if ONLY_CONST is set. */
3204 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3205 switch (TREE_CODE (arg0))
3207 case INTEGER_CST:
3208 return tree_int_cst_equal (arg0, arg1);
3210 case FIXED_CST:
3211 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3212 TREE_FIXED_CST (arg1));
3214 case REAL_CST:
3215 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3216 return true;
3218 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3220 /* If we do not distinguish between signed and unsigned zero,
3221 consider them equal. */
3222 if (real_zerop (arg0) && real_zerop (arg1))
3223 return true;
3225 return false;
3227 case VECTOR_CST:
3229 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3230 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3231 return false;
3233 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3234 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3235 return false;
3237 unsigned int count = vector_cst_encoded_nelts (arg0);
3238 for (unsigned int i = 0; i < count; ++i)
3239 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3240 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3241 return false;
3242 return true;
3245 case COMPLEX_CST:
3246 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3247 flags)
3248 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3249 flags));
3251 case STRING_CST:
3252 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3253 && ! memcmp (TREE_STRING_POINTER (arg0),
3254 TREE_STRING_POINTER (arg1),
3255 TREE_STRING_LENGTH (arg0)));
3257 case ADDR_EXPR:
3258 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3259 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3260 flags | OEP_ADDRESS_OF
3261 | OEP_MATCH_SIDE_EFFECTS);
3262 case CONSTRUCTOR:
3263 /* In GIMPLE empty constructors are allowed in initializers of
3264 aggregates. */
3265 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3266 default:
3267 break;
3270 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3271 two instances of undefined behavior will give identical results. */
3272 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3273 return false;
3275 /* Define macros to test an operand from arg0 and arg1 for equality and a
3276 variant that allows null and views null as being different from any
3277 non-null value. In the latter case, if either is null, the both
3278 must be; otherwise, do the normal comparison. */
3279 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3280 TREE_OPERAND (arg1, N), flags)
3282 #define OP_SAME_WITH_NULL(N) \
3283 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3284 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3286 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3288 case tcc_unary:
3289 /* Two conversions are equal only if signedness and modes match. */
3290 switch (TREE_CODE (arg0))
3292 CASE_CONVERT:
3293 case FIX_TRUNC_EXPR:
3294 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3295 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3296 return false;
3297 break;
3298 default:
3299 break;
3302 return OP_SAME (0);
3305 case tcc_comparison:
3306 case tcc_binary:
3307 if (OP_SAME (0) && OP_SAME (1))
3308 return true;
3310 /* For commutative ops, allow the other order. */
3311 return (commutative_tree_code (TREE_CODE (arg0))
3312 && operand_equal_p (TREE_OPERAND (arg0, 0),
3313 TREE_OPERAND (arg1, 1), flags)
3314 && operand_equal_p (TREE_OPERAND (arg0, 1),
3315 TREE_OPERAND (arg1, 0), flags));
3317 case tcc_reference:
3318 /* If either of the pointer (or reference) expressions we are
3319 dereferencing contain a side effect, these cannot be equal,
3320 but their addresses can be. */
3321 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3322 && (TREE_SIDE_EFFECTS (arg0)
3323 || TREE_SIDE_EFFECTS (arg1)))
3324 return false;
3326 switch (TREE_CODE (arg0))
3328 case INDIRECT_REF:
3329 if (!(flags & OEP_ADDRESS_OF))
3331 if (TYPE_ALIGN (TREE_TYPE (arg0))
3332 != TYPE_ALIGN (TREE_TYPE (arg1)))
3333 return false;
3334 /* Verify that the access types are compatible. */
3335 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3336 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3337 return false;
3339 flags &= ~OEP_ADDRESS_OF;
3340 return OP_SAME (0);
3342 case IMAGPART_EXPR:
3343 /* Require the same offset. */
3344 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3345 TYPE_SIZE (TREE_TYPE (arg1)),
3346 flags & ~OEP_ADDRESS_OF))
3347 return false;
3349 /* Fallthru. */
3350 case REALPART_EXPR:
3351 case VIEW_CONVERT_EXPR:
3352 return OP_SAME (0);
3354 case TARGET_MEM_REF:
3355 case MEM_REF:
3356 if (!(flags & OEP_ADDRESS_OF))
3358 /* Require equal access sizes */
3359 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3360 && (!TYPE_SIZE (TREE_TYPE (arg0))
3361 || !TYPE_SIZE (TREE_TYPE (arg1))
3362 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3363 TYPE_SIZE (TREE_TYPE (arg1)),
3364 flags)))
3365 return false;
3366 /* Verify that access happens in similar types. */
3367 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3368 return false;
3369 /* Verify that accesses are TBAA compatible. */
3370 if (!alias_ptr_types_compatible_p
3371 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3372 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3373 || (MR_DEPENDENCE_CLIQUE (arg0)
3374 != MR_DEPENDENCE_CLIQUE (arg1))
3375 || (MR_DEPENDENCE_BASE (arg0)
3376 != MR_DEPENDENCE_BASE (arg1)))
3377 return false;
3378 /* Verify that alignment is compatible. */
3379 if (TYPE_ALIGN (TREE_TYPE (arg0))
3380 != TYPE_ALIGN (TREE_TYPE (arg1)))
3381 return false;
3383 flags &= ~OEP_ADDRESS_OF;
3384 return (OP_SAME (0) && OP_SAME (1)
3385 /* TARGET_MEM_REF require equal extra operands. */
3386 && (TREE_CODE (arg0) != TARGET_MEM_REF
3387 || (OP_SAME_WITH_NULL (2)
3388 && OP_SAME_WITH_NULL (3)
3389 && OP_SAME_WITH_NULL (4))));
3391 case ARRAY_REF:
3392 case ARRAY_RANGE_REF:
3393 if (!OP_SAME (0))
3394 return false;
3395 flags &= ~OEP_ADDRESS_OF;
3396 /* Compare the array index by value if it is constant first as we
3397 may have different types but same value here. */
3398 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3399 TREE_OPERAND (arg1, 1))
3400 || OP_SAME (1))
3401 && OP_SAME_WITH_NULL (2)
3402 && OP_SAME_WITH_NULL (3)
3403 /* Compare low bound and element size as with OEP_ADDRESS_OF
3404 we have to account for the offset of the ref. */
3405 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3406 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3407 || (operand_equal_p (array_ref_low_bound
3408 (CONST_CAST_TREE (arg0)),
3409 array_ref_low_bound
3410 (CONST_CAST_TREE (arg1)), flags)
3411 && operand_equal_p (array_ref_element_size
3412 (CONST_CAST_TREE (arg0)),
3413 array_ref_element_size
3414 (CONST_CAST_TREE (arg1)),
3415 flags))));
3417 case COMPONENT_REF:
3418 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3419 may be NULL when we're called to compare MEM_EXPRs. */
3420 if (!OP_SAME_WITH_NULL (0))
3421 return false;
3423 bool compare_address = flags & OEP_ADDRESS_OF;
3425 /* Most of time we only need to compare FIELD_DECLs for equality.
3426 However when determining address look into actual offsets.
3427 These may match for unions and unshared record types. */
3428 flags &= ~OEP_ADDRESS_OF;
3429 if (!OP_SAME (1))
3431 if (compare_address
3432 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3434 tree field0 = TREE_OPERAND (arg0, 1);
3435 tree field1 = TREE_OPERAND (arg1, 1);
3437 /* Non-FIELD_DECL operands can appear in C++ templates. */
3438 if (TREE_CODE (field0) != FIELD_DECL
3439 || TREE_CODE (field1) != FIELD_DECL
3440 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3441 DECL_FIELD_OFFSET (field1), flags)
3442 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3443 DECL_FIELD_BIT_OFFSET (field1),
3444 flags))
3445 return false;
3447 else
3448 return false;
3451 return OP_SAME_WITH_NULL (2);
3453 case BIT_FIELD_REF:
3454 if (!OP_SAME (0))
3455 return false;
3456 flags &= ~OEP_ADDRESS_OF;
3457 return OP_SAME (1) && OP_SAME (2);
3459 default:
3460 return false;
3463 case tcc_expression:
3464 switch (TREE_CODE (arg0))
3466 case ADDR_EXPR:
3467 /* Be sure we pass right ADDRESS_OF flag. */
3468 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3469 return operand_equal_p (TREE_OPERAND (arg0, 0),
3470 TREE_OPERAND (arg1, 0),
3471 flags | OEP_ADDRESS_OF);
3473 case TRUTH_NOT_EXPR:
3474 return OP_SAME (0);
3476 case TRUTH_ANDIF_EXPR:
3477 case TRUTH_ORIF_EXPR:
3478 return OP_SAME (0) && OP_SAME (1);
3480 case WIDEN_MULT_PLUS_EXPR:
3481 case WIDEN_MULT_MINUS_EXPR:
3482 if (!OP_SAME (2))
3483 return false;
3484 /* The multiplcation operands are commutative. */
3485 /* FALLTHRU */
3487 case TRUTH_AND_EXPR:
3488 case TRUTH_OR_EXPR:
3489 case TRUTH_XOR_EXPR:
3490 if (OP_SAME (0) && OP_SAME (1))
3491 return true;
3493 /* Otherwise take into account this is a commutative operation. */
3494 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3495 TREE_OPERAND (arg1, 1), flags)
3496 && operand_equal_p (TREE_OPERAND (arg0, 1),
3497 TREE_OPERAND (arg1, 0), flags));
3499 case COND_EXPR:
3500 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3501 return false;
3502 flags &= ~OEP_ADDRESS_OF;
3503 return OP_SAME (0);
3505 case BIT_INSERT_EXPR:
3506 /* BIT_INSERT_EXPR has an implict operand as the type precision
3507 of op1. Need to check to make sure they are the same. */
3508 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3509 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3510 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3511 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3512 return false;
3513 /* FALLTHRU */
3515 case VEC_COND_EXPR:
3516 case DOT_PROD_EXPR:
3517 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3519 case MODIFY_EXPR:
3520 case INIT_EXPR:
3521 case COMPOUND_EXPR:
3522 case PREDECREMENT_EXPR:
3523 case PREINCREMENT_EXPR:
3524 case POSTDECREMENT_EXPR:
3525 case POSTINCREMENT_EXPR:
3526 if (flags & OEP_LEXICOGRAPHIC)
3527 return OP_SAME (0) && OP_SAME (1);
3528 return false;
3530 case CLEANUP_POINT_EXPR:
3531 case EXPR_STMT:
3532 case SAVE_EXPR:
3533 if (flags & OEP_LEXICOGRAPHIC)
3534 return OP_SAME (0);
3535 return false;
3537 case OBJ_TYPE_REF:
3538 /* Virtual table reference. */
3539 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3540 OBJ_TYPE_REF_EXPR (arg1), flags))
3541 return false;
3542 flags &= ~OEP_ADDRESS_OF;
3543 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3544 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3545 return false;
3546 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3547 OBJ_TYPE_REF_OBJECT (arg1), flags))
3548 return false;
3549 if (virtual_method_call_p (arg0))
3551 if (!virtual_method_call_p (arg1))
3552 return false;
3553 return types_same_for_odr (obj_type_ref_class (arg0),
3554 obj_type_ref_class (arg1));
3556 return false;
3558 default:
3559 return false;
3562 case tcc_vl_exp:
3563 switch (TREE_CODE (arg0))
3565 case CALL_EXPR:
3566 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3567 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3568 /* If not both CALL_EXPRs are either internal or normal function
3569 functions, then they are not equal. */
3570 return false;
3571 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3573 /* If the CALL_EXPRs call different internal functions, then they
3574 are not equal. */
3575 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3576 return false;
3578 else
3580 /* If the CALL_EXPRs call different functions, then they are not
3581 equal. */
3582 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3583 flags))
3584 return false;
3587 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3589 unsigned int cef = call_expr_flags (arg0);
3590 if (flags & OEP_PURE_SAME)
3591 cef &= ECF_CONST | ECF_PURE;
3592 else
3593 cef &= ECF_CONST;
3594 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3595 return false;
3598 /* Now see if all the arguments are the same. */
3600 const_call_expr_arg_iterator iter0, iter1;
3601 const_tree a0, a1;
3602 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3603 a1 = first_const_call_expr_arg (arg1, &iter1);
3604 a0 && a1;
3605 a0 = next_const_call_expr_arg (&iter0),
3606 a1 = next_const_call_expr_arg (&iter1))
3607 if (! operand_equal_p (a0, a1, flags))
3608 return false;
3610 /* If we get here and both argument lists are exhausted
3611 then the CALL_EXPRs are equal. */
3612 return ! (a0 || a1);
3614 default:
3615 return false;
3618 case tcc_declaration:
3619 /* Consider __builtin_sqrt equal to sqrt. */
3620 if (TREE_CODE (arg0) == FUNCTION_DECL)
3621 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3622 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3623 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3624 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3626 if (DECL_P (arg0)
3627 && (flags & OEP_DECL_NAME)
3628 && (flags & OEP_LEXICOGRAPHIC))
3630 /* Consider decls with the same name equal. The caller needs
3631 to make sure they refer to the same entity (such as a function
3632 formal parameter). */
3633 tree a0name = DECL_NAME (arg0);
3634 tree a1name = DECL_NAME (arg1);
3635 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3636 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3637 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3639 return false;
3641 case tcc_exceptional:
3642 if (TREE_CODE (arg0) == CONSTRUCTOR)
3644 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3645 return false;
3647 /* In GIMPLE constructors are used only to build vectors from
3648 elements. Individual elements in the constructor must be
3649 indexed in increasing order and form an initial sequence.
3651 We make no effort to compare constructors in generic.
3652 (see sem_variable::equals in ipa-icf which can do so for
3653 constants). */
3654 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3655 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3656 return false;
3658 /* Be sure that vectors constructed have the same representation.
3659 We only tested element precision and modes to match.
3660 Vectors may be BLKmode and thus also check that the number of
3661 parts match. */
3662 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3663 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3664 return false;
3666 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3667 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3668 unsigned int len = vec_safe_length (v0);
3670 if (len != vec_safe_length (v1))
3671 return false;
3673 for (unsigned int i = 0; i < len; i++)
3675 constructor_elt *c0 = &(*v0)[i];
3676 constructor_elt *c1 = &(*v1)[i];
3678 if (!operand_equal_p (c0->value, c1->value, flags)
3679 /* In GIMPLE the indexes can be either NULL or matching i.
3680 Double check this so we won't get false
3681 positives for GENERIC. */
3682 || (c0->index
3683 && (TREE_CODE (c0->index) != INTEGER_CST
3684 || compare_tree_int (c0->index, i)))
3685 || (c1->index
3686 && (TREE_CODE (c1->index) != INTEGER_CST
3687 || compare_tree_int (c1->index, i))))
3688 return false;
3690 return true;
3692 else if (TREE_CODE (arg0) == STATEMENT_LIST
3693 && (flags & OEP_LEXICOGRAPHIC))
3695 /* Compare the STATEMENT_LISTs. */
3696 tree_stmt_iterator tsi1, tsi2;
3697 tree body1 = CONST_CAST_TREE (arg0);
3698 tree body2 = CONST_CAST_TREE (arg1);
3699 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3700 tsi_next (&tsi1), tsi_next (&tsi2))
3702 /* The lists don't have the same number of statements. */
3703 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3704 return false;
3705 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3706 return true;
3707 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3708 flags & (OEP_LEXICOGRAPHIC
3709 | OEP_NO_HASH_CHECK)))
3710 return false;
3713 return false;
3715 case tcc_statement:
3716 switch (TREE_CODE (arg0))
3718 case RETURN_EXPR:
3719 if (flags & OEP_LEXICOGRAPHIC)
3720 return OP_SAME_WITH_NULL (0);
3721 return false;
3722 case DEBUG_BEGIN_STMT:
3723 if (flags & OEP_LEXICOGRAPHIC)
3724 return true;
3725 return false;
3726 default:
3727 return false;
3730 default:
3731 return false;
3734 #undef OP_SAME
3735 #undef OP_SAME_WITH_NULL
3738 /* Generate a hash value for an expression. This can be used iteratively
3739 by passing a previous result as the HSTATE argument. */
3741 void
3742 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3743 unsigned int flags)
3745 int i;
3746 enum tree_code code;
3747 enum tree_code_class tclass;
3749 if (t == NULL_TREE || t == error_mark_node)
3751 hstate.merge_hash (0);
3752 return;
3755 STRIP_ANY_LOCATION_WRAPPER (t);
3757 if (!(flags & OEP_ADDRESS_OF))
3758 STRIP_NOPS (t);
3760 code = TREE_CODE (t);
3762 switch (code)
3764 /* Alas, constants aren't shared, so we can't rely on pointer
3765 identity. */
3766 case VOID_CST:
3767 hstate.merge_hash (0);
3768 return;
3769 case INTEGER_CST:
3770 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3771 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3772 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3773 return;
3774 case REAL_CST:
3776 unsigned int val2;
3777 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3778 val2 = rvc_zero;
3779 else
3780 val2 = real_hash (TREE_REAL_CST_PTR (t));
3781 hstate.merge_hash (val2);
3782 return;
3784 case FIXED_CST:
3786 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3787 hstate.merge_hash (val2);
3788 return;
3790 case STRING_CST:
3791 hstate.add ((const void *) TREE_STRING_POINTER (t),
3792 TREE_STRING_LENGTH (t));
3793 return;
3794 case COMPLEX_CST:
3795 hash_operand (TREE_REALPART (t), hstate, flags);
3796 hash_operand (TREE_IMAGPART (t), hstate, flags);
3797 return;
3798 case VECTOR_CST:
3800 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3801 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3802 unsigned int count = vector_cst_encoded_nelts (t);
3803 for (unsigned int i = 0; i < count; ++i)
3804 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3805 return;
3807 case SSA_NAME:
3808 /* We can just compare by pointer. */
3809 hstate.add_hwi (SSA_NAME_VERSION (t));
3810 return;
3811 case PLACEHOLDER_EXPR:
3812 /* The node itself doesn't matter. */
3813 return;
3814 case BLOCK:
3815 case OMP_CLAUSE:
3816 /* Ignore. */
3817 return;
3818 case TREE_LIST:
3819 /* A list of expressions, for a CALL_EXPR or as the elements of a
3820 VECTOR_CST. */
3821 for (; t; t = TREE_CHAIN (t))
3822 hash_operand (TREE_VALUE (t), hstate, flags);
3823 return;
3824 case CONSTRUCTOR:
3826 unsigned HOST_WIDE_INT idx;
3827 tree field, value;
3828 flags &= ~OEP_ADDRESS_OF;
3829 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3830 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3832 /* In GIMPLE the indexes can be either NULL or matching i. */
3833 if (field == NULL_TREE)
3834 field = bitsize_int (idx);
3835 hash_operand (field, hstate, flags);
3836 hash_operand (value, hstate, flags);
3838 return;
3840 case STATEMENT_LIST:
3842 tree_stmt_iterator i;
3843 for (i = tsi_start (CONST_CAST_TREE (t));
3844 !tsi_end_p (i); tsi_next (&i))
3845 hash_operand (tsi_stmt (i), hstate, flags);
3846 return;
3848 case TREE_VEC:
3849 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3850 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3851 return;
3852 case IDENTIFIER_NODE:
3853 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3854 return;
3855 case FUNCTION_DECL:
3856 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3857 Otherwise nodes that compare equal according to operand_equal_p might
3858 get different hash codes. However, don't do this for machine specific
3859 or front end builtins, since the function code is overloaded in those
3860 cases. */
3861 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3862 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3864 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3865 code = TREE_CODE (t);
3867 /* FALL THROUGH */
3868 default:
3869 if (POLY_INT_CST_P (t))
3871 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3872 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3873 return;
3875 tclass = TREE_CODE_CLASS (code);
3877 if (tclass == tcc_declaration)
3879 /* DECL's have a unique ID */
3880 hstate.add_hwi (DECL_UID (t));
3882 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3884 /* For comparisons that can be swapped, use the lower
3885 tree code. */
3886 enum tree_code ccode = swap_tree_comparison (code);
3887 if (code < ccode)
3888 ccode = code;
3889 hstate.add_object (ccode);
3890 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3891 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3893 else if (CONVERT_EXPR_CODE_P (code))
3895 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3896 operand_equal_p. */
3897 enum tree_code ccode = NOP_EXPR;
3898 hstate.add_object (ccode);
3900 /* Don't hash the type, that can lead to having nodes which
3901 compare equal according to operand_equal_p, but which
3902 have different hash codes. Make sure to include signedness
3903 in the hash computation. */
3904 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3905 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3907 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3908 else if (code == MEM_REF
3909 && (flags & OEP_ADDRESS_OF) != 0
3910 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3911 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3912 && integer_zerop (TREE_OPERAND (t, 1)))
3913 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3914 hstate, flags);
3915 /* Don't ICE on FE specific trees, or their arguments etc.
3916 during operand_equal_p hash verification. */
3917 else if (!IS_EXPR_CODE_CLASS (tclass))
3918 gcc_assert (flags & OEP_HASH_CHECK);
3919 else
3921 unsigned int sflags = flags;
3923 hstate.add_object (code);
3925 switch (code)
3927 case ADDR_EXPR:
3928 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3929 flags |= OEP_ADDRESS_OF;
3930 sflags = flags;
3931 break;
3933 case INDIRECT_REF:
3934 case MEM_REF:
3935 case TARGET_MEM_REF:
3936 flags &= ~OEP_ADDRESS_OF;
3937 sflags = flags;
3938 break;
3940 case COMPONENT_REF:
3941 if (sflags & OEP_ADDRESS_OF)
3943 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3944 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3945 hstate, flags & ~OEP_ADDRESS_OF);
3946 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3947 hstate, flags & ~OEP_ADDRESS_OF);
3948 return;
3950 break;
3951 case ARRAY_REF:
3952 case ARRAY_RANGE_REF:
3953 case BIT_FIELD_REF:
3954 sflags &= ~OEP_ADDRESS_OF;
3955 break;
3957 case COND_EXPR:
3958 flags &= ~OEP_ADDRESS_OF;
3959 break;
3961 case WIDEN_MULT_PLUS_EXPR:
3962 case WIDEN_MULT_MINUS_EXPR:
3964 /* The multiplication operands are commutative. */
3965 inchash::hash one, two;
3966 hash_operand (TREE_OPERAND (t, 0), one, flags);
3967 hash_operand (TREE_OPERAND (t, 1), two, flags);
3968 hstate.add_commutative (one, two);
3969 hash_operand (TREE_OPERAND (t, 2), two, flags);
3970 return;
3973 case CALL_EXPR:
3974 if (CALL_EXPR_FN (t) == NULL_TREE)
3975 hstate.add_int (CALL_EXPR_IFN (t));
3976 break;
3978 case TARGET_EXPR:
3979 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3980 Usually different TARGET_EXPRs just should use
3981 different temporaries in their slots. */
3982 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3983 return;
3985 case OBJ_TYPE_REF:
3986 /* Virtual table reference. */
3987 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3988 flags &= ~OEP_ADDRESS_OF;
3989 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3990 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3991 if (!virtual_method_call_p (t))
3992 return;
3993 if (tree c = obj_type_ref_class (t))
3995 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3996 /* We compute mangled names only when free_lang_data is run.
3997 In that case we can hash precisely. */
3998 if (TREE_CODE (c) == TYPE_DECL
3999 && DECL_ASSEMBLER_NAME_SET_P (c))
4000 hstate.add_object
4001 (IDENTIFIER_HASH_VALUE
4002 (DECL_ASSEMBLER_NAME (c)));
4004 return;
4005 default:
4006 break;
4009 /* Don't hash the type, that can lead to having nodes which
4010 compare equal according to operand_equal_p, but which
4011 have different hash codes. */
4012 if (code == NON_LVALUE_EXPR)
4014 /* Make sure to include signness in the hash computation. */
4015 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4016 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4019 else if (commutative_tree_code (code))
4021 /* It's a commutative expression. We want to hash it the same
4022 however it appears. We do this by first hashing both operands
4023 and then rehashing based on the order of their independent
4024 hashes. */
4025 inchash::hash one, two;
4026 hash_operand (TREE_OPERAND (t, 0), one, flags);
4027 hash_operand (TREE_OPERAND (t, 1), two, flags);
4028 hstate.add_commutative (one, two);
4030 else
4031 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4032 hash_operand (TREE_OPERAND (t, i), hstate,
4033 i == 0 ? flags : sflags);
4035 return;
4039 bool
4040 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4041 unsigned int flags, bool *ret)
4043 /* When checking and unless comparing DECL names, verify that if
4044 the outermost operand_equal_p call returns non-zero then ARG0
4045 and ARG1 have the same hash value. */
4046 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4048 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4050 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4052 inchash::hash hstate0 (0), hstate1 (0);
4053 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4054 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4055 hashval_t h0 = hstate0.end ();
4056 hashval_t h1 = hstate1.end ();
4057 gcc_assert (h0 == h1);
4059 *ret = true;
4061 else
4062 *ret = false;
4064 return true;
4067 return false;
4071 static operand_compare default_compare_instance;
4073 /* Conveinece wrapper around operand_compare class because usually we do
4074 not need to play with the valueizer. */
4076 bool
4077 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4079 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4082 namespace inchash
4085 /* Generate a hash value for an expression. This can be used iteratively
4086 by passing a previous result as the HSTATE argument.
4088 This function is intended to produce the same hash for expressions which
4089 would compare equal using operand_equal_p. */
4090 void
4091 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4093 default_compare_instance.hash_operand (t, hstate, flags);
4098 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4099 with a different signedness or a narrower precision. */
4101 static bool
4102 operand_equal_for_comparison_p (tree arg0, tree arg1)
4104 if (operand_equal_p (arg0, arg1, 0))
4105 return true;
4107 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4108 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4109 return false;
4111 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4112 and see if the inner values are the same. This removes any
4113 signedness comparison, which doesn't matter here. */
4114 tree op0 = arg0;
4115 tree op1 = arg1;
4116 STRIP_NOPS (op0);
4117 STRIP_NOPS (op1);
4118 if (operand_equal_p (op0, op1, 0))
4119 return true;
4121 /* Discard a single widening conversion from ARG1 and see if the inner
4122 value is the same as ARG0. */
4123 if (CONVERT_EXPR_P (arg1)
4124 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4125 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4126 < TYPE_PRECISION (TREE_TYPE (arg1))
4127 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4128 return true;
4130 return false;
4133 /* See if ARG is an expression that is either a comparison or is performing
4134 arithmetic on comparisons. The comparisons must only be comparing
4135 two different values, which will be stored in *CVAL1 and *CVAL2; if
4136 they are nonzero it means that some operands have already been found.
4137 No variables may be used anywhere else in the expression except in the
4138 comparisons.
4140 If this is true, return 1. Otherwise, return zero. */
4142 static bool
4143 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4145 enum tree_code code = TREE_CODE (arg);
4146 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4148 /* We can handle some of the tcc_expression cases here. */
4149 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4150 tclass = tcc_unary;
4151 else if (tclass == tcc_expression
4152 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4153 || code == COMPOUND_EXPR))
4154 tclass = tcc_binary;
4156 switch (tclass)
4158 case tcc_unary:
4159 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4161 case tcc_binary:
4162 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4163 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4165 case tcc_constant:
4166 return true;
4168 case tcc_expression:
4169 if (code == COND_EXPR)
4170 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4171 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4172 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4173 return false;
4175 case tcc_comparison:
4176 /* First see if we can handle the first operand, then the second. For
4177 the second operand, we know *CVAL1 can't be zero. It must be that
4178 one side of the comparison is each of the values; test for the
4179 case where this isn't true by failing if the two operands
4180 are the same. */
4182 if (operand_equal_p (TREE_OPERAND (arg, 0),
4183 TREE_OPERAND (arg, 1), 0))
4184 return false;
4186 if (*cval1 == 0)
4187 *cval1 = TREE_OPERAND (arg, 0);
4188 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4190 else if (*cval2 == 0)
4191 *cval2 = TREE_OPERAND (arg, 0);
4192 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4194 else
4195 return false;
4197 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4199 else if (*cval2 == 0)
4200 *cval2 = TREE_OPERAND (arg, 1);
4201 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4203 else
4204 return false;
4206 return true;
4208 default:
4209 return false;
4213 /* ARG is a tree that is known to contain just arithmetic operations and
4214 comparisons. Evaluate the operations in the tree substituting NEW0 for
4215 any occurrence of OLD0 as an operand of a comparison and likewise for
4216 NEW1 and OLD1. */
4218 static tree
4219 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4220 tree old1, tree new1)
4222 tree type = TREE_TYPE (arg);
4223 enum tree_code code = TREE_CODE (arg);
4224 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4226 /* We can handle some of the tcc_expression cases here. */
4227 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4228 tclass = tcc_unary;
4229 else if (tclass == tcc_expression
4230 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4231 tclass = tcc_binary;
4233 switch (tclass)
4235 case tcc_unary:
4236 return fold_build1_loc (loc, code, type,
4237 eval_subst (loc, TREE_OPERAND (arg, 0),
4238 old0, new0, old1, new1));
4240 case tcc_binary:
4241 return fold_build2_loc (loc, code, type,
4242 eval_subst (loc, TREE_OPERAND (arg, 0),
4243 old0, new0, old1, new1),
4244 eval_subst (loc, TREE_OPERAND (arg, 1),
4245 old0, new0, old1, new1));
4247 case tcc_expression:
4248 switch (code)
4250 case SAVE_EXPR:
4251 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4252 old1, new1);
4254 case COMPOUND_EXPR:
4255 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4256 old1, new1);
4258 case COND_EXPR:
4259 return fold_build3_loc (loc, code, type,
4260 eval_subst (loc, TREE_OPERAND (arg, 0),
4261 old0, new0, old1, new1),
4262 eval_subst (loc, TREE_OPERAND (arg, 1),
4263 old0, new0, old1, new1),
4264 eval_subst (loc, TREE_OPERAND (arg, 2),
4265 old0, new0, old1, new1));
4266 default:
4267 break;
4269 /* Fall through - ??? */
4271 case tcc_comparison:
4273 tree arg0 = TREE_OPERAND (arg, 0);
4274 tree arg1 = TREE_OPERAND (arg, 1);
4276 /* We need to check both for exact equality and tree equality. The
4277 former will be true if the operand has a side-effect. In that
4278 case, we know the operand occurred exactly once. */
4280 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4281 arg0 = new0;
4282 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4283 arg0 = new1;
4285 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4286 arg1 = new0;
4287 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4288 arg1 = new1;
4290 return fold_build2_loc (loc, code, type, arg0, arg1);
4293 default:
4294 return arg;
4298 /* Return a tree for the case when the result of an expression is RESULT
4299 converted to TYPE and OMITTED was previously an operand of the expression
4300 but is now not needed (e.g., we folded OMITTED * 0).
4302 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4303 the conversion of RESULT to TYPE. */
4305 tree
4306 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4308 tree t = fold_convert_loc (loc, type, result);
4310 /* If the resulting operand is an empty statement, just return the omitted
4311 statement casted to void. */
4312 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4313 return build1_loc (loc, NOP_EXPR, void_type_node,
4314 fold_ignored_result (omitted));
4316 if (TREE_SIDE_EFFECTS (omitted))
4317 return build2_loc (loc, COMPOUND_EXPR, type,
4318 fold_ignored_result (omitted), t);
4320 return non_lvalue_loc (loc, t);
4323 /* Return a tree for the case when the result of an expression is RESULT
4324 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4325 of the expression but are now not needed.
4327 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4328 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4329 evaluated before OMITTED2. Otherwise, if neither has side effects,
4330 just do the conversion of RESULT to TYPE. */
4332 tree
4333 omit_two_operands_loc (location_t loc, tree type, tree result,
4334 tree omitted1, tree omitted2)
4336 tree t = fold_convert_loc (loc, type, result);
4338 if (TREE_SIDE_EFFECTS (omitted2))
4339 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4340 if (TREE_SIDE_EFFECTS (omitted1))
4341 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4343 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4347 /* Return a simplified tree node for the truth-negation of ARG. This
4348 never alters ARG itself. We assume that ARG is an operation that
4349 returns a truth value (0 or 1).
4351 FIXME: one would think we would fold the result, but it causes
4352 problems with the dominator optimizer. */
4354 static tree
4355 fold_truth_not_expr (location_t loc, tree arg)
4357 tree type = TREE_TYPE (arg);
4358 enum tree_code code = TREE_CODE (arg);
4359 location_t loc1, loc2;
4361 /* If this is a comparison, we can simply invert it, except for
4362 floating-point non-equality comparisons, in which case we just
4363 enclose a TRUTH_NOT_EXPR around what we have. */
4365 if (TREE_CODE_CLASS (code) == tcc_comparison)
4367 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4368 if (FLOAT_TYPE_P (op_type)
4369 && flag_trapping_math
4370 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4371 && code != NE_EXPR && code != EQ_EXPR)
4372 return NULL_TREE;
4374 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4375 if (code == ERROR_MARK)
4376 return NULL_TREE;
4378 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4379 TREE_OPERAND (arg, 1));
4380 copy_warning (ret, arg);
4381 return ret;
4384 switch (code)
4386 case INTEGER_CST:
4387 return constant_boolean_node (integer_zerop (arg), type);
4389 case TRUTH_AND_EXPR:
4390 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4392 return build2_loc (loc, TRUTH_OR_EXPR, type,
4393 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4394 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4396 case TRUTH_OR_EXPR:
4397 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4398 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4399 return build2_loc (loc, TRUTH_AND_EXPR, type,
4400 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4401 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4403 case TRUTH_XOR_EXPR:
4404 /* Here we can invert either operand. We invert the first operand
4405 unless the second operand is a TRUTH_NOT_EXPR in which case our
4406 result is the XOR of the first operand with the inside of the
4407 negation of the second operand. */
4409 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4410 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4411 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4412 else
4413 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4414 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4415 TREE_OPERAND (arg, 1));
4417 case TRUTH_ANDIF_EXPR:
4418 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4419 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4420 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4421 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4422 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4424 case TRUTH_ORIF_EXPR:
4425 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4426 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4427 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4428 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4429 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4431 case TRUTH_NOT_EXPR:
4432 return TREE_OPERAND (arg, 0);
4434 case COND_EXPR:
4436 tree arg1 = TREE_OPERAND (arg, 1);
4437 tree arg2 = TREE_OPERAND (arg, 2);
4439 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4440 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4442 /* A COND_EXPR may have a throw as one operand, which
4443 then has void type. Just leave void operands
4444 as they are. */
4445 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4446 VOID_TYPE_P (TREE_TYPE (arg1))
4447 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4448 VOID_TYPE_P (TREE_TYPE (arg2))
4449 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4452 case COMPOUND_EXPR:
4453 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4454 return build2_loc (loc, COMPOUND_EXPR, type,
4455 TREE_OPERAND (arg, 0),
4456 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4458 case NON_LVALUE_EXPR:
4459 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4460 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4462 CASE_CONVERT:
4463 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4464 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4466 /* fall through */
4468 case FLOAT_EXPR:
4469 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4470 return build1_loc (loc, TREE_CODE (arg), type,
4471 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4473 case BIT_AND_EXPR:
4474 if (!integer_onep (TREE_OPERAND (arg, 1)))
4475 return NULL_TREE;
4476 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4478 case SAVE_EXPR:
4479 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4481 case CLEANUP_POINT_EXPR:
4482 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4483 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4484 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4486 default:
4487 return NULL_TREE;
4491 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4492 assume that ARG is an operation that returns a truth value (0 or 1
4493 for scalars, 0 or -1 for vectors). Return the folded expression if
4494 folding is successful. Otherwise, return NULL_TREE. */
4496 static tree
4497 fold_invert_truthvalue (location_t loc, tree arg)
4499 tree type = TREE_TYPE (arg);
4500 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4501 ? BIT_NOT_EXPR
4502 : TRUTH_NOT_EXPR,
4503 type, arg);
4506 /* Return a simplified tree node for the truth-negation of ARG. This
4507 never alters ARG itself. We assume that ARG is an operation that
4508 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4510 tree
4511 invert_truthvalue_loc (location_t loc, tree arg)
4513 if (TREE_CODE (arg) == ERROR_MARK)
4514 return arg;
4516 tree type = TREE_TYPE (arg);
4517 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4518 ? BIT_NOT_EXPR
4519 : TRUTH_NOT_EXPR,
4520 type, arg);
4523 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4524 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4525 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4526 is the original memory reference used to preserve the alias set of
4527 the access. */
4529 static tree
4530 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4531 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4532 int unsignedp, int reversep)
4534 tree result, bftype;
4536 /* Attempt not to lose the access path if possible. */
4537 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4539 tree ninner = TREE_OPERAND (orig_inner, 0);
4540 machine_mode nmode;
4541 poly_int64 nbitsize, nbitpos;
4542 tree noffset;
4543 int nunsignedp, nreversep, nvolatilep = 0;
4544 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4545 &noffset, &nmode, &nunsignedp,
4546 &nreversep, &nvolatilep);
4547 if (base == inner
4548 && noffset == NULL_TREE
4549 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4550 && !reversep
4551 && !nreversep
4552 && !nvolatilep)
4554 inner = ninner;
4555 bitpos -= nbitpos;
4559 alias_set_type iset = get_alias_set (orig_inner);
4560 if (iset == 0 && get_alias_set (inner) != iset)
4561 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4562 build_fold_addr_expr (inner),
4563 build_int_cst (ptr_type_node, 0));
4565 if (known_eq (bitpos, 0) && !reversep)
4567 tree size = TYPE_SIZE (TREE_TYPE (inner));
4568 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4569 || POINTER_TYPE_P (TREE_TYPE (inner)))
4570 && tree_fits_shwi_p (size)
4571 && tree_to_shwi (size) == bitsize)
4572 return fold_convert_loc (loc, type, inner);
4575 bftype = type;
4576 if (TYPE_PRECISION (bftype) != bitsize
4577 || TYPE_UNSIGNED (bftype) == !unsignedp)
4578 bftype = build_nonstandard_integer_type (bitsize, 0);
4580 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4581 bitsize_int (bitsize), bitsize_int (bitpos));
4582 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4584 if (bftype != type)
4585 result = fold_convert_loc (loc, type, result);
4587 return result;
4590 /* Optimize a bit-field compare.
4592 There are two cases: First is a compare against a constant and the
4593 second is a comparison of two items where the fields are at the same
4594 bit position relative to the start of a chunk (byte, halfword, word)
4595 large enough to contain it. In these cases we can avoid the shift
4596 implicit in bitfield extractions.
4598 For constants, we emit a compare of the shifted constant with the
4599 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4600 compared. For two fields at the same position, we do the ANDs with the
4601 similar mask and compare the result of the ANDs.
4603 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4604 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4605 are the left and right operands of the comparison, respectively.
4607 If the optimization described above can be done, we return the resulting
4608 tree. Otherwise we return zero. */
4610 static tree
4611 optimize_bit_field_compare (location_t loc, enum tree_code code,
4612 tree compare_type, tree lhs, tree rhs)
4614 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4615 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4616 tree type = TREE_TYPE (lhs);
4617 tree unsigned_type;
4618 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4619 machine_mode lmode, rmode;
4620 scalar_int_mode nmode;
4621 int lunsignedp, runsignedp;
4622 int lreversep, rreversep;
4623 int lvolatilep = 0, rvolatilep = 0;
4624 tree linner, rinner = NULL_TREE;
4625 tree mask;
4626 tree offset;
4628 /* Get all the information about the extractions being done. If the bit size
4629 is the same as the size of the underlying object, we aren't doing an
4630 extraction at all and so can do nothing. We also don't want to
4631 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4632 then will no longer be able to replace it. */
4633 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4634 &lunsignedp, &lreversep, &lvolatilep);
4635 if (linner == lhs
4636 || !known_size_p (plbitsize)
4637 || !plbitsize.is_constant (&lbitsize)
4638 || !plbitpos.is_constant (&lbitpos)
4639 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4640 || offset != 0
4641 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4642 || lvolatilep)
4643 return 0;
4645 if (const_p)
4646 rreversep = lreversep;
4647 else
4649 /* If this is not a constant, we can only do something if bit positions,
4650 sizes, signedness and storage order are the same. */
4651 rinner
4652 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4653 &runsignedp, &rreversep, &rvolatilep);
4655 if (rinner == rhs
4656 || maybe_ne (lbitpos, rbitpos)
4657 || maybe_ne (lbitsize, rbitsize)
4658 || lunsignedp != runsignedp
4659 || lreversep != rreversep
4660 || offset != 0
4661 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4662 || rvolatilep)
4663 return 0;
4666 /* Honor the C++ memory model and mimic what RTL expansion does. */
4667 poly_uint64 bitstart = 0;
4668 poly_uint64 bitend = 0;
4669 if (TREE_CODE (lhs) == COMPONENT_REF)
4671 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4672 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4673 return 0;
4676 /* See if we can find a mode to refer to this field. We should be able to,
4677 but fail if we can't. */
4678 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4679 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4680 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4681 TYPE_ALIGN (TREE_TYPE (rinner))),
4682 BITS_PER_WORD, false, &nmode))
4683 return 0;
4685 /* Set signed and unsigned types of the precision of this mode for the
4686 shifts below. */
4687 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4689 /* Compute the bit position and size for the new reference and our offset
4690 within it. If the new reference is the same size as the original, we
4691 won't optimize anything, so return zero. */
4692 nbitsize = GET_MODE_BITSIZE (nmode);
4693 nbitpos = lbitpos & ~ (nbitsize - 1);
4694 lbitpos -= nbitpos;
4695 if (nbitsize == lbitsize)
4696 return 0;
4698 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4699 lbitpos = nbitsize - lbitsize - lbitpos;
4701 /* Make the mask to be used against the extracted field. */
4702 mask = build_int_cst_type (unsigned_type, -1);
4703 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4704 mask = const_binop (RSHIFT_EXPR, mask,
4705 size_int (nbitsize - lbitsize - lbitpos));
4707 if (! const_p)
4709 if (nbitpos < 0)
4710 return 0;
4712 /* If not comparing with constant, just rework the comparison
4713 and return. */
4714 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4715 nbitsize, nbitpos, 1, lreversep);
4716 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4717 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4718 nbitsize, nbitpos, 1, rreversep);
4719 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4720 return fold_build2_loc (loc, code, compare_type, t1, t2);
4723 /* Otherwise, we are handling the constant case. See if the constant is too
4724 big for the field. Warn and return a tree for 0 (false) if so. We do
4725 this not only for its own sake, but to avoid having to test for this
4726 error case below. If we didn't, we might generate wrong code.
4728 For unsigned fields, the constant shifted right by the field length should
4729 be all zero. For signed fields, the high-order bits should agree with
4730 the sign bit. */
4732 if (lunsignedp)
4734 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4736 warning (0, "comparison is always %d due to width of bit-field",
4737 code == NE_EXPR);
4738 return constant_boolean_node (code == NE_EXPR, compare_type);
4741 else
4743 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4744 if (tem != 0 && tem != -1)
4746 warning (0, "comparison is always %d due to width of bit-field",
4747 code == NE_EXPR);
4748 return constant_boolean_node (code == NE_EXPR, compare_type);
4752 if (nbitpos < 0)
4753 return 0;
4755 /* Single-bit compares should always be against zero. */
4756 if (lbitsize == 1 && ! integer_zerop (rhs))
4758 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4759 rhs = build_int_cst (type, 0);
4762 /* Make a new bitfield reference, shift the constant over the
4763 appropriate number of bits and mask it with the computed mask
4764 (in case this was a signed field). If we changed it, make a new one. */
4765 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4766 nbitsize, nbitpos, 1, lreversep);
4768 rhs = const_binop (BIT_AND_EXPR,
4769 const_binop (LSHIFT_EXPR,
4770 fold_convert_loc (loc, unsigned_type, rhs),
4771 size_int (lbitpos)),
4772 mask);
4774 lhs = build2_loc (loc, code, compare_type,
4775 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4776 return lhs;
4779 /* Subroutine for fold_truth_andor_1: decode a field reference.
4781 If EXP is a comparison reference, we return the innermost reference.
4783 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4784 set to the starting bit number.
4786 If the innermost field can be completely contained in a mode-sized
4787 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4789 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4790 otherwise it is not changed.
4792 *PUNSIGNEDP is set to the signedness of the field.
4794 *PREVERSEP is set to the storage order of the field.
4796 *PMASK is set to the mask used. This is either contained in a
4797 BIT_AND_EXPR or derived from the width of the field.
4799 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4801 Return 0 if this is not a component reference or is one that we can't
4802 do anything with. */
4804 static tree
4805 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4806 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4807 int *punsignedp, int *preversep, int *pvolatilep,
4808 tree *pmask, tree *pand_mask)
4810 tree exp = *exp_;
4811 tree outer_type = 0;
4812 tree and_mask = 0;
4813 tree mask, inner, offset;
4814 tree unsigned_type;
4815 unsigned int precision;
4817 /* All the optimizations using this function assume integer fields.
4818 There are problems with FP fields since the type_for_size call
4819 below can fail for, e.g., XFmode. */
4820 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4821 return NULL_TREE;
4823 /* We are interested in the bare arrangement of bits, so strip everything
4824 that doesn't affect the machine mode. However, record the type of the
4825 outermost expression if it may matter below. */
4826 if (CONVERT_EXPR_P (exp)
4827 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4828 outer_type = TREE_TYPE (exp);
4829 STRIP_NOPS (exp);
4831 if (TREE_CODE (exp) == BIT_AND_EXPR)
4833 and_mask = TREE_OPERAND (exp, 1);
4834 exp = TREE_OPERAND (exp, 0);
4835 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4836 if (TREE_CODE (and_mask) != INTEGER_CST)
4837 return NULL_TREE;
4840 poly_int64 poly_bitsize, poly_bitpos;
4841 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4842 pmode, punsignedp, preversep, pvolatilep);
4843 if ((inner == exp && and_mask == 0)
4844 || !poly_bitsize.is_constant (pbitsize)
4845 || !poly_bitpos.is_constant (pbitpos)
4846 || *pbitsize < 0
4847 || offset != 0
4848 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4849 /* Reject out-of-bound accesses (PR79731). */
4850 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4851 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4852 *pbitpos + *pbitsize) < 0))
4853 return NULL_TREE;
4855 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4856 if (unsigned_type == NULL_TREE)
4857 return NULL_TREE;
4859 *exp_ = exp;
4861 /* If the number of bits in the reference is the same as the bitsize of
4862 the outer type, then the outer type gives the signedness. Otherwise
4863 (in case of a small bitfield) the signedness is unchanged. */
4864 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4865 *punsignedp = TYPE_UNSIGNED (outer_type);
4867 /* Compute the mask to access the bitfield. */
4868 precision = TYPE_PRECISION (unsigned_type);
4870 mask = build_int_cst_type (unsigned_type, -1);
4872 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4873 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4875 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4876 if (and_mask != 0)
4877 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4878 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4880 *pmask = mask;
4881 *pand_mask = and_mask;
4882 return inner;
4885 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4886 bit positions and MASK is SIGNED. */
4888 static bool
4889 all_ones_mask_p (const_tree mask, unsigned int size)
4891 tree type = TREE_TYPE (mask);
4892 unsigned int precision = TYPE_PRECISION (type);
4894 /* If this function returns true when the type of the mask is
4895 UNSIGNED, then there will be errors. In particular see
4896 gcc.c-torture/execute/990326-1.c. There does not appear to be
4897 any documentation paper trail as to why this is so. But the pre
4898 wide-int worked with that restriction and it has been preserved
4899 here. */
4900 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4901 return false;
4903 return wi::mask (size, false, precision) == wi::to_wide (mask);
4906 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4907 represents the sign bit of EXP's type. If EXP represents a sign
4908 or zero extension, also test VAL against the unextended type.
4909 The return value is the (sub)expression whose sign bit is VAL,
4910 or NULL_TREE otherwise. */
4912 tree
4913 sign_bit_p (tree exp, const_tree val)
4915 int width;
4916 tree t;
4918 /* Tree EXP must have an integral type. */
4919 t = TREE_TYPE (exp);
4920 if (! INTEGRAL_TYPE_P (t))
4921 return NULL_TREE;
4923 /* Tree VAL must be an integer constant. */
4924 if (TREE_CODE (val) != INTEGER_CST
4925 || TREE_OVERFLOW (val))
4926 return NULL_TREE;
4928 width = TYPE_PRECISION (t);
4929 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4930 return exp;
4932 /* Handle extension from a narrower type. */
4933 if (TREE_CODE (exp) == NOP_EXPR
4934 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4935 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4937 return NULL_TREE;
4940 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4941 operand is simple enough to be evaluated unconditionally. */
4943 static bool
4944 simple_operand_p (const_tree exp)
4946 /* Strip any conversions that don't change the machine mode. */
4947 STRIP_NOPS (exp);
4949 return (CONSTANT_CLASS_P (exp)
4950 || TREE_CODE (exp) == SSA_NAME
4951 || (DECL_P (exp)
4952 && ! TREE_ADDRESSABLE (exp)
4953 && ! TREE_THIS_VOLATILE (exp)
4954 && ! DECL_NONLOCAL (exp)
4955 /* Don't regard global variables as simple. They may be
4956 allocated in ways unknown to the compiler (shared memory,
4957 #pragma weak, etc). */
4958 && ! TREE_PUBLIC (exp)
4959 && ! DECL_EXTERNAL (exp)
4960 /* Weakrefs are not safe to be read, since they can be NULL.
4961 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4962 have DECL_WEAK flag set. */
4963 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4964 /* Loading a static variable is unduly expensive, but global
4965 registers aren't expensive. */
4966 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4969 /* Determine if an operand is simple enough to be evaluated unconditionally.
4970 In addition to simple_operand_p, we assume that comparisons, conversions,
4971 and logic-not operations are simple, if their operands are simple, too. */
4973 bool
4974 simple_condition_p (tree exp)
4976 enum tree_code code;
4978 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4979 return false;
4981 while (CONVERT_EXPR_P (exp))
4982 exp = TREE_OPERAND (exp, 0);
4984 code = TREE_CODE (exp);
4986 if (TREE_CODE_CLASS (code) == tcc_comparison)
4987 return (simple_operand_p (TREE_OPERAND (exp, 0))
4988 && simple_operand_p (TREE_OPERAND (exp, 1)));
4990 if (code == TRUTH_NOT_EXPR)
4991 return simple_condition_p (TREE_OPERAND (exp, 0));
4993 return simple_operand_p (exp);
4997 /* The following functions are subroutines to fold_range_test and allow it to
4998 try to change a logical combination of comparisons into a range test.
5000 For example, both
5001 X == 2 || X == 3 || X == 4 || X == 5
5003 X >= 2 && X <= 5
5004 are converted to
5005 (unsigned) (X - 2) <= 3
5007 We describe each set of comparisons as being either inside or outside
5008 a range, using a variable named like IN_P, and then describe the
5009 range with a lower and upper bound. If one of the bounds is omitted,
5010 it represents either the highest or lowest value of the type.
5012 In the comments below, we represent a range by two numbers in brackets
5013 preceded by a "+" to designate being inside that range, or a "-" to
5014 designate being outside that range, so the condition can be inverted by
5015 flipping the prefix. An omitted bound is represented by a "-". For
5016 example, "- [-, 10]" means being outside the range starting at the lowest
5017 possible value and ending at 10, in other words, being greater than 10.
5018 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5019 always false.
5021 We set up things so that the missing bounds are handled in a consistent
5022 manner so neither a missing bound nor "true" and "false" need to be
5023 handled using a special case. */
5025 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5026 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5027 and UPPER1_P are nonzero if the respective argument is an upper bound
5028 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5029 must be specified for a comparison. ARG1 will be converted to ARG0's
5030 type if both are specified. */
5032 static tree
5033 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5034 tree arg1, int upper1_p)
5036 tree tem;
5037 int result;
5038 int sgn0, sgn1;
5040 /* If neither arg represents infinity, do the normal operation.
5041 Else, if not a comparison, return infinity. Else handle the special
5042 comparison rules. Note that most of the cases below won't occur, but
5043 are handled for consistency. */
5045 if (arg0 != 0 && arg1 != 0)
5047 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5048 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5049 STRIP_NOPS (tem);
5050 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5053 if (TREE_CODE_CLASS (code) != tcc_comparison)
5054 return 0;
5056 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5057 for neither. In real maths, we cannot assume open ended ranges are
5058 the same. But, this is computer arithmetic, where numbers are finite.
5059 We can therefore make the transformation of any unbounded range with
5060 the value Z, Z being greater than any representable number. This permits
5061 us to treat unbounded ranges as equal. */
5062 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5063 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5064 switch (code)
5066 case EQ_EXPR:
5067 result = sgn0 == sgn1;
5068 break;
5069 case NE_EXPR:
5070 result = sgn0 != sgn1;
5071 break;
5072 case LT_EXPR:
5073 result = sgn0 < sgn1;
5074 break;
5075 case LE_EXPR:
5076 result = sgn0 <= sgn1;
5077 break;
5078 case GT_EXPR:
5079 result = sgn0 > sgn1;
5080 break;
5081 case GE_EXPR:
5082 result = sgn0 >= sgn1;
5083 break;
5084 default:
5085 gcc_unreachable ();
5088 return constant_boolean_node (result, type);
5091 /* Helper routine for make_range. Perform one step for it, return
5092 new expression if the loop should continue or NULL_TREE if it should
5093 stop. */
5095 tree
5096 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5097 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5098 bool *strict_overflow_p)
5100 tree arg0_type = TREE_TYPE (arg0);
5101 tree n_low, n_high, low = *p_low, high = *p_high;
5102 int in_p = *p_in_p, n_in_p;
5104 switch (code)
5106 case TRUTH_NOT_EXPR:
5107 /* We can only do something if the range is testing for zero. */
5108 if (low == NULL_TREE || high == NULL_TREE
5109 || ! integer_zerop (low) || ! integer_zerop (high))
5110 return NULL_TREE;
5111 *p_in_p = ! in_p;
5112 return arg0;
5114 case EQ_EXPR: case NE_EXPR:
5115 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5116 /* We can only do something if the range is testing for zero
5117 and if the second operand is an integer constant. Note that
5118 saying something is "in" the range we make is done by
5119 complementing IN_P since it will set in the initial case of
5120 being not equal to zero; "out" is leaving it alone. */
5121 if (low == NULL_TREE || high == NULL_TREE
5122 || ! integer_zerop (low) || ! integer_zerop (high)
5123 || TREE_CODE (arg1) != INTEGER_CST)
5124 return NULL_TREE;
5126 switch (code)
5128 case NE_EXPR: /* - [c, c] */
5129 low = high = arg1;
5130 break;
5131 case EQ_EXPR: /* + [c, c] */
5132 in_p = ! in_p, low = high = arg1;
5133 break;
5134 case GT_EXPR: /* - [-, c] */
5135 low = 0, high = arg1;
5136 break;
5137 case GE_EXPR: /* + [c, -] */
5138 in_p = ! in_p, low = arg1, high = 0;
5139 break;
5140 case LT_EXPR: /* - [c, -] */
5141 low = arg1, high = 0;
5142 break;
5143 case LE_EXPR: /* + [-, c] */
5144 in_p = ! in_p, low = 0, high = arg1;
5145 break;
5146 default:
5147 gcc_unreachable ();
5150 /* If this is an unsigned comparison, we also know that EXP is
5151 greater than or equal to zero. We base the range tests we make
5152 on that fact, so we record it here so we can parse existing
5153 range tests. We test arg0_type since often the return type
5154 of, e.g. EQ_EXPR, is boolean. */
5155 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5157 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5158 in_p, low, high, 1,
5159 build_int_cst (arg0_type, 0),
5160 NULL_TREE))
5161 return NULL_TREE;
5163 in_p = n_in_p, low = n_low, high = n_high;
5165 /* If the high bound is missing, but we have a nonzero low
5166 bound, reverse the range so it goes from zero to the low bound
5167 minus 1. */
5168 if (high == 0 && low && ! integer_zerop (low))
5170 in_p = ! in_p;
5171 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5172 build_int_cst (TREE_TYPE (low), 1), 0);
5173 low = build_int_cst (arg0_type, 0);
5177 *p_low = low;
5178 *p_high = high;
5179 *p_in_p = in_p;
5180 return arg0;
5182 case NEGATE_EXPR:
5183 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5184 low and high are non-NULL, then normalize will DTRT. */
5185 if (!TYPE_UNSIGNED (arg0_type)
5186 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5188 if (low == NULL_TREE)
5189 low = TYPE_MIN_VALUE (arg0_type);
5190 if (high == NULL_TREE)
5191 high = TYPE_MAX_VALUE (arg0_type);
5194 /* (-x) IN [a,b] -> x in [-b, -a] */
5195 n_low = range_binop (MINUS_EXPR, exp_type,
5196 build_int_cst (exp_type, 0),
5197 0, high, 1);
5198 n_high = range_binop (MINUS_EXPR, exp_type,
5199 build_int_cst (exp_type, 0),
5200 0, low, 0);
5201 if (n_high != 0 && TREE_OVERFLOW (n_high))
5202 return NULL_TREE;
5203 goto normalize;
5205 case BIT_NOT_EXPR:
5206 /* ~ X -> -X - 1 */
5207 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5208 build_int_cst (exp_type, 1));
5210 case PLUS_EXPR:
5211 case MINUS_EXPR:
5212 if (TREE_CODE (arg1) != INTEGER_CST)
5213 return NULL_TREE;
5215 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5216 move a constant to the other side. */
5217 if (!TYPE_UNSIGNED (arg0_type)
5218 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5219 return NULL_TREE;
5221 /* If EXP is signed, any overflow in the computation is undefined,
5222 so we don't worry about it so long as our computations on
5223 the bounds don't overflow. For unsigned, overflow is defined
5224 and this is exactly the right thing. */
5225 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5226 arg0_type, low, 0, arg1, 0);
5227 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5228 arg0_type, high, 1, arg1, 0);
5229 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5230 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5231 return NULL_TREE;
5233 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5234 *strict_overflow_p = true;
5236 normalize:
5237 /* Check for an unsigned range which has wrapped around the maximum
5238 value thus making n_high < n_low, and normalize it. */
5239 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5241 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5242 build_int_cst (TREE_TYPE (n_high), 1), 0);
5243 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5244 build_int_cst (TREE_TYPE (n_low), 1), 0);
5246 /* If the range is of the form +/- [ x+1, x ], we won't
5247 be able to normalize it. But then, it represents the
5248 whole range or the empty set, so make it
5249 +/- [ -, - ]. */
5250 if (tree_int_cst_equal (n_low, low)
5251 && tree_int_cst_equal (n_high, high))
5252 low = high = 0;
5253 else
5254 in_p = ! in_p;
5256 else
5257 low = n_low, high = n_high;
5259 *p_low = low;
5260 *p_high = high;
5261 *p_in_p = in_p;
5262 return arg0;
5264 CASE_CONVERT:
5265 case NON_LVALUE_EXPR:
5266 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5267 return NULL_TREE;
5269 if (! INTEGRAL_TYPE_P (arg0_type)
5270 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5271 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5272 return NULL_TREE;
5274 n_low = low, n_high = high;
5276 if (n_low != 0)
5277 n_low = fold_convert_loc (loc, arg0_type, n_low);
5279 if (n_high != 0)
5280 n_high = fold_convert_loc (loc, arg0_type, n_high);
5282 /* If we're converting arg0 from an unsigned type, to exp,
5283 a signed type, we will be doing the comparison as unsigned.
5284 The tests above have already verified that LOW and HIGH
5285 are both positive.
5287 So we have to ensure that we will handle large unsigned
5288 values the same way that the current signed bounds treat
5289 negative values. */
5291 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5293 tree high_positive;
5294 tree equiv_type;
5295 /* For fixed-point modes, we need to pass the saturating flag
5296 as the 2nd parameter. */
5297 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5298 equiv_type
5299 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5300 TYPE_SATURATING (arg0_type));
5301 else
5302 equiv_type
5303 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5305 /* A range without an upper bound is, naturally, unbounded.
5306 Since convert would have cropped a very large value, use
5307 the max value for the destination type. */
5308 high_positive
5309 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5310 : TYPE_MAX_VALUE (arg0_type);
5312 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5313 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5314 fold_convert_loc (loc, arg0_type,
5315 high_positive),
5316 build_int_cst (arg0_type, 1));
5318 /* If the low bound is specified, "and" the range with the
5319 range for which the original unsigned value will be
5320 positive. */
5321 if (low != 0)
5323 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5324 1, fold_convert_loc (loc, arg0_type,
5325 integer_zero_node),
5326 high_positive))
5327 return NULL_TREE;
5329 in_p = (n_in_p == in_p);
5331 else
5333 /* Otherwise, "or" the range with the range of the input
5334 that will be interpreted as negative. */
5335 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5336 1, fold_convert_loc (loc, arg0_type,
5337 integer_zero_node),
5338 high_positive))
5339 return NULL_TREE;
5341 in_p = (in_p != n_in_p);
5345 /* Otherwise, if we are converting arg0 from signed type, to exp,
5346 an unsigned type, we will do the comparison as signed. If
5347 high is non-NULL, we punt above if it doesn't fit in the signed
5348 type, so if we get through here, +[-, high] or +[low, high] are
5349 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5350 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5351 high is not, the +[low, -] range is equivalent to union of
5352 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5353 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5354 low being 0, which should be treated as [-, -]. */
5355 else if (TYPE_UNSIGNED (exp_type)
5356 && !TYPE_UNSIGNED (arg0_type)
5357 && low
5358 && !high)
5360 if (integer_zerop (low))
5361 n_low = NULL_TREE;
5362 else
5364 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5365 n_low, build_int_cst (arg0_type, -1));
5366 n_low = build_zero_cst (arg0_type);
5367 in_p = !in_p;
5371 *p_low = n_low;
5372 *p_high = n_high;
5373 *p_in_p = in_p;
5374 return arg0;
5376 default:
5377 return NULL_TREE;
5381 /* Given EXP, a logical expression, set the range it is testing into
5382 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5383 actually being tested. *PLOW and *PHIGH will be made of the same
5384 type as the returned expression. If EXP is not a comparison, we
5385 will most likely not be returning a useful value and range. Set
5386 *STRICT_OVERFLOW_P to true if the return value is only valid
5387 because signed overflow is undefined; otherwise, do not change
5388 *STRICT_OVERFLOW_P. */
5390 tree
5391 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5392 bool *strict_overflow_p)
5394 enum tree_code code;
5395 tree arg0, arg1 = NULL_TREE;
5396 tree exp_type, nexp;
5397 int in_p;
5398 tree low, high;
5399 location_t loc = EXPR_LOCATION (exp);
5401 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5402 and see if we can refine the range. Some of the cases below may not
5403 happen, but it doesn't seem worth worrying about this. We "continue"
5404 the outer loop when we've changed something; otherwise we "break"
5405 the switch, which will "break" the while. */
5407 in_p = 0;
5408 low = high = build_int_cst (TREE_TYPE (exp), 0);
5410 while (1)
5412 code = TREE_CODE (exp);
5413 exp_type = TREE_TYPE (exp);
5414 arg0 = NULL_TREE;
5416 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5418 if (TREE_OPERAND_LENGTH (exp) > 0)
5419 arg0 = TREE_OPERAND (exp, 0);
5420 if (TREE_CODE_CLASS (code) == tcc_binary
5421 || TREE_CODE_CLASS (code) == tcc_comparison
5422 || (TREE_CODE_CLASS (code) == tcc_expression
5423 && TREE_OPERAND_LENGTH (exp) > 1))
5424 arg1 = TREE_OPERAND (exp, 1);
5426 if (arg0 == NULL_TREE)
5427 break;
5429 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5430 &high, &in_p, strict_overflow_p);
5431 if (nexp == NULL_TREE)
5432 break;
5433 exp = nexp;
5436 /* If EXP is a constant, we can evaluate whether this is true or false. */
5437 if (TREE_CODE (exp) == INTEGER_CST)
5439 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5440 exp, 0, low, 0))
5441 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5442 exp, 1, high, 1)));
5443 low = high = 0;
5444 exp = 0;
5447 *pin_p = in_p, *plow = low, *phigh = high;
5448 return exp;
5451 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5452 a bitwise check i.e. when
5453 LOW == 0xXX...X00...0
5454 HIGH == 0xXX...X11...1
5455 Return corresponding mask in MASK and stem in VALUE. */
5457 static bool
5458 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5459 tree *value)
5461 if (TREE_CODE (low) != INTEGER_CST
5462 || TREE_CODE (high) != INTEGER_CST)
5463 return false;
5465 unsigned prec = TYPE_PRECISION (type);
5466 wide_int lo = wi::to_wide (low, prec);
5467 wide_int hi = wi::to_wide (high, prec);
5469 wide_int end_mask = lo ^ hi;
5470 if ((end_mask & (end_mask + 1)) != 0
5471 || (lo & end_mask) != 0)
5472 return false;
5474 wide_int stem_mask = ~end_mask;
5475 wide_int stem = lo & stem_mask;
5476 if (stem != (hi & stem_mask))
5477 return false;
5479 *mask = wide_int_to_tree (type, stem_mask);
5480 *value = wide_int_to_tree (type, stem);
5482 return true;
5485 /* Helper routine for build_range_check and match.pd. Return the type to
5486 perform the check or NULL if it shouldn't be optimized. */
5488 tree
5489 range_check_type (tree etype)
5491 /* First make sure that arithmetics in this type is valid, then make sure
5492 that it wraps around. */
5493 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5494 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5496 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5498 tree utype, minv, maxv;
5500 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5501 for the type in question, as we rely on this here. */
5502 utype = unsigned_type_for (etype);
5503 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5504 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5505 build_int_cst (TREE_TYPE (maxv), 1), 1);
5506 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5508 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5509 minv, 1, maxv, 1)))
5510 etype = utype;
5511 else
5512 return NULL_TREE;
5514 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5515 etype = unsigned_type_for (etype);
5516 return etype;
5519 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5520 type, TYPE, return an expression to test if EXP is in (or out of, depending
5521 on IN_P) the range. Return 0 if the test couldn't be created. */
5523 tree
5524 build_range_check (location_t loc, tree type, tree exp, int in_p,
5525 tree low, tree high)
5527 tree etype = TREE_TYPE (exp), mask, value;
5529 /* Disable this optimization for function pointer expressions
5530 on targets that require function pointer canonicalization. */
5531 if (targetm.have_canonicalize_funcptr_for_compare ()
5532 && POINTER_TYPE_P (etype)
5533 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5534 return NULL_TREE;
5536 if (! in_p)
5538 value = build_range_check (loc, type, exp, 1, low, high);
5539 if (value != 0)
5540 return invert_truthvalue_loc (loc, value);
5542 return 0;
5545 if (low == 0 && high == 0)
5546 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5548 if (low == 0)
5549 return fold_build2_loc (loc, LE_EXPR, type, exp,
5550 fold_convert_loc (loc, etype, high));
5552 if (high == 0)
5553 return fold_build2_loc (loc, GE_EXPR, type, exp,
5554 fold_convert_loc (loc, etype, low));
5556 if (operand_equal_p (low, high, 0))
5557 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5558 fold_convert_loc (loc, etype, low));
5560 if (TREE_CODE (exp) == BIT_AND_EXPR
5561 && maskable_range_p (low, high, etype, &mask, &value))
5562 return fold_build2_loc (loc, EQ_EXPR, type,
5563 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5564 exp, mask),
5565 value);
5567 if (integer_zerop (low))
5569 if (! TYPE_UNSIGNED (etype))
5571 etype = unsigned_type_for (etype);
5572 high = fold_convert_loc (loc, etype, high);
5573 exp = fold_convert_loc (loc, etype, exp);
5575 return build_range_check (loc, type, exp, 1, 0, high);
5578 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5579 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5581 int prec = TYPE_PRECISION (etype);
5583 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5585 if (TYPE_UNSIGNED (etype))
5587 tree signed_etype = signed_type_for (etype);
5588 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5589 etype
5590 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5591 else
5592 etype = signed_etype;
5593 exp = fold_convert_loc (loc, etype, exp);
5595 return fold_build2_loc (loc, GT_EXPR, type, exp,
5596 build_int_cst (etype, 0));
5600 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5601 This requires wrap-around arithmetics for the type of the expression. */
5602 etype = range_check_type (etype);
5603 if (etype == NULL_TREE)
5604 return NULL_TREE;
5606 high = fold_convert_loc (loc, etype, high);
5607 low = fold_convert_loc (loc, etype, low);
5608 exp = fold_convert_loc (loc, etype, exp);
5610 value = const_binop (MINUS_EXPR, high, low);
5612 if (value != 0 && !TREE_OVERFLOW (value))
5613 return build_range_check (loc, type,
5614 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5615 1, build_int_cst (etype, 0), value);
5617 return 0;
5620 /* Return the predecessor of VAL in its type, handling the infinite case. */
5622 static tree
5623 range_predecessor (tree val)
5625 tree type = TREE_TYPE (val);
5627 if (INTEGRAL_TYPE_P (type)
5628 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5629 return 0;
5630 else
5631 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5632 build_int_cst (TREE_TYPE (val), 1), 0);
5635 /* Return the successor of VAL in its type, handling the infinite case. */
5637 static tree
5638 range_successor (tree val)
5640 tree type = TREE_TYPE (val);
5642 if (INTEGRAL_TYPE_P (type)
5643 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5644 return 0;
5645 else
5646 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5647 build_int_cst (TREE_TYPE (val), 1), 0);
5650 /* Given two ranges, see if we can merge them into one. Return 1 if we
5651 can, 0 if we can't. Set the output range into the specified parameters. */
5653 bool
5654 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5655 tree high0, int in1_p, tree low1, tree high1)
5657 int no_overlap;
5658 int subset;
5659 int temp;
5660 tree tem;
5661 int in_p;
5662 tree low, high;
5663 int lowequal = ((low0 == 0 && low1 == 0)
5664 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5665 low0, 0, low1, 0)));
5666 int highequal = ((high0 == 0 && high1 == 0)
5667 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5668 high0, 1, high1, 1)));
5670 /* Make range 0 be the range that starts first, or ends last if they
5671 start at the same value. Swap them if it isn't. */
5672 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5673 low0, 0, low1, 0))
5674 || (lowequal
5675 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5676 high1, 1, high0, 1))))
5678 temp = in0_p, in0_p = in1_p, in1_p = temp;
5679 tem = low0, low0 = low1, low1 = tem;
5680 tem = high0, high0 = high1, high1 = tem;
5683 /* If the second range is != high1 where high1 is the type maximum of
5684 the type, try first merging with < high1 range. */
5685 if (low1
5686 && high1
5687 && TREE_CODE (low1) == INTEGER_CST
5688 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5689 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5690 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5691 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5692 && operand_equal_p (low1, high1, 0))
5694 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5695 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5696 !in1_p, NULL_TREE, range_predecessor (low1)))
5697 return true;
5698 /* Similarly for the second range != low1 where low1 is the type minimum
5699 of the type, try first merging with > low1 range. */
5700 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5701 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5702 !in1_p, range_successor (low1), NULL_TREE))
5703 return true;
5706 /* Now flag two cases, whether the ranges are disjoint or whether the
5707 second range is totally subsumed in the first. Note that the tests
5708 below are simplified by the ones above. */
5709 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5710 high0, 1, low1, 0));
5711 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5712 high1, 1, high0, 1));
5714 /* We now have four cases, depending on whether we are including or
5715 excluding the two ranges. */
5716 if (in0_p && in1_p)
5718 /* If they don't overlap, the result is false. If the second range
5719 is a subset it is the result. Otherwise, the range is from the start
5720 of the second to the end of the first. */
5721 if (no_overlap)
5722 in_p = 0, low = high = 0;
5723 else if (subset)
5724 in_p = 1, low = low1, high = high1;
5725 else
5726 in_p = 1, low = low1, high = high0;
5729 else if (in0_p && ! in1_p)
5731 /* If they don't overlap, the result is the first range. If they are
5732 equal, the result is false. If the second range is a subset of the
5733 first, and the ranges begin at the same place, we go from just after
5734 the end of the second range to the end of the first. If the second
5735 range is not a subset of the first, or if it is a subset and both
5736 ranges end at the same place, the range starts at the start of the
5737 first range and ends just before the second range.
5738 Otherwise, we can't describe this as a single range. */
5739 if (no_overlap)
5740 in_p = 1, low = low0, high = high0;
5741 else if (lowequal && highequal)
5742 in_p = 0, low = high = 0;
5743 else if (subset && lowequal)
5745 low = range_successor (high1);
5746 high = high0;
5747 in_p = 1;
5748 if (low == 0)
5750 /* We are in the weird situation where high0 > high1 but
5751 high1 has no successor. Punt. */
5752 return 0;
5755 else if (! subset || highequal)
5757 low = low0;
5758 high = range_predecessor (low1);
5759 in_p = 1;
5760 if (high == 0)
5762 /* low0 < low1 but low1 has no predecessor. Punt. */
5763 return 0;
5766 else
5767 return 0;
5770 else if (! in0_p && in1_p)
5772 /* If they don't overlap, the result is the second range. If the second
5773 is a subset of the first, the result is false. Otherwise,
5774 the range starts just after the first range and ends at the
5775 end of the second. */
5776 if (no_overlap)
5777 in_p = 1, low = low1, high = high1;
5778 else if (subset || highequal)
5779 in_p = 0, low = high = 0;
5780 else
5782 low = range_successor (high0);
5783 high = high1;
5784 in_p = 1;
5785 if (low == 0)
5787 /* high1 > high0 but high0 has no successor. Punt. */
5788 return 0;
5793 else
5795 /* The case where we are excluding both ranges. Here the complex case
5796 is if they don't overlap. In that case, the only time we have a
5797 range is if they are adjacent. If the second is a subset of the
5798 first, the result is the first. Otherwise, the range to exclude
5799 starts at the beginning of the first range and ends at the end of the
5800 second. */
5801 if (no_overlap)
5803 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5804 range_successor (high0),
5805 1, low1, 0)))
5806 in_p = 0, low = low0, high = high1;
5807 else
5809 /* Canonicalize - [min, x] into - [-, x]. */
5810 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5811 switch (TREE_CODE (TREE_TYPE (low0)))
5813 case ENUMERAL_TYPE:
5814 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5815 GET_MODE_BITSIZE
5816 (TYPE_MODE (TREE_TYPE (low0)))))
5817 break;
5818 /* FALLTHROUGH */
5819 case INTEGER_TYPE:
5820 if (tree_int_cst_equal (low0,
5821 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5822 low0 = 0;
5823 break;
5824 case POINTER_TYPE:
5825 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5826 && integer_zerop (low0))
5827 low0 = 0;
5828 break;
5829 default:
5830 break;
5833 /* Canonicalize - [x, max] into - [x, -]. */
5834 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5835 switch (TREE_CODE (TREE_TYPE (high1)))
5837 case ENUMERAL_TYPE:
5838 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5839 GET_MODE_BITSIZE
5840 (TYPE_MODE (TREE_TYPE (high1)))))
5841 break;
5842 /* FALLTHROUGH */
5843 case INTEGER_TYPE:
5844 if (tree_int_cst_equal (high1,
5845 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5846 high1 = 0;
5847 break;
5848 case POINTER_TYPE:
5849 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5850 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5851 high1, 1,
5852 build_int_cst (TREE_TYPE (high1), 1),
5853 1)))
5854 high1 = 0;
5855 break;
5856 default:
5857 break;
5860 /* The ranges might be also adjacent between the maximum and
5861 minimum values of the given type. For
5862 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5863 return + [x + 1, y - 1]. */
5864 if (low0 == 0 && high1 == 0)
5866 low = range_successor (high0);
5867 high = range_predecessor (low1);
5868 if (low == 0 || high == 0)
5869 return 0;
5871 in_p = 1;
5873 else
5874 return 0;
5877 else if (subset)
5878 in_p = 0, low = low0, high = high0;
5879 else
5880 in_p = 0, low = low0, high = high1;
5883 *pin_p = in_p, *plow = low, *phigh = high;
5884 return 1;
5888 /* Subroutine of fold, looking inside expressions of the form
5889 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5890 are the three operands of the COND_EXPR. This function is
5891 being used also to optimize A op B ? C : A, by reversing the
5892 comparison first.
5894 Return a folded expression whose code is not a COND_EXPR
5895 anymore, or NULL_TREE if no folding opportunity is found. */
5897 static tree
5898 fold_cond_expr_with_comparison (location_t loc, tree type,
5899 enum tree_code comp_code,
5900 tree arg00, tree arg01, tree arg1, tree arg2)
5902 tree arg1_type = TREE_TYPE (arg1);
5903 tree tem;
5905 STRIP_NOPS (arg1);
5906 STRIP_NOPS (arg2);
5908 /* If we have A op 0 ? A : -A, consider applying the following
5909 transformations:
5911 A == 0? A : -A same as -A
5912 A != 0? A : -A same as A
5913 A >= 0? A : -A same as abs (A)
5914 A > 0? A : -A same as abs (A)
5915 A <= 0? A : -A same as -abs (A)
5916 A < 0? A : -A same as -abs (A)
5918 None of these transformations work for modes with signed
5919 zeros. If A is +/-0, the first two transformations will
5920 change the sign of the result (from +0 to -0, or vice
5921 versa). The last four will fix the sign of the result,
5922 even though the original expressions could be positive or
5923 negative, depending on the sign of A.
5925 Note that all these transformations are correct if A is
5926 NaN, since the two alternatives (A and -A) are also NaNs. */
5927 if (!HONOR_SIGNED_ZEROS (type)
5928 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5929 ? real_zerop (arg01)
5930 : integer_zerop (arg01))
5931 && ((TREE_CODE (arg2) == NEGATE_EXPR
5932 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5933 /* In the case that A is of the form X-Y, '-A' (arg2) may
5934 have already been folded to Y-X, check for that. */
5935 || (TREE_CODE (arg1) == MINUS_EXPR
5936 && TREE_CODE (arg2) == MINUS_EXPR
5937 && operand_equal_p (TREE_OPERAND (arg1, 0),
5938 TREE_OPERAND (arg2, 1), 0)
5939 && operand_equal_p (TREE_OPERAND (arg1, 1),
5940 TREE_OPERAND (arg2, 0), 0))))
5941 switch (comp_code)
5943 case EQ_EXPR:
5944 case UNEQ_EXPR:
5945 tem = fold_convert_loc (loc, arg1_type, arg1);
5946 return fold_convert_loc (loc, type, negate_expr (tem));
5947 case NE_EXPR:
5948 case LTGT_EXPR:
5949 return fold_convert_loc (loc, type, arg1);
5950 case UNGE_EXPR:
5951 case UNGT_EXPR:
5952 if (flag_trapping_math)
5953 break;
5954 /* Fall through. */
5955 case GE_EXPR:
5956 case GT_EXPR:
5957 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5958 break;
5959 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5960 return fold_convert_loc (loc, type, tem);
5961 case UNLE_EXPR:
5962 case UNLT_EXPR:
5963 if (flag_trapping_math)
5964 break;
5965 /* FALLTHRU */
5966 case LE_EXPR:
5967 case LT_EXPR:
5968 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5969 break;
5970 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5971 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5973 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5974 is not, invokes UB both in abs and in the negation of it.
5975 So, use ABSU_EXPR instead. */
5976 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5977 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5978 tem = negate_expr (tem);
5979 return fold_convert_loc (loc, type, tem);
5981 else
5983 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5984 return negate_expr (fold_convert_loc (loc, type, tem));
5986 default:
5987 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5988 break;
5991 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5992 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5993 both transformations are correct when A is NaN: A != 0
5994 is then true, and A == 0 is false. */
5996 if (!HONOR_SIGNED_ZEROS (type)
5997 && integer_zerop (arg01) && integer_zerop (arg2))
5999 if (comp_code == NE_EXPR)
6000 return fold_convert_loc (loc, type, arg1);
6001 else if (comp_code == EQ_EXPR)
6002 return build_zero_cst (type);
6005 /* Try some transformations of A op B ? A : B.
6007 A == B? A : B same as B
6008 A != B? A : B same as A
6009 A >= B? A : B same as max (A, B)
6010 A > B? A : B same as max (B, A)
6011 A <= B? A : B same as min (A, B)
6012 A < B? A : B same as min (B, A)
6014 As above, these transformations don't work in the presence
6015 of signed zeros. For example, if A and B are zeros of
6016 opposite sign, the first two transformations will change
6017 the sign of the result. In the last four, the original
6018 expressions give different results for (A=+0, B=-0) and
6019 (A=-0, B=+0), but the transformed expressions do not.
6021 The first two transformations are correct if either A or B
6022 is a NaN. In the first transformation, the condition will
6023 be false, and B will indeed be chosen. In the case of the
6024 second transformation, the condition A != B will be true,
6025 and A will be chosen.
6027 The conversions to max() and min() are not correct if B is
6028 a number and A is not. The conditions in the original
6029 expressions will be false, so all four give B. The min()
6030 and max() versions would give a NaN instead. */
6031 if (!HONOR_SIGNED_ZEROS (type)
6032 && operand_equal_for_comparison_p (arg01, arg2)
6033 /* Avoid these transformations if the COND_EXPR may be used
6034 as an lvalue in the C++ front-end. PR c++/19199. */
6035 && (in_gimple_form
6036 || VECTOR_TYPE_P (type)
6037 || (! lang_GNU_CXX ()
6038 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6039 || ! maybe_lvalue_p (arg1)
6040 || ! maybe_lvalue_p (arg2)))
6042 tree comp_op0 = arg00;
6043 tree comp_op1 = arg01;
6044 tree comp_type = TREE_TYPE (comp_op0);
6046 switch (comp_code)
6048 case EQ_EXPR:
6049 return fold_convert_loc (loc, type, arg2);
6050 case NE_EXPR:
6051 return fold_convert_loc (loc, type, arg1);
6052 case LE_EXPR:
6053 case LT_EXPR:
6054 case UNLE_EXPR:
6055 case UNLT_EXPR:
6056 /* In C++ a ?: expression can be an lvalue, so put the
6057 operand which will be used if they are equal first
6058 so that we can convert this back to the
6059 corresponding COND_EXPR. */
6060 if (!HONOR_NANS (arg1))
6062 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6063 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6064 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6065 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6066 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6067 comp_op1, comp_op0);
6068 return fold_convert_loc (loc, type, tem);
6070 break;
6071 case GE_EXPR:
6072 case GT_EXPR:
6073 case UNGE_EXPR:
6074 case UNGT_EXPR:
6075 if (!HONOR_NANS (arg1))
6077 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6078 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6079 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6080 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6081 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6082 comp_op1, comp_op0);
6083 return fold_convert_loc (loc, type, tem);
6085 break;
6086 case UNEQ_EXPR:
6087 if (!HONOR_NANS (arg1))
6088 return fold_convert_loc (loc, type, arg2);
6089 break;
6090 case LTGT_EXPR:
6091 if (!HONOR_NANS (arg1))
6092 return fold_convert_loc (loc, type, arg1);
6093 break;
6094 default:
6095 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6096 break;
6100 return NULL_TREE;
6105 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6106 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6107 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6108 false) >= 2)
6109 #endif
6111 /* EXP is some logical combination of boolean tests. See if we can
6112 merge it into some range test. Return the new tree if so. */
6114 static tree
6115 fold_range_test (location_t loc, enum tree_code code, tree type,
6116 tree op0, tree op1)
6118 int or_op = (code == TRUTH_ORIF_EXPR
6119 || code == TRUTH_OR_EXPR);
6120 int in0_p, in1_p, in_p;
6121 tree low0, low1, low, high0, high1, high;
6122 bool strict_overflow_p = false;
6123 tree tem, lhs, rhs;
6124 const char * const warnmsg = G_("assuming signed overflow does not occur "
6125 "when simplifying range test");
6127 if (!INTEGRAL_TYPE_P (type))
6128 return 0;
6130 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6131 /* If op0 is known true or false and this is a short-circuiting
6132 operation we must not merge with op1 since that makes side-effects
6133 unconditional. So special-case this. */
6134 if (!lhs
6135 && ((code == TRUTH_ORIF_EXPR && in0_p)
6136 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6137 return op0;
6138 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6140 /* If this is an OR operation, invert both sides; we will invert
6141 again at the end. */
6142 if (or_op)
6143 in0_p = ! in0_p, in1_p = ! in1_p;
6145 /* If both expressions are the same, if we can merge the ranges, and we
6146 can build the range test, return it or it inverted. If one of the
6147 ranges is always true or always false, consider it to be the same
6148 expression as the other. */
6149 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6150 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6151 in1_p, low1, high1)
6152 && (tem = (build_range_check (loc, type,
6153 lhs != 0 ? lhs
6154 : rhs != 0 ? rhs : integer_zero_node,
6155 in_p, low, high))) != 0)
6157 if (strict_overflow_p)
6158 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6159 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6162 /* On machines where the branch cost is expensive, if this is a
6163 short-circuited branch and the underlying object on both sides
6164 is the same, make a non-short-circuit operation. */
6165 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6166 if (param_logical_op_non_short_circuit != -1)
6167 logical_op_non_short_circuit
6168 = param_logical_op_non_short_circuit;
6169 if (logical_op_non_short_circuit
6170 && !sanitize_coverage_p ()
6171 && lhs != 0 && rhs != 0
6172 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6173 && operand_equal_p (lhs, rhs, 0))
6175 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6176 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6177 which cases we can't do this. */
6178 if (simple_operand_p (lhs))
6179 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6180 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6181 type, op0, op1);
6183 else if (!lang_hooks.decls.global_bindings_p ()
6184 && !CONTAINS_PLACEHOLDER_P (lhs))
6186 tree common = save_expr (lhs);
6188 if ((lhs = build_range_check (loc, type, common,
6189 or_op ? ! in0_p : in0_p,
6190 low0, high0)) != 0
6191 && (rhs = build_range_check (loc, type, common,
6192 or_op ? ! in1_p : in1_p,
6193 low1, high1)) != 0)
6195 if (strict_overflow_p)
6196 fold_overflow_warning (warnmsg,
6197 WARN_STRICT_OVERFLOW_COMPARISON);
6198 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6199 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6200 type, lhs, rhs);
6205 return 0;
6208 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6209 bit value. Arrange things so the extra bits will be set to zero if and
6210 only if C is signed-extended to its full width. If MASK is nonzero,
6211 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6213 static tree
6214 unextend (tree c, int p, int unsignedp, tree mask)
6216 tree type = TREE_TYPE (c);
6217 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6218 tree temp;
6220 if (p == modesize || unsignedp)
6221 return c;
6223 /* We work by getting just the sign bit into the low-order bit, then
6224 into the high-order bit, then sign-extend. We then XOR that value
6225 with C. */
6226 temp = build_int_cst (TREE_TYPE (c),
6227 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6229 /* We must use a signed type in order to get an arithmetic right shift.
6230 However, we must also avoid introducing accidental overflows, so that
6231 a subsequent call to integer_zerop will work. Hence we must
6232 do the type conversion here. At this point, the constant is either
6233 zero or one, and the conversion to a signed type can never overflow.
6234 We could get an overflow if this conversion is done anywhere else. */
6235 if (TYPE_UNSIGNED (type))
6236 temp = fold_convert (signed_type_for (type), temp);
6238 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6239 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6240 if (mask != 0)
6241 temp = const_binop (BIT_AND_EXPR, temp,
6242 fold_convert (TREE_TYPE (c), mask));
6243 /* If necessary, convert the type back to match the type of C. */
6244 if (TYPE_UNSIGNED (type))
6245 temp = fold_convert (type, temp);
6247 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6250 /* For an expression that has the form
6251 (A && B) || ~B
6253 (A || B) && ~B,
6254 we can drop one of the inner expressions and simplify to
6255 A || ~B
6257 A && ~B
6258 LOC is the location of the resulting expression. OP is the inner
6259 logical operation; the left-hand side in the examples above, while CMPOP
6260 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6261 removing a condition that guards another, as in
6262 (A != NULL && A->...) || A == NULL
6263 which we must not transform. If RHS_ONLY is true, only eliminate the
6264 right-most operand of the inner logical operation. */
6266 static tree
6267 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6268 bool rhs_only)
6270 tree type = TREE_TYPE (cmpop);
6271 enum tree_code code = TREE_CODE (cmpop);
6272 enum tree_code truthop_code = TREE_CODE (op);
6273 tree lhs = TREE_OPERAND (op, 0);
6274 tree rhs = TREE_OPERAND (op, 1);
6275 tree orig_lhs = lhs, orig_rhs = rhs;
6276 enum tree_code rhs_code = TREE_CODE (rhs);
6277 enum tree_code lhs_code = TREE_CODE (lhs);
6278 enum tree_code inv_code;
6280 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6281 return NULL_TREE;
6283 if (TREE_CODE_CLASS (code) != tcc_comparison)
6284 return NULL_TREE;
6286 if (rhs_code == truthop_code)
6288 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6289 if (newrhs != NULL_TREE)
6291 rhs = newrhs;
6292 rhs_code = TREE_CODE (rhs);
6295 if (lhs_code == truthop_code && !rhs_only)
6297 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6298 if (newlhs != NULL_TREE)
6300 lhs = newlhs;
6301 lhs_code = TREE_CODE (lhs);
6305 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6306 if (inv_code == rhs_code
6307 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6308 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6309 return lhs;
6310 if (!rhs_only && inv_code == lhs_code
6311 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6312 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6313 return rhs;
6314 if (rhs != orig_rhs || lhs != orig_lhs)
6315 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6316 lhs, rhs);
6317 return NULL_TREE;
6320 /* Find ways of folding logical expressions of LHS and RHS:
6321 Try to merge two comparisons to the same innermost item.
6322 Look for range tests like "ch >= '0' && ch <= '9'".
6323 Look for combinations of simple terms on machines with expensive branches
6324 and evaluate the RHS unconditionally.
6326 For example, if we have p->a == 2 && p->b == 4 and we can make an
6327 object large enough to span both A and B, we can do this with a comparison
6328 against the object ANDed with the a mask.
6330 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6331 operations to do this with one comparison.
6333 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6334 function and the one above.
6336 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6337 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6339 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6340 two operands.
6342 We return the simplified tree or 0 if no optimization is possible. */
6344 static tree
6345 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6346 tree lhs, tree rhs)
6348 /* If this is the "or" of two comparisons, we can do something if
6349 the comparisons are NE_EXPR. If this is the "and", we can do something
6350 if the comparisons are EQ_EXPR. I.e.,
6351 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6353 WANTED_CODE is this operation code. For single bit fields, we can
6354 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6355 comparison for one-bit fields. */
6357 enum tree_code wanted_code;
6358 enum tree_code lcode, rcode;
6359 tree ll_arg, lr_arg, rl_arg, rr_arg;
6360 tree ll_inner, lr_inner, rl_inner, rr_inner;
6361 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6362 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6363 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6364 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6365 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6366 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6367 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6368 scalar_int_mode lnmode, rnmode;
6369 tree ll_mask, lr_mask, rl_mask, rr_mask;
6370 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6371 tree l_const, r_const;
6372 tree lntype, rntype, result;
6373 HOST_WIDE_INT first_bit, end_bit;
6374 int volatilep;
6376 /* Start by getting the comparison codes. Fail if anything is volatile.
6377 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6378 it were surrounded with a NE_EXPR. */
6380 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6381 return 0;
6383 lcode = TREE_CODE (lhs);
6384 rcode = TREE_CODE (rhs);
6386 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6388 lhs = build2 (NE_EXPR, truth_type, lhs,
6389 build_int_cst (TREE_TYPE (lhs), 0));
6390 lcode = NE_EXPR;
6393 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6395 rhs = build2 (NE_EXPR, truth_type, rhs,
6396 build_int_cst (TREE_TYPE (rhs), 0));
6397 rcode = NE_EXPR;
6400 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6401 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6402 return 0;
6404 ll_arg = TREE_OPERAND (lhs, 0);
6405 lr_arg = TREE_OPERAND (lhs, 1);
6406 rl_arg = TREE_OPERAND (rhs, 0);
6407 rr_arg = TREE_OPERAND (rhs, 1);
6409 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6410 if (simple_operand_p (ll_arg)
6411 && simple_operand_p (lr_arg))
6413 if (operand_equal_p (ll_arg, rl_arg, 0)
6414 && operand_equal_p (lr_arg, rr_arg, 0))
6416 result = combine_comparisons (loc, code, lcode, rcode,
6417 truth_type, ll_arg, lr_arg);
6418 if (result)
6419 return result;
6421 else if (operand_equal_p (ll_arg, rr_arg, 0)
6422 && operand_equal_p (lr_arg, rl_arg, 0))
6424 result = combine_comparisons (loc, code, lcode,
6425 swap_tree_comparison (rcode),
6426 truth_type, ll_arg, lr_arg);
6427 if (result)
6428 return result;
6432 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6433 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6435 /* If the RHS can be evaluated unconditionally and its operands are
6436 simple, it wins to evaluate the RHS unconditionally on machines
6437 with expensive branches. In this case, this isn't a comparison
6438 that can be merged. */
6440 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6441 false) >= 2
6442 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6443 && simple_operand_p (rl_arg)
6444 && simple_operand_p (rr_arg))
6446 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6447 if (code == TRUTH_OR_EXPR
6448 && lcode == NE_EXPR && integer_zerop (lr_arg)
6449 && rcode == NE_EXPR && integer_zerop (rr_arg)
6450 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6451 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6452 return build2_loc (loc, NE_EXPR, truth_type,
6453 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6454 ll_arg, rl_arg),
6455 build_int_cst (TREE_TYPE (ll_arg), 0));
6457 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6458 if (code == TRUTH_AND_EXPR
6459 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6460 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6461 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6462 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6463 return build2_loc (loc, EQ_EXPR, truth_type,
6464 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6465 ll_arg, rl_arg),
6466 build_int_cst (TREE_TYPE (ll_arg), 0));
6469 /* See if the comparisons can be merged. Then get all the parameters for
6470 each side. */
6472 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6473 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6474 return 0;
6476 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6477 volatilep = 0;
6478 ll_inner = decode_field_reference (loc, &ll_arg,
6479 &ll_bitsize, &ll_bitpos, &ll_mode,
6480 &ll_unsignedp, &ll_reversep, &volatilep,
6481 &ll_mask, &ll_and_mask);
6482 lr_inner = decode_field_reference (loc, &lr_arg,
6483 &lr_bitsize, &lr_bitpos, &lr_mode,
6484 &lr_unsignedp, &lr_reversep, &volatilep,
6485 &lr_mask, &lr_and_mask);
6486 rl_inner = decode_field_reference (loc, &rl_arg,
6487 &rl_bitsize, &rl_bitpos, &rl_mode,
6488 &rl_unsignedp, &rl_reversep, &volatilep,
6489 &rl_mask, &rl_and_mask);
6490 rr_inner = decode_field_reference (loc, &rr_arg,
6491 &rr_bitsize, &rr_bitpos, &rr_mode,
6492 &rr_unsignedp, &rr_reversep, &volatilep,
6493 &rr_mask, &rr_and_mask);
6495 /* It must be true that the inner operation on the lhs of each
6496 comparison must be the same if we are to be able to do anything.
6497 Then see if we have constants. If not, the same must be true for
6498 the rhs's. */
6499 if (volatilep
6500 || ll_reversep != rl_reversep
6501 || ll_inner == 0 || rl_inner == 0
6502 || ! operand_equal_p (ll_inner, rl_inner, 0))
6503 return 0;
6505 if (TREE_CODE (lr_arg) == INTEGER_CST
6506 && TREE_CODE (rr_arg) == INTEGER_CST)
6508 l_const = lr_arg, r_const = rr_arg;
6509 lr_reversep = ll_reversep;
6511 else if (lr_reversep != rr_reversep
6512 || lr_inner == 0 || rr_inner == 0
6513 || ! operand_equal_p (lr_inner, rr_inner, 0))
6514 return 0;
6515 else
6516 l_const = r_const = 0;
6518 /* If either comparison code is not correct for our logical operation,
6519 fail. However, we can convert a one-bit comparison against zero into
6520 the opposite comparison against that bit being set in the field. */
6522 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6523 if (lcode != wanted_code)
6525 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6527 /* Make the left operand unsigned, since we are only interested
6528 in the value of one bit. Otherwise we are doing the wrong
6529 thing below. */
6530 ll_unsignedp = 1;
6531 l_const = ll_mask;
6533 else
6534 return 0;
6537 /* This is analogous to the code for l_const above. */
6538 if (rcode != wanted_code)
6540 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6542 rl_unsignedp = 1;
6543 r_const = rl_mask;
6545 else
6546 return 0;
6549 /* See if we can find a mode that contains both fields being compared on
6550 the left. If we can't, fail. Otherwise, update all constants and masks
6551 to be relative to a field of that size. */
6552 first_bit = MIN (ll_bitpos, rl_bitpos);
6553 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6554 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6555 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6556 volatilep, &lnmode))
6557 return 0;
6559 lnbitsize = GET_MODE_BITSIZE (lnmode);
6560 lnbitpos = first_bit & ~ (lnbitsize - 1);
6561 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6562 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6564 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6566 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6567 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6570 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6571 size_int (xll_bitpos));
6572 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6573 size_int (xrl_bitpos));
6574 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6575 return 0;
6577 if (l_const)
6579 l_const = fold_convert_loc (loc, lntype, l_const);
6580 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6581 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6582 if (l_const == NULL_TREE)
6583 return 0;
6584 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6585 fold_build1_loc (loc, BIT_NOT_EXPR,
6586 lntype, ll_mask))))
6588 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6590 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6593 if (r_const)
6595 r_const = fold_convert_loc (loc, lntype, r_const);
6596 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6597 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6598 if (r_const == NULL_TREE)
6599 return 0;
6600 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6601 fold_build1_loc (loc, BIT_NOT_EXPR,
6602 lntype, rl_mask))))
6604 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6606 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6610 /* If the right sides are not constant, do the same for it. Also,
6611 disallow this optimization if a size, signedness or storage order
6612 mismatch occurs between the left and right sides. */
6613 if (l_const == 0)
6615 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6616 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6617 || ll_reversep != lr_reversep
6618 /* Make sure the two fields on the right
6619 correspond to the left without being swapped. */
6620 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6621 return 0;
6623 first_bit = MIN (lr_bitpos, rr_bitpos);
6624 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6625 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6626 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6627 volatilep, &rnmode))
6628 return 0;
6630 rnbitsize = GET_MODE_BITSIZE (rnmode);
6631 rnbitpos = first_bit & ~ (rnbitsize - 1);
6632 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6633 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6635 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6637 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6638 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6641 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6642 rntype, lr_mask),
6643 size_int (xlr_bitpos));
6644 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6645 rntype, rr_mask),
6646 size_int (xrr_bitpos));
6647 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6648 return 0;
6650 /* Make a mask that corresponds to both fields being compared.
6651 Do this for both items being compared. If the operands are the
6652 same size and the bits being compared are in the same position
6653 then we can do this by masking both and comparing the masked
6654 results. */
6655 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6656 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6657 if (lnbitsize == rnbitsize
6658 && xll_bitpos == xlr_bitpos
6659 && lnbitpos >= 0
6660 && rnbitpos >= 0)
6662 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6663 lntype, lnbitsize, lnbitpos,
6664 ll_unsignedp || rl_unsignedp, ll_reversep);
6665 if (! all_ones_mask_p (ll_mask, lnbitsize))
6666 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6668 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6669 rntype, rnbitsize, rnbitpos,
6670 lr_unsignedp || rr_unsignedp, lr_reversep);
6671 if (! all_ones_mask_p (lr_mask, rnbitsize))
6672 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6674 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6677 /* There is still another way we can do something: If both pairs of
6678 fields being compared are adjacent, we may be able to make a wider
6679 field containing them both.
6681 Note that we still must mask the lhs/rhs expressions. Furthermore,
6682 the mask must be shifted to account for the shift done by
6683 make_bit_field_ref. */
6684 if (((ll_bitsize + ll_bitpos == rl_bitpos
6685 && lr_bitsize + lr_bitpos == rr_bitpos)
6686 || (ll_bitpos == rl_bitpos + rl_bitsize
6687 && lr_bitpos == rr_bitpos + rr_bitsize))
6688 && ll_bitpos >= 0
6689 && rl_bitpos >= 0
6690 && lr_bitpos >= 0
6691 && rr_bitpos >= 0)
6693 tree type;
6695 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6696 ll_bitsize + rl_bitsize,
6697 MIN (ll_bitpos, rl_bitpos),
6698 ll_unsignedp, ll_reversep);
6699 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6700 lr_bitsize + rr_bitsize,
6701 MIN (lr_bitpos, rr_bitpos),
6702 lr_unsignedp, lr_reversep);
6704 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6705 size_int (MIN (xll_bitpos, xrl_bitpos)));
6706 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6707 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6708 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6709 return 0;
6711 /* Convert to the smaller type before masking out unwanted bits. */
6712 type = lntype;
6713 if (lntype != rntype)
6715 if (lnbitsize > rnbitsize)
6717 lhs = fold_convert_loc (loc, rntype, lhs);
6718 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6719 type = rntype;
6721 else if (lnbitsize < rnbitsize)
6723 rhs = fold_convert_loc (loc, lntype, rhs);
6724 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6725 type = lntype;
6729 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6730 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6732 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6733 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6735 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6738 return 0;
6741 /* Handle the case of comparisons with constants. If there is something in
6742 common between the masks, those bits of the constants must be the same.
6743 If not, the condition is always false. Test for this to avoid generating
6744 incorrect code below. */
6745 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6746 if (! integer_zerop (result)
6747 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6748 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6750 if (wanted_code == NE_EXPR)
6752 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6753 return constant_boolean_node (true, truth_type);
6755 else
6757 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6758 return constant_boolean_node (false, truth_type);
6762 if (lnbitpos < 0)
6763 return 0;
6765 /* Construct the expression we will return. First get the component
6766 reference we will make. Unless the mask is all ones the width of
6767 that field, perform the mask operation. Then compare with the
6768 merged constant. */
6769 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6770 lntype, lnbitsize, lnbitpos,
6771 ll_unsignedp || rl_unsignedp, ll_reversep);
6773 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6774 if (! all_ones_mask_p (ll_mask, lnbitsize))
6775 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6777 return build2_loc (loc, wanted_code, truth_type, result,
6778 const_binop (BIT_IOR_EXPR, l_const, r_const));
6781 /* T is an integer expression that is being multiplied, divided, or taken a
6782 modulus (CODE says which and what kind of divide or modulus) by a
6783 constant C. See if we can eliminate that operation by folding it with
6784 other operations already in T. WIDE_TYPE, if non-null, is a type that
6785 should be used for the computation if wider than our type.
6787 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6788 (X * 2) + (Y * 4). We must, however, be assured that either the original
6789 expression would not overflow or that overflow is undefined for the type
6790 in the language in question.
6792 If we return a non-null expression, it is an equivalent form of the
6793 original computation, but need not be in the original type.
6795 We set *STRICT_OVERFLOW_P to true if the return values depends on
6796 signed overflow being undefined. Otherwise we do not change
6797 *STRICT_OVERFLOW_P. */
6799 static tree
6800 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6801 bool *strict_overflow_p)
6803 /* To avoid exponential search depth, refuse to allow recursion past
6804 three levels. Beyond that (1) it's highly unlikely that we'll find
6805 something interesting and (2) we've probably processed it before
6806 when we built the inner expression. */
6808 static int depth;
6809 tree ret;
6811 if (depth > 3)
6812 return NULL;
6814 depth++;
6815 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6816 depth--;
6818 return ret;
6821 static tree
6822 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6823 bool *strict_overflow_p)
6825 tree type = TREE_TYPE (t);
6826 enum tree_code tcode = TREE_CODE (t);
6827 tree ctype = (wide_type != 0
6828 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6829 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6830 ? wide_type : type);
6831 tree t1, t2;
6832 int same_p = tcode == code;
6833 tree op0 = NULL_TREE, op1 = NULL_TREE;
6834 bool sub_strict_overflow_p;
6836 /* Don't deal with constants of zero here; they confuse the code below. */
6837 if (integer_zerop (c))
6838 return NULL_TREE;
6840 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6841 op0 = TREE_OPERAND (t, 0);
6843 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6844 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6846 /* Note that we need not handle conditional operations here since fold
6847 already handles those cases. So just do arithmetic here. */
6848 switch (tcode)
6850 case INTEGER_CST:
6851 /* For a constant, we can always simplify if we are a multiply
6852 or (for divide and modulus) if it is a multiple of our constant. */
6853 if (code == MULT_EXPR
6854 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6855 TYPE_SIGN (type)))
6857 tree tem = const_binop (code, fold_convert (ctype, t),
6858 fold_convert (ctype, c));
6859 /* If the multiplication overflowed, we lost information on it.
6860 See PR68142 and PR69845. */
6861 if (TREE_OVERFLOW (tem))
6862 return NULL_TREE;
6863 return tem;
6865 break;
6867 CASE_CONVERT: case NON_LVALUE_EXPR:
6868 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6869 break;
6870 /* If op0 is an expression ... */
6871 if ((COMPARISON_CLASS_P (op0)
6872 || UNARY_CLASS_P (op0)
6873 || BINARY_CLASS_P (op0)
6874 || VL_EXP_CLASS_P (op0)
6875 || EXPRESSION_CLASS_P (op0))
6876 /* ... and has wrapping overflow, and its type is smaller
6877 than ctype, then we cannot pass through as widening. */
6878 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6879 && (TYPE_PRECISION (ctype)
6880 > TYPE_PRECISION (TREE_TYPE (op0))))
6881 /* ... or this is a truncation (t is narrower than op0),
6882 then we cannot pass through this narrowing. */
6883 || (TYPE_PRECISION (type)
6884 < TYPE_PRECISION (TREE_TYPE (op0)))
6885 /* ... or signedness changes for division or modulus,
6886 then we cannot pass through this conversion. */
6887 || (code != MULT_EXPR
6888 && (TYPE_UNSIGNED (ctype)
6889 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6890 /* ... or has undefined overflow while the converted to
6891 type has not, we cannot do the operation in the inner type
6892 as that would introduce undefined overflow. */
6893 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6894 && !TYPE_OVERFLOW_UNDEFINED (type))))
6895 break;
6897 /* Pass the constant down and see if we can make a simplification. If
6898 we can, replace this expression with the inner simplification for
6899 possible later conversion to our or some other type. */
6900 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6901 && TREE_CODE (t2) == INTEGER_CST
6902 && !TREE_OVERFLOW (t2)
6903 && (t1 = extract_muldiv (op0, t2, code,
6904 code == MULT_EXPR ? ctype : NULL_TREE,
6905 strict_overflow_p)) != 0)
6906 return t1;
6907 break;
6909 case ABS_EXPR:
6910 /* If widening the type changes it from signed to unsigned, then we
6911 must avoid building ABS_EXPR itself as unsigned. */
6912 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6914 tree cstype = (*signed_type_for) (ctype);
6915 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6916 != 0)
6918 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6919 return fold_convert (ctype, t1);
6921 break;
6923 /* If the constant is negative, we cannot simplify this. */
6924 if (tree_int_cst_sgn (c) == -1)
6925 break;
6926 /* FALLTHROUGH */
6927 case NEGATE_EXPR:
6928 /* For division and modulus, type can't be unsigned, as e.g.
6929 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6930 For signed types, even with wrapping overflow, this is fine. */
6931 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6932 break;
6933 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6934 != 0)
6935 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6936 break;
6938 case MIN_EXPR: case MAX_EXPR:
6939 /* If widening the type changes the signedness, then we can't perform
6940 this optimization as that changes the result. */
6941 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6942 break;
6944 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6945 sub_strict_overflow_p = false;
6946 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6947 &sub_strict_overflow_p)) != 0
6948 && (t2 = extract_muldiv (op1, c, code, wide_type,
6949 &sub_strict_overflow_p)) != 0)
6951 if (tree_int_cst_sgn (c) < 0)
6952 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6953 if (sub_strict_overflow_p)
6954 *strict_overflow_p = true;
6955 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6956 fold_convert (ctype, t2));
6958 break;
6960 case LSHIFT_EXPR: case RSHIFT_EXPR:
6961 /* If the second operand is constant, this is a multiplication
6962 or floor division, by a power of two, so we can treat it that
6963 way unless the multiplier or divisor overflows. Signed
6964 left-shift overflow is implementation-defined rather than
6965 undefined in C90, so do not convert signed left shift into
6966 multiplication. */
6967 if (TREE_CODE (op1) == INTEGER_CST
6968 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6969 /* const_binop may not detect overflow correctly,
6970 so check for it explicitly here. */
6971 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6972 wi::to_wide (op1))
6973 && (t1 = fold_convert (ctype,
6974 const_binop (LSHIFT_EXPR, size_one_node,
6975 op1))) != 0
6976 && !TREE_OVERFLOW (t1))
6977 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6978 ? MULT_EXPR : FLOOR_DIV_EXPR,
6979 ctype,
6980 fold_convert (ctype, op0),
6981 t1),
6982 c, code, wide_type, strict_overflow_p);
6983 break;
6985 case PLUS_EXPR: case MINUS_EXPR:
6986 /* See if we can eliminate the operation on both sides. If we can, we
6987 can return a new PLUS or MINUS. If we can't, the only remaining
6988 cases where we can do anything are if the second operand is a
6989 constant. */
6990 sub_strict_overflow_p = false;
6991 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6992 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6993 if (t1 != 0 && t2 != 0
6994 && TYPE_OVERFLOW_WRAPS (ctype)
6995 && (code == MULT_EXPR
6996 /* If not multiplication, we can only do this if both operands
6997 are divisible by c. */
6998 || (multiple_of_p (ctype, op0, c)
6999 && multiple_of_p (ctype, op1, c))))
7001 if (sub_strict_overflow_p)
7002 *strict_overflow_p = true;
7003 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7004 fold_convert (ctype, t2));
7007 /* If this was a subtraction, negate OP1 and set it to be an addition.
7008 This simplifies the logic below. */
7009 if (tcode == MINUS_EXPR)
7011 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7012 /* If OP1 was not easily negatable, the constant may be OP0. */
7013 if (TREE_CODE (op0) == INTEGER_CST)
7015 std::swap (op0, op1);
7016 std::swap (t1, t2);
7020 if (TREE_CODE (op1) != INTEGER_CST)
7021 break;
7023 /* If either OP1 or C are negative, this optimization is not safe for
7024 some of the division and remainder types while for others we need
7025 to change the code. */
7026 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7028 if (code == CEIL_DIV_EXPR)
7029 code = FLOOR_DIV_EXPR;
7030 else if (code == FLOOR_DIV_EXPR)
7031 code = CEIL_DIV_EXPR;
7032 else if (code != MULT_EXPR
7033 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7034 break;
7037 /* If it's a multiply or a division/modulus operation of a multiple
7038 of our constant, do the operation and verify it doesn't overflow. */
7039 if (code == MULT_EXPR
7040 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7041 TYPE_SIGN (type)))
7043 op1 = const_binop (code, fold_convert (ctype, op1),
7044 fold_convert (ctype, c));
7045 /* We allow the constant to overflow with wrapping semantics. */
7046 if (op1 == 0
7047 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7048 break;
7050 else
7051 break;
7053 /* If we have an unsigned type, we cannot widen the operation since it
7054 will change the result if the original computation overflowed. */
7055 if (TYPE_UNSIGNED (ctype) && ctype != type)
7056 break;
7058 /* The last case is if we are a multiply. In that case, we can
7059 apply the distributive law to commute the multiply and addition
7060 if the multiplication of the constants doesn't overflow
7061 and overflow is defined. With undefined overflow
7062 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7063 But fold_plusminus_mult_expr would factor back any power-of-two
7064 value so do not distribute in the first place in this case. */
7065 if (code == MULT_EXPR
7066 && TYPE_OVERFLOW_WRAPS (ctype)
7067 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7068 return fold_build2 (tcode, ctype,
7069 fold_build2 (code, ctype,
7070 fold_convert (ctype, op0),
7071 fold_convert (ctype, c)),
7072 op1);
7074 break;
7076 case MULT_EXPR:
7077 /* We have a special case here if we are doing something like
7078 (C * 8) % 4 since we know that's zero. */
7079 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7080 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7081 /* If the multiplication can overflow we cannot optimize this. */
7082 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7083 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7084 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7085 TYPE_SIGN (type)))
7087 *strict_overflow_p = true;
7088 return omit_one_operand (type, integer_zero_node, op0);
7091 /* ... fall through ... */
7093 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7094 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7095 /* If we can extract our operation from the LHS, do so and return a
7096 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7097 do something only if the second operand is a constant. */
7098 if (same_p
7099 && TYPE_OVERFLOW_WRAPS (ctype)
7100 && (t1 = extract_muldiv (op0, c, code, wide_type,
7101 strict_overflow_p)) != 0)
7102 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7103 fold_convert (ctype, op1));
7104 else if (tcode == MULT_EXPR && code == MULT_EXPR
7105 && TYPE_OVERFLOW_WRAPS (ctype)
7106 && (t1 = extract_muldiv (op1, c, code, wide_type,
7107 strict_overflow_p)) != 0)
7108 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7109 fold_convert (ctype, t1));
7110 else if (TREE_CODE (op1) != INTEGER_CST)
7111 return 0;
7113 /* If these are the same operation types, we can associate them
7114 assuming no overflow. */
7115 if (tcode == code)
7117 bool overflow_p = false;
7118 wi::overflow_type overflow_mul;
7119 signop sign = TYPE_SIGN (ctype);
7120 unsigned prec = TYPE_PRECISION (ctype);
7121 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7122 wi::to_wide (c, prec),
7123 sign, &overflow_mul);
7124 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7125 if (overflow_mul
7126 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7127 overflow_p = true;
7128 if (!overflow_p)
7129 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7130 wide_int_to_tree (ctype, mul));
7133 /* If these operations "cancel" each other, we have the main
7134 optimizations of this pass, which occur when either constant is a
7135 multiple of the other, in which case we replace this with either an
7136 operation or CODE or TCODE.
7138 If we have an unsigned type, we cannot do this since it will change
7139 the result if the original computation overflowed. */
7140 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7141 && !TYPE_OVERFLOW_SANITIZED (ctype)
7142 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7143 || (tcode == MULT_EXPR
7144 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7145 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7146 && code != MULT_EXPR)))
7148 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7149 TYPE_SIGN (type)))
7151 *strict_overflow_p = true;
7152 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7153 fold_convert (ctype,
7154 const_binop (TRUNC_DIV_EXPR,
7155 op1, c)));
7157 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7158 TYPE_SIGN (type)))
7160 *strict_overflow_p = true;
7161 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7162 fold_convert (ctype,
7163 const_binop (TRUNC_DIV_EXPR,
7164 c, op1)));
7167 break;
7169 default:
7170 break;
7173 return 0;
7176 /* Return a node which has the indicated constant VALUE (either 0 or
7177 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7178 and is of the indicated TYPE. */
7180 tree
7181 constant_boolean_node (bool value, tree type)
7183 if (type == integer_type_node)
7184 return value ? integer_one_node : integer_zero_node;
7185 else if (type == boolean_type_node)
7186 return value ? boolean_true_node : boolean_false_node;
7187 else if (TREE_CODE (type) == VECTOR_TYPE)
7188 return build_vector_from_val (type,
7189 build_int_cst (TREE_TYPE (type),
7190 value ? -1 : 0));
7191 else
7192 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7196 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7197 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7198 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7199 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7200 COND is the first argument to CODE; otherwise (as in the example
7201 given here), it is the second argument. TYPE is the type of the
7202 original expression. Return NULL_TREE if no simplification is
7203 possible. */
7205 static tree
7206 fold_binary_op_with_conditional_arg (location_t loc,
7207 enum tree_code code,
7208 tree type, tree op0, tree op1,
7209 tree cond, tree arg, int cond_first_p)
7211 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7212 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7213 tree test, true_value, false_value;
7214 tree lhs = NULL_TREE;
7215 tree rhs = NULL_TREE;
7216 enum tree_code cond_code = COND_EXPR;
7218 /* Do not move possibly trapping operations into the conditional as this
7219 pessimizes code and causes gimplification issues when applied late. */
7220 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7221 ANY_INTEGRAL_TYPE_P (type)
7222 && TYPE_OVERFLOW_TRAPS (type), op1))
7223 return NULL_TREE;
7225 if (TREE_CODE (cond) == COND_EXPR
7226 || TREE_CODE (cond) == VEC_COND_EXPR)
7228 test = TREE_OPERAND (cond, 0);
7229 true_value = TREE_OPERAND (cond, 1);
7230 false_value = TREE_OPERAND (cond, 2);
7231 /* If this operand throws an expression, then it does not make
7232 sense to try to perform a logical or arithmetic operation
7233 involving it. */
7234 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7235 lhs = true_value;
7236 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7237 rhs = false_value;
7239 else if (!(TREE_CODE (type) != VECTOR_TYPE
7240 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7242 tree testtype = TREE_TYPE (cond);
7243 test = cond;
7244 true_value = constant_boolean_node (true, testtype);
7245 false_value = constant_boolean_node (false, testtype);
7247 else
7248 /* Detect the case of mixing vector and scalar types - bail out. */
7249 return NULL_TREE;
7251 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7252 cond_code = VEC_COND_EXPR;
7254 /* This transformation is only worthwhile if we don't have to wrap ARG
7255 in a SAVE_EXPR and the operation can be simplified without recursing
7256 on at least one of the branches once its pushed inside the COND_EXPR. */
7257 if (!TREE_CONSTANT (arg)
7258 && (TREE_SIDE_EFFECTS (arg)
7259 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7260 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7261 return NULL_TREE;
7263 arg = fold_convert_loc (loc, arg_type, arg);
7264 if (lhs == 0)
7266 true_value = fold_convert_loc (loc, cond_type, true_value);
7267 if (cond_first_p)
7268 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7269 else
7270 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7272 if (rhs == 0)
7274 false_value = fold_convert_loc (loc, cond_type, false_value);
7275 if (cond_first_p)
7276 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7277 else
7278 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7281 /* Check that we have simplified at least one of the branches. */
7282 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7283 return NULL_TREE;
7285 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7289 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7291 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7292 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7293 if ARG - ZERO_ARG is the same as X.
7295 If ARG is NULL, check for any value of type TYPE.
7297 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7298 and finite. The problematic cases are when X is zero, and its mode
7299 has signed zeros. In the case of rounding towards -infinity,
7300 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7301 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7303 bool
7304 fold_real_zero_addition_p (const_tree type, const_tree arg,
7305 const_tree zero_arg, int negate)
7307 if (!real_zerop (zero_arg))
7308 return false;
7310 /* Don't allow the fold with -fsignaling-nans. */
7311 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7312 return false;
7314 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7315 if (!HONOR_SIGNED_ZEROS (type))
7316 return true;
7318 /* There is no case that is safe for all rounding modes. */
7319 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7320 return false;
7322 /* In a vector or complex, we would need to check the sign of all zeros. */
7323 if (TREE_CODE (zero_arg) == VECTOR_CST)
7324 zero_arg = uniform_vector_p (zero_arg);
7325 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7326 return false;
7328 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7329 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7330 negate = !negate;
7332 /* The mode has signed zeros, and we have to honor their sign.
7333 In this situation, there are only two cases we can return true for.
7334 (i) X - 0 is the same as X with default rounding.
7335 (ii) X + 0 is X when X can't possibly be -0.0. */
7336 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7339 /* Subroutine of match.pd that optimizes comparisons of a division by
7340 a nonzero integer constant against an integer constant, i.e.
7341 X/C1 op C2.
7343 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7344 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7346 enum tree_code
7347 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7348 tree *hi, bool *neg_overflow)
7350 tree prod, tmp, type = TREE_TYPE (c1);
7351 signop sign = TYPE_SIGN (type);
7352 wi::overflow_type overflow;
7354 /* We have to do this the hard way to detect unsigned overflow.
7355 prod = int_const_binop (MULT_EXPR, c1, c2); */
7356 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7357 prod = force_fit_type (type, val, -1, overflow);
7358 *neg_overflow = false;
7360 if (sign == UNSIGNED)
7362 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7363 *lo = prod;
7365 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7366 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7367 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7369 else if (tree_int_cst_sgn (c1) >= 0)
7371 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7372 switch (tree_int_cst_sgn (c2))
7374 case -1:
7375 *neg_overflow = true;
7376 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7377 *hi = prod;
7378 break;
7380 case 0:
7381 *lo = fold_negate_const (tmp, type);
7382 *hi = tmp;
7383 break;
7385 case 1:
7386 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7387 *lo = prod;
7388 break;
7390 default:
7391 gcc_unreachable ();
7394 else
7396 /* A negative divisor reverses the relational operators. */
7397 code = swap_tree_comparison (code);
7399 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7400 switch (tree_int_cst_sgn (c2))
7402 case -1:
7403 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7404 *lo = prod;
7405 break;
7407 case 0:
7408 *hi = fold_negate_const (tmp, type);
7409 *lo = tmp;
7410 break;
7412 case 1:
7413 *neg_overflow = true;
7414 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7415 *hi = prod;
7416 break;
7418 default:
7419 gcc_unreachable ();
7423 if (code != EQ_EXPR && code != NE_EXPR)
7424 return code;
7426 if (TREE_OVERFLOW (*lo)
7427 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7428 *lo = NULL_TREE;
7429 if (TREE_OVERFLOW (*hi)
7430 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7431 *hi = NULL_TREE;
7433 return code;
7437 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7438 equality/inequality test, then return a simplified form of the test
7439 using a sign testing. Otherwise return NULL. TYPE is the desired
7440 result type. */
7442 static tree
7443 fold_single_bit_test_into_sign_test (location_t loc,
7444 enum tree_code code, tree arg0, tree arg1,
7445 tree result_type)
7447 /* If this is testing a single bit, we can optimize the test. */
7448 if ((code == NE_EXPR || code == EQ_EXPR)
7449 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7450 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7452 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7453 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7454 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7456 if (arg00 != NULL_TREE
7457 /* This is only a win if casting to a signed type is cheap,
7458 i.e. when arg00's type is not a partial mode. */
7459 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7461 tree stype = signed_type_for (TREE_TYPE (arg00));
7462 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7463 result_type,
7464 fold_convert_loc (loc, stype, arg00),
7465 build_int_cst (stype, 0));
7469 return NULL_TREE;
7472 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7473 equality/inequality test, then return a simplified form of
7474 the test using shifts and logical operations. Otherwise return
7475 NULL. TYPE is the desired result type. */
7477 tree
7478 fold_single_bit_test (location_t loc, enum tree_code code,
7479 tree arg0, tree arg1, tree result_type)
7481 /* If this is testing a single bit, we can optimize the test. */
7482 if ((code == NE_EXPR || code == EQ_EXPR)
7483 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7486 tree inner = TREE_OPERAND (arg0, 0);
7487 tree type = TREE_TYPE (arg0);
7488 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7489 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7490 int ops_unsigned;
7491 tree signed_type, unsigned_type, intermediate_type;
7492 tree tem, one;
7494 /* First, see if we can fold the single bit test into a sign-bit
7495 test. */
7496 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7497 result_type);
7498 if (tem)
7499 return tem;
7501 /* Otherwise we have (A & C) != 0 where C is a single bit,
7502 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7503 Similarly for (A & C) == 0. */
7505 /* If INNER is a right shift of a constant and it plus BITNUM does
7506 not overflow, adjust BITNUM and INNER. */
7507 if (TREE_CODE (inner) == RSHIFT_EXPR
7508 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7509 && bitnum < TYPE_PRECISION (type)
7510 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7511 TYPE_PRECISION (type) - bitnum))
7513 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7514 inner = TREE_OPERAND (inner, 0);
7517 /* If we are going to be able to omit the AND below, we must do our
7518 operations as unsigned. If we must use the AND, we have a choice.
7519 Normally unsigned is faster, but for some machines signed is. */
7520 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7521 && !flag_syntax_only) ? 0 : 1;
7523 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7524 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7525 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7526 inner = fold_convert_loc (loc, intermediate_type, inner);
7528 if (bitnum != 0)
7529 inner = build2 (RSHIFT_EXPR, intermediate_type,
7530 inner, size_int (bitnum));
7532 one = build_int_cst (intermediate_type, 1);
7534 if (code == EQ_EXPR)
7535 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7537 /* Put the AND last so it can combine with more things. */
7538 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7540 /* Make sure to return the proper type. */
7541 inner = fold_convert_loc (loc, result_type, inner);
7543 return inner;
7545 return NULL_TREE;
7548 /* Test whether it is preferable to swap two operands, ARG0 and
7549 ARG1, for example because ARG0 is an integer constant and ARG1
7550 isn't. */
7552 bool
7553 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7555 if (CONSTANT_CLASS_P (arg1))
7556 return 0;
7557 if (CONSTANT_CLASS_P (arg0))
7558 return 1;
7560 STRIP_NOPS (arg0);
7561 STRIP_NOPS (arg1);
7563 if (TREE_CONSTANT (arg1))
7564 return 0;
7565 if (TREE_CONSTANT (arg0))
7566 return 1;
7568 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7569 for commutative and comparison operators. Ensuring a canonical
7570 form allows the optimizers to find additional redundancies without
7571 having to explicitly check for both orderings. */
7572 if (TREE_CODE (arg0) == SSA_NAME
7573 && TREE_CODE (arg1) == SSA_NAME
7574 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7575 return 1;
7577 /* Put SSA_NAMEs last. */
7578 if (TREE_CODE (arg1) == SSA_NAME)
7579 return 0;
7580 if (TREE_CODE (arg0) == SSA_NAME)
7581 return 1;
7583 /* Put variables last. */
7584 if (DECL_P (arg1))
7585 return 0;
7586 if (DECL_P (arg0))
7587 return 1;
7589 return 0;
7593 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7594 means A >= Y && A != MAX, but in this case we know that
7595 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7597 static tree
7598 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7600 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7602 if (TREE_CODE (bound) == LT_EXPR)
7603 a = TREE_OPERAND (bound, 0);
7604 else if (TREE_CODE (bound) == GT_EXPR)
7605 a = TREE_OPERAND (bound, 1);
7606 else
7607 return NULL_TREE;
7609 typea = TREE_TYPE (a);
7610 if (!INTEGRAL_TYPE_P (typea)
7611 && !POINTER_TYPE_P (typea))
7612 return NULL_TREE;
7614 if (TREE_CODE (ineq) == LT_EXPR)
7616 a1 = TREE_OPERAND (ineq, 1);
7617 y = TREE_OPERAND (ineq, 0);
7619 else if (TREE_CODE (ineq) == GT_EXPR)
7621 a1 = TREE_OPERAND (ineq, 0);
7622 y = TREE_OPERAND (ineq, 1);
7624 else
7625 return NULL_TREE;
7627 if (TREE_TYPE (a1) != typea)
7628 return NULL_TREE;
7630 if (POINTER_TYPE_P (typea))
7632 /* Convert the pointer types into integer before taking the difference. */
7633 tree ta = fold_convert_loc (loc, ssizetype, a);
7634 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7635 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7637 else
7638 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7640 if (!diff || !integer_onep (diff))
7641 return NULL_TREE;
7643 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7646 /* Fold a sum or difference of at least one multiplication.
7647 Returns the folded tree or NULL if no simplification could be made. */
7649 static tree
7650 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7651 tree arg0, tree arg1)
7653 tree arg00, arg01, arg10, arg11;
7654 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7656 /* (A * C) +- (B * C) -> (A+-B) * C.
7657 (A * C) +- A -> A * (C+-1).
7658 We are most concerned about the case where C is a constant,
7659 but other combinations show up during loop reduction. Since
7660 it is not difficult, try all four possibilities. */
7662 if (TREE_CODE (arg0) == MULT_EXPR)
7664 arg00 = TREE_OPERAND (arg0, 0);
7665 arg01 = TREE_OPERAND (arg0, 1);
7667 else if (TREE_CODE (arg0) == INTEGER_CST)
7669 arg00 = build_one_cst (type);
7670 arg01 = arg0;
7672 else
7674 /* We cannot generate constant 1 for fract. */
7675 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7676 return NULL_TREE;
7677 arg00 = arg0;
7678 arg01 = build_one_cst (type);
7680 if (TREE_CODE (arg1) == MULT_EXPR)
7682 arg10 = TREE_OPERAND (arg1, 0);
7683 arg11 = TREE_OPERAND (arg1, 1);
7685 else if (TREE_CODE (arg1) == INTEGER_CST)
7687 arg10 = build_one_cst (type);
7688 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7689 the purpose of this canonicalization. */
7690 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7691 && negate_expr_p (arg1)
7692 && code == PLUS_EXPR)
7694 arg11 = negate_expr (arg1);
7695 code = MINUS_EXPR;
7697 else
7698 arg11 = arg1;
7700 else
7702 /* We cannot generate constant 1 for fract. */
7703 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7704 return NULL_TREE;
7705 arg10 = arg1;
7706 arg11 = build_one_cst (type);
7708 same = NULL_TREE;
7710 /* Prefer factoring a common non-constant. */
7711 if (operand_equal_p (arg00, arg10, 0))
7712 same = arg00, alt0 = arg01, alt1 = arg11;
7713 else if (operand_equal_p (arg01, arg11, 0))
7714 same = arg01, alt0 = arg00, alt1 = arg10;
7715 else if (operand_equal_p (arg00, arg11, 0))
7716 same = arg00, alt0 = arg01, alt1 = arg10;
7717 else if (operand_equal_p (arg01, arg10, 0))
7718 same = arg01, alt0 = arg00, alt1 = arg11;
7720 /* No identical multiplicands; see if we can find a common
7721 power-of-two factor in non-power-of-two multiplies. This
7722 can help in multi-dimensional array access. */
7723 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7725 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7726 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7727 HOST_WIDE_INT tmp;
7728 bool swap = false;
7729 tree maybe_same;
7731 /* Move min of absolute values to int11. */
7732 if (absu_hwi (int01) < absu_hwi (int11))
7734 tmp = int01, int01 = int11, int11 = tmp;
7735 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7736 maybe_same = arg01;
7737 swap = true;
7739 else
7740 maybe_same = arg11;
7742 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7743 if (factor > 1
7744 && pow2p_hwi (factor)
7745 && (int01 & (factor - 1)) == 0
7746 /* The remainder should not be a constant, otherwise we
7747 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7748 increased the number of multiplications necessary. */
7749 && TREE_CODE (arg10) != INTEGER_CST)
7751 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7752 build_int_cst (TREE_TYPE (arg00),
7753 int01 / int11));
7754 alt1 = arg10;
7755 same = maybe_same;
7756 if (swap)
7757 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7761 if (!same)
7762 return NULL_TREE;
7764 if (! ANY_INTEGRAL_TYPE_P (type)
7765 || TYPE_OVERFLOW_WRAPS (type)
7766 /* We are neither factoring zero nor minus one. */
7767 || TREE_CODE (same) == INTEGER_CST)
7768 return fold_build2_loc (loc, MULT_EXPR, type,
7769 fold_build2_loc (loc, code, type,
7770 fold_convert_loc (loc, type, alt0),
7771 fold_convert_loc (loc, type, alt1)),
7772 fold_convert_loc (loc, type, same));
7774 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7775 same may be minus one and thus the multiplication may overflow. Perform
7776 the sum operation in an unsigned type. */
7777 tree utype = unsigned_type_for (type);
7778 tree tem = fold_build2_loc (loc, code, utype,
7779 fold_convert_loc (loc, utype, alt0),
7780 fold_convert_loc (loc, utype, alt1));
7781 /* If the sum evaluated to a constant that is not -INF the multiplication
7782 cannot overflow. */
7783 if (TREE_CODE (tem) == INTEGER_CST
7784 && (wi::to_wide (tem)
7785 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7786 return fold_build2_loc (loc, MULT_EXPR, type,
7787 fold_convert (type, tem), same);
7789 /* Do not resort to unsigned multiplication because
7790 we lose the no-overflow property of the expression. */
7791 return NULL_TREE;
7794 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7795 specified by EXPR into the buffer PTR of length LEN bytes.
7796 Return the number of bytes placed in the buffer, or zero
7797 upon failure. */
7799 static int
7800 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7802 tree type = TREE_TYPE (expr);
7803 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7804 int byte, offset, word, words;
7805 unsigned char value;
7807 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7808 return 0;
7809 if (off == -1)
7810 off = 0;
7812 if (ptr == NULL)
7813 /* Dry run. */
7814 return MIN (len, total_bytes - off);
7816 words = total_bytes / UNITS_PER_WORD;
7818 for (byte = 0; byte < total_bytes; byte++)
7820 int bitpos = byte * BITS_PER_UNIT;
7821 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7822 number of bytes. */
7823 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7825 if (total_bytes > UNITS_PER_WORD)
7827 word = byte / UNITS_PER_WORD;
7828 if (WORDS_BIG_ENDIAN)
7829 word = (words - 1) - word;
7830 offset = word * UNITS_PER_WORD;
7831 if (BYTES_BIG_ENDIAN)
7832 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 else
7834 offset += byte % UNITS_PER_WORD;
7836 else
7837 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7838 if (offset >= off && offset - off < len)
7839 ptr[offset - off] = value;
7841 return MIN (len, total_bytes - off);
7845 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7846 specified by EXPR into the buffer PTR of length LEN bytes.
7847 Return the number of bytes placed in the buffer, or zero
7848 upon failure. */
7850 static int
7851 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7853 tree type = TREE_TYPE (expr);
7854 scalar_mode mode = SCALAR_TYPE_MODE (type);
7855 int total_bytes = GET_MODE_SIZE (mode);
7856 FIXED_VALUE_TYPE value;
7857 tree i_value, i_type;
7859 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7860 return 0;
7862 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7864 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7865 return 0;
7867 value = TREE_FIXED_CST (expr);
7868 i_value = double_int_to_tree (i_type, value.data);
7870 return native_encode_int (i_value, ptr, len, off);
7874 /* Subroutine of native_encode_expr. Encode the REAL_CST
7875 specified by EXPR into the buffer PTR of length LEN bytes.
7876 Return the number of bytes placed in the buffer, or zero
7877 upon failure. */
7879 static int
7880 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7882 tree type = TREE_TYPE (expr);
7883 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7884 int byte, offset, word, words, bitpos;
7885 unsigned char value;
7887 /* There are always 32 bits in each long, no matter the size of
7888 the hosts long. We handle floating point representations with
7889 up to 192 bits. */
7890 long tmp[6];
7892 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7893 return 0;
7894 if (off == -1)
7895 off = 0;
7897 if (ptr == NULL)
7898 /* Dry run. */
7899 return MIN (len, total_bytes - off);
7901 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7903 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7905 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7906 bitpos += BITS_PER_UNIT)
7908 byte = (bitpos / BITS_PER_UNIT) & 3;
7909 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7911 if (UNITS_PER_WORD < 4)
7913 word = byte / UNITS_PER_WORD;
7914 if (WORDS_BIG_ENDIAN)
7915 word = (words - 1) - word;
7916 offset = word * UNITS_PER_WORD;
7917 if (BYTES_BIG_ENDIAN)
7918 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7919 else
7920 offset += byte % UNITS_PER_WORD;
7922 else
7924 offset = byte;
7925 if (BYTES_BIG_ENDIAN)
7927 /* Reverse bytes within each long, or within the entire float
7928 if it's smaller than a long (for HFmode). */
7929 offset = MIN (3, total_bytes - 1) - offset;
7930 gcc_assert (offset >= 0);
7933 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7934 if (offset >= off
7935 && offset - off < len)
7936 ptr[offset - off] = value;
7938 return MIN (len, total_bytes - off);
7941 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7942 specified by EXPR into the buffer PTR of length LEN bytes.
7943 Return the number of bytes placed in the buffer, or zero
7944 upon failure. */
7946 static int
7947 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7949 int rsize, isize;
7950 tree part;
7952 part = TREE_REALPART (expr);
7953 rsize = native_encode_expr (part, ptr, len, off);
7954 if (off == -1 && rsize == 0)
7955 return 0;
7956 part = TREE_IMAGPART (expr);
7957 if (off != -1)
7958 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7959 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7960 len - rsize, off);
7961 if (off == -1 && isize != rsize)
7962 return 0;
7963 return rsize + isize;
7966 /* Like native_encode_vector, but only encode the first COUNT elements.
7967 The other arguments are as for native_encode_vector. */
7969 static int
7970 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7971 int off, unsigned HOST_WIDE_INT count)
7973 tree itype = TREE_TYPE (TREE_TYPE (expr));
7974 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7975 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7977 /* This is the only case in which elements can be smaller than a byte.
7978 Element 0 is always in the lsb of the containing byte. */
7979 unsigned int elt_bits = TYPE_PRECISION (itype);
7980 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7981 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7982 return 0;
7984 if (off == -1)
7985 off = 0;
7987 /* Zero the buffer and then set bits later where necessary. */
7988 int extract_bytes = MIN (len, total_bytes - off);
7989 if (ptr)
7990 memset (ptr, 0, extract_bytes);
7992 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7993 unsigned int first_elt = off * elts_per_byte;
7994 unsigned int extract_elts = extract_bytes * elts_per_byte;
7995 for (unsigned int i = 0; i < extract_elts; ++i)
7997 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7998 if (TREE_CODE (elt) != INTEGER_CST)
7999 return 0;
8001 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
8003 unsigned int bit = i * elt_bits;
8004 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8007 return extract_bytes;
8010 int offset = 0;
8011 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8012 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8014 if (off >= size)
8016 off -= size;
8017 continue;
8019 tree elem = VECTOR_CST_ELT (expr, i);
8020 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8021 len - offset, off);
8022 if ((off == -1 && res != size) || res == 0)
8023 return 0;
8024 offset += res;
8025 if (offset >= len)
8026 return (off == -1 && i < count - 1) ? 0 : offset;
8027 if (off != -1)
8028 off = 0;
8030 return offset;
8033 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8034 specified by EXPR into the buffer PTR of length LEN bytes.
8035 Return the number of bytes placed in the buffer, or zero
8036 upon failure. */
8038 static int
8039 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8041 unsigned HOST_WIDE_INT count;
8042 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8043 return 0;
8044 return native_encode_vector_part (expr, ptr, len, off, count);
8048 /* Subroutine of native_encode_expr. Encode the STRING_CST
8049 specified by EXPR into the buffer PTR of length LEN bytes.
8050 Return the number of bytes placed in the buffer, or zero
8051 upon failure. */
8053 static int
8054 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8056 tree type = TREE_TYPE (expr);
8058 /* Wide-char strings are encoded in target byte-order so native
8059 encoding them is trivial. */
8060 if (BITS_PER_UNIT != CHAR_BIT
8061 || TREE_CODE (type) != ARRAY_TYPE
8062 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8063 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8064 return 0;
8066 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8067 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8068 return 0;
8069 if (off == -1)
8070 off = 0;
8071 len = MIN (total_bytes - off, len);
8072 if (ptr == NULL)
8073 /* Dry run. */;
8074 else
8076 int written = 0;
8077 if (off < TREE_STRING_LENGTH (expr))
8079 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8080 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8082 memset (ptr + written, 0, len - written);
8084 return len;
8088 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8089 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8090 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8091 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8092 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8093 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8094 Return the number of bytes placed in the buffer, or zero upon failure. */
8097 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8099 /* We don't support starting at negative offset and -1 is special. */
8100 if (off < -1)
8101 return 0;
8103 switch (TREE_CODE (expr))
8105 case INTEGER_CST:
8106 return native_encode_int (expr, ptr, len, off);
8108 case REAL_CST:
8109 return native_encode_real (expr, ptr, len, off);
8111 case FIXED_CST:
8112 return native_encode_fixed (expr, ptr, len, off);
8114 case COMPLEX_CST:
8115 return native_encode_complex (expr, ptr, len, off);
8117 case VECTOR_CST:
8118 return native_encode_vector (expr, ptr, len, off);
8120 case STRING_CST:
8121 return native_encode_string (expr, ptr, len, off);
8123 default:
8124 return 0;
8128 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8129 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8130 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8131 machine modes, we can't just use build_nonstandard_integer_type. */
8133 tree
8134 find_bitfield_repr_type (int fieldsize, int len)
8136 machine_mode mode;
8137 for (int pass = 0; pass < 2; pass++)
8139 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8140 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8141 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8142 && known_eq (GET_MODE_PRECISION (mode),
8143 GET_MODE_BITSIZE (mode))
8144 && known_le (GET_MODE_SIZE (mode), len))
8146 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8147 if (ret && TYPE_MODE (ret) == mode)
8148 return ret;
8152 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8153 if (int_n_enabled_p[i]
8154 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8155 && int_n_trees[i].unsigned_type)
8157 tree ret = int_n_trees[i].unsigned_type;
8158 mode = TYPE_MODE (ret);
8159 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8160 && known_eq (GET_MODE_PRECISION (mode),
8161 GET_MODE_BITSIZE (mode))
8162 && known_le (GET_MODE_SIZE (mode), len))
8163 return ret;
8166 return NULL_TREE;
8169 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8170 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8171 to be non-NULL and OFF zero), then in addition to filling the
8172 bytes pointed by PTR with the value also clear any bits pointed
8173 by MASK that are known to be initialized, keep them as is for
8174 e.g. uninitialized padding bits or uninitialized fields. */
8177 native_encode_initializer (tree init, unsigned char *ptr, int len,
8178 int off, unsigned char *mask)
8180 int r;
8182 /* We don't support starting at negative offset and -1 is special. */
8183 if (off < -1 || init == NULL_TREE)
8184 return 0;
8186 gcc_assert (mask == NULL || (off == 0 && ptr));
8188 STRIP_NOPS (init);
8189 switch (TREE_CODE (init))
8191 case VIEW_CONVERT_EXPR:
8192 case NON_LVALUE_EXPR:
8193 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8194 mask);
8195 default:
8196 r = native_encode_expr (init, ptr, len, off);
8197 if (mask)
8198 memset (mask, 0, r);
8199 return r;
8200 case CONSTRUCTOR:
8201 tree type = TREE_TYPE (init);
8202 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8203 if (total_bytes < 0)
8204 return 0;
8205 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8206 return 0;
8207 int o = off == -1 ? 0 : off;
8208 if (TREE_CODE (type) == ARRAY_TYPE)
8210 tree min_index;
8211 unsigned HOST_WIDE_INT cnt;
8212 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8213 constructor_elt *ce;
8215 if (!TYPE_DOMAIN (type)
8216 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8217 return 0;
8219 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8220 if (fieldsize <= 0)
8221 return 0;
8223 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8224 if (ptr)
8225 memset (ptr, '\0', MIN (total_bytes - off, len));
8227 for (cnt = 0; ; cnt++)
8229 tree val = NULL_TREE, index = NULL_TREE;
8230 HOST_WIDE_INT pos = curpos, count = 0;
8231 bool full = false;
8232 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8234 val = ce->value;
8235 index = ce->index;
8237 else if (mask == NULL
8238 || CONSTRUCTOR_NO_CLEARING (init)
8239 || curpos >= total_bytes)
8240 break;
8241 else
8242 pos = total_bytes;
8244 if (index && TREE_CODE (index) == RANGE_EXPR)
8246 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8247 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8248 return 0;
8249 offset_int wpos
8250 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8251 - wi::to_offset (min_index),
8252 TYPE_PRECISION (sizetype));
8253 wpos *= fieldsize;
8254 if (!wi::fits_shwi_p (pos))
8255 return 0;
8256 pos = wpos.to_shwi ();
8257 offset_int wcount
8258 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8259 - wi::to_offset (TREE_OPERAND (index, 0)),
8260 TYPE_PRECISION (sizetype));
8261 if (!wi::fits_shwi_p (wcount))
8262 return 0;
8263 count = wcount.to_shwi ();
8265 else if (index)
8267 if (TREE_CODE (index) != INTEGER_CST)
8268 return 0;
8269 offset_int wpos
8270 = wi::sext (wi::to_offset (index)
8271 - wi::to_offset (min_index),
8272 TYPE_PRECISION (sizetype));
8273 wpos *= fieldsize;
8274 if (!wi::fits_shwi_p (wpos))
8275 return 0;
8276 pos = wpos.to_shwi ();
8279 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8281 if (valueinit == -1)
8283 tree zero = build_zero_cst (TREE_TYPE (type));
8284 r = native_encode_initializer (zero, ptr + curpos,
8285 fieldsize, 0,
8286 mask + curpos);
8287 if (TREE_CODE (zero) == CONSTRUCTOR)
8288 ggc_free (zero);
8289 if (!r)
8290 return 0;
8291 valueinit = curpos;
8292 curpos += fieldsize;
8294 while (curpos != pos)
8296 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8297 memcpy (mask + curpos, mask + valueinit, fieldsize);
8298 curpos += fieldsize;
8302 curpos = pos;
8303 if (val)
8306 if (off == -1
8307 || (curpos >= off
8308 && (curpos + fieldsize
8309 <= (HOST_WIDE_INT) off + len)))
8311 if (full)
8313 if (ptr)
8314 memcpy (ptr + (curpos - o), ptr + (pos - o),
8315 fieldsize);
8316 if (mask)
8317 memcpy (mask + curpos, mask + pos, fieldsize);
8319 else if (!native_encode_initializer (val,
8321 ? ptr + curpos - o
8322 : NULL,
8323 fieldsize,
8324 off == -1 ? -1
8325 : 0,
8326 mask
8327 ? mask + curpos
8328 : NULL))
8329 return 0;
8330 else
8332 full = true;
8333 pos = curpos;
8336 else if (curpos + fieldsize > off
8337 && curpos < (HOST_WIDE_INT) off + len)
8339 /* Partial overlap. */
8340 unsigned char *p = NULL;
8341 int no = 0;
8342 int l;
8343 gcc_assert (mask == NULL);
8344 if (curpos >= off)
8346 if (ptr)
8347 p = ptr + curpos - off;
8348 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8349 fieldsize);
8351 else
8353 p = ptr;
8354 no = off - curpos;
8355 l = len;
8357 if (!native_encode_initializer (val, p, l, no, NULL))
8358 return 0;
8360 curpos += fieldsize;
8362 while (count-- != 0);
8364 return MIN (total_bytes - off, len);
8366 else if (TREE_CODE (type) == RECORD_TYPE
8367 || TREE_CODE (type) == UNION_TYPE)
8369 unsigned HOST_WIDE_INT cnt;
8370 constructor_elt *ce;
8371 tree fld_base = TYPE_FIELDS (type);
8372 tree to_free = NULL_TREE;
8374 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8375 if (ptr != NULL)
8376 memset (ptr, '\0', MIN (total_bytes - o, len));
8377 for (cnt = 0; ; cnt++)
8379 tree val = NULL_TREE, field = NULL_TREE;
8380 HOST_WIDE_INT pos = 0, fieldsize;
8381 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8383 if (to_free)
8385 ggc_free (to_free);
8386 to_free = NULL_TREE;
8389 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8391 val = ce->value;
8392 field = ce->index;
8393 if (field == NULL_TREE)
8394 return 0;
8396 pos = int_byte_position (field);
8397 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8398 continue;
8400 else if (mask == NULL
8401 || CONSTRUCTOR_NO_CLEARING (init))
8402 break;
8403 else
8404 pos = total_bytes;
8406 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8408 tree fld;
8409 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8411 if (TREE_CODE (fld) != FIELD_DECL)
8412 continue;
8413 if (fld == field)
8414 break;
8415 if (DECL_PADDING_P (fld))
8416 continue;
8417 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8418 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8419 return 0;
8420 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8421 continue;
8422 break;
8424 if (fld == NULL_TREE)
8426 if (ce == NULL)
8427 break;
8428 return 0;
8430 fld_base = DECL_CHAIN (fld);
8431 if (fld != field)
8433 cnt--;
8434 field = fld;
8435 pos = int_byte_position (field);
8436 val = build_zero_cst (TREE_TYPE (fld));
8437 if (TREE_CODE (val) == CONSTRUCTOR)
8438 to_free = val;
8442 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8443 && TYPE_DOMAIN (TREE_TYPE (field))
8444 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8446 if (mask || off != -1)
8447 return 0;
8448 if (val == NULL_TREE)
8449 continue;
8450 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8451 return 0;
8452 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8453 if (fieldsize < 0
8454 || (int) fieldsize != fieldsize
8455 || (pos + fieldsize) > INT_MAX)
8456 return 0;
8457 if (pos + fieldsize > total_bytes)
8459 if (ptr != NULL && total_bytes < len)
8460 memset (ptr + total_bytes, '\0',
8461 MIN (pos + fieldsize, len) - total_bytes);
8462 total_bytes = pos + fieldsize;
8465 else
8467 if (DECL_SIZE_UNIT (field) == NULL_TREE
8468 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8469 return 0;
8470 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8472 if (fieldsize == 0)
8473 continue;
8475 if (DECL_BIT_FIELD (field))
8477 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8478 return 0;
8479 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8480 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8481 if (bpos % BITS_PER_UNIT)
8482 bpos %= BITS_PER_UNIT;
8483 else
8484 bpos = 0;
8485 fieldsize += bpos;
8486 epos = fieldsize % BITS_PER_UNIT;
8487 fieldsize += BITS_PER_UNIT - 1;
8488 fieldsize /= BITS_PER_UNIT;
8491 if (off != -1 && pos + fieldsize <= off)
8492 continue;
8494 if (val == NULL_TREE)
8495 continue;
8497 if (DECL_BIT_FIELD (field))
8499 /* FIXME: Handle PDP endian. */
8500 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8501 return 0;
8503 if (TREE_CODE (val) != INTEGER_CST)
8504 return 0;
8506 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8507 tree repr_type = NULL_TREE;
8508 HOST_WIDE_INT rpos = 0;
8509 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8511 rpos = int_byte_position (repr);
8512 repr_type = TREE_TYPE (repr);
8514 else
8516 repr_type = find_bitfield_repr_type (fieldsize, len);
8517 if (repr_type == NULL_TREE)
8518 return 0;
8519 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8520 gcc_assert (repr_size > 0 && repr_size <= len);
8521 if (pos + repr_size <= o + len)
8522 rpos = pos;
8523 else
8525 rpos = o + len - repr_size;
8526 gcc_assert (rpos <= pos);
8530 if (rpos > pos)
8531 return 0;
8532 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8533 int diff = (TYPE_PRECISION (repr_type)
8534 - TYPE_PRECISION (TREE_TYPE (field)));
8535 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8536 if (!BYTES_BIG_ENDIAN)
8537 w = wi::lshift (w, bitoff);
8538 else
8539 w = wi::lshift (w, diff - bitoff);
8540 val = wide_int_to_tree (repr_type, w);
8542 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8543 / BITS_PER_UNIT + 1];
8544 int l = native_encode_int (val, buf, sizeof buf, 0);
8545 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8546 return 0;
8548 if (ptr == NULL)
8549 continue;
8551 /* If the bitfield does not start at byte boundary, handle
8552 the partial byte at the start. */
8553 if (bpos
8554 && (off == -1 || (pos >= off && len >= 1)))
8556 if (!BYTES_BIG_ENDIAN)
8558 int msk = (1 << bpos) - 1;
8559 buf[pos - rpos] &= ~msk;
8560 buf[pos - rpos] |= ptr[pos - o] & msk;
8561 if (mask)
8563 if (fieldsize > 1 || epos == 0)
8564 mask[pos] &= msk;
8565 else
8566 mask[pos] &= (msk | ~((1 << epos) - 1));
8569 else
8571 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8572 buf[pos - rpos] &= msk;
8573 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8574 if (mask)
8576 if (fieldsize > 1 || epos == 0)
8577 mask[pos] &= ~msk;
8578 else
8579 mask[pos] &= (~msk
8580 | ((1 << (BITS_PER_UNIT - epos))
8581 - 1));
8585 /* If the bitfield does not end at byte boundary, handle
8586 the partial byte at the end. */
8587 if (epos
8588 && (off == -1
8589 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8591 if (!BYTES_BIG_ENDIAN)
8593 int msk = (1 << epos) - 1;
8594 buf[pos - rpos + fieldsize - 1] &= msk;
8595 buf[pos - rpos + fieldsize - 1]
8596 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8597 if (mask && (fieldsize > 1 || bpos == 0))
8598 mask[pos + fieldsize - 1] &= ~msk;
8600 else
8602 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8603 buf[pos - rpos + fieldsize - 1] &= ~msk;
8604 buf[pos - rpos + fieldsize - 1]
8605 |= ptr[pos + fieldsize - 1 - o] & msk;
8606 if (mask && (fieldsize > 1 || bpos == 0))
8607 mask[pos + fieldsize - 1] &= msk;
8610 if (off == -1
8611 || (pos >= off
8612 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8614 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8615 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8616 memset (mask + pos + (bpos != 0), 0,
8617 fieldsize - (bpos != 0) - (epos != 0));
8619 else
8621 /* Partial overlap. */
8622 HOST_WIDE_INT fsz = fieldsize;
8623 gcc_assert (mask == NULL);
8624 if (pos < off)
8626 fsz -= (off - pos);
8627 pos = off;
8629 if (pos + fsz > (HOST_WIDE_INT) off + len)
8630 fsz = (HOST_WIDE_INT) off + len - pos;
8631 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8633 continue;
8636 if (off == -1
8637 || (pos >= off
8638 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8640 int fldsize = fieldsize;
8641 if (off == -1)
8643 tree fld = DECL_CHAIN (field);
8644 while (fld)
8646 if (TREE_CODE (fld) == FIELD_DECL)
8647 break;
8648 fld = DECL_CHAIN (fld);
8650 if (fld == NULL_TREE)
8651 fldsize = len - pos;
8653 r = native_encode_initializer (val, ptr ? ptr + pos - o
8654 : NULL,
8655 fldsize,
8656 off == -1 ? -1 : 0,
8657 mask ? mask + pos : NULL);
8658 if (!r)
8659 return 0;
8660 if (off == -1
8661 && fldsize != fieldsize
8662 && r > fieldsize
8663 && pos + r > total_bytes)
8664 total_bytes = pos + r;
8666 else
8668 /* Partial overlap. */
8669 unsigned char *p = NULL;
8670 int no = 0;
8671 int l;
8672 gcc_assert (mask == NULL);
8673 if (pos >= off)
8675 if (ptr)
8676 p = ptr + pos - off;
8677 l = MIN ((HOST_WIDE_INT) off + len - pos,
8678 fieldsize);
8680 else
8682 p = ptr;
8683 no = off - pos;
8684 l = len;
8686 if (!native_encode_initializer (val, p, l, no, NULL))
8687 return 0;
8690 return MIN (total_bytes - off, len);
8692 return 0;
8697 /* Subroutine of native_interpret_expr. Interpret the contents of
8698 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8699 If the buffer cannot be interpreted, return NULL_TREE. */
8701 static tree
8702 native_interpret_int (tree type, const unsigned char *ptr, int len)
8704 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8706 if (total_bytes > len
8707 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8708 return NULL_TREE;
8710 wide_int result = wi::from_buffer (ptr, total_bytes);
8712 return wide_int_to_tree (type, result);
8716 /* Subroutine of native_interpret_expr. Interpret the contents of
8717 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8718 If the buffer cannot be interpreted, return NULL_TREE. */
8720 static tree
8721 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8723 scalar_mode mode = SCALAR_TYPE_MODE (type);
8724 int total_bytes = GET_MODE_SIZE (mode);
8725 double_int result;
8726 FIXED_VALUE_TYPE fixed_value;
8728 if (total_bytes > len
8729 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8730 return NULL_TREE;
8732 result = double_int::from_buffer (ptr, total_bytes);
8733 fixed_value = fixed_from_double_int (result, mode);
8735 return build_fixed (type, fixed_value);
8739 /* Subroutine of native_interpret_expr. Interpret the contents of
8740 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8741 If the buffer cannot be interpreted, return NULL_TREE. */
8743 tree
8744 native_interpret_real (tree type, const unsigned char *ptr, int len)
8746 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8747 int total_bytes = GET_MODE_SIZE (mode);
8748 unsigned char value;
8749 /* There are always 32 bits in each long, no matter the size of
8750 the hosts long. We handle floating point representations with
8751 up to 192 bits. */
8752 REAL_VALUE_TYPE r;
8753 long tmp[6];
8755 if (total_bytes > len || total_bytes > 24)
8756 return NULL_TREE;
8757 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8759 memset (tmp, 0, sizeof (tmp));
8760 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8761 bitpos += BITS_PER_UNIT)
8763 /* Both OFFSET and BYTE index within a long;
8764 bitpos indexes the whole float. */
8765 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8766 if (UNITS_PER_WORD < 4)
8768 int word = byte / UNITS_PER_WORD;
8769 if (WORDS_BIG_ENDIAN)
8770 word = (words - 1) - word;
8771 offset = word * UNITS_PER_WORD;
8772 if (BYTES_BIG_ENDIAN)
8773 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8774 else
8775 offset += byte % UNITS_PER_WORD;
8777 else
8779 offset = byte;
8780 if (BYTES_BIG_ENDIAN)
8782 /* Reverse bytes within each long, or within the entire float
8783 if it's smaller than a long (for HFmode). */
8784 offset = MIN (3, total_bytes - 1) - offset;
8785 gcc_assert (offset >= 0);
8788 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8790 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8793 real_from_target (&r, tmp, mode);
8794 return build_real (type, r);
8798 /* Subroutine of native_interpret_expr. Interpret the contents of
8799 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8800 If the buffer cannot be interpreted, return NULL_TREE. */
8802 static tree
8803 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8805 tree etype, rpart, ipart;
8806 int size;
8808 etype = TREE_TYPE (type);
8809 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8810 if (size * 2 > len)
8811 return NULL_TREE;
8812 rpart = native_interpret_expr (etype, ptr, size);
8813 if (!rpart)
8814 return NULL_TREE;
8815 ipart = native_interpret_expr (etype, ptr+size, size);
8816 if (!ipart)
8817 return NULL_TREE;
8818 return build_complex (type, rpart, ipart);
8821 /* Read a vector of type TYPE from the target memory image given by BYTES,
8822 which contains LEN bytes. The vector is known to be encodable using
8823 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8825 Return the vector on success, otherwise return null. */
8827 static tree
8828 native_interpret_vector_part (tree type, const unsigned char *bytes,
8829 unsigned int len, unsigned int npatterns,
8830 unsigned int nelts_per_pattern)
8832 tree elt_type = TREE_TYPE (type);
8833 if (VECTOR_BOOLEAN_TYPE_P (type)
8834 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8836 /* This is the only case in which elements can be smaller than a byte.
8837 Element 0 is always in the lsb of the containing byte. */
8838 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8839 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8840 return NULL_TREE;
8842 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8843 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8845 unsigned int bit_index = i * elt_bits;
8846 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8847 unsigned int lsb = bit_index % BITS_PER_UNIT;
8848 builder.quick_push (bytes[byte_index] & (1 << lsb)
8849 ? build_all_ones_cst (elt_type)
8850 : build_zero_cst (elt_type));
8852 return builder.build ();
8855 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8856 if (elt_bytes * npatterns * nelts_per_pattern > len)
8857 return NULL_TREE;
8859 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8860 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8862 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8863 if (!elt)
8864 return NULL_TREE;
8865 builder.quick_push (elt);
8866 bytes += elt_bytes;
8868 return builder.build ();
8871 /* Subroutine of native_interpret_expr. Interpret the contents of
8872 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8873 If the buffer cannot be interpreted, return NULL_TREE. */
8875 static tree
8876 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8878 tree etype;
8879 unsigned int size;
8880 unsigned HOST_WIDE_INT count;
8882 etype = TREE_TYPE (type);
8883 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8884 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8885 || size * count > len)
8886 return NULL_TREE;
8888 return native_interpret_vector_part (type, ptr, len, count, 1);
8892 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8893 the buffer PTR of length LEN as a constant of type TYPE. For
8894 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8895 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8896 return NULL_TREE. */
8898 tree
8899 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8901 switch (TREE_CODE (type))
8903 case INTEGER_TYPE:
8904 case ENUMERAL_TYPE:
8905 case BOOLEAN_TYPE:
8906 case POINTER_TYPE:
8907 case REFERENCE_TYPE:
8908 case OFFSET_TYPE:
8909 return native_interpret_int (type, ptr, len);
8911 case REAL_TYPE:
8912 if (tree ret = native_interpret_real (type, ptr, len))
8914 /* For floating point values in composite modes, punt if this
8915 folding doesn't preserve bit representation. As the mode doesn't
8916 have fixed precision while GCC pretends it does, there could be
8917 valid values that GCC can't really represent accurately.
8918 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8919 bit combinationations which GCC doesn't preserve. */
8920 unsigned char buf[24 * 2];
8921 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8922 int total_bytes = GET_MODE_SIZE (mode);
8923 memcpy (buf + 24, ptr, total_bytes);
8924 clear_type_padding_in_mask (type, buf + 24);
8925 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8926 || memcmp (buf + 24, buf, total_bytes) != 0)
8927 return NULL_TREE;
8928 return ret;
8930 return NULL_TREE;
8932 case FIXED_POINT_TYPE:
8933 return native_interpret_fixed (type, ptr, len);
8935 case COMPLEX_TYPE:
8936 return native_interpret_complex (type, ptr, len);
8938 case VECTOR_TYPE:
8939 return native_interpret_vector (type, ptr, len);
8941 default:
8942 return NULL_TREE;
8946 /* Returns true if we can interpret the contents of a native encoding
8947 as TYPE. */
8949 bool
8950 can_native_interpret_type_p (tree type)
8952 switch (TREE_CODE (type))
8954 case INTEGER_TYPE:
8955 case ENUMERAL_TYPE:
8956 case BOOLEAN_TYPE:
8957 case POINTER_TYPE:
8958 case REFERENCE_TYPE:
8959 case FIXED_POINT_TYPE:
8960 case REAL_TYPE:
8961 case COMPLEX_TYPE:
8962 case VECTOR_TYPE:
8963 case OFFSET_TYPE:
8964 return true;
8965 default:
8966 return false;
8970 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8971 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8973 tree
8974 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8975 int len)
8977 vec<constructor_elt, va_gc> *elts = NULL;
8978 if (TREE_CODE (type) == ARRAY_TYPE)
8980 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8981 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8982 return NULL_TREE;
8984 HOST_WIDE_INT cnt = 0;
8985 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8987 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8988 return NULL_TREE;
8989 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8991 if (eltsz == 0)
8992 cnt = 0;
8993 HOST_WIDE_INT pos = 0;
8994 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8996 tree v = NULL_TREE;
8997 if (pos >= len || pos + eltsz > len)
8998 return NULL_TREE;
8999 if (can_native_interpret_type_p (TREE_TYPE (type)))
9001 v = native_interpret_expr (TREE_TYPE (type),
9002 ptr + off + pos, eltsz);
9003 if (v == NULL_TREE)
9004 return NULL_TREE;
9006 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9007 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9008 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9009 eltsz);
9010 if (v == NULL_TREE)
9011 return NULL_TREE;
9012 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9014 return build_constructor (type, elts);
9016 if (TREE_CODE (type) != RECORD_TYPE)
9017 return NULL_TREE;
9018 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9020 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
9021 continue;
9022 tree fld = field;
9023 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9024 int diff = 0;
9025 tree v = NULL_TREE;
9026 if (DECL_BIT_FIELD (field))
9028 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9029 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9031 poly_int64 bitoffset;
9032 poly_uint64 field_offset, fld_offset;
9033 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9034 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9035 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9036 else
9037 bitoffset = 0;
9038 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9039 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9040 diff = (TYPE_PRECISION (TREE_TYPE (fld))
9041 - TYPE_PRECISION (TREE_TYPE (field)));
9042 if (!bitoffset.is_constant (&bitoff)
9043 || bitoff < 0
9044 || bitoff > diff)
9045 return NULL_TREE;
9047 else
9049 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9050 return NULL_TREE;
9051 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9052 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9053 bpos %= BITS_PER_UNIT;
9054 fieldsize += bpos;
9055 fieldsize += BITS_PER_UNIT - 1;
9056 fieldsize /= BITS_PER_UNIT;
9057 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9058 if (repr_type == NULL_TREE)
9059 return NULL_TREE;
9060 sz = int_size_in_bytes (repr_type);
9061 if (sz < 0 || sz > len)
9062 return NULL_TREE;
9063 pos = int_byte_position (field);
9064 if (pos < 0 || pos > len || pos + fieldsize > len)
9065 return NULL_TREE;
9066 HOST_WIDE_INT rpos;
9067 if (pos + sz <= len)
9068 rpos = pos;
9069 else
9071 rpos = len - sz;
9072 gcc_assert (rpos <= pos);
9074 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9075 pos = rpos;
9076 diff = (TYPE_PRECISION (repr_type)
9077 - TYPE_PRECISION (TREE_TYPE (field)));
9078 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9079 if (v == NULL_TREE)
9080 return NULL_TREE;
9081 fld = NULL_TREE;
9085 if (fld)
9087 sz = int_size_in_bytes (TREE_TYPE (fld));
9088 if (sz < 0 || sz > len)
9089 return NULL_TREE;
9090 tree byte_pos = byte_position (fld);
9091 if (!tree_fits_shwi_p (byte_pos))
9092 return NULL_TREE;
9093 pos = tree_to_shwi (byte_pos);
9094 if (pos < 0 || pos > len || pos + sz > len)
9095 return NULL_TREE;
9097 if (fld == NULL_TREE)
9098 /* Already handled above. */;
9099 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9101 v = native_interpret_expr (TREE_TYPE (fld),
9102 ptr + off + pos, sz);
9103 if (v == NULL_TREE)
9104 return NULL_TREE;
9106 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9107 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9108 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9109 if (v == NULL_TREE)
9110 return NULL_TREE;
9111 if (fld != field)
9113 if (TREE_CODE (v) != INTEGER_CST)
9114 return NULL_TREE;
9116 /* FIXME: Figure out how to handle PDP endian bitfields. */
9117 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9118 return NULL_TREE;
9119 if (!BYTES_BIG_ENDIAN)
9120 v = wide_int_to_tree (TREE_TYPE (field),
9121 wi::lrshift (wi::to_wide (v), bitoff));
9122 else
9123 v = wide_int_to_tree (TREE_TYPE (field),
9124 wi::lrshift (wi::to_wide (v),
9125 diff - bitoff));
9127 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9129 return build_constructor (type, elts);
9132 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9133 or extracted constant positions and/or sizes aren't byte aligned. */
9135 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9136 bits between adjacent elements. AMNT should be within
9137 [0, BITS_PER_UNIT).
9138 Example, AMNT = 2:
9139 00011111|11100000 << 2 = 01111111|10000000
9140 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9142 void
9143 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9144 unsigned int amnt)
9146 if (amnt == 0)
9147 return;
9149 unsigned char carry_over = 0U;
9150 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9151 unsigned char clear_mask = (~0U) << amnt;
9153 for (unsigned int i = 0; i < sz; i++)
9155 unsigned prev_carry_over = carry_over;
9156 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9158 ptr[i] <<= amnt;
9159 if (i != 0)
9161 ptr[i] &= clear_mask;
9162 ptr[i] |= prev_carry_over;
9167 /* Like shift_bytes_in_array_left but for big-endian.
9168 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9169 bits between adjacent elements. AMNT should be within
9170 [0, BITS_PER_UNIT).
9171 Example, AMNT = 2:
9172 00011111|11100000 >> 2 = 00000111|11111000
9173 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9175 void
9176 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9177 unsigned int amnt)
9179 if (amnt == 0)
9180 return;
9182 unsigned char carry_over = 0U;
9183 unsigned char carry_mask = ~(~0U << amnt);
9185 for (unsigned int i = 0; i < sz; i++)
9187 unsigned prev_carry_over = carry_over;
9188 carry_over = ptr[i] & carry_mask;
9190 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9191 ptr[i] >>= amnt;
9192 ptr[i] |= prev_carry_over;
9196 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9197 directly on the VECTOR_CST encoding, in a way that works for variable-
9198 length vectors. Return the resulting VECTOR_CST on success or null
9199 on failure. */
9201 static tree
9202 fold_view_convert_vector_encoding (tree type, tree expr)
9204 tree expr_type = TREE_TYPE (expr);
9205 poly_uint64 type_bits, expr_bits;
9206 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9207 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9208 return NULL_TREE;
9210 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9211 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9212 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9213 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9215 /* We can only preserve the semantics of a stepped pattern if the new
9216 vector element is an integer of the same size. */
9217 if (VECTOR_CST_STEPPED_P (expr)
9218 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9219 return NULL_TREE;
9221 /* The number of bits needed to encode one element from every pattern
9222 of the original vector. */
9223 unsigned int expr_sequence_bits
9224 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9226 /* The number of bits needed to encode one element from every pattern
9227 of the result. */
9228 unsigned int type_sequence_bits
9229 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9231 /* Don't try to read more bytes than are available, which can happen
9232 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9233 The general VIEW_CONVERT handling can cope with that case, so there's
9234 no point complicating things here. */
9235 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9236 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9237 BITS_PER_UNIT);
9238 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9239 if (known_gt (buffer_bits, expr_bits))
9240 return NULL_TREE;
9242 /* Get enough bytes of EXPR to form the new encoding. */
9243 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9244 buffer.quick_grow (buffer_bytes);
9245 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9246 buffer_bits / expr_elt_bits)
9247 != (int) buffer_bytes)
9248 return NULL_TREE;
9250 /* Reencode the bytes as TYPE. */
9251 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9252 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9253 type_npatterns, nelts_per_pattern);
9256 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9257 TYPE at compile-time. If we're unable to perform the conversion
9258 return NULL_TREE. */
9260 static tree
9261 fold_view_convert_expr (tree type, tree expr)
9263 /* We support up to 512-bit values (for V8DFmode). */
9264 unsigned char buffer[64];
9265 int len;
9267 /* Check that the host and target are sane. */
9268 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9269 return NULL_TREE;
9271 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9272 if (tree res = fold_view_convert_vector_encoding (type, expr))
9273 return res;
9275 len = native_encode_expr (expr, buffer, sizeof (buffer));
9276 if (len == 0)
9277 return NULL_TREE;
9279 return native_interpret_expr (type, buffer, len);
9282 /* Build an expression for the address of T. Folds away INDIRECT_REF
9283 to avoid confusing the gimplify process. */
9285 tree
9286 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9288 /* The size of the object is not relevant when talking about its address. */
9289 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9290 t = TREE_OPERAND (t, 0);
9292 if (TREE_CODE (t) == INDIRECT_REF)
9294 t = TREE_OPERAND (t, 0);
9296 if (TREE_TYPE (t) != ptrtype)
9297 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9299 else if (TREE_CODE (t) == MEM_REF
9300 && integer_zerop (TREE_OPERAND (t, 1)))
9302 t = TREE_OPERAND (t, 0);
9304 if (TREE_TYPE (t) != ptrtype)
9305 t = fold_convert_loc (loc, ptrtype, t);
9307 else if (TREE_CODE (t) == MEM_REF
9308 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9309 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9310 TREE_OPERAND (t, 0),
9311 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9312 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9314 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9316 if (TREE_TYPE (t) != ptrtype)
9317 t = fold_convert_loc (loc, ptrtype, t);
9319 else
9320 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9322 return t;
9325 /* Build an expression for the address of T. */
9327 tree
9328 build_fold_addr_expr_loc (location_t loc, tree t)
9330 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9332 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9335 /* Fold a unary expression of code CODE and type TYPE with operand
9336 OP0. Return the folded expression if folding is successful.
9337 Otherwise, return NULL_TREE. */
9339 tree
9340 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9342 tree tem;
9343 tree arg0;
9344 enum tree_code_class kind = TREE_CODE_CLASS (code);
9346 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9347 && TREE_CODE_LENGTH (code) == 1);
9349 arg0 = op0;
9350 if (arg0)
9352 if (CONVERT_EXPR_CODE_P (code)
9353 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9355 /* Don't use STRIP_NOPS, because signedness of argument type
9356 matters. */
9357 STRIP_SIGN_NOPS (arg0);
9359 else
9361 /* Strip any conversions that don't change the mode. This
9362 is safe for every expression, except for a comparison
9363 expression because its signedness is derived from its
9364 operands.
9366 Note that this is done as an internal manipulation within
9367 the constant folder, in order to find the simplest
9368 representation of the arguments so that their form can be
9369 studied. In any cases, the appropriate type conversions
9370 should be put back in the tree that will get out of the
9371 constant folder. */
9372 STRIP_NOPS (arg0);
9375 if (CONSTANT_CLASS_P (arg0))
9377 tree tem = const_unop (code, type, arg0);
9378 if (tem)
9380 if (TREE_TYPE (tem) != type)
9381 tem = fold_convert_loc (loc, type, tem);
9382 return tem;
9387 tem = generic_simplify (loc, code, type, op0);
9388 if (tem)
9389 return tem;
9391 if (TREE_CODE_CLASS (code) == tcc_unary)
9393 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9394 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9395 fold_build1_loc (loc, code, type,
9396 fold_convert_loc (loc, TREE_TYPE (op0),
9397 TREE_OPERAND (arg0, 1))));
9398 else if (TREE_CODE (arg0) == COND_EXPR)
9400 tree arg01 = TREE_OPERAND (arg0, 1);
9401 tree arg02 = TREE_OPERAND (arg0, 2);
9402 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9403 arg01 = fold_build1_loc (loc, code, type,
9404 fold_convert_loc (loc,
9405 TREE_TYPE (op0), arg01));
9406 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9407 arg02 = fold_build1_loc (loc, code, type,
9408 fold_convert_loc (loc,
9409 TREE_TYPE (op0), arg02));
9410 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9411 arg01, arg02);
9413 /* If this was a conversion, and all we did was to move into
9414 inside the COND_EXPR, bring it back out. But leave it if
9415 it is a conversion from integer to integer and the
9416 result precision is no wider than a word since such a
9417 conversion is cheap and may be optimized away by combine,
9418 while it couldn't if it were outside the COND_EXPR. Then return
9419 so we don't get into an infinite recursion loop taking the
9420 conversion out and then back in. */
9422 if ((CONVERT_EXPR_CODE_P (code)
9423 || code == NON_LVALUE_EXPR)
9424 && TREE_CODE (tem) == COND_EXPR
9425 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9426 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9427 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9428 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9429 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9430 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9431 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9432 && (INTEGRAL_TYPE_P
9433 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9434 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9435 || flag_syntax_only))
9436 tem = build1_loc (loc, code, type,
9437 build3 (COND_EXPR,
9438 TREE_TYPE (TREE_OPERAND
9439 (TREE_OPERAND (tem, 1), 0)),
9440 TREE_OPERAND (tem, 0),
9441 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9442 TREE_OPERAND (TREE_OPERAND (tem, 2),
9443 0)));
9444 return tem;
9448 switch (code)
9450 case NON_LVALUE_EXPR:
9451 if (!maybe_lvalue_p (op0))
9452 return fold_convert_loc (loc, type, op0);
9453 return NULL_TREE;
9455 CASE_CONVERT:
9456 case FLOAT_EXPR:
9457 case FIX_TRUNC_EXPR:
9458 if (COMPARISON_CLASS_P (op0))
9460 /* If we have (type) (a CMP b) and type is an integral type, return
9461 new expression involving the new type. Canonicalize
9462 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9463 non-integral type.
9464 Do not fold the result as that would not simplify further, also
9465 folding again results in recursions. */
9466 if (TREE_CODE (type) == BOOLEAN_TYPE)
9467 return build2_loc (loc, TREE_CODE (op0), type,
9468 TREE_OPERAND (op0, 0),
9469 TREE_OPERAND (op0, 1));
9470 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9471 && TREE_CODE (type) != VECTOR_TYPE)
9472 return build3_loc (loc, COND_EXPR, type, op0,
9473 constant_boolean_node (true, type),
9474 constant_boolean_node (false, type));
9477 /* Handle (T *)&A.B.C for A being of type T and B and C
9478 living at offset zero. This occurs frequently in
9479 C++ upcasting and then accessing the base. */
9480 if (TREE_CODE (op0) == ADDR_EXPR
9481 && POINTER_TYPE_P (type)
9482 && handled_component_p (TREE_OPERAND (op0, 0)))
9484 poly_int64 bitsize, bitpos;
9485 tree offset;
9486 machine_mode mode;
9487 int unsignedp, reversep, volatilep;
9488 tree base
9489 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9490 &offset, &mode, &unsignedp, &reversep,
9491 &volatilep);
9492 /* If the reference was to a (constant) zero offset, we can use
9493 the address of the base if it has the same base type
9494 as the result type and the pointer type is unqualified. */
9495 if (!offset
9496 && known_eq (bitpos, 0)
9497 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9498 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9499 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9500 return fold_convert_loc (loc, type,
9501 build_fold_addr_expr_loc (loc, base));
9504 if (TREE_CODE (op0) == MODIFY_EXPR
9505 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9506 /* Detect assigning a bitfield. */
9507 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9508 && DECL_BIT_FIELD
9509 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9511 /* Don't leave an assignment inside a conversion
9512 unless assigning a bitfield. */
9513 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9514 /* First do the assignment, then return converted constant. */
9515 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9516 suppress_warning (tem /* What warning? */);
9517 TREE_USED (tem) = 1;
9518 return tem;
9521 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9522 constants (if x has signed type, the sign bit cannot be set
9523 in c). This folds extension into the BIT_AND_EXPR.
9524 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9525 very likely don't have maximal range for their precision and this
9526 transformation effectively doesn't preserve non-maximal ranges. */
9527 if (TREE_CODE (type) == INTEGER_TYPE
9528 && TREE_CODE (op0) == BIT_AND_EXPR
9529 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9531 tree and_expr = op0;
9532 tree and0 = TREE_OPERAND (and_expr, 0);
9533 tree and1 = TREE_OPERAND (and_expr, 1);
9534 int change = 0;
9536 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9537 || (TYPE_PRECISION (type)
9538 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9539 change = 1;
9540 else if (TYPE_PRECISION (TREE_TYPE (and1))
9541 <= HOST_BITS_PER_WIDE_INT
9542 && tree_fits_uhwi_p (and1))
9544 unsigned HOST_WIDE_INT cst;
9546 cst = tree_to_uhwi (and1);
9547 cst &= HOST_WIDE_INT_M1U
9548 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9549 change = (cst == 0);
9550 if (change
9551 && !flag_syntax_only
9552 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9553 == ZERO_EXTEND))
9555 tree uns = unsigned_type_for (TREE_TYPE (and0));
9556 and0 = fold_convert_loc (loc, uns, and0);
9557 and1 = fold_convert_loc (loc, uns, and1);
9560 if (change)
9562 tem = force_fit_type (type, wi::to_widest (and1), 0,
9563 TREE_OVERFLOW (and1));
9564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9565 fold_convert_loc (loc, type, and0), tem);
9569 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9570 cast (T1)X will fold away. We assume that this happens when X itself
9571 is a cast. */
9572 if (POINTER_TYPE_P (type)
9573 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9574 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9576 tree arg00 = TREE_OPERAND (arg0, 0);
9577 tree arg01 = TREE_OPERAND (arg0, 1);
9579 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9580 when the pointed type needs higher alignment than
9581 the p+ first operand's pointed type. */
9582 if (!in_gimple_form
9583 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9584 && (min_align_of_type (TREE_TYPE (type))
9585 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9586 return NULL_TREE;
9588 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9589 when type is a reference type and arg00's type is not,
9590 because arg00 could be validly nullptr and if arg01 doesn't return,
9591 we don't want false positive binding of reference to nullptr. */
9592 if (TREE_CODE (type) == REFERENCE_TYPE
9593 && !in_gimple_form
9594 && sanitize_flags_p (SANITIZE_NULL)
9595 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9596 return NULL_TREE;
9598 arg00 = fold_convert_loc (loc, type, arg00);
9599 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9602 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9603 of the same precision, and X is an integer type not narrower than
9604 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9605 if (INTEGRAL_TYPE_P (type)
9606 && TREE_CODE (op0) == BIT_NOT_EXPR
9607 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9608 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9609 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9611 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9612 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9613 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9614 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9615 fold_convert_loc (loc, type, tem));
9618 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9619 type of X and Y (integer types only). */
9620 if (INTEGRAL_TYPE_P (type)
9621 && TREE_CODE (op0) == MULT_EXPR
9622 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9623 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9624 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9625 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9627 /* Be careful not to introduce new overflows. */
9628 tree mult_type;
9629 if (TYPE_OVERFLOW_WRAPS (type))
9630 mult_type = type;
9631 else
9632 mult_type = unsigned_type_for (type);
9634 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9636 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9637 fold_convert_loc (loc, mult_type,
9638 TREE_OPERAND (op0, 0)),
9639 fold_convert_loc (loc, mult_type,
9640 TREE_OPERAND (op0, 1)));
9641 return fold_convert_loc (loc, type, tem);
9645 return NULL_TREE;
9647 case VIEW_CONVERT_EXPR:
9648 if (TREE_CODE (op0) == MEM_REF)
9650 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9651 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9652 tem = fold_build2_loc (loc, MEM_REF, type,
9653 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9654 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9655 return tem;
9658 return NULL_TREE;
9660 case NEGATE_EXPR:
9661 tem = fold_negate_expr (loc, arg0);
9662 if (tem)
9663 return fold_convert_loc (loc, type, tem);
9664 return NULL_TREE;
9666 case ABS_EXPR:
9667 /* Convert fabs((double)float) into (double)fabsf(float). */
9668 if (TREE_CODE (arg0) == NOP_EXPR
9669 && TREE_CODE (type) == REAL_TYPE)
9671 tree targ0 = strip_float_extensions (arg0);
9672 if (targ0 != arg0)
9673 return fold_convert_loc (loc, type,
9674 fold_build1_loc (loc, ABS_EXPR,
9675 TREE_TYPE (targ0),
9676 targ0));
9678 return NULL_TREE;
9680 case BIT_NOT_EXPR:
9681 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9682 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9683 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9684 fold_convert_loc (loc, type,
9685 TREE_OPERAND (arg0, 0)))))
9686 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9687 fold_convert_loc (loc, type,
9688 TREE_OPERAND (arg0, 1)));
9689 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9690 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9691 fold_convert_loc (loc, type,
9692 TREE_OPERAND (arg0, 1)))))
9693 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9694 fold_convert_loc (loc, type,
9695 TREE_OPERAND (arg0, 0)), tem);
9697 return NULL_TREE;
9699 case TRUTH_NOT_EXPR:
9700 /* Note that the operand of this must be an int
9701 and its values must be 0 or 1.
9702 ("true" is a fixed value perhaps depending on the language,
9703 but we don't handle values other than 1 correctly yet.) */
9704 tem = fold_truth_not_expr (loc, arg0);
9705 if (!tem)
9706 return NULL_TREE;
9707 return fold_convert_loc (loc, type, tem);
9709 case INDIRECT_REF:
9710 /* Fold *&X to X if X is an lvalue. */
9711 if (TREE_CODE (op0) == ADDR_EXPR)
9713 tree op00 = TREE_OPERAND (op0, 0);
9714 if ((VAR_P (op00)
9715 || TREE_CODE (op00) == PARM_DECL
9716 || TREE_CODE (op00) == RESULT_DECL)
9717 && !TREE_READONLY (op00))
9718 return op00;
9720 return NULL_TREE;
9722 default:
9723 return NULL_TREE;
9724 } /* switch (code) */
9728 /* If the operation was a conversion do _not_ mark a resulting constant
9729 with TREE_OVERFLOW if the original constant was not. These conversions
9730 have implementation defined behavior and retaining the TREE_OVERFLOW
9731 flag here would confuse later passes such as VRP. */
9732 tree
9733 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9734 tree type, tree op0)
9736 tree res = fold_unary_loc (loc, code, type, op0);
9737 if (res
9738 && TREE_CODE (res) == INTEGER_CST
9739 && TREE_CODE (op0) == INTEGER_CST
9740 && CONVERT_EXPR_CODE_P (code))
9741 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9743 return res;
9746 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9747 operands OP0 and OP1. LOC is the location of the resulting expression.
9748 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9749 Return the folded expression if folding is successful. Otherwise,
9750 return NULL_TREE. */
9751 static tree
9752 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9753 tree arg0, tree arg1, tree op0, tree op1)
9755 tree tem;
9757 /* We only do these simplifications if we are optimizing. */
9758 if (!optimize)
9759 return NULL_TREE;
9761 /* Check for things like (A || B) && (A || C). We can convert this
9762 to A || (B && C). Note that either operator can be any of the four
9763 truth and/or operations and the transformation will still be
9764 valid. Also note that we only care about order for the
9765 ANDIF and ORIF operators. If B contains side effects, this
9766 might change the truth-value of A. */
9767 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9768 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9769 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9770 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9771 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9772 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9774 tree a00 = TREE_OPERAND (arg0, 0);
9775 tree a01 = TREE_OPERAND (arg0, 1);
9776 tree a10 = TREE_OPERAND (arg1, 0);
9777 tree a11 = TREE_OPERAND (arg1, 1);
9778 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9779 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9780 && (code == TRUTH_AND_EXPR
9781 || code == TRUTH_OR_EXPR));
9783 if (operand_equal_p (a00, a10, 0))
9784 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9785 fold_build2_loc (loc, code, type, a01, a11));
9786 else if (commutative && operand_equal_p (a00, a11, 0))
9787 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9788 fold_build2_loc (loc, code, type, a01, a10));
9789 else if (commutative && operand_equal_p (a01, a10, 0))
9790 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9791 fold_build2_loc (loc, code, type, a00, a11));
9793 /* This case if tricky because we must either have commutative
9794 operators or else A10 must not have side-effects. */
9796 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9797 && operand_equal_p (a01, a11, 0))
9798 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9799 fold_build2_loc (loc, code, type, a00, a10),
9800 a01);
9803 /* See if we can build a range comparison. */
9804 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9805 return tem;
9807 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9808 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9810 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9811 if (tem)
9812 return fold_build2_loc (loc, code, type, tem, arg1);
9815 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9816 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9818 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9819 if (tem)
9820 return fold_build2_loc (loc, code, type, arg0, tem);
9823 /* Check for the possibility of merging component references. If our
9824 lhs is another similar operation, try to merge its rhs with our
9825 rhs. Then try to merge our lhs and rhs. */
9826 if (TREE_CODE (arg0) == code
9827 && (tem = fold_truth_andor_1 (loc, code, type,
9828 TREE_OPERAND (arg0, 1), arg1)) != 0)
9829 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9831 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9832 return tem;
9834 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9835 if (param_logical_op_non_short_circuit != -1)
9836 logical_op_non_short_circuit
9837 = param_logical_op_non_short_circuit;
9838 if (logical_op_non_short_circuit
9839 && !sanitize_coverage_p ()
9840 && (code == TRUTH_AND_EXPR
9841 || code == TRUTH_ANDIF_EXPR
9842 || code == TRUTH_OR_EXPR
9843 || code == TRUTH_ORIF_EXPR))
9845 enum tree_code ncode, icode;
9847 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9848 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9849 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9851 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9852 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9853 We don't want to pack more than two leafs to a non-IF AND/OR
9854 expression.
9855 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9856 equal to IF-CODE, then we don't want to add right-hand operand.
9857 If the inner right-hand side of left-hand operand has
9858 side-effects, or isn't simple, then we can't add to it,
9859 as otherwise we might destroy if-sequence. */
9860 if (TREE_CODE (arg0) == icode
9861 && simple_condition_p (arg1)
9862 /* Needed for sequence points to handle trappings, and
9863 side-effects. */
9864 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9866 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9867 arg1);
9868 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9869 tem);
9871 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9872 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9873 else if (TREE_CODE (arg1) == icode
9874 && simple_condition_p (arg0)
9875 /* Needed for sequence points to handle trappings, and
9876 side-effects. */
9877 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9879 tem = fold_build2_loc (loc, ncode, type,
9880 arg0, TREE_OPERAND (arg1, 0));
9881 return fold_build2_loc (loc, icode, type, tem,
9882 TREE_OPERAND (arg1, 1));
9884 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9885 into (A OR B).
9886 For sequence point consistancy, we need to check for trapping,
9887 and side-effects. */
9888 else if (code == icode && simple_condition_p (arg0)
9889 && simple_condition_p (arg1))
9890 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9893 return NULL_TREE;
9896 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9897 by changing CODE to reduce the magnitude of constants involved in
9898 ARG0 of the comparison.
9899 Returns a canonicalized comparison tree if a simplification was
9900 possible, otherwise returns NULL_TREE.
9901 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9902 valid if signed overflow is undefined. */
9904 static tree
9905 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9906 tree arg0, tree arg1,
9907 bool *strict_overflow_p)
9909 enum tree_code code0 = TREE_CODE (arg0);
9910 tree t, cst0 = NULL_TREE;
9911 int sgn0;
9913 /* Match A +- CST code arg1. We can change this only if overflow
9914 is undefined. */
9915 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9916 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9917 /* In principle pointers also have undefined overflow behavior,
9918 but that causes problems elsewhere. */
9919 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9920 && (code0 == MINUS_EXPR
9921 || code0 == PLUS_EXPR)
9922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9923 return NULL_TREE;
9925 /* Identify the constant in arg0 and its sign. */
9926 cst0 = TREE_OPERAND (arg0, 1);
9927 sgn0 = tree_int_cst_sgn (cst0);
9929 /* Overflowed constants and zero will cause problems. */
9930 if (integer_zerop (cst0)
9931 || TREE_OVERFLOW (cst0))
9932 return NULL_TREE;
9934 /* See if we can reduce the magnitude of the constant in
9935 arg0 by changing the comparison code. */
9936 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9937 if (code == LT_EXPR
9938 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9939 code = LE_EXPR;
9940 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9941 else if (code == GT_EXPR
9942 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9943 code = GE_EXPR;
9944 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9945 else if (code == LE_EXPR
9946 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9947 code = LT_EXPR;
9948 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9949 else if (code == GE_EXPR
9950 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9951 code = GT_EXPR;
9952 else
9953 return NULL_TREE;
9954 *strict_overflow_p = true;
9956 /* Now build the constant reduced in magnitude. But not if that
9957 would produce one outside of its types range. */
9958 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9959 && ((sgn0 == 1
9960 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9961 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9962 || (sgn0 == -1
9963 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9964 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9965 return NULL_TREE;
9967 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9968 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9969 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9970 t = fold_convert (TREE_TYPE (arg1), t);
9972 return fold_build2_loc (loc, code, type, t, arg1);
9975 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9976 overflow further. Try to decrease the magnitude of constants involved
9977 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9978 and put sole constants at the second argument position.
9979 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9981 static tree
9982 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9983 tree arg0, tree arg1)
9985 tree t;
9986 bool strict_overflow_p;
9987 const char * const warnmsg = G_("assuming signed overflow does not occur "
9988 "when reducing constant in comparison");
9990 /* Try canonicalization by simplifying arg0. */
9991 strict_overflow_p = false;
9992 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9993 &strict_overflow_p);
9994 if (t)
9996 if (strict_overflow_p)
9997 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9998 return t;
10001 /* Try canonicalization by simplifying arg1 using the swapped
10002 comparison. */
10003 code = swap_tree_comparison (code);
10004 strict_overflow_p = false;
10005 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10006 &strict_overflow_p);
10007 if (t && strict_overflow_p)
10008 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10009 return t;
10012 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10013 space. This is used to avoid issuing overflow warnings for
10014 expressions like &p->x which cannot wrap. */
10016 static bool
10017 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10019 if (!POINTER_TYPE_P (TREE_TYPE (base)))
10020 return true;
10022 if (maybe_lt (bitpos, 0))
10023 return true;
10025 poly_wide_int wi_offset;
10026 int precision = TYPE_PRECISION (TREE_TYPE (base));
10027 if (offset == NULL_TREE)
10028 wi_offset = wi::zero (precision);
10029 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10030 return true;
10031 else
10032 wi_offset = wi::to_poly_wide (offset);
10034 wi::overflow_type overflow;
10035 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10036 precision);
10037 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10038 if (overflow)
10039 return true;
10041 poly_uint64 total_hwi, size;
10042 if (!total.to_uhwi (&total_hwi)
10043 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10044 &size)
10045 || known_eq (size, 0U))
10046 return true;
10048 if (known_le (total_hwi, size))
10049 return false;
10051 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10052 array. */
10053 if (TREE_CODE (base) == ADDR_EXPR
10054 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10055 &size)
10056 && maybe_ne (size, 0U)
10057 && known_le (total_hwi, size))
10058 return false;
10060 return true;
10063 /* Return a positive integer when the symbol DECL is known to have
10064 a nonzero address, zero when it's known not to (e.g., it's a weak
10065 symbol), and a negative integer when the symbol is not yet in the
10066 symbol table and so whether or not its address is zero is unknown.
10067 For function local objects always return positive integer. */
10068 static int
10069 maybe_nonzero_address (tree decl)
10071 /* Normally, don't do anything for variables and functions before symtab is
10072 built; it is quite possible that DECL will be declared weak later.
10073 But if folding_initializer, we need a constant answer now, so create
10074 the symtab entry and prevent later weak declaration. */
10075 if (DECL_P (decl) && decl_in_symtab_p (decl))
10076 if (struct symtab_node *symbol
10077 = (folding_initializer
10078 ? symtab_node::get_create (decl)
10079 : symtab_node::get (decl)))
10080 return symbol->nonzero_address ();
10082 /* Function local objects are never NULL. */
10083 if (DECL_P (decl)
10084 && (DECL_CONTEXT (decl)
10085 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10086 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10087 return 1;
10089 return -1;
10092 /* Subroutine of fold_binary. This routine performs all of the
10093 transformations that are common to the equality/inequality
10094 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10095 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10096 fold_binary should call fold_binary. Fold a comparison with
10097 tree code CODE and type TYPE with operands OP0 and OP1. Return
10098 the folded comparison or NULL_TREE. */
10100 static tree
10101 fold_comparison (location_t loc, enum tree_code code, tree type,
10102 tree op0, tree op1)
10104 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10105 tree arg0, arg1, tem;
10107 arg0 = op0;
10108 arg1 = op1;
10110 STRIP_SIGN_NOPS (arg0);
10111 STRIP_SIGN_NOPS (arg1);
10113 /* For comparisons of pointers we can decompose it to a compile time
10114 comparison of the base objects and the offsets into the object.
10115 This requires at least one operand being an ADDR_EXPR or a
10116 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10117 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10118 && (TREE_CODE (arg0) == ADDR_EXPR
10119 || TREE_CODE (arg1) == ADDR_EXPR
10120 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10121 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10123 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10124 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10125 machine_mode mode;
10126 int volatilep, reversep, unsignedp;
10127 bool indirect_base0 = false, indirect_base1 = false;
10129 /* Get base and offset for the access. Strip ADDR_EXPR for
10130 get_inner_reference, but put it back by stripping INDIRECT_REF
10131 off the base object if possible. indirect_baseN will be true
10132 if baseN is not an address but refers to the object itself. */
10133 base0 = arg0;
10134 if (TREE_CODE (arg0) == ADDR_EXPR)
10136 base0
10137 = get_inner_reference (TREE_OPERAND (arg0, 0),
10138 &bitsize, &bitpos0, &offset0, &mode,
10139 &unsignedp, &reversep, &volatilep);
10140 if (TREE_CODE (base0) == INDIRECT_REF)
10141 base0 = TREE_OPERAND (base0, 0);
10142 else
10143 indirect_base0 = true;
10145 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10147 base0 = TREE_OPERAND (arg0, 0);
10148 STRIP_SIGN_NOPS (base0);
10149 if (TREE_CODE (base0) == ADDR_EXPR)
10151 base0
10152 = get_inner_reference (TREE_OPERAND (base0, 0),
10153 &bitsize, &bitpos0, &offset0, &mode,
10154 &unsignedp, &reversep, &volatilep);
10155 if (TREE_CODE (base0) == INDIRECT_REF)
10156 base0 = TREE_OPERAND (base0, 0);
10157 else
10158 indirect_base0 = true;
10160 if (offset0 == NULL_TREE || integer_zerop (offset0))
10161 offset0 = TREE_OPERAND (arg0, 1);
10162 else
10163 offset0 = size_binop (PLUS_EXPR, offset0,
10164 TREE_OPERAND (arg0, 1));
10165 if (poly_int_tree_p (offset0))
10167 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10168 TYPE_PRECISION (sizetype));
10169 tem <<= LOG2_BITS_PER_UNIT;
10170 tem += bitpos0;
10171 if (tem.to_shwi (&bitpos0))
10172 offset0 = NULL_TREE;
10176 base1 = arg1;
10177 if (TREE_CODE (arg1) == ADDR_EXPR)
10179 base1
10180 = get_inner_reference (TREE_OPERAND (arg1, 0),
10181 &bitsize, &bitpos1, &offset1, &mode,
10182 &unsignedp, &reversep, &volatilep);
10183 if (TREE_CODE (base1) == INDIRECT_REF)
10184 base1 = TREE_OPERAND (base1, 0);
10185 else
10186 indirect_base1 = true;
10188 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10190 base1 = TREE_OPERAND (arg1, 0);
10191 STRIP_SIGN_NOPS (base1);
10192 if (TREE_CODE (base1) == ADDR_EXPR)
10194 base1
10195 = get_inner_reference (TREE_OPERAND (base1, 0),
10196 &bitsize, &bitpos1, &offset1, &mode,
10197 &unsignedp, &reversep, &volatilep);
10198 if (TREE_CODE (base1) == INDIRECT_REF)
10199 base1 = TREE_OPERAND (base1, 0);
10200 else
10201 indirect_base1 = true;
10203 if (offset1 == NULL_TREE || integer_zerop (offset1))
10204 offset1 = TREE_OPERAND (arg1, 1);
10205 else
10206 offset1 = size_binop (PLUS_EXPR, offset1,
10207 TREE_OPERAND (arg1, 1));
10208 if (poly_int_tree_p (offset1))
10210 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10211 TYPE_PRECISION (sizetype));
10212 tem <<= LOG2_BITS_PER_UNIT;
10213 tem += bitpos1;
10214 if (tem.to_shwi (&bitpos1))
10215 offset1 = NULL_TREE;
10219 /* If we have equivalent bases we might be able to simplify. */
10220 if (indirect_base0 == indirect_base1
10221 && operand_equal_p (base0, base1,
10222 indirect_base0 ? OEP_ADDRESS_OF : 0))
10224 /* We can fold this expression to a constant if the non-constant
10225 offset parts are equal. */
10226 if ((offset0 == offset1
10227 || (offset0 && offset1
10228 && operand_equal_p (offset0, offset1, 0)))
10229 && (equality_code
10230 || (indirect_base0
10231 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10232 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10234 if (!equality_code
10235 && maybe_ne (bitpos0, bitpos1)
10236 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10237 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10238 fold_overflow_warning (("assuming pointer wraparound does not "
10239 "occur when comparing P +- C1 with "
10240 "P +- C2"),
10241 WARN_STRICT_OVERFLOW_CONDITIONAL);
10243 switch (code)
10245 case EQ_EXPR:
10246 if (known_eq (bitpos0, bitpos1))
10247 return constant_boolean_node (true, type);
10248 if (known_ne (bitpos0, bitpos1))
10249 return constant_boolean_node (false, type);
10250 break;
10251 case NE_EXPR:
10252 if (known_ne (bitpos0, bitpos1))
10253 return constant_boolean_node (true, type);
10254 if (known_eq (bitpos0, bitpos1))
10255 return constant_boolean_node (false, type);
10256 break;
10257 case LT_EXPR:
10258 if (known_lt (bitpos0, bitpos1))
10259 return constant_boolean_node (true, type);
10260 if (known_ge (bitpos0, bitpos1))
10261 return constant_boolean_node (false, type);
10262 break;
10263 case LE_EXPR:
10264 if (known_le (bitpos0, bitpos1))
10265 return constant_boolean_node (true, type);
10266 if (known_gt (bitpos0, bitpos1))
10267 return constant_boolean_node (false, type);
10268 break;
10269 case GE_EXPR:
10270 if (known_ge (bitpos0, bitpos1))
10271 return constant_boolean_node (true, type);
10272 if (known_lt (bitpos0, bitpos1))
10273 return constant_boolean_node (false, type);
10274 break;
10275 case GT_EXPR:
10276 if (known_gt (bitpos0, bitpos1))
10277 return constant_boolean_node (true, type);
10278 if (known_le (bitpos0, bitpos1))
10279 return constant_boolean_node (false, type);
10280 break;
10281 default:;
10284 /* We can simplify the comparison to a comparison of the variable
10285 offset parts if the constant offset parts are equal.
10286 Be careful to use signed sizetype here because otherwise we
10287 mess with array offsets in the wrong way. This is possible
10288 because pointer arithmetic is restricted to retain within an
10289 object and overflow on pointer differences is undefined as of
10290 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10291 else if (known_eq (bitpos0, bitpos1)
10292 && (equality_code
10293 || (indirect_base0
10294 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10295 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10297 /* By converting to signed sizetype we cover middle-end pointer
10298 arithmetic which operates on unsigned pointer types of size
10299 type size and ARRAY_REF offsets which are properly sign or
10300 zero extended from their type in case it is narrower than
10301 sizetype. */
10302 if (offset0 == NULL_TREE)
10303 offset0 = build_int_cst (ssizetype, 0);
10304 else
10305 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10306 if (offset1 == NULL_TREE)
10307 offset1 = build_int_cst (ssizetype, 0);
10308 else
10309 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10311 if (!equality_code
10312 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10313 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10314 fold_overflow_warning (("assuming pointer wraparound does not "
10315 "occur when comparing P +- C1 with "
10316 "P +- C2"),
10317 WARN_STRICT_OVERFLOW_COMPARISON);
10319 return fold_build2_loc (loc, code, type, offset0, offset1);
10322 /* For equal offsets we can simplify to a comparison of the
10323 base addresses. */
10324 else if (known_eq (bitpos0, bitpos1)
10325 && (indirect_base0
10326 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10327 && (indirect_base1
10328 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10329 && ((offset0 == offset1)
10330 || (offset0 && offset1
10331 && operand_equal_p (offset0, offset1, 0))))
10333 if (indirect_base0)
10334 base0 = build_fold_addr_expr_loc (loc, base0);
10335 if (indirect_base1)
10336 base1 = build_fold_addr_expr_loc (loc, base1);
10337 return fold_build2_loc (loc, code, type, base0, base1);
10339 /* Comparison between an ordinary (non-weak) symbol and a null
10340 pointer can be eliminated since such symbols must have a non
10341 null address. In C, relational expressions between pointers
10342 to objects and null pointers are undefined. The results
10343 below follow the C++ rules with the additional property that
10344 every object pointer compares greater than a null pointer.
10346 else if (((DECL_P (base0)
10347 && maybe_nonzero_address (base0) > 0
10348 /* Avoid folding references to struct members at offset 0 to
10349 prevent tests like '&ptr->firstmember == 0' from getting
10350 eliminated. When ptr is null, although the -> expression
10351 is strictly speaking invalid, GCC retains it as a matter
10352 of QoI. See PR c/44555. */
10353 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10354 || CONSTANT_CLASS_P (base0))
10355 && indirect_base0
10356 /* The caller guarantees that when one of the arguments is
10357 constant (i.e., null in this case) it is second. */
10358 && integer_zerop (arg1))
10360 switch (code)
10362 case EQ_EXPR:
10363 case LE_EXPR:
10364 case LT_EXPR:
10365 return constant_boolean_node (false, type);
10366 case GE_EXPR:
10367 case GT_EXPR:
10368 case NE_EXPR:
10369 return constant_boolean_node (true, type);
10370 default:
10371 gcc_unreachable ();
10376 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10377 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10378 the resulting offset is smaller in absolute value than the
10379 original one and has the same sign. */
10380 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10381 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10382 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10383 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10384 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10385 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10386 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10387 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10389 tree const1 = TREE_OPERAND (arg0, 1);
10390 tree const2 = TREE_OPERAND (arg1, 1);
10391 tree variable1 = TREE_OPERAND (arg0, 0);
10392 tree variable2 = TREE_OPERAND (arg1, 0);
10393 tree cst;
10394 const char * const warnmsg = G_("assuming signed overflow does not "
10395 "occur when combining constants around "
10396 "a comparison");
10398 /* Put the constant on the side where it doesn't overflow and is
10399 of lower absolute value and of same sign than before. */
10400 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10401 ? MINUS_EXPR : PLUS_EXPR,
10402 const2, const1);
10403 if (!TREE_OVERFLOW (cst)
10404 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10405 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10407 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10408 return fold_build2_loc (loc, code, type,
10409 variable1,
10410 fold_build2_loc (loc, TREE_CODE (arg1),
10411 TREE_TYPE (arg1),
10412 variable2, cst));
10415 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10416 ? MINUS_EXPR : PLUS_EXPR,
10417 const1, const2);
10418 if (!TREE_OVERFLOW (cst)
10419 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10420 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10422 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10423 return fold_build2_loc (loc, code, type,
10424 fold_build2_loc (loc, TREE_CODE (arg0),
10425 TREE_TYPE (arg0),
10426 variable1, cst),
10427 variable2);
10431 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10432 if (tem)
10433 return tem;
10435 /* If we are comparing an expression that just has comparisons
10436 of two integer values, arithmetic expressions of those comparisons,
10437 and constants, we can simplify it. There are only three cases
10438 to check: the two values can either be equal, the first can be
10439 greater, or the second can be greater. Fold the expression for
10440 those three values. Since each value must be 0 or 1, we have
10441 eight possibilities, each of which corresponds to the constant 0
10442 or 1 or one of the six possible comparisons.
10444 This handles common cases like (a > b) == 0 but also handles
10445 expressions like ((x > y) - (y > x)) > 0, which supposedly
10446 occur in macroized code. */
10448 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10450 tree cval1 = 0, cval2 = 0;
10452 if (twoval_comparison_p (arg0, &cval1, &cval2)
10453 /* Don't handle degenerate cases here; they should already
10454 have been handled anyway. */
10455 && cval1 != 0 && cval2 != 0
10456 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10457 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10458 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10459 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10460 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10461 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10462 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10464 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10465 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10467 /* We can't just pass T to eval_subst in case cval1 or cval2
10468 was the same as ARG1. */
10470 tree high_result
10471 = fold_build2_loc (loc, code, type,
10472 eval_subst (loc, arg0, cval1, maxval,
10473 cval2, minval),
10474 arg1);
10475 tree equal_result
10476 = fold_build2_loc (loc, code, type,
10477 eval_subst (loc, arg0, cval1, maxval,
10478 cval2, maxval),
10479 arg1);
10480 tree low_result
10481 = fold_build2_loc (loc, code, type,
10482 eval_subst (loc, arg0, cval1, minval,
10483 cval2, maxval),
10484 arg1);
10486 /* All three of these results should be 0 or 1. Confirm they are.
10487 Then use those values to select the proper code to use. */
10489 if (TREE_CODE (high_result) == INTEGER_CST
10490 && TREE_CODE (equal_result) == INTEGER_CST
10491 && TREE_CODE (low_result) == INTEGER_CST)
10493 /* Make a 3-bit mask with the high-order bit being the
10494 value for `>', the next for '=', and the low for '<'. */
10495 switch ((integer_onep (high_result) * 4)
10496 + (integer_onep (equal_result) * 2)
10497 + integer_onep (low_result))
10499 case 0:
10500 /* Always false. */
10501 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10502 case 1:
10503 code = LT_EXPR;
10504 break;
10505 case 2:
10506 code = EQ_EXPR;
10507 break;
10508 case 3:
10509 code = LE_EXPR;
10510 break;
10511 case 4:
10512 code = GT_EXPR;
10513 break;
10514 case 5:
10515 code = NE_EXPR;
10516 break;
10517 case 6:
10518 code = GE_EXPR;
10519 break;
10520 case 7:
10521 /* Always true. */
10522 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10525 return fold_build2_loc (loc, code, type, cval1, cval2);
10530 return NULL_TREE;
10534 /* Subroutine of fold_binary. Optimize complex multiplications of the
10535 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10536 argument EXPR represents the expression "z" of type TYPE. */
10538 static tree
10539 fold_mult_zconjz (location_t loc, tree type, tree expr)
10541 tree itype = TREE_TYPE (type);
10542 tree rpart, ipart, tem;
10544 if (TREE_CODE (expr) == COMPLEX_EXPR)
10546 rpart = TREE_OPERAND (expr, 0);
10547 ipart = TREE_OPERAND (expr, 1);
10549 else if (TREE_CODE (expr) == COMPLEX_CST)
10551 rpart = TREE_REALPART (expr);
10552 ipart = TREE_IMAGPART (expr);
10554 else
10556 expr = save_expr (expr);
10557 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10558 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10561 rpart = save_expr (rpart);
10562 ipart = save_expr (ipart);
10563 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10564 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10565 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10566 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10567 build_zero_cst (itype));
10571 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10572 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10573 true if successful. */
10575 static bool
10576 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10578 unsigned HOST_WIDE_INT i, nunits;
10580 if (TREE_CODE (arg) == VECTOR_CST
10581 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10583 for (i = 0; i < nunits; ++i)
10584 elts[i] = VECTOR_CST_ELT (arg, i);
10586 else if (TREE_CODE (arg) == CONSTRUCTOR)
10588 constructor_elt *elt;
10590 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10591 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10592 return false;
10593 else
10594 elts[i] = elt->value;
10596 else
10597 return false;
10598 for (; i < nelts; i++)
10599 elts[i]
10600 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10601 return true;
10604 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10605 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10606 NULL_TREE otherwise. */
10608 tree
10609 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10611 unsigned int i;
10612 unsigned HOST_WIDE_INT nelts;
10613 bool need_ctor = false;
10615 if (!sel.length ().is_constant (&nelts))
10616 return NULL_TREE;
10617 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10618 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10619 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10620 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10621 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10622 return NULL_TREE;
10624 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10625 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10626 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10627 return NULL_TREE;
10629 tree_vector_builder out_elts (type, nelts, 1);
10630 for (i = 0; i < nelts; i++)
10632 HOST_WIDE_INT index;
10633 if (!sel[i].is_constant (&index))
10634 return NULL_TREE;
10635 if (!CONSTANT_CLASS_P (in_elts[index]))
10636 need_ctor = true;
10637 out_elts.quick_push (unshare_expr (in_elts[index]));
10640 if (need_ctor)
10642 vec<constructor_elt, va_gc> *v;
10643 vec_alloc (v, nelts);
10644 for (i = 0; i < nelts; i++)
10645 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10646 return build_constructor (type, v);
10648 else
10649 return out_elts.build ();
10652 /* Try to fold a pointer difference of type TYPE two address expressions of
10653 array references AREF0 and AREF1 using location LOC. Return a
10654 simplified expression for the difference or NULL_TREE. */
10656 static tree
10657 fold_addr_of_array_ref_difference (location_t loc, tree type,
10658 tree aref0, tree aref1,
10659 bool use_pointer_diff)
10661 tree base0 = TREE_OPERAND (aref0, 0);
10662 tree base1 = TREE_OPERAND (aref1, 0);
10663 tree base_offset = build_int_cst (type, 0);
10665 /* If the bases are array references as well, recurse. If the bases
10666 are pointer indirections compute the difference of the pointers.
10667 If the bases are equal, we are set. */
10668 if ((TREE_CODE (base0) == ARRAY_REF
10669 && TREE_CODE (base1) == ARRAY_REF
10670 && (base_offset
10671 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10672 use_pointer_diff)))
10673 || (INDIRECT_REF_P (base0)
10674 && INDIRECT_REF_P (base1)
10675 && (base_offset
10676 = use_pointer_diff
10677 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10678 TREE_OPERAND (base0, 0),
10679 TREE_OPERAND (base1, 0))
10680 : fold_binary_loc (loc, MINUS_EXPR, type,
10681 fold_convert (type,
10682 TREE_OPERAND (base0, 0)),
10683 fold_convert (type,
10684 TREE_OPERAND (base1, 0)))))
10685 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10687 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10688 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10689 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10690 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10691 return fold_build2_loc (loc, PLUS_EXPR, type,
10692 base_offset,
10693 fold_build2_loc (loc, MULT_EXPR, type,
10694 diff, esz));
10696 return NULL_TREE;
10699 /* If the real or vector real constant CST of type TYPE has an exact
10700 inverse, return it, else return NULL. */
10702 tree
10703 exact_inverse (tree type, tree cst)
10705 REAL_VALUE_TYPE r;
10706 tree unit_type;
10707 machine_mode mode;
10709 switch (TREE_CODE (cst))
10711 case REAL_CST:
10712 r = TREE_REAL_CST (cst);
10714 if (exact_real_inverse (TYPE_MODE (type), &r))
10715 return build_real (type, r);
10717 return NULL_TREE;
10719 case VECTOR_CST:
10721 unit_type = TREE_TYPE (type);
10722 mode = TYPE_MODE (unit_type);
10724 tree_vector_builder elts;
10725 if (!elts.new_unary_operation (type, cst, false))
10726 return NULL_TREE;
10727 unsigned int count = elts.encoded_nelts ();
10728 for (unsigned int i = 0; i < count; ++i)
10730 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10731 if (!exact_real_inverse (mode, &r))
10732 return NULL_TREE;
10733 elts.quick_push (build_real (unit_type, r));
10736 return elts.build ();
10739 default:
10740 return NULL_TREE;
10744 /* Mask out the tz least significant bits of X of type TYPE where
10745 tz is the number of trailing zeroes in Y. */
10746 static wide_int
10747 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10749 int tz = wi::ctz (y);
10750 if (tz > 0)
10751 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10752 return x;
10755 /* Return true when T is an address and is known to be nonzero.
10756 For floating point we further ensure that T is not denormal.
10757 Similar logic is present in nonzero_address in rtlanal.h.
10759 If the return value is based on the assumption that signed overflow
10760 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10761 change *STRICT_OVERFLOW_P. */
10763 static bool
10764 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10766 tree type = TREE_TYPE (t);
10767 enum tree_code code;
10769 /* Doing something useful for floating point would need more work. */
10770 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10771 return false;
10773 code = TREE_CODE (t);
10774 switch (TREE_CODE_CLASS (code))
10776 case tcc_unary:
10777 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10778 strict_overflow_p);
10779 case tcc_binary:
10780 case tcc_comparison:
10781 return tree_binary_nonzero_warnv_p (code, type,
10782 TREE_OPERAND (t, 0),
10783 TREE_OPERAND (t, 1),
10784 strict_overflow_p);
10785 case tcc_constant:
10786 case tcc_declaration:
10787 case tcc_reference:
10788 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10790 default:
10791 break;
10794 switch (code)
10796 case TRUTH_NOT_EXPR:
10797 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10798 strict_overflow_p);
10800 case TRUTH_AND_EXPR:
10801 case TRUTH_OR_EXPR:
10802 case TRUTH_XOR_EXPR:
10803 return tree_binary_nonzero_warnv_p (code, type,
10804 TREE_OPERAND (t, 0),
10805 TREE_OPERAND (t, 1),
10806 strict_overflow_p);
10808 case COND_EXPR:
10809 case CONSTRUCTOR:
10810 case OBJ_TYPE_REF:
10811 case ADDR_EXPR:
10812 case WITH_SIZE_EXPR:
10813 case SSA_NAME:
10814 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10816 case COMPOUND_EXPR:
10817 case MODIFY_EXPR:
10818 case BIND_EXPR:
10819 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10820 strict_overflow_p);
10822 case SAVE_EXPR:
10823 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10824 strict_overflow_p);
10826 case CALL_EXPR:
10828 tree fndecl = get_callee_fndecl (t);
10829 if (!fndecl) return false;
10830 if (flag_delete_null_pointer_checks && !flag_check_new
10831 && DECL_IS_OPERATOR_NEW_P (fndecl)
10832 && !TREE_NOTHROW (fndecl))
10833 return true;
10834 if (flag_delete_null_pointer_checks
10835 && lookup_attribute ("returns_nonnull",
10836 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10837 return true;
10838 return alloca_call_p (t);
10841 default:
10842 break;
10844 return false;
10847 /* Return true when T is an address and is known to be nonzero.
10848 Handle warnings about undefined signed overflow. */
10850 bool
10851 tree_expr_nonzero_p (tree t)
10853 bool ret, strict_overflow_p;
10855 strict_overflow_p = false;
10856 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10857 if (strict_overflow_p)
10858 fold_overflow_warning (("assuming signed overflow does not occur when "
10859 "determining that expression is always "
10860 "non-zero"),
10861 WARN_STRICT_OVERFLOW_MISC);
10862 return ret;
10865 /* Return true if T is known not to be equal to an integer W. */
10867 bool
10868 expr_not_equal_to (tree t, const wide_int &w)
10870 int_range_max vr;
10871 switch (TREE_CODE (t))
10873 case INTEGER_CST:
10874 return wi::to_wide (t) != w;
10876 case SSA_NAME:
10877 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10878 return false;
10880 if (cfun)
10881 get_range_query (cfun)->range_of_expr (vr, t);
10882 else
10883 get_global_range_query ()->range_of_expr (vr, t);
10885 if (!vr.undefined_p () && !vr.contains_p (w))
10886 return true;
10887 /* If T has some known zero bits and W has any of those bits set,
10888 then T is known not to be equal to W. */
10889 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10890 TYPE_PRECISION (TREE_TYPE (t))), 0))
10891 return true;
10892 return false;
10894 default:
10895 return false;
10899 /* Fold a binary expression of code CODE and type TYPE with operands
10900 OP0 and OP1. LOC is the location of the resulting expression.
10901 Return the folded expression if folding is successful. Otherwise,
10902 return NULL_TREE. */
10904 tree
10905 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10906 tree op0, tree op1)
10908 enum tree_code_class kind = TREE_CODE_CLASS (code);
10909 tree arg0, arg1, tem;
10910 tree t1 = NULL_TREE;
10911 bool strict_overflow_p;
10912 unsigned int prec;
10914 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10915 && TREE_CODE_LENGTH (code) == 2
10916 && op0 != NULL_TREE
10917 && op1 != NULL_TREE);
10919 arg0 = op0;
10920 arg1 = op1;
10922 /* Strip any conversions that don't change the mode. This is
10923 safe for every expression, except for a comparison expression
10924 because its signedness is derived from its operands. So, in
10925 the latter case, only strip conversions that don't change the
10926 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10927 preserved.
10929 Note that this is done as an internal manipulation within the
10930 constant folder, in order to find the simplest representation
10931 of the arguments so that their form can be studied. In any
10932 cases, the appropriate type conversions should be put back in
10933 the tree that will get out of the constant folder. */
10935 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10937 STRIP_SIGN_NOPS (arg0);
10938 STRIP_SIGN_NOPS (arg1);
10940 else
10942 STRIP_NOPS (arg0);
10943 STRIP_NOPS (arg1);
10946 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10947 constant but we can't do arithmetic on them. */
10948 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10950 tem = const_binop (code, type, arg0, arg1);
10951 if (tem != NULL_TREE)
10953 if (TREE_TYPE (tem) != type)
10954 tem = fold_convert_loc (loc, type, tem);
10955 return tem;
10959 /* If this is a commutative operation, and ARG0 is a constant, move it
10960 to ARG1 to reduce the number of tests below. */
10961 if (commutative_tree_code (code)
10962 && tree_swap_operands_p (arg0, arg1))
10963 return fold_build2_loc (loc, code, type, op1, op0);
10965 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10966 to ARG1 to reduce the number of tests below. */
10967 if (kind == tcc_comparison
10968 && tree_swap_operands_p (arg0, arg1))
10969 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10971 tem = generic_simplify (loc, code, type, op0, op1);
10972 if (tem)
10973 return tem;
10975 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10977 First check for cases where an arithmetic operation is applied to a
10978 compound, conditional, or comparison operation. Push the arithmetic
10979 operation inside the compound or conditional to see if any folding
10980 can then be done. Convert comparison to conditional for this purpose.
10981 The also optimizes non-constant cases that used to be done in
10982 expand_expr.
10984 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10985 one of the operands is a comparison and the other is a comparison, a
10986 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10987 code below would make the expression more complex. Change it to a
10988 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10989 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10991 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10992 || code == EQ_EXPR || code == NE_EXPR)
10993 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10994 && ((truth_value_p (TREE_CODE (arg0))
10995 && (truth_value_p (TREE_CODE (arg1))
10996 || (TREE_CODE (arg1) == BIT_AND_EXPR
10997 && integer_onep (TREE_OPERAND (arg1, 1)))))
10998 || (truth_value_p (TREE_CODE (arg1))
10999 && (truth_value_p (TREE_CODE (arg0))
11000 || (TREE_CODE (arg0) == BIT_AND_EXPR
11001 && integer_onep (TREE_OPERAND (arg0, 1)))))))
11003 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11004 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11005 : TRUTH_XOR_EXPR,
11006 boolean_type_node,
11007 fold_convert_loc (loc, boolean_type_node, arg0),
11008 fold_convert_loc (loc, boolean_type_node, arg1));
11010 if (code == EQ_EXPR)
11011 tem = invert_truthvalue_loc (loc, tem);
11013 return fold_convert_loc (loc, type, tem);
11016 if (TREE_CODE_CLASS (code) == tcc_binary
11017 || TREE_CODE_CLASS (code) == tcc_comparison)
11019 if (TREE_CODE (arg0) == COMPOUND_EXPR)
11021 tem = fold_build2_loc (loc, code, type,
11022 fold_convert_loc (loc, TREE_TYPE (op0),
11023 TREE_OPERAND (arg0, 1)), op1);
11024 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11025 tem);
11027 if (TREE_CODE (arg1) == COMPOUND_EXPR)
11029 tem = fold_build2_loc (loc, code, type, op0,
11030 fold_convert_loc (loc, TREE_TYPE (op1),
11031 TREE_OPERAND (arg1, 1)));
11032 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11033 tem);
11036 if (TREE_CODE (arg0) == COND_EXPR
11037 || TREE_CODE (arg0) == VEC_COND_EXPR
11038 || COMPARISON_CLASS_P (arg0))
11040 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11041 arg0, arg1,
11042 /*cond_first_p=*/1);
11043 if (tem != NULL_TREE)
11044 return tem;
11047 if (TREE_CODE (arg1) == COND_EXPR
11048 || TREE_CODE (arg1) == VEC_COND_EXPR
11049 || COMPARISON_CLASS_P (arg1))
11051 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11052 arg1, arg0,
11053 /*cond_first_p=*/0);
11054 if (tem != NULL_TREE)
11055 return tem;
11059 switch (code)
11061 case MEM_REF:
11062 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11063 if (TREE_CODE (arg0) == ADDR_EXPR
11064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11066 tree iref = TREE_OPERAND (arg0, 0);
11067 return fold_build2 (MEM_REF, type,
11068 TREE_OPERAND (iref, 0),
11069 int_const_binop (PLUS_EXPR, arg1,
11070 TREE_OPERAND (iref, 1)));
11073 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11074 if (TREE_CODE (arg0) == ADDR_EXPR
11075 && handled_component_p (TREE_OPERAND (arg0, 0)))
11077 tree base;
11078 poly_int64 coffset;
11079 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11080 &coffset);
11081 if (!base)
11082 return NULL_TREE;
11083 return fold_build2 (MEM_REF, type,
11084 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11085 int_const_binop (PLUS_EXPR, arg1,
11086 size_int (coffset)));
11089 return NULL_TREE;
11091 case POINTER_PLUS_EXPR:
11092 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11093 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11094 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11095 return fold_convert_loc (loc, type,
11096 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11097 fold_convert_loc (loc, sizetype,
11098 arg1),
11099 fold_convert_loc (loc, sizetype,
11100 arg0)));
11102 return NULL_TREE;
11104 case PLUS_EXPR:
11105 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11107 /* X + (X / CST) * -CST is X % CST. */
11108 if (TREE_CODE (arg1) == MULT_EXPR
11109 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11110 && operand_equal_p (arg0,
11111 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11113 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11114 tree cst1 = TREE_OPERAND (arg1, 1);
11115 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11116 cst1, cst0);
11117 if (sum && integer_zerop (sum))
11118 return fold_convert_loc (loc, type,
11119 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11120 TREE_TYPE (arg0), arg0,
11121 cst0));
11125 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11126 one. Make sure the type is not saturating and has the signedness of
11127 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11128 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11129 if ((TREE_CODE (arg0) == MULT_EXPR
11130 || TREE_CODE (arg1) == MULT_EXPR)
11131 && !TYPE_SATURATING (type)
11132 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11133 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11134 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11136 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11137 if (tem)
11138 return tem;
11141 if (! FLOAT_TYPE_P (type))
11143 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11144 (plus (plus (mult) (mult)) (foo)) so that we can
11145 take advantage of the factoring cases below. */
11146 if (ANY_INTEGRAL_TYPE_P (type)
11147 && TYPE_OVERFLOW_WRAPS (type)
11148 && (((TREE_CODE (arg0) == PLUS_EXPR
11149 || TREE_CODE (arg0) == MINUS_EXPR)
11150 && TREE_CODE (arg1) == MULT_EXPR)
11151 || ((TREE_CODE (arg1) == PLUS_EXPR
11152 || TREE_CODE (arg1) == MINUS_EXPR)
11153 && TREE_CODE (arg0) == MULT_EXPR)))
11155 tree parg0, parg1, parg, marg;
11156 enum tree_code pcode;
11158 if (TREE_CODE (arg1) == MULT_EXPR)
11159 parg = arg0, marg = arg1;
11160 else
11161 parg = arg1, marg = arg0;
11162 pcode = TREE_CODE (parg);
11163 parg0 = TREE_OPERAND (parg, 0);
11164 parg1 = TREE_OPERAND (parg, 1);
11165 STRIP_NOPS (parg0);
11166 STRIP_NOPS (parg1);
11168 if (TREE_CODE (parg0) == MULT_EXPR
11169 && TREE_CODE (parg1) != MULT_EXPR)
11170 return fold_build2_loc (loc, pcode, type,
11171 fold_build2_loc (loc, PLUS_EXPR, type,
11172 fold_convert_loc (loc, type,
11173 parg0),
11174 fold_convert_loc (loc, type,
11175 marg)),
11176 fold_convert_loc (loc, type, parg1));
11177 if (TREE_CODE (parg0) != MULT_EXPR
11178 && TREE_CODE (parg1) == MULT_EXPR)
11179 return
11180 fold_build2_loc (loc, PLUS_EXPR, type,
11181 fold_convert_loc (loc, type, parg0),
11182 fold_build2_loc (loc, pcode, type,
11183 fold_convert_loc (loc, type, marg),
11184 fold_convert_loc (loc, type,
11185 parg1)));
11188 else
11190 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11191 to __complex__ ( x, y ). This is not the same for SNaNs or
11192 if signed zeros are involved. */
11193 if (!HONOR_SNANS (arg0)
11194 && !HONOR_SIGNED_ZEROS (arg0)
11195 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11197 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11198 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11199 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11200 bool arg0rz = false, arg0iz = false;
11201 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11202 || (arg0i && (arg0iz = real_zerop (arg0i))))
11204 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11205 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11206 if (arg0rz && arg1i && real_zerop (arg1i))
11208 tree rp = arg1r ? arg1r
11209 : build1 (REALPART_EXPR, rtype, arg1);
11210 tree ip = arg0i ? arg0i
11211 : build1 (IMAGPART_EXPR, rtype, arg0);
11212 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11214 else if (arg0iz && arg1r && real_zerop (arg1r))
11216 tree rp = arg0r ? arg0r
11217 : build1 (REALPART_EXPR, rtype, arg0);
11218 tree ip = arg1i ? arg1i
11219 : build1 (IMAGPART_EXPR, rtype, arg1);
11220 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11225 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11226 We associate floats only if the user has specified
11227 -fassociative-math. */
11228 if (flag_associative_math
11229 && TREE_CODE (arg1) == PLUS_EXPR
11230 && TREE_CODE (arg0) != MULT_EXPR)
11232 tree tree10 = TREE_OPERAND (arg1, 0);
11233 tree tree11 = TREE_OPERAND (arg1, 1);
11234 if (TREE_CODE (tree11) == MULT_EXPR
11235 && TREE_CODE (tree10) == MULT_EXPR)
11237 tree tree0;
11238 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11239 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11242 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11243 We associate floats only if the user has specified
11244 -fassociative-math. */
11245 if (flag_associative_math
11246 && TREE_CODE (arg0) == PLUS_EXPR
11247 && TREE_CODE (arg1) != MULT_EXPR)
11249 tree tree00 = TREE_OPERAND (arg0, 0);
11250 tree tree01 = TREE_OPERAND (arg0, 1);
11251 if (TREE_CODE (tree01) == MULT_EXPR
11252 && TREE_CODE (tree00) == MULT_EXPR)
11254 tree tree0;
11255 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11256 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11261 bit_rotate:
11262 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11263 is a rotate of A by C1 bits. */
11264 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11265 is a rotate of A by B bits.
11266 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11267 though in this case CODE must be | and not + or ^, otherwise
11268 it doesn't return A when B is 0. */
11270 enum tree_code code0, code1;
11271 tree rtype;
11272 code0 = TREE_CODE (arg0);
11273 code1 = TREE_CODE (arg1);
11274 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11275 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11276 && operand_equal_p (TREE_OPERAND (arg0, 0),
11277 TREE_OPERAND (arg1, 0), 0)
11278 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11279 TYPE_UNSIGNED (rtype))
11280 /* Only create rotates in complete modes. Other cases are not
11281 expanded properly. */
11282 && (element_precision (rtype)
11283 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11285 tree tree01, tree11;
11286 tree orig_tree01, orig_tree11;
11287 enum tree_code code01, code11;
11289 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11290 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11291 STRIP_NOPS (tree01);
11292 STRIP_NOPS (tree11);
11293 code01 = TREE_CODE (tree01);
11294 code11 = TREE_CODE (tree11);
11295 if (code11 != MINUS_EXPR
11296 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11298 std::swap (code0, code1);
11299 std::swap (code01, code11);
11300 std::swap (tree01, tree11);
11301 std::swap (orig_tree01, orig_tree11);
11303 if (code01 == INTEGER_CST
11304 && code11 == INTEGER_CST
11305 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11306 == element_precision (rtype)))
11308 tem = build2_loc (loc, LROTATE_EXPR,
11309 rtype, TREE_OPERAND (arg0, 0),
11310 code0 == LSHIFT_EXPR
11311 ? orig_tree01 : orig_tree11);
11312 return fold_convert_loc (loc, type, tem);
11314 else if (code11 == MINUS_EXPR)
11316 tree tree110, tree111;
11317 tree110 = TREE_OPERAND (tree11, 0);
11318 tree111 = TREE_OPERAND (tree11, 1);
11319 STRIP_NOPS (tree110);
11320 STRIP_NOPS (tree111);
11321 if (TREE_CODE (tree110) == INTEGER_CST
11322 && compare_tree_int (tree110,
11323 element_precision (rtype)) == 0
11324 && operand_equal_p (tree01, tree111, 0))
11326 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11327 ? LROTATE_EXPR : RROTATE_EXPR),
11328 rtype, TREE_OPERAND (arg0, 0),
11329 orig_tree01);
11330 return fold_convert_loc (loc, type, tem);
11333 else if (code == BIT_IOR_EXPR
11334 && code11 == BIT_AND_EXPR
11335 && pow2p_hwi (element_precision (rtype)))
11337 tree tree110, tree111;
11338 tree110 = TREE_OPERAND (tree11, 0);
11339 tree111 = TREE_OPERAND (tree11, 1);
11340 STRIP_NOPS (tree110);
11341 STRIP_NOPS (tree111);
11342 if (TREE_CODE (tree110) == NEGATE_EXPR
11343 && TREE_CODE (tree111) == INTEGER_CST
11344 && compare_tree_int (tree111,
11345 element_precision (rtype) - 1) == 0
11346 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11348 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11349 ? LROTATE_EXPR : RROTATE_EXPR),
11350 rtype, TREE_OPERAND (arg0, 0),
11351 orig_tree01);
11352 return fold_convert_loc (loc, type, tem);
11358 associate:
11359 /* In most languages, can't associate operations on floats through
11360 parentheses. Rather than remember where the parentheses were, we
11361 don't associate floats at all, unless the user has specified
11362 -fassociative-math.
11363 And, we need to make sure type is not saturating. */
11365 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11366 && !TYPE_SATURATING (type)
11367 && !TYPE_OVERFLOW_SANITIZED (type))
11369 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11370 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11371 tree atype = type;
11372 bool ok = true;
11374 /* Split both trees into variables, constants, and literals. Then
11375 associate each group together, the constants with literals,
11376 then the result with variables. This increases the chances of
11377 literals being recombined later and of generating relocatable
11378 expressions for the sum of a constant and literal. */
11379 var0 = split_tree (arg0, type, code,
11380 &minus_var0, &con0, &minus_con0,
11381 &lit0, &minus_lit0, 0);
11382 var1 = split_tree (arg1, type, code,
11383 &minus_var1, &con1, &minus_con1,
11384 &lit1, &minus_lit1, code == MINUS_EXPR);
11386 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11387 if (code == MINUS_EXPR)
11388 code = PLUS_EXPR;
11390 /* With undefined overflow prefer doing association in a type
11391 which wraps on overflow, if that is one of the operand types. */
11392 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11393 && !TYPE_OVERFLOW_WRAPS (type))
11395 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11396 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11397 atype = TREE_TYPE (arg0);
11398 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11399 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11400 atype = TREE_TYPE (arg1);
11401 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11404 /* With undefined overflow we can only associate constants with one
11405 variable, and constants whose association doesn't overflow. */
11406 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11407 && !TYPE_OVERFLOW_WRAPS (atype))
11409 if ((var0 && var1) || (minus_var0 && minus_var1))
11411 /* ??? If split_tree would handle NEGATE_EXPR we could
11412 simply reject these cases and the allowed cases would
11413 be the var0/minus_var1 ones. */
11414 tree tmp0 = var0 ? var0 : minus_var0;
11415 tree tmp1 = var1 ? var1 : minus_var1;
11416 bool one_neg = false;
11418 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11420 tmp0 = TREE_OPERAND (tmp0, 0);
11421 one_neg = !one_neg;
11423 if (CONVERT_EXPR_P (tmp0)
11424 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11425 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11426 <= TYPE_PRECISION (atype)))
11427 tmp0 = TREE_OPERAND (tmp0, 0);
11428 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11430 tmp1 = TREE_OPERAND (tmp1, 0);
11431 one_neg = !one_neg;
11433 if (CONVERT_EXPR_P (tmp1)
11434 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11435 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11436 <= TYPE_PRECISION (atype)))
11437 tmp1 = TREE_OPERAND (tmp1, 0);
11438 /* The only case we can still associate with two variables
11439 is if they cancel out. */
11440 if (!one_neg
11441 || !operand_equal_p (tmp0, tmp1, 0))
11442 ok = false;
11444 else if ((var0 && minus_var1
11445 && ! operand_equal_p (var0, minus_var1, 0))
11446 || (minus_var0 && var1
11447 && ! operand_equal_p (minus_var0, var1, 0)))
11448 ok = false;
11451 /* Only do something if we found more than two objects. Otherwise,
11452 nothing has changed and we risk infinite recursion. */
11453 if (ok
11454 && ((var0 != 0) + (var1 != 0)
11455 + (minus_var0 != 0) + (minus_var1 != 0)
11456 + (con0 != 0) + (con1 != 0)
11457 + (minus_con0 != 0) + (minus_con1 != 0)
11458 + (lit0 != 0) + (lit1 != 0)
11459 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11461 var0 = associate_trees (loc, var0, var1, code, atype);
11462 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11463 code, atype);
11464 con0 = associate_trees (loc, con0, con1, code, atype);
11465 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11466 code, atype);
11467 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11468 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11469 code, atype);
11471 if (minus_var0 && var0)
11473 var0 = associate_trees (loc, var0, minus_var0,
11474 MINUS_EXPR, atype);
11475 minus_var0 = 0;
11477 if (minus_con0 && con0)
11479 con0 = associate_trees (loc, con0, minus_con0,
11480 MINUS_EXPR, atype);
11481 minus_con0 = 0;
11484 /* Preserve the MINUS_EXPR if the negative part of the literal is
11485 greater than the positive part. Otherwise, the multiplicative
11486 folding code (i.e extract_muldiv) may be fooled in case
11487 unsigned constants are subtracted, like in the following
11488 example: ((X*2 + 4) - 8U)/2. */
11489 if (minus_lit0 && lit0)
11491 if (TREE_CODE (lit0) == INTEGER_CST
11492 && TREE_CODE (minus_lit0) == INTEGER_CST
11493 && tree_int_cst_lt (lit0, minus_lit0)
11494 /* But avoid ending up with only negated parts. */
11495 && (var0 || con0))
11497 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11498 MINUS_EXPR, atype);
11499 lit0 = 0;
11501 else
11503 lit0 = associate_trees (loc, lit0, minus_lit0,
11504 MINUS_EXPR, atype);
11505 minus_lit0 = 0;
11509 /* Don't introduce overflows through reassociation. */
11510 if ((lit0 && TREE_OVERFLOW_P (lit0))
11511 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11512 return NULL_TREE;
11514 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11515 con0 = associate_trees (loc, con0, lit0, code, atype);
11516 lit0 = 0;
11517 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11518 code, atype);
11519 minus_lit0 = 0;
11521 /* Eliminate minus_con0. */
11522 if (minus_con0)
11524 if (con0)
11525 con0 = associate_trees (loc, con0, minus_con0,
11526 MINUS_EXPR, atype);
11527 else if (var0)
11528 var0 = associate_trees (loc, var0, minus_con0,
11529 MINUS_EXPR, atype);
11530 else
11531 gcc_unreachable ();
11532 minus_con0 = 0;
11535 /* Eliminate minus_var0. */
11536 if (minus_var0)
11538 if (con0)
11539 con0 = associate_trees (loc, con0, minus_var0,
11540 MINUS_EXPR, atype);
11541 else
11542 gcc_unreachable ();
11543 minus_var0 = 0;
11546 return
11547 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11548 code, atype));
11552 return NULL_TREE;
11554 case POINTER_DIFF_EXPR:
11555 case MINUS_EXPR:
11556 /* Fold &a[i] - &a[j] to i-j. */
11557 if (TREE_CODE (arg0) == ADDR_EXPR
11558 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11559 && TREE_CODE (arg1) == ADDR_EXPR
11560 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11562 tree tem = fold_addr_of_array_ref_difference (loc, type,
11563 TREE_OPERAND (arg0, 0),
11564 TREE_OPERAND (arg1, 0),
11565 code
11566 == POINTER_DIFF_EXPR);
11567 if (tem)
11568 return tem;
11571 /* Further transformations are not for pointers. */
11572 if (code == POINTER_DIFF_EXPR)
11573 return NULL_TREE;
11575 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11576 if (TREE_CODE (arg0) == NEGATE_EXPR
11577 && negate_expr_p (op1)
11578 /* If arg0 is e.g. unsigned int and type is int, then this could
11579 introduce UB, because if A is INT_MIN at runtime, the original
11580 expression can be well defined while the latter is not.
11581 See PR83269. */
11582 && !(ANY_INTEGRAL_TYPE_P (type)
11583 && TYPE_OVERFLOW_UNDEFINED (type)
11584 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11585 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11586 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11587 fold_convert_loc (loc, type,
11588 TREE_OPERAND (arg0, 0)));
11590 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11591 __complex__ ( x, -y ). This is not the same for SNaNs or if
11592 signed zeros are involved. */
11593 if (!HONOR_SNANS (arg0)
11594 && !HONOR_SIGNED_ZEROS (arg0)
11595 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11597 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11598 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11599 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11600 bool arg0rz = false, arg0iz = false;
11601 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11602 || (arg0i && (arg0iz = real_zerop (arg0i))))
11604 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11605 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11606 if (arg0rz && arg1i && real_zerop (arg1i))
11608 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11609 arg1r ? arg1r
11610 : build1 (REALPART_EXPR, rtype, arg1));
11611 tree ip = arg0i ? arg0i
11612 : build1 (IMAGPART_EXPR, rtype, arg0);
11613 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11615 else if (arg0iz && arg1r && real_zerop (arg1r))
11617 tree rp = arg0r ? arg0r
11618 : build1 (REALPART_EXPR, rtype, arg0);
11619 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11620 arg1i ? arg1i
11621 : build1 (IMAGPART_EXPR, rtype, arg1));
11622 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11627 /* A - B -> A + (-B) if B is easily negatable. */
11628 if (negate_expr_p (op1)
11629 && ! TYPE_OVERFLOW_SANITIZED (type)
11630 && ((FLOAT_TYPE_P (type)
11631 /* Avoid this transformation if B is a positive REAL_CST. */
11632 && (TREE_CODE (op1) != REAL_CST
11633 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11634 || INTEGRAL_TYPE_P (type)))
11635 return fold_build2_loc (loc, PLUS_EXPR, type,
11636 fold_convert_loc (loc, type, arg0),
11637 negate_expr (op1));
11639 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11640 one. Make sure the type is not saturating and has the signedness of
11641 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11642 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11643 if ((TREE_CODE (arg0) == MULT_EXPR
11644 || TREE_CODE (arg1) == MULT_EXPR)
11645 && !TYPE_SATURATING (type)
11646 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11647 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11648 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11650 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11651 if (tem)
11652 return tem;
11655 goto associate;
11657 case MULT_EXPR:
11658 if (! FLOAT_TYPE_P (type))
11660 /* Transform x * -C into -x * C if x is easily negatable. */
11661 if (TREE_CODE (op1) == INTEGER_CST
11662 && tree_int_cst_sgn (op1) == -1
11663 && negate_expr_p (op0)
11664 && negate_expr_p (op1)
11665 && (tem = negate_expr (op1)) != op1
11666 && ! TREE_OVERFLOW (tem))
11667 return fold_build2_loc (loc, MULT_EXPR, type,
11668 fold_convert_loc (loc, type,
11669 negate_expr (op0)), tem);
11671 strict_overflow_p = false;
11672 if (TREE_CODE (arg1) == INTEGER_CST
11673 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11674 &strict_overflow_p)) != 0)
11676 if (strict_overflow_p)
11677 fold_overflow_warning (("assuming signed overflow does not "
11678 "occur when simplifying "
11679 "multiplication"),
11680 WARN_STRICT_OVERFLOW_MISC);
11681 return fold_convert_loc (loc, type, tem);
11684 /* Optimize z * conj(z) for integer complex numbers. */
11685 if (TREE_CODE (arg0) == CONJ_EXPR
11686 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11687 return fold_mult_zconjz (loc, type, arg1);
11688 if (TREE_CODE (arg1) == CONJ_EXPR
11689 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11690 return fold_mult_zconjz (loc, type, arg0);
11692 else
11694 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11695 This is not the same for NaNs or if signed zeros are
11696 involved. */
11697 if (!HONOR_NANS (arg0)
11698 && !HONOR_SIGNED_ZEROS (arg0)
11699 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11700 && TREE_CODE (arg1) == COMPLEX_CST
11701 && real_zerop (TREE_REALPART (arg1)))
11703 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11704 if (real_onep (TREE_IMAGPART (arg1)))
11705 return
11706 fold_build2_loc (loc, COMPLEX_EXPR, type,
11707 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11708 rtype, arg0)),
11709 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11710 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11711 return
11712 fold_build2_loc (loc, COMPLEX_EXPR, type,
11713 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11714 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11715 rtype, arg0)));
11718 /* Optimize z * conj(z) for floating point complex numbers.
11719 Guarded by flag_unsafe_math_optimizations as non-finite
11720 imaginary components don't produce scalar results. */
11721 if (flag_unsafe_math_optimizations
11722 && TREE_CODE (arg0) == CONJ_EXPR
11723 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11724 return fold_mult_zconjz (loc, type, arg1);
11725 if (flag_unsafe_math_optimizations
11726 && TREE_CODE (arg1) == CONJ_EXPR
11727 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11728 return fold_mult_zconjz (loc, type, arg0);
11730 goto associate;
11732 case BIT_IOR_EXPR:
11733 /* Canonicalize (X & C1) | C2. */
11734 if (TREE_CODE (arg0) == BIT_AND_EXPR
11735 && TREE_CODE (arg1) == INTEGER_CST
11736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11738 int width = TYPE_PRECISION (type), w;
11739 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11740 wide_int c2 = wi::to_wide (arg1);
11742 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11743 if ((c1 & c2) == c1)
11744 return omit_one_operand_loc (loc, type, arg1,
11745 TREE_OPERAND (arg0, 0));
11747 wide_int msk = wi::mask (width, false,
11748 TYPE_PRECISION (TREE_TYPE (arg1)));
11750 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11751 if (wi::bit_and_not (msk, c1 | c2) == 0)
11753 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11754 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11757 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11758 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11759 mode which allows further optimizations. */
11760 c1 &= msk;
11761 c2 &= msk;
11762 wide_int c3 = wi::bit_and_not (c1, c2);
11763 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11765 wide_int mask = wi::mask (w, false,
11766 TYPE_PRECISION (type));
11767 if (((c1 | c2) & mask) == mask
11768 && wi::bit_and_not (c1, mask) == 0)
11770 c3 = mask;
11771 break;
11775 if (c3 != c1)
11777 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11778 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11779 wide_int_to_tree (type, c3));
11780 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11784 /* See if this can be simplified into a rotate first. If that
11785 is unsuccessful continue in the association code. */
11786 goto bit_rotate;
11788 case BIT_XOR_EXPR:
11789 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11790 if (TREE_CODE (arg0) == BIT_AND_EXPR
11791 && INTEGRAL_TYPE_P (type)
11792 && integer_onep (TREE_OPERAND (arg0, 1))
11793 && integer_onep (arg1))
11794 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11795 build_zero_cst (TREE_TYPE (arg0)));
11797 /* See if this can be simplified into a rotate first. If that
11798 is unsuccessful continue in the association code. */
11799 goto bit_rotate;
11801 case BIT_AND_EXPR:
11802 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11803 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11804 && INTEGRAL_TYPE_P (type)
11805 && integer_onep (TREE_OPERAND (arg0, 1))
11806 && integer_onep (arg1))
11808 tree tem2;
11809 tem = TREE_OPERAND (arg0, 0);
11810 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11811 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11812 tem, tem2);
11813 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11814 build_zero_cst (TREE_TYPE (tem)));
11816 /* Fold ~X & 1 as (X & 1) == 0. */
11817 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11818 && INTEGRAL_TYPE_P (type)
11819 && integer_onep (arg1))
11821 tree tem2;
11822 tem = TREE_OPERAND (arg0, 0);
11823 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11824 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11825 tem, tem2);
11826 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11827 build_zero_cst (TREE_TYPE (tem)));
11829 /* Fold !X & 1 as X == 0. */
11830 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11831 && integer_onep (arg1))
11833 tem = TREE_OPERAND (arg0, 0);
11834 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11835 build_zero_cst (TREE_TYPE (tem)));
11838 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11839 multiple of 1 << CST. */
11840 if (TREE_CODE (arg1) == INTEGER_CST)
11842 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11843 wide_int ncst1 = -cst1;
11844 if ((cst1 & ncst1) == ncst1
11845 && multiple_of_p (type, arg0,
11846 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11847 return fold_convert_loc (loc, type, arg0);
11850 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11851 bits from CST2. */
11852 if (TREE_CODE (arg1) == INTEGER_CST
11853 && TREE_CODE (arg0) == MULT_EXPR
11854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11856 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11857 wide_int masked
11858 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11860 if (masked == 0)
11861 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11862 arg0, arg1);
11863 else if (masked != warg1)
11865 /* Avoid the transform if arg1 is a mask of some
11866 mode which allows further optimizations. */
11867 int pop = wi::popcount (warg1);
11868 if (!(pop >= BITS_PER_UNIT
11869 && pow2p_hwi (pop)
11870 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11871 return fold_build2_loc (loc, code, type, op0,
11872 wide_int_to_tree (type, masked));
11876 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11877 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11878 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11880 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11882 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11883 if (mask == -1)
11884 return
11885 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11888 goto associate;
11890 case RDIV_EXPR:
11891 /* Don't touch a floating-point divide by zero unless the mode
11892 of the constant can represent infinity. */
11893 if (TREE_CODE (arg1) == REAL_CST
11894 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11895 && real_zerop (arg1))
11896 return NULL_TREE;
11898 /* (-A) / (-B) -> A / B */
11899 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11900 return fold_build2_loc (loc, RDIV_EXPR, type,
11901 TREE_OPERAND (arg0, 0),
11902 negate_expr (arg1));
11903 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11904 return fold_build2_loc (loc, RDIV_EXPR, type,
11905 negate_expr (arg0),
11906 TREE_OPERAND (arg1, 0));
11907 return NULL_TREE;
11909 case TRUNC_DIV_EXPR:
11910 /* Fall through */
11912 case FLOOR_DIV_EXPR:
11913 /* Simplify A / (B << N) where A and B are positive and B is
11914 a power of 2, to A >> (N + log2(B)). */
11915 strict_overflow_p = false;
11916 if (TREE_CODE (arg1) == LSHIFT_EXPR
11917 && (TYPE_UNSIGNED (type)
11918 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11920 tree sval = TREE_OPERAND (arg1, 0);
11921 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11923 tree sh_cnt = TREE_OPERAND (arg1, 1);
11924 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11925 wi::exact_log2 (wi::to_wide (sval)));
11927 if (strict_overflow_p)
11928 fold_overflow_warning (("assuming signed overflow does not "
11929 "occur when simplifying A / (B << N)"),
11930 WARN_STRICT_OVERFLOW_MISC);
11932 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11933 sh_cnt, pow2);
11934 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11935 fold_convert_loc (loc, type, arg0), sh_cnt);
11939 /* Fall through */
11941 case ROUND_DIV_EXPR:
11942 case CEIL_DIV_EXPR:
11943 case EXACT_DIV_EXPR:
11944 if (integer_zerop (arg1))
11945 return NULL_TREE;
11947 /* Convert -A / -B to A / B when the type is signed and overflow is
11948 undefined. */
11949 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11950 && TREE_CODE (op0) == NEGATE_EXPR
11951 && negate_expr_p (op1))
11953 if (ANY_INTEGRAL_TYPE_P (type))
11954 fold_overflow_warning (("assuming signed overflow does not occur "
11955 "when distributing negation across "
11956 "division"),
11957 WARN_STRICT_OVERFLOW_MISC);
11958 return fold_build2_loc (loc, code, type,
11959 fold_convert_loc (loc, type,
11960 TREE_OPERAND (arg0, 0)),
11961 negate_expr (op1));
11963 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11964 && TREE_CODE (arg1) == NEGATE_EXPR
11965 && negate_expr_p (op0))
11967 if (ANY_INTEGRAL_TYPE_P (type))
11968 fold_overflow_warning (("assuming signed overflow does not occur "
11969 "when distributing negation across "
11970 "division"),
11971 WARN_STRICT_OVERFLOW_MISC);
11972 return fold_build2_loc (loc, code, type,
11973 negate_expr (op0),
11974 fold_convert_loc (loc, type,
11975 TREE_OPERAND (arg1, 0)));
11978 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11979 operation, EXACT_DIV_EXPR.
11981 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11982 At one time others generated faster code, it's not clear if they do
11983 after the last round to changes to the DIV code in expmed.cc. */
11984 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11985 && multiple_of_p (type, arg0, arg1))
11986 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11987 fold_convert (type, arg0),
11988 fold_convert (type, arg1));
11990 strict_overflow_p = false;
11991 if (TREE_CODE (arg1) == INTEGER_CST
11992 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11993 &strict_overflow_p)) != 0)
11995 if (strict_overflow_p)
11996 fold_overflow_warning (("assuming signed overflow does not occur "
11997 "when simplifying division"),
11998 WARN_STRICT_OVERFLOW_MISC);
11999 return fold_convert_loc (loc, type, tem);
12002 return NULL_TREE;
12004 case CEIL_MOD_EXPR:
12005 case FLOOR_MOD_EXPR:
12006 case ROUND_MOD_EXPR:
12007 case TRUNC_MOD_EXPR:
12008 strict_overflow_p = false;
12009 if (TREE_CODE (arg1) == INTEGER_CST
12010 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12011 &strict_overflow_p)) != 0)
12013 if (strict_overflow_p)
12014 fold_overflow_warning (("assuming signed overflow does not occur "
12015 "when simplifying modulus"),
12016 WARN_STRICT_OVERFLOW_MISC);
12017 return fold_convert_loc (loc, type, tem);
12020 return NULL_TREE;
12022 case LROTATE_EXPR:
12023 case RROTATE_EXPR:
12024 case RSHIFT_EXPR:
12025 case LSHIFT_EXPR:
12026 /* Since negative shift count is not well-defined,
12027 don't try to compute it in the compiler. */
12028 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12029 return NULL_TREE;
12031 prec = element_precision (type);
12033 /* If we have a rotate of a bit operation with the rotate count and
12034 the second operand of the bit operation both constant,
12035 permute the two operations. */
12036 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12037 && (TREE_CODE (arg0) == BIT_AND_EXPR
12038 || TREE_CODE (arg0) == BIT_IOR_EXPR
12039 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12040 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12042 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12043 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12044 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12045 fold_build2_loc (loc, code, type,
12046 arg00, arg1),
12047 fold_build2_loc (loc, code, type,
12048 arg01, arg1));
12051 /* Two consecutive rotates adding up to the some integer
12052 multiple of the precision of the type can be ignored. */
12053 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12054 && TREE_CODE (arg0) == RROTATE_EXPR
12055 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12056 && wi::umod_trunc (wi::to_wide (arg1)
12057 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12058 prec) == 0)
12059 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12061 return NULL_TREE;
12063 case MIN_EXPR:
12064 case MAX_EXPR:
12065 goto associate;
12067 case TRUTH_ANDIF_EXPR:
12068 /* Note that the operands of this must be ints
12069 and their values must be 0 or 1.
12070 ("true" is a fixed value perhaps depending on the language.) */
12071 /* If first arg is constant zero, return it. */
12072 if (integer_zerop (arg0))
12073 return fold_convert_loc (loc, type, arg0);
12074 /* FALLTHRU */
12075 case TRUTH_AND_EXPR:
12076 /* If either arg is constant true, drop it. */
12077 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12078 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12079 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12080 /* Preserve sequence points. */
12081 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12082 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12083 /* If second arg is constant zero, result is zero, but first arg
12084 must be evaluated. */
12085 if (integer_zerop (arg1))
12086 return omit_one_operand_loc (loc, type, arg1, arg0);
12087 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12088 case will be handled here. */
12089 if (integer_zerop (arg0))
12090 return omit_one_operand_loc (loc, type, arg0, arg1);
12092 /* !X && X is always false. */
12093 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12095 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12096 /* X && !X is always false. */
12097 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12099 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12101 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12102 means A >= Y && A != MAX, but in this case we know that
12103 A < X <= MAX. */
12105 if (!TREE_SIDE_EFFECTS (arg0)
12106 && !TREE_SIDE_EFFECTS (arg1))
12108 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12109 if (tem && !operand_equal_p (tem, arg0, 0))
12110 return fold_convert (type,
12111 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12112 tem, arg1));
12114 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12115 if (tem && !operand_equal_p (tem, arg1, 0))
12116 return fold_convert (type,
12117 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12118 arg0, tem));
12121 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12122 != NULL_TREE)
12123 return tem;
12125 return NULL_TREE;
12127 case TRUTH_ORIF_EXPR:
12128 /* Note that the operands of this must be ints
12129 and their values must be 0 or true.
12130 ("true" is a fixed value perhaps depending on the language.) */
12131 /* If first arg is constant true, return it. */
12132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12133 return fold_convert_loc (loc, type, arg0);
12134 /* FALLTHRU */
12135 case TRUTH_OR_EXPR:
12136 /* If either arg is constant zero, drop it. */
12137 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12138 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12139 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12140 /* Preserve sequence points. */
12141 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12143 /* If second arg is constant true, result is true, but we must
12144 evaluate first arg. */
12145 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12146 return omit_one_operand_loc (loc, type, arg1, arg0);
12147 /* Likewise for first arg, but note this only occurs here for
12148 TRUTH_OR_EXPR. */
12149 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12150 return omit_one_operand_loc (loc, type, arg0, arg1);
12152 /* !X || X is always true. */
12153 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12155 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12156 /* X || !X is always true. */
12157 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12158 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12159 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12161 /* (X && !Y) || (!X && Y) is X ^ Y */
12162 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12163 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12165 tree a0, a1, l0, l1, n0, n1;
12167 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12168 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12170 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12171 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12173 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12174 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12176 if ((operand_equal_p (n0, a0, 0)
12177 && operand_equal_p (n1, a1, 0))
12178 || (operand_equal_p (n0, a1, 0)
12179 && operand_equal_p (n1, a0, 0)))
12180 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12183 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12184 != NULL_TREE)
12185 return tem;
12187 return NULL_TREE;
12189 case TRUTH_XOR_EXPR:
12190 /* If the second arg is constant zero, drop it. */
12191 if (integer_zerop (arg1))
12192 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12193 /* If the second arg is constant true, this is a logical inversion. */
12194 if (integer_onep (arg1))
12196 tem = invert_truthvalue_loc (loc, arg0);
12197 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12199 /* Identical arguments cancel to zero. */
12200 if (operand_equal_p (arg0, arg1, 0))
12201 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12203 /* !X ^ X is always true. */
12204 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12206 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12208 /* X ^ !X is always true. */
12209 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12211 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12213 return NULL_TREE;
12215 case EQ_EXPR:
12216 case NE_EXPR:
12217 STRIP_NOPS (arg0);
12218 STRIP_NOPS (arg1);
12220 tem = fold_comparison (loc, code, type, op0, op1);
12221 if (tem != NULL_TREE)
12222 return tem;
12224 /* bool_var != 1 becomes !bool_var. */
12225 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12226 && code == NE_EXPR)
12227 return fold_convert_loc (loc, type,
12228 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12229 TREE_TYPE (arg0), arg0));
12231 /* bool_var == 0 becomes !bool_var. */
12232 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12233 && code == EQ_EXPR)
12234 return fold_convert_loc (loc, type,
12235 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12236 TREE_TYPE (arg0), arg0));
12238 /* !exp != 0 becomes !exp */
12239 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12240 && code == NE_EXPR)
12241 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12243 /* If this is an EQ or NE comparison with zero and ARG0 is
12244 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12245 two operations, but the latter can be done in one less insn
12246 on machines that have only two-operand insns or on which a
12247 constant cannot be the first operand. */
12248 if (TREE_CODE (arg0) == BIT_AND_EXPR
12249 && integer_zerop (arg1))
12251 tree arg00 = TREE_OPERAND (arg0, 0);
12252 tree arg01 = TREE_OPERAND (arg0, 1);
12253 if (TREE_CODE (arg00) == LSHIFT_EXPR
12254 && integer_onep (TREE_OPERAND (arg00, 0)))
12256 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12257 arg01, TREE_OPERAND (arg00, 1));
12258 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12259 build_one_cst (TREE_TYPE (arg0)));
12260 return fold_build2_loc (loc, code, type,
12261 fold_convert_loc (loc, TREE_TYPE (arg1),
12262 tem), arg1);
12264 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12265 && integer_onep (TREE_OPERAND (arg01, 0)))
12267 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12268 arg00, TREE_OPERAND (arg01, 1));
12269 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12270 build_one_cst (TREE_TYPE (arg0)));
12271 return fold_build2_loc (loc, code, type,
12272 fold_convert_loc (loc, TREE_TYPE (arg1),
12273 tem), arg1);
12277 /* If this is a comparison of a field, we may be able to simplify it. */
12278 if ((TREE_CODE (arg0) == COMPONENT_REF
12279 || TREE_CODE (arg0) == BIT_FIELD_REF)
12280 /* Handle the constant case even without -O
12281 to make sure the warnings are given. */
12282 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12284 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12285 if (t1)
12286 return t1;
12289 /* Optimize comparisons of strlen vs zero to a compare of the
12290 first character of the string vs zero. To wit,
12291 strlen(ptr) == 0 => *ptr == 0
12292 strlen(ptr) != 0 => *ptr != 0
12293 Other cases should reduce to one of these two (or a constant)
12294 due to the return value of strlen being unsigned. */
12295 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12297 tree fndecl = get_callee_fndecl (arg0);
12299 if (fndecl
12300 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12301 && call_expr_nargs (arg0) == 1
12302 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12303 == POINTER_TYPE))
12305 tree ptrtype
12306 = build_pointer_type (build_qualified_type (char_type_node,
12307 TYPE_QUAL_CONST));
12308 tree ptr = fold_convert_loc (loc, ptrtype,
12309 CALL_EXPR_ARG (arg0, 0));
12310 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12311 return fold_build2_loc (loc, code, type, iref,
12312 build_int_cst (TREE_TYPE (iref), 0));
12316 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12317 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12318 if (TREE_CODE (arg0) == RSHIFT_EXPR
12319 && integer_zerop (arg1)
12320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12322 tree arg00 = TREE_OPERAND (arg0, 0);
12323 tree arg01 = TREE_OPERAND (arg0, 1);
12324 tree itype = TREE_TYPE (arg00);
12325 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12327 if (TYPE_UNSIGNED (itype))
12329 itype = signed_type_for (itype);
12330 arg00 = fold_convert_loc (loc, itype, arg00);
12332 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12333 type, arg00, build_zero_cst (itype));
12337 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12338 (X & C) == 0 when C is a single bit. */
12339 if (TREE_CODE (arg0) == BIT_AND_EXPR
12340 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12341 && integer_zerop (arg1)
12342 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12344 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12345 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12346 TREE_OPERAND (arg0, 1));
12347 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12348 type, tem,
12349 fold_convert_loc (loc, TREE_TYPE (arg0),
12350 arg1));
12353 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12354 constant C is a power of two, i.e. a single bit. */
12355 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12356 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12357 && integer_zerop (arg1)
12358 && integer_pow2p (TREE_OPERAND (arg0, 1))
12359 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12360 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12362 tree arg00 = TREE_OPERAND (arg0, 0);
12363 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12364 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12367 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12368 when is C is a power of two, i.e. a single bit. */
12369 if (TREE_CODE (arg0) == BIT_AND_EXPR
12370 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12371 && integer_zerop (arg1)
12372 && integer_pow2p (TREE_OPERAND (arg0, 1))
12373 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12374 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12376 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12377 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12378 arg000, TREE_OPERAND (arg0, 1));
12379 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12380 tem, build_int_cst (TREE_TYPE (tem), 0));
12383 if (integer_zerop (arg1)
12384 && tree_expr_nonzero_p (arg0))
12386 tree res = constant_boolean_node (code==NE_EXPR, type);
12387 return omit_one_operand_loc (loc, type, res, arg0);
12390 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12391 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12393 tree arg00 = TREE_OPERAND (arg0, 0);
12394 tree arg01 = TREE_OPERAND (arg0, 1);
12395 tree arg10 = TREE_OPERAND (arg1, 0);
12396 tree arg11 = TREE_OPERAND (arg1, 1);
12397 tree itype = TREE_TYPE (arg0);
12399 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12400 operand_equal_p guarantees no side-effects so we don't need
12401 to use omit_one_operand on Z. */
12402 if (operand_equal_p (arg01, arg11, 0))
12403 return fold_build2_loc (loc, code, type, arg00,
12404 fold_convert_loc (loc, TREE_TYPE (arg00),
12405 arg10));
12406 if (operand_equal_p (arg01, arg10, 0))
12407 return fold_build2_loc (loc, code, type, arg00,
12408 fold_convert_loc (loc, TREE_TYPE (arg00),
12409 arg11));
12410 if (operand_equal_p (arg00, arg11, 0))
12411 return fold_build2_loc (loc, code, type, arg01,
12412 fold_convert_loc (loc, TREE_TYPE (arg01),
12413 arg10));
12414 if (operand_equal_p (arg00, arg10, 0))
12415 return fold_build2_loc (loc, code, type, arg01,
12416 fold_convert_loc (loc, TREE_TYPE (arg01),
12417 arg11));
12419 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12420 if (TREE_CODE (arg01) == INTEGER_CST
12421 && TREE_CODE (arg11) == INTEGER_CST)
12423 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12424 fold_convert_loc (loc, itype, arg11));
12425 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12426 return fold_build2_loc (loc, code, type, tem,
12427 fold_convert_loc (loc, itype, arg10));
12431 /* Attempt to simplify equality/inequality comparisons of complex
12432 values. Only lower the comparison if the result is known or
12433 can be simplified to a single scalar comparison. */
12434 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12435 || TREE_CODE (arg0) == COMPLEX_CST)
12436 && (TREE_CODE (arg1) == COMPLEX_EXPR
12437 || TREE_CODE (arg1) == COMPLEX_CST))
12439 tree real0, imag0, real1, imag1;
12440 tree rcond, icond;
12442 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12444 real0 = TREE_OPERAND (arg0, 0);
12445 imag0 = TREE_OPERAND (arg0, 1);
12447 else
12449 real0 = TREE_REALPART (arg0);
12450 imag0 = TREE_IMAGPART (arg0);
12453 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12455 real1 = TREE_OPERAND (arg1, 0);
12456 imag1 = TREE_OPERAND (arg1, 1);
12458 else
12460 real1 = TREE_REALPART (arg1);
12461 imag1 = TREE_IMAGPART (arg1);
12464 rcond = fold_binary_loc (loc, code, type, real0, real1);
12465 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12467 if (integer_zerop (rcond))
12469 if (code == EQ_EXPR)
12470 return omit_two_operands_loc (loc, type, boolean_false_node,
12471 imag0, imag1);
12472 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12474 else
12476 if (code == NE_EXPR)
12477 return omit_two_operands_loc (loc, type, boolean_true_node,
12478 imag0, imag1);
12479 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12483 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12484 if (icond && TREE_CODE (icond) == INTEGER_CST)
12486 if (integer_zerop (icond))
12488 if (code == EQ_EXPR)
12489 return omit_two_operands_loc (loc, type, boolean_false_node,
12490 real0, real1);
12491 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12493 else
12495 if (code == NE_EXPR)
12496 return omit_two_operands_loc (loc, type, boolean_true_node,
12497 real0, real1);
12498 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12503 return NULL_TREE;
12505 case LT_EXPR:
12506 case GT_EXPR:
12507 case LE_EXPR:
12508 case GE_EXPR:
12509 tem = fold_comparison (loc, code, type, op0, op1);
12510 if (tem != NULL_TREE)
12511 return tem;
12513 /* Transform comparisons of the form X +- C CMP X. */
12514 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12516 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12517 && !HONOR_SNANS (arg0))
12519 tree arg01 = TREE_OPERAND (arg0, 1);
12520 enum tree_code code0 = TREE_CODE (arg0);
12521 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12523 /* (X - c) > X becomes false. */
12524 if (code == GT_EXPR
12525 && ((code0 == MINUS_EXPR && is_positive >= 0)
12526 || (code0 == PLUS_EXPR && is_positive <= 0)))
12527 return constant_boolean_node (0, type);
12529 /* Likewise (X + c) < X becomes false. */
12530 if (code == LT_EXPR
12531 && ((code0 == PLUS_EXPR && is_positive >= 0)
12532 || (code0 == MINUS_EXPR && is_positive <= 0)))
12533 return constant_boolean_node (0, type);
12535 /* Convert (X - c) <= X to true. */
12536 if (!HONOR_NANS (arg1)
12537 && code == LE_EXPR
12538 && ((code0 == MINUS_EXPR && is_positive >= 0)
12539 || (code0 == PLUS_EXPR && is_positive <= 0)))
12540 return constant_boolean_node (1, type);
12542 /* Convert (X + c) >= X to true. */
12543 if (!HONOR_NANS (arg1)
12544 && code == GE_EXPR
12545 && ((code0 == PLUS_EXPR && is_positive >= 0)
12546 || (code0 == MINUS_EXPR && is_positive <= 0)))
12547 return constant_boolean_node (1, type);
12550 /* If we are comparing an ABS_EXPR with a constant, we can
12551 convert all the cases into explicit comparisons, but they may
12552 well not be faster than doing the ABS and one comparison.
12553 But ABS (X) <= C is a range comparison, which becomes a subtraction
12554 and a comparison, and is probably faster. */
12555 if (code == LE_EXPR
12556 && TREE_CODE (arg1) == INTEGER_CST
12557 && TREE_CODE (arg0) == ABS_EXPR
12558 && ! TREE_SIDE_EFFECTS (arg0)
12559 && (tem = negate_expr (arg1)) != 0
12560 && TREE_CODE (tem) == INTEGER_CST
12561 && !TREE_OVERFLOW (tem))
12562 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12563 build2 (GE_EXPR, type,
12564 TREE_OPERAND (arg0, 0), tem),
12565 build2 (LE_EXPR, type,
12566 TREE_OPERAND (arg0, 0), arg1));
12568 /* Convert ABS_EXPR<x> >= 0 to true. */
12569 strict_overflow_p = false;
12570 if (code == GE_EXPR
12571 && (integer_zerop (arg1)
12572 || (! HONOR_NANS (arg0)
12573 && real_zerop (arg1)))
12574 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12576 if (strict_overflow_p)
12577 fold_overflow_warning (("assuming signed overflow does not occur "
12578 "when simplifying comparison of "
12579 "absolute value and zero"),
12580 WARN_STRICT_OVERFLOW_CONDITIONAL);
12581 return omit_one_operand_loc (loc, type,
12582 constant_boolean_node (true, type),
12583 arg0);
12586 /* Convert ABS_EXPR<x> < 0 to false. */
12587 strict_overflow_p = false;
12588 if (code == LT_EXPR
12589 && (integer_zerop (arg1) || real_zerop (arg1))
12590 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12592 if (strict_overflow_p)
12593 fold_overflow_warning (("assuming signed overflow does not occur "
12594 "when simplifying comparison of "
12595 "absolute value and zero"),
12596 WARN_STRICT_OVERFLOW_CONDITIONAL);
12597 return omit_one_operand_loc (loc, type,
12598 constant_boolean_node (false, type),
12599 arg0);
12602 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12603 and similarly for >= into !=. */
12604 if ((code == LT_EXPR || code == GE_EXPR)
12605 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12606 && TREE_CODE (arg1) == LSHIFT_EXPR
12607 && integer_onep (TREE_OPERAND (arg1, 0)))
12608 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12609 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12610 TREE_OPERAND (arg1, 1)),
12611 build_zero_cst (TREE_TYPE (arg0)));
12613 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12614 otherwise Y might be >= # of bits in X's type and thus e.g.
12615 (unsigned char) (1 << Y) for Y 15 might be 0.
12616 If the cast is widening, then 1 << Y should have unsigned type,
12617 otherwise if Y is number of bits in the signed shift type minus 1,
12618 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12619 31 might be 0xffffffff80000000. */
12620 if ((code == LT_EXPR || code == GE_EXPR)
12621 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12622 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12623 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12624 && CONVERT_EXPR_P (arg1)
12625 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12626 && (element_precision (TREE_TYPE (arg1))
12627 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12628 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12629 || (element_precision (TREE_TYPE (arg1))
12630 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12631 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12633 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12634 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12635 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12636 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12637 build_zero_cst (TREE_TYPE (arg0)));
12640 return NULL_TREE;
12642 case UNORDERED_EXPR:
12643 case ORDERED_EXPR:
12644 case UNLT_EXPR:
12645 case UNLE_EXPR:
12646 case UNGT_EXPR:
12647 case UNGE_EXPR:
12648 case UNEQ_EXPR:
12649 case LTGT_EXPR:
12650 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12652 tree targ0 = strip_float_extensions (arg0);
12653 tree targ1 = strip_float_extensions (arg1);
12654 tree newtype = TREE_TYPE (targ0);
12656 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12657 newtype = TREE_TYPE (targ1);
12659 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12660 return fold_build2_loc (loc, code, type,
12661 fold_convert_loc (loc, newtype, targ0),
12662 fold_convert_loc (loc, newtype, targ1));
12665 return NULL_TREE;
12667 case COMPOUND_EXPR:
12668 /* When pedantic, a compound expression can be neither an lvalue
12669 nor an integer constant expression. */
12670 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12671 return NULL_TREE;
12672 /* Don't let (0, 0) be null pointer constant. */
12673 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12674 : fold_convert_loc (loc, type, arg1);
12675 return tem;
12677 default:
12678 return NULL_TREE;
12679 } /* switch (code) */
12682 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12683 ((A & N) + B) & M -> (A + B) & M
12684 Similarly if (N & M) == 0,
12685 ((A | N) + B) & M -> (A + B) & M
12686 and for - instead of + (or unary - instead of +)
12687 and/or ^ instead of |.
12688 If B is constant and (B & M) == 0, fold into A & M.
12690 This function is a helper for match.pd patterns. Return non-NULL
12691 type in which the simplified operation should be performed only
12692 if any optimization is possible.
12694 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12695 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12696 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12697 +/-. */
12698 tree
12699 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12700 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12701 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12702 tree *pmop)
12704 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12705 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12706 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12707 if (~cst1 == 0
12708 || (cst1 & (cst1 + 1)) != 0
12709 || !INTEGRAL_TYPE_P (type)
12710 || (!TYPE_OVERFLOW_WRAPS (type)
12711 && TREE_CODE (type) != INTEGER_TYPE)
12712 || (wi::max_value (type) & cst1) != cst1)
12713 return NULL_TREE;
12715 enum tree_code codes[2] = { code00, code01 };
12716 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12717 int which = 0;
12718 wide_int cst0;
12720 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12721 arg1 (M) is == (1LL << cst) - 1.
12722 Store C into PMOP[0] and D into PMOP[1]. */
12723 pmop[0] = arg00;
12724 pmop[1] = arg01;
12725 which = code != NEGATE_EXPR;
12727 for (; which >= 0; which--)
12728 switch (codes[which])
12730 case BIT_AND_EXPR:
12731 case BIT_IOR_EXPR:
12732 case BIT_XOR_EXPR:
12733 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12734 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12735 if (codes[which] == BIT_AND_EXPR)
12737 if (cst0 != cst1)
12738 break;
12740 else if (cst0 != 0)
12741 break;
12742 /* If C or D is of the form (A & N) where
12743 (N & M) == M, or of the form (A | N) or
12744 (A ^ N) where (N & M) == 0, replace it with A. */
12745 pmop[which] = arg0xx[2 * which];
12746 break;
12747 case ERROR_MARK:
12748 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12749 break;
12750 /* If C or D is a N where (N & M) == 0, it can be
12751 omitted (replaced with 0). */
12752 if ((code == PLUS_EXPR
12753 || (code == MINUS_EXPR && which == 0))
12754 && (cst1 & wi::to_wide (pmop[which])) == 0)
12755 pmop[which] = build_int_cst (type, 0);
12756 /* Similarly, with C - N where (-N & M) == 0. */
12757 if (code == MINUS_EXPR
12758 && which == 1
12759 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12760 pmop[which] = build_int_cst (type, 0);
12761 break;
12762 default:
12763 gcc_unreachable ();
12766 /* Only build anything new if we optimized one or both arguments above. */
12767 if (pmop[0] == arg00 && pmop[1] == arg01)
12768 return NULL_TREE;
12770 if (TYPE_OVERFLOW_WRAPS (type))
12771 return type;
12772 else
12773 return unsigned_type_for (type);
12776 /* Used by contains_label_[p1]. */
12778 struct contains_label_data
12780 hash_set<tree> *pset;
12781 bool inside_switch_p;
12784 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12785 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12786 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12788 static tree
12789 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12791 contains_label_data *d = (contains_label_data *) data;
12792 switch (TREE_CODE (*tp))
12794 case LABEL_EXPR:
12795 return *tp;
12797 case CASE_LABEL_EXPR:
12798 if (!d->inside_switch_p)
12799 return *tp;
12800 return NULL_TREE;
12802 case SWITCH_EXPR:
12803 if (!d->inside_switch_p)
12805 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12806 return *tp;
12807 d->inside_switch_p = true;
12808 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12809 return *tp;
12810 d->inside_switch_p = false;
12811 *walk_subtrees = 0;
12813 return NULL_TREE;
12815 case GOTO_EXPR:
12816 *walk_subtrees = 0;
12817 return NULL_TREE;
12819 default:
12820 return NULL_TREE;
12824 /* Return whether the sub-tree ST contains a label which is accessible from
12825 outside the sub-tree. */
12827 static bool
12828 contains_label_p (tree st)
12830 hash_set<tree> pset;
12831 contains_label_data data = { &pset, false };
12832 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12835 /* Fold a ternary expression of code CODE and type TYPE with operands
12836 OP0, OP1, and OP2. Return the folded expression if folding is
12837 successful. Otherwise, return NULL_TREE. */
12839 tree
12840 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12841 tree op0, tree op1, tree op2)
12843 tree tem;
12844 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12845 enum tree_code_class kind = TREE_CODE_CLASS (code);
12847 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12848 && TREE_CODE_LENGTH (code) == 3);
12850 /* If this is a commutative operation, and OP0 is a constant, move it
12851 to OP1 to reduce the number of tests below. */
12852 if (commutative_ternary_tree_code (code)
12853 && tree_swap_operands_p (op0, op1))
12854 return fold_build3_loc (loc, code, type, op1, op0, op2);
12856 tem = generic_simplify (loc, code, type, op0, op1, op2);
12857 if (tem)
12858 return tem;
12860 /* Strip any conversions that don't change the mode. This is safe
12861 for every expression, except for a comparison expression because
12862 its signedness is derived from its operands. So, in the latter
12863 case, only strip conversions that don't change the signedness.
12865 Note that this is done as an internal manipulation within the
12866 constant folder, in order to find the simplest representation of
12867 the arguments so that their form can be studied. In any cases,
12868 the appropriate type conversions should be put back in the tree
12869 that will get out of the constant folder. */
12870 if (op0)
12872 arg0 = op0;
12873 STRIP_NOPS (arg0);
12876 if (op1)
12878 arg1 = op1;
12879 STRIP_NOPS (arg1);
12882 if (op2)
12884 arg2 = op2;
12885 STRIP_NOPS (arg2);
12888 switch (code)
12890 case COMPONENT_REF:
12891 if (TREE_CODE (arg0) == CONSTRUCTOR
12892 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12894 unsigned HOST_WIDE_INT idx;
12895 tree field, value;
12896 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12897 if (field == arg1)
12898 return value;
12900 return NULL_TREE;
12902 case COND_EXPR:
12903 case VEC_COND_EXPR:
12904 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12905 so all simple results must be passed through pedantic_non_lvalue. */
12906 if (TREE_CODE (arg0) == INTEGER_CST)
12908 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12909 tem = integer_zerop (arg0) ? op2 : op1;
12910 /* Only optimize constant conditions when the selected branch
12911 has the same type as the COND_EXPR. This avoids optimizing
12912 away "c ? x : throw", where the throw has a void type.
12913 Avoid throwing away that operand which contains label. */
12914 if ((!TREE_SIDE_EFFECTS (unused_op)
12915 || !contains_label_p (unused_op))
12916 && (! VOID_TYPE_P (TREE_TYPE (tem))
12917 || VOID_TYPE_P (type)))
12918 return protected_set_expr_location_unshare (tem, loc);
12919 return NULL_TREE;
12921 else if (TREE_CODE (arg0) == VECTOR_CST)
12923 unsigned HOST_WIDE_INT nelts;
12924 if ((TREE_CODE (arg1) == VECTOR_CST
12925 || TREE_CODE (arg1) == CONSTRUCTOR)
12926 && (TREE_CODE (arg2) == VECTOR_CST
12927 || TREE_CODE (arg2) == CONSTRUCTOR)
12928 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12930 vec_perm_builder sel (nelts, nelts, 1);
12931 for (unsigned int i = 0; i < nelts; i++)
12933 tree val = VECTOR_CST_ELT (arg0, i);
12934 if (integer_all_onesp (val))
12935 sel.quick_push (i);
12936 else if (integer_zerop (val))
12937 sel.quick_push (nelts + i);
12938 else /* Currently unreachable. */
12939 return NULL_TREE;
12941 vec_perm_indices indices (sel, 2, nelts);
12942 tree t = fold_vec_perm (type, arg1, arg2, indices);
12943 if (t != NULL_TREE)
12944 return t;
12948 /* If we have A op B ? A : C, we may be able to convert this to a
12949 simpler expression, depending on the operation and the values
12950 of B and C. Signed zeros prevent all of these transformations,
12951 for reasons given above each one.
12953 Also try swapping the arguments and inverting the conditional. */
12954 if (COMPARISON_CLASS_P (arg0)
12955 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12956 && !HONOR_SIGNED_ZEROS (op1))
12958 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12959 TREE_OPERAND (arg0, 0),
12960 TREE_OPERAND (arg0, 1),
12961 op1, op2);
12962 if (tem)
12963 return tem;
12966 if (COMPARISON_CLASS_P (arg0)
12967 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12968 && !HONOR_SIGNED_ZEROS (op2))
12970 enum tree_code comp_code = TREE_CODE (arg0);
12971 tree arg00 = TREE_OPERAND (arg0, 0);
12972 tree arg01 = TREE_OPERAND (arg0, 1);
12973 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12974 if (comp_code != ERROR_MARK)
12975 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12976 arg00,
12977 arg01,
12978 op2, op1);
12979 if (tem)
12980 return tem;
12983 /* If the second operand is simpler than the third, swap them
12984 since that produces better jump optimization results. */
12985 if (truth_value_p (TREE_CODE (arg0))
12986 && tree_swap_operands_p (op1, op2))
12988 location_t loc0 = expr_location_or (arg0, loc);
12989 /* See if this can be inverted. If it can't, possibly because
12990 it was a floating-point inequality comparison, don't do
12991 anything. */
12992 tem = fold_invert_truthvalue (loc0, arg0);
12993 if (tem)
12994 return fold_build3_loc (loc, code, type, tem, op2, op1);
12997 /* Convert A ? 1 : 0 to simply A. */
12998 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12999 : (integer_onep (op1)
13000 && !VECTOR_TYPE_P (type)))
13001 && integer_zerop (op2)
13002 /* If we try to convert OP0 to our type, the
13003 call to fold will try to move the conversion inside
13004 a COND, which will recurse. In that case, the COND_EXPR
13005 is probably the best choice, so leave it alone. */
13006 && type == TREE_TYPE (arg0))
13007 return protected_set_expr_location_unshare (arg0, loc);
13009 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13010 over COND_EXPR in cases such as floating point comparisons. */
13011 if (integer_zerop (op1)
13012 && code == COND_EXPR
13013 && integer_onep (op2)
13014 && !VECTOR_TYPE_P (type)
13015 && truth_value_p (TREE_CODE (arg0)))
13016 return fold_convert_loc (loc, type,
13017 invert_truthvalue_loc (loc, arg0));
13019 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13020 if (TREE_CODE (arg0) == LT_EXPR
13021 && integer_zerop (TREE_OPERAND (arg0, 1))
13022 && integer_zerop (op2)
13023 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13025 /* sign_bit_p looks through both zero and sign extensions,
13026 but for this optimization only sign extensions are
13027 usable. */
13028 tree tem2 = TREE_OPERAND (arg0, 0);
13029 while (tem != tem2)
13031 if (TREE_CODE (tem2) != NOP_EXPR
13032 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13034 tem = NULL_TREE;
13035 break;
13037 tem2 = TREE_OPERAND (tem2, 0);
13039 /* sign_bit_p only checks ARG1 bits within A's precision.
13040 If <sign bit of A> has wider type than A, bits outside
13041 of A's precision in <sign bit of A> need to be checked.
13042 If they are all 0, this optimization needs to be done
13043 in unsigned A's type, if they are all 1 in signed A's type,
13044 otherwise this can't be done. */
13045 if (tem
13046 && TYPE_PRECISION (TREE_TYPE (tem))
13047 < TYPE_PRECISION (TREE_TYPE (arg1))
13048 && TYPE_PRECISION (TREE_TYPE (tem))
13049 < TYPE_PRECISION (type))
13051 int inner_width, outer_width;
13052 tree tem_type;
13054 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13055 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13056 if (outer_width > TYPE_PRECISION (type))
13057 outer_width = TYPE_PRECISION (type);
13059 wide_int mask = wi::shifted_mask
13060 (inner_width, outer_width - inner_width, false,
13061 TYPE_PRECISION (TREE_TYPE (arg1)));
13063 wide_int common = mask & wi::to_wide (arg1);
13064 if (common == mask)
13066 tem_type = signed_type_for (TREE_TYPE (tem));
13067 tem = fold_convert_loc (loc, tem_type, tem);
13069 else if (common == 0)
13071 tem_type = unsigned_type_for (TREE_TYPE (tem));
13072 tem = fold_convert_loc (loc, tem_type, tem);
13074 else
13075 tem = NULL;
13078 if (tem)
13079 return
13080 fold_convert_loc (loc, type,
13081 fold_build2_loc (loc, BIT_AND_EXPR,
13082 TREE_TYPE (tem), tem,
13083 fold_convert_loc (loc,
13084 TREE_TYPE (tem),
13085 arg1)));
13088 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13089 already handled above. */
13090 if (TREE_CODE (arg0) == BIT_AND_EXPR
13091 && integer_onep (TREE_OPERAND (arg0, 1))
13092 && integer_zerop (op2)
13093 && integer_pow2p (arg1))
13095 tree tem = TREE_OPERAND (arg0, 0);
13096 STRIP_NOPS (tem);
13097 if (TREE_CODE (tem) == RSHIFT_EXPR
13098 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13099 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13100 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13101 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13102 fold_convert_loc (loc, type,
13103 TREE_OPERAND (tem, 0)),
13104 op1);
13107 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13108 is probably obsolete because the first operand should be a
13109 truth value (that's why we have the two cases above), but let's
13110 leave it in until we can confirm this for all front-ends. */
13111 if (integer_zerop (op2)
13112 && TREE_CODE (arg0) == NE_EXPR
13113 && integer_zerop (TREE_OPERAND (arg0, 1))
13114 && integer_pow2p (arg1)
13115 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13116 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13117 arg1, OEP_ONLY_CONST)
13118 /* operand_equal_p compares just value, not precision, so e.g.
13119 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13120 second operand 32-bit -128, which is not a power of two (or vice
13121 versa. */
13122 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13123 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13125 /* Disable the transformations below for vectors, since
13126 fold_binary_op_with_conditional_arg may undo them immediately,
13127 yielding an infinite loop. */
13128 if (code == VEC_COND_EXPR)
13129 return NULL_TREE;
13131 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13132 if (integer_zerop (op2)
13133 && truth_value_p (TREE_CODE (arg0))
13134 && truth_value_p (TREE_CODE (arg1))
13135 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13136 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13137 : TRUTH_ANDIF_EXPR,
13138 type, fold_convert_loc (loc, type, arg0), op1);
13140 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13141 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13142 && truth_value_p (TREE_CODE (arg0))
13143 && truth_value_p (TREE_CODE (arg1))
13144 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13146 location_t loc0 = expr_location_or (arg0, loc);
13147 /* Only perform transformation if ARG0 is easily inverted. */
13148 tem = fold_invert_truthvalue (loc0, arg0);
13149 if (tem)
13150 return fold_build2_loc (loc, code == VEC_COND_EXPR
13151 ? BIT_IOR_EXPR
13152 : TRUTH_ORIF_EXPR,
13153 type, fold_convert_loc (loc, type, tem),
13154 op1);
13157 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13158 if (integer_zerop (arg1)
13159 && truth_value_p (TREE_CODE (arg0))
13160 && truth_value_p (TREE_CODE (op2))
13161 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13163 location_t loc0 = expr_location_or (arg0, loc);
13164 /* Only perform transformation if ARG0 is easily inverted. */
13165 tem = fold_invert_truthvalue (loc0, arg0);
13166 if (tem)
13167 return fold_build2_loc (loc, code == VEC_COND_EXPR
13168 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13169 type, fold_convert_loc (loc, type, tem),
13170 op2);
13173 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13174 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13175 && truth_value_p (TREE_CODE (arg0))
13176 && truth_value_p (TREE_CODE (op2))
13177 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13178 return fold_build2_loc (loc, code == VEC_COND_EXPR
13179 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13180 type, fold_convert_loc (loc, type, arg0), op2);
13182 return NULL_TREE;
13184 case CALL_EXPR:
13185 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13186 of fold_ternary on them. */
13187 gcc_unreachable ();
13189 case BIT_FIELD_REF:
13190 if (TREE_CODE (arg0) == VECTOR_CST
13191 && (type == TREE_TYPE (TREE_TYPE (arg0))
13192 || (VECTOR_TYPE_P (type)
13193 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13194 && tree_fits_uhwi_p (op1)
13195 && tree_fits_uhwi_p (op2))
13197 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13198 unsigned HOST_WIDE_INT width
13199 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13200 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13201 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13202 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13204 if (n != 0
13205 && (idx % width) == 0
13206 && (n % width) == 0
13207 && known_le ((idx + n) / width,
13208 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13210 idx = idx / width;
13211 n = n / width;
13213 if (TREE_CODE (arg0) == VECTOR_CST)
13215 if (n == 1)
13217 tem = VECTOR_CST_ELT (arg0, idx);
13218 if (VECTOR_TYPE_P (type))
13219 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13220 return tem;
13223 tree_vector_builder vals (type, n, 1);
13224 for (unsigned i = 0; i < n; ++i)
13225 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13226 return vals.build ();
13231 /* On constants we can use native encode/interpret to constant
13232 fold (nearly) all BIT_FIELD_REFs. */
13233 if (CONSTANT_CLASS_P (arg0)
13234 && can_native_interpret_type_p (type)
13235 && BITS_PER_UNIT == 8
13236 && tree_fits_uhwi_p (op1)
13237 && tree_fits_uhwi_p (op2))
13239 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13240 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13241 /* Limit us to a reasonable amount of work. To relax the
13242 other limitations we need bit-shifting of the buffer
13243 and rounding up the size. */
13244 if (bitpos % BITS_PER_UNIT == 0
13245 && bitsize % BITS_PER_UNIT == 0
13246 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13248 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13249 unsigned HOST_WIDE_INT len
13250 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13251 bitpos / BITS_PER_UNIT);
13252 if (len > 0
13253 && len * BITS_PER_UNIT >= bitsize)
13255 tree v = native_interpret_expr (type, b,
13256 bitsize / BITS_PER_UNIT);
13257 if (v)
13258 return v;
13263 return NULL_TREE;
13265 case VEC_PERM_EXPR:
13266 /* Perform constant folding of BIT_INSERT_EXPR. */
13267 if (TREE_CODE (arg2) == VECTOR_CST
13268 && TREE_CODE (op0) == VECTOR_CST
13269 && TREE_CODE (op1) == VECTOR_CST)
13271 /* Build a vector of integers from the tree mask. */
13272 vec_perm_builder builder;
13273 if (!tree_to_vec_perm_builder (&builder, arg2))
13274 return NULL_TREE;
13276 /* Create a vec_perm_indices for the integer vector. */
13277 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13278 bool single_arg = (op0 == op1);
13279 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13280 return fold_vec_perm (type, op0, op1, sel);
13282 return NULL_TREE;
13284 case BIT_INSERT_EXPR:
13285 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13286 if (TREE_CODE (arg0) == INTEGER_CST
13287 && TREE_CODE (arg1) == INTEGER_CST)
13289 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13290 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13291 wide_int tem = (wi::to_wide (arg0)
13292 & wi::shifted_mask (bitpos, bitsize, true,
13293 TYPE_PRECISION (type)));
13294 wide_int tem2
13295 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13296 bitsize), bitpos);
13297 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13299 else if (TREE_CODE (arg0) == VECTOR_CST
13300 && CONSTANT_CLASS_P (arg1)
13301 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13302 TREE_TYPE (arg1)))
13304 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13305 unsigned HOST_WIDE_INT elsize
13306 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13307 if (bitpos % elsize == 0)
13309 unsigned k = bitpos / elsize;
13310 unsigned HOST_WIDE_INT nelts;
13311 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13312 return arg0;
13313 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13315 tree_vector_builder elts (type, nelts, 1);
13316 elts.quick_grow (nelts);
13317 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13318 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13319 return elts.build ();
13323 return NULL_TREE;
13325 default:
13326 return NULL_TREE;
13327 } /* switch (code) */
13330 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13331 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13332 constructor element index of the value returned. If the element is
13333 not found NULL_TREE is returned and *CTOR_IDX is updated to
13334 the index of the element after the ACCESS_INDEX position (which
13335 may be outside of the CTOR array). */
13337 tree
13338 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13339 unsigned *ctor_idx)
13341 tree index_type = NULL_TREE;
13342 signop index_sgn = UNSIGNED;
13343 offset_int low_bound = 0;
13345 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13347 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13348 if (domain_type && TYPE_MIN_VALUE (domain_type))
13350 /* Static constructors for variably sized objects makes no sense. */
13351 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13352 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13353 /* ??? When it is obvious that the range is signed, treat it so. */
13354 if (TYPE_UNSIGNED (index_type)
13355 && TYPE_MAX_VALUE (domain_type)
13356 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13357 TYPE_MIN_VALUE (domain_type)))
13359 index_sgn = SIGNED;
13360 low_bound
13361 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13362 SIGNED);
13364 else
13366 index_sgn = TYPE_SIGN (index_type);
13367 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13372 if (index_type)
13373 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13374 index_sgn);
13376 offset_int index = low_bound;
13377 if (index_type)
13378 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13380 offset_int max_index = index;
13381 unsigned cnt;
13382 tree cfield, cval;
13383 bool first_p = true;
13385 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13387 /* Array constructor might explicitly set index, or specify a range,
13388 or leave index NULL meaning that it is next index after previous
13389 one. */
13390 if (cfield)
13392 if (TREE_CODE (cfield) == INTEGER_CST)
13393 max_index = index
13394 = offset_int::from (wi::to_wide (cfield), index_sgn);
13395 else
13397 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13398 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13399 index_sgn);
13400 max_index
13401 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13402 index_sgn);
13403 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13406 else if (!first_p)
13408 index = max_index + 1;
13409 if (index_type)
13410 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13411 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13412 max_index = index;
13414 else
13415 first_p = false;
13417 /* Do we have match? */
13418 if (wi::cmp (access_index, index, index_sgn) >= 0)
13420 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13422 if (ctor_idx)
13423 *ctor_idx = cnt;
13424 return cval;
13427 else if (in_gimple_form)
13428 /* We're past the element we search for. Note during parsing
13429 the elements might not be sorted.
13430 ??? We should use a binary search and a flag on the
13431 CONSTRUCTOR as to whether elements are sorted in declaration
13432 order. */
13433 break;
13435 if (ctor_idx)
13436 *ctor_idx = cnt;
13437 return NULL_TREE;
13440 /* Perform constant folding and related simplification of EXPR.
13441 The related simplifications include x*1 => x, x*0 => 0, etc.,
13442 and application of the associative law.
13443 NOP_EXPR conversions may be removed freely (as long as we
13444 are careful not to change the type of the overall expression).
13445 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13446 but we can constant-fold them if they have constant operands. */
13448 #ifdef ENABLE_FOLD_CHECKING
13449 # define fold(x) fold_1 (x)
13450 static tree fold_1 (tree);
13451 static
13452 #endif
13453 tree
13454 fold (tree expr)
13456 const tree t = expr;
13457 enum tree_code code = TREE_CODE (t);
13458 enum tree_code_class kind = TREE_CODE_CLASS (code);
13459 tree tem;
13460 location_t loc = EXPR_LOCATION (expr);
13462 /* Return right away if a constant. */
13463 if (kind == tcc_constant)
13464 return t;
13466 /* CALL_EXPR-like objects with variable numbers of operands are
13467 treated specially. */
13468 if (kind == tcc_vl_exp)
13470 if (code == CALL_EXPR)
13472 tem = fold_call_expr (loc, expr, false);
13473 return tem ? tem : expr;
13475 return expr;
13478 if (IS_EXPR_CODE_CLASS (kind))
13480 tree type = TREE_TYPE (t);
13481 tree op0, op1, op2;
13483 switch (TREE_CODE_LENGTH (code))
13485 case 1:
13486 op0 = TREE_OPERAND (t, 0);
13487 tem = fold_unary_loc (loc, code, type, op0);
13488 return tem ? tem : expr;
13489 case 2:
13490 op0 = TREE_OPERAND (t, 0);
13491 op1 = TREE_OPERAND (t, 1);
13492 tem = fold_binary_loc (loc, code, type, op0, op1);
13493 return tem ? tem : expr;
13494 case 3:
13495 op0 = TREE_OPERAND (t, 0);
13496 op1 = TREE_OPERAND (t, 1);
13497 op2 = TREE_OPERAND (t, 2);
13498 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13499 return tem ? tem : expr;
13500 default:
13501 break;
13505 switch (code)
13507 case ARRAY_REF:
13509 tree op0 = TREE_OPERAND (t, 0);
13510 tree op1 = TREE_OPERAND (t, 1);
13512 if (TREE_CODE (op1) == INTEGER_CST
13513 && TREE_CODE (op0) == CONSTRUCTOR
13514 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13516 tree val = get_array_ctor_element_at_index (op0,
13517 wi::to_offset (op1));
13518 if (val)
13519 return val;
13522 return t;
13525 /* Return a VECTOR_CST if possible. */
13526 case CONSTRUCTOR:
13528 tree type = TREE_TYPE (t);
13529 if (TREE_CODE (type) != VECTOR_TYPE)
13530 return t;
13532 unsigned i;
13533 tree val;
13534 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13535 if (! CONSTANT_CLASS_P (val))
13536 return t;
13538 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13541 case CONST_DECL:
13542 return fold (DECL_INITIAL (t));
13544 default:
13545 return t;
13546 } /* switch (code) */
13549 #ifdef ENABLE_FOLD_CHECKING
13550 #undef fold
13552 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13553 hash_table<nofree_ptr_hash<const tree_node> > *);
13554 static void fold_check_failed (const_tree, const_tree);
13555 void print_fold_checksum (const_tree);
13557 /* When --enable-checking=fold, compute a digest of expr before
13558 and after actual fold call to see if fold did not accidentally
13559 change original expr. */
13561 tree
13562 fold (tree expr)
13564 tree ret;
13565 struct md5_ctx ctx;
13566 unsigned char checksum_before[16], checksum_after[16];
13567 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13569 md5_init_ctx (&ctx);
13570 fold_checksum_tree (expr, &ctx, &ht);
13571 md5_finish_ctx (&ctx, checksum_before);
13572 ht.empty ();
13574 ret = fold_1 (expr);
13576 md5_init_ctx (&ctx);
13577 fold_checksum_tree (expr, &ctx, &ht);
13578 md5_finish_ctx (&ctx, checksum_after);
13580 if (memcmp (checksum_before, checksum_after, 16))
13581 fold_check_failed (expr, ret);
13583 return ret;
13586 void
13587 print_fold_checksum (const_tree expr)
13589 struct md5_ctx ctx;
13590 unsigned char checksum[16], cnt;
13591 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13593 md5_init_ctx (&ctx);
13594 fold_checksum_tree (expr, &ctx, &ht);
13595 md5_finish_ctx (&ctx, checksum);
13596 for (cnt = 0; cnt < 16; ++cnt)
13597 fprintf (stderr, "%02x", checksum[cnt]);
13598 putc ('\n', stderr);
13601 static void
13602 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13604 internal_error ("fold check: original tree changed by fold");
13607 static void
13608 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13609 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13611 const tree_node **slot;
13612 enum tree_code code;
13613 union tree_node *buf;
13614 int i, len;
13616 recursive_label:
13617 if (expr == NULL)
13618 return;
13619 slot = ht->find_slot (expr, INSERT);
13620 if (*slot != NULL)
13621 return;
13622 *slot = expr;
13623 code = TREE_CODE (expr);
13624 if (TREE_CODE_CLASS (code) == tcc_declaration
13625 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13627 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13628 size_t sz = tree_size (expr);
13629 buf = XALLOCAVAR (union tree_node, sz);
13630 memcpy ((char *) buf, expr, sz);
13631 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13632 buf->decl_with_vis.symtab_node = NULL;
13633 buf->base.nowarning_flag = 0;
13634 expr = (tree) buf;
13636 else if (TREE_CODE_CLASS (code) == tcc_type
13637 && (TYPE_POINTER_TO (expr)
13638 || TYPE_REFERENCE_TO (expr)
13639 || TYPE_CACHED_VALUES_P (expr)
13640 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13641 || TYPE_NEXT_VARIANT (expr)
13642 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13644 /* Allow these fields to be modified. */
13645 tree tmp;
13646 size_t sz = tree_size (expr);
13647 buf = XALLOCAVAR (union tree_node, sz);
13648 memcpy ((char *) buf, expr, sz);
13649 expr = tmp = (tree) buf;
13650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13651 TYPE_POINTER_TO (tmp) = NULL;
13652 TYPE_REFERENCE_TO (tmp) = NULL;
13653 TYPE_NEXT_VARIANT (tmp) = NULL;
13654 TYPE_ALIAS_SET (tmp) = -1;
13655 if (TYPE_CACHED_VALUES_P (tmp))
13657 TYPE_CACHED_VALUES_P (tmp) = 0;
13658 TYPE_CACHED_VALUES (tmp) = NULL;
13661 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13663 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13664 that and change builtins.cc etc. instead - see PR89543. */
13665 size_t sz = tree_size (expr);
13666 buf = XALLOCAVAR (union tree_node, sz);
13667 memcpy ((char *) buf, expr, sz);
13668 buf->base.nowarning_flag = 0;
13669 expr = (tree) buf;
13671 md5_process_bytes (expr, tree_size (expr), ctx);
13672 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13673 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13674 if (TREE_CODE_CLASS (code) != tcc_type
13675 && TREE_CODE_CLASS (code) != tcc_declaration
13676 && code != TREE_LIST
13677 && code != SSA_NAME
13678 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13679 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13680 switch (TREE_CODE_CLASS (code))
13682 case tcc_constant:
13683 switch (code)
13685 case STRING_CST:
13686 md5_process_bytes (TREE_STRING_POINTER (expr),
13687 TREE_STRING_LENGTH (expr), ctx);
13688 break;
13689 case COMPLEX_CST:
13690 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13691 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13692 break;
13693 case VECTOR_CST:
13694 len = vector_cst_encoded_nelts (expr);
13695 for (i = 0; i < len; ++i)
13696 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13697 break;
13698 default:
13699 break;
13701 break;
13702 case tcc_exceptional:
13703 switch (code)
13705 case TREE_LIST:
13706 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13707 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13708 expr = TREE_CHAIN (expr);
13709 goto recursive_label;
13710 break;
13711 case TREE_VEC:
13712 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13713 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13714 break;
13715 default:
13716 break;
13718 break;
13719 case tcc_expression:
13720 case tcc_reference:
13721 case tcc_comparison:
13722 case tcc_unary:
13723 case tcc_binary:
13724 case tcc_statement:
13725 case tcc_vl_exp:
13726 len = TREE_OPERAND_LENGTH (expr);
13727 for (i = 0; i < len; ++i)
13728 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13729 break;
13730 case tcc_declaration:
13731 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13732 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13733 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13735 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13736 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13737 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13738 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13739 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13742 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13744 if (TREE_CODE (expr) == FUNCTION_DECL)
13746 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13747 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13749 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13751 break;
13752 case tcc_type:
13753 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13754 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13755 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13756 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13757 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13758 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13759 if (INTEGRAL_TYPE_P (expr)
13760 || SCALAR_FLOAT_TYPE_P (expr))
13762 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13763 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13765 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13766 if (TREE_CODE (expr) == RECORD_TYPE
13767 || TREE_CODE (expr) == UNION_TYPE
13768 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13769 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13770 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13771 break;
13772 default:
13773 break;
13777 /* Helper function for outputting the checksum of a tree T. When
13778 debugging with gdb, you can "define mynext" to be "next" followed
13779 by "call debug_fold_checksum (op0)", then just trace down till the
13780 outputs differ. */
13782 DEBUG_FUNCTION void
13783 debug_fold_checksum (const_tree t)
13785 int i;
13786 unsigned char checksum[16];
13787 struct md5_ctx ctx;
13788 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13790 md5_init_ctx (&ctx);
13791 fold_checksum_tree (t, &ctx, &ht);
13792 md5_finish_ctx (&ctx, checksum);
13793 ht.empty ();
13795 for (i = 0; i < 16; i++)
13796 fprintf (stderr, "%d ", checksum[i]);
13798 fprintf (stderr, "\n");
13801 #endif
13803 /* Fold a unary tree expression with code CODE of type TYPE with an
13804 operand OP0. LOC is the location of the resulting expression.
13805 Return a folded expression if successful. Otherwise, return a tree
13806 expression with code CODE of type TYPE with an operand OP0. */
13808 tree
13809 fold_build1_loc (location_t loc,
13810 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13812 tree tem;
13813 #ifdef ENABLE_FOLD_CHECKING
13814 unsigned char checksum_before[16], checksum_after[16];
13815 struct md5_ctx ctx;
13816 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13818 md5_init_ctx (&ctx);
13819 fold_checksum_tree (op0, &ctx, &ht);
13820 md5_finish_ctx (&ctx, checksum_before);
13821 ht.empty ();
13822 #endif
13824 tem = fold_unary_loc (loc, code, type, op0);
13825 if (!tem)
13826 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13828 #ifdef ENABLE_FOLD_CHECKING
13829 md5_init_ctx (&ctx);
13830 fold_checksum_tree (op0, &ctx, &ht);
13831 md5_finish_ctx (&ctx, checksum_after);
13833 if (memcmp (checksum_before, checksum_after, 16))
13834 fold_check_failed (op0, tem);
13835 #endif
13836 return tem;
13839 /* Fold a binary tree expression with code CODE of type TYPE with
13840 operands OP0 and OP1. LOC is the location of the resulting
13841 expression. Return a folded expression if successful. Otherwise,
13842 return a tree expression with code CODE of type TYPE with operands
13843 OP0 and OP1. */
13845 tree
13846 fold_build2_loc (location_t loc,
13847 enum tree_code code, tree type, tree op0, tree op1
13848 MEM_STAT_DECL)
13850 tree tem;
13851 #ifdef ENABLE_FOLD_CHECKING
13852 unsigned char checksum_before_op0[16],
13853 checksum_before_op1[16],
13854 checksum_after_op0[16],
13855 checksum_after_op1[16];
13856 struct md5_ctx ctx;
13857 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13859 md5_init_ctx (&ctx);
13860 fold_checksum_tree (op0, &ctx, &ht);
13861 md5_finish_ctx (&ctx, checksum_before_op0);
13862 ht.empty ();
13864 md5_init_ctx (&ctx);
13865 fold_checksum_tree (op1, &ctx, &ht);
13866 md5_finish_ctx (&ctx, checksum_before_op1);
13867 ht.empty ();
13868 #endif
13870 tem = fold_binary_loc (loc, code, type, op0, op1);
13871 if (!tem)
13872 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13874 #ifdef ENABLE_FOLD_CHECKING
13875 md5_init_ctx (&ctx);
13876 fold_checksum_tree (op0, &ctx, &ht);
13877 md5_finish_ctx (&ctx, checksum_after_op0);
13878 ht.empty ();
13880 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13881 fold_check_failed (op0, tem);
13883 md5_init_ctx (&ctx);
13884 fold_checksum_tree (op1, &ctx, &ht);
13885 md5_finish_ctx (&ctx, checksum_after_op1);
13887 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13888 fold_check_failed (op1, tem);
13889 #endif
13890 return tem;
13893 /* Fold a ternary tree expression with code CODE of type TYPE with
13894 operands OP0, OP1, and OP2. Return a folded expression if
13895 successful. Otherwise, return a tree expression with code CODE of
13896 type TYPE with operands OP0, OP1, and OP2. */
13898 tree
13899 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13900 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13902 tree tem;
13903 #ifdef ENABLE_FOLD_CHECKING
13904 unsigned char checksum_before_op0[16],
13905 checksum_before_op1[16],
13906 checksum_before_op2[16],
13907 checksum_after_op0[16],
13908 checksum_after_op1[16],
13909 checksum_after_op2[16];
13910 struct md5_ctx ctx;
13911 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13913 md5_init_ctx (&ctx);
13914 fold_checksum_tree (op0, &ctx, &ht);
13915 md5_finish_ctx (&ctx, checksum_before_op0);
13916 ht.empty ();
13918 md5_init_ctx (&ctx);
13919 fold_checksum_tree (op1, &ctx, &ht);
13920 md5_finish_ctx (&ctx, checksum_before_op1);
13921 ht.empty ();
13923 md5_init_ctx (&ctx);
13924 fold_checksum_tree (op2, &ctx, &ht);
13925 md5_finish_ctx (&ctx, checksum_before_op2);
13926 ht.empty ();
13927 #endif
13929 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13930 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13931 if (!tem)
13932 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13934 #ifdef ENABLE_FOLD_CHECKING
13935 md5_init_ctx (&ctx);
13936 fold_checksum_tree (op0, &ctx, &ht);
13937 md5_finish_ctx (&ctx, checksum_after_op0);
13938 ht.empty ();
13940 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13941 fold_check_failed (op0, tem);
13943 md5_init_ctx (&ctx);
13944 fold_checksum_tree (op1, &ctx, &ht);
13945 md5_finish_ctx (&ctx, checksum_after_op1);
13946 ht.empty ();
13948 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13949 fold_check_failed (op1, tem);
13951 md5_init_ctx (&ctx);
13952 fold_checksum_tree (op2, &ctx, &ht);
13953 md5_finish_ctx (&ctx, checksum_after_op2);
13955 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13956 fold_check_failed (op2, tem);
13957 #endif
13958 return tem;
13961 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13962 arguments in ARGARRAY, and a null static chain.
13963 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13964 of type TYPE from the given operands as constructed by build_call_array. */
13966 tree
13967 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13968 int nargs, tree *argarray)
13970 tree tem;
13971 #ifdef ENABLE_FOLD_CHECKING
13972 unsigned char checksum_before_fn[16],
13973 checksum_before_arglist[16],
13974 checksum_after_fn[16],
13975 checksum_after_arglist[16];
13976 struct md5_ctx ctx;
13977 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13978 int i;
13980 md5_init_ctx (&ctx);
13981 fold_checksum_tree (fn, &ctx, &ht);
13982 md5_finish_ctx (&ctx, checksum_before_fn);
13983 ht.empty ();
13985 md5_init_ctx (&ctx);
13986 for (i = 0; i < nargs; i++)
13987 fold_checksum_tree (argarray[i], &ctx, &ht);
13988 md5_finish_ctx (&ctx, checksum_before_arglist);
13989 ht.empty ();
13990 #endif
13992 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13993 if (!tem)
13994 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13996 #ifdef ENABLE_FOLD_CHECKING
13997 md5_init_ctx (&ctx);
13998 fold_checksum_tree (fn, &ctx, &ht);
13999 md5_finish_ctx (&ctx, checksum_after_fn);
14000 ht.empty ();
14002 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14003 fold_check_failed (fn, tem);
14005 md5_init_ctx (&ctx);
14006 for (i = 0; i < nargs; i++)
14007 fold_checksum_tree (argarray[i], &ctx, &ht);
14008 md5_finish_ctx (&ctx, checksum_after_arglist);
14010 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14011 fold_check_failed (NULL_TREE, tem);
14012 #endif
14013 return tem;
14016 /* Perform constant folding and related simplification of initializer
14017 expression EXPR. These behave identically to "fold_buildN" but ignore
14018 potential run-time traps and exceptions that fold must preserve. */
14020 #define START_FOLD_INIT \
14021 int saved_signaling_nans = flag_signaling_nans;\
14022 int saved_trapping_math = flag_trapping_math;\
14023 int saved_rounding_math = flag_rounding_math;\
14024 int saved_trapv = flag_trapv;\
14025 int saved_folding_initializer = folding_initializer;\
14026 flag_signaling_nans = 0;\
14027 flag_trapping_math = 0;\
14028 flag_rounding_math = 0;\
14029 flag_trapv = 0;\
14030 folding_initializer = 1;
14032 #define END_FOLD_INIT \
14033 flag_signaling_nans = saved_signaling_nans;\
14034 flag_trapping_math = saved_trapping_math;\
14035 flag_rounding_math = saved_rounding_math;\
14036 flag_trapv = saved_trapv;\
14037 folding_initializer = saved_folding_initializer;
14039 tree
14040 fold_init (tree expr)
14042 tree result;
14043 START_FOLD_INIT;
14045 result = fold (expr);
14047 END_FOLD_INIT;
14048 return result;
14051 tree
14052 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14053 tree type, tree op)
14055 tree result;
14056 START_FOLD_INIT;
14058 result = fold_build1_loc (loc, code, type, op);
14060 END_FOLD_INIT;
14061 return result;
14064 tree
14065 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14066 tree type, tree op0, tree op1)
14068 tree result;
14069 START_FOLD_INIT;
14071 result = fold_build2_loc (loc, code, type, op0, op1);
14073 END_FOLD_INIT;
14074 return result;
14077 tree
14078 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14079 int nargs, tree *argarray)
14081 tree result;
14082 START_FOLD_INIT;
14084 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14086 END_FOLD_INIT;
14087 return result;
14090 tree
14091 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14092 tree lhs, tree rhs)
14094 tree result;
14095 START_FOLD_INIT;
14097 result = fold_binary_loc (loc, code, type, lhs, rhs);
14099 END_FOLD_INIT;
14100 return result;
14103 #undef START_FOLD_INIT
14104 #undef END_FOLD_INIT
14106 /* Determine if first argument is a multiple of second argument. Return 0 if
14107 it is not, or we cannot easily determined it to be.
14109 An example of the sort of thing we care about (at this point; this routine
14110 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14111 fold cases do now) is discovering that
14113 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14115 is a multiple of
14117 SAVE_EXPR (J * 8)
14119 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14121 This code also handles discovering that
14123 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14125 is a multiple of 8 so we don't have to worry about dealing with a
14126 possible remainder.
14128 Note that we *look* inside a SAVE_EXPR only to determine how it was
14129 calculated; it is not safe for fold to do much of anything else with the
14130 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14131 at run time. For example, the latter example above *cannot* be implemented
14132 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14133 evaluation time of the original SAVE_EXPR is not necessarily the same at
14134 the time the new expression is evaluated. The only optimization of this
14135 sort that would be valid is changing
14137 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14139 divided by 8 to
14141 SAVE_EXPR (I) * SAVE_EXPR (J)
14143 (where the same SAVE_EXPR (J) is used in the original and the
14144 transformed version).
14146 NOWRAP specifies whether all outer operations in TYPE should
14147 be considered not wrapping. Any type conversion within TOP acts
14148 as a barrier and we will fall back to NOWRAP being false.
14149 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14150 as not wrapping even though they are generally using unsigned arithmetic. */
14153 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14155 gimple *stmt;
14156 tree op1, op2;
14158 if (operand_equal_p (top, bottom, 0))
14159 return 1;
14161 if (TREE_CODE (type) != INTEGER_TYPE)
14162 return 0;
14164 switch (TREE_CODE (top))
14166 case BIT_AND_EXPR:
14167 /* Bitwise and provides a power of two multiple. If the mask is
14168 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14169 if (!integer_pow2p (bottom))
14170 return 0;
14171 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14172 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14174 case MULT_EXPR:
14175 /* If the multiplication can wrap we cannot recurse further unless
14176 the bottom is a power of two which is where wrapping does not
14177 matter. */
14178 if (!nowrap
14179 && !TYPE_OVERFLOW_UNDEFINED (type)
14180 && !integer_pow2p (bottom))
14181 return 0;
14182 if (TREE_CODE (bottom) == INTEGER_CST)
14184 op1 = TREE_OPERAND (top, 0);
14185 op2 = TREE_OPERAND (top, 1);
14186 if (TREE_CODE (op1) == INTEGER_CST)
14187 std::swap (op1, op2);
14188 if (TREE_CODE (op2) == INTEGER_CST)
14190 if (multiple_of_p (type, op2, bottom, nowrap))
14191 return 1;
14192 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14193 if (multiple_of_p (type, bottom, op2, nowrap))
14195 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14196 wi::to_widest (op2));
14197 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14199 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14200 return multiple_of_p (type, op1, op2, nowrap);
14203 return multiple_of_p (type, op1, bottom, nowrap);
14206 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14207 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14209 case LSHIFT_EXPR:
14210 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14211 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14213 op1 = TREE_OPERAND (top, 1);
14214 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14216 wide_int mul_op
14217 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14218 return multiple_of_p (type,
14219 wide_int_to_tree (type, mul_op), bottom,
14220 nowrap);
14223 return 0;
14225 case MINUS_EXPR:
14226 case PLUS_EXPR:
14227 /* If the addition or subtraction can wrap we cannot recurse further
14228 unless bottom is a power of two which is where wrapping does not
14229 matter. */
14230 if (!nowrap
14231 && !TYPE_OVERFLOW_UNDEFINED (type)
14232 && !integer_pow2p (bottom))
14233 return 0;
14235 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14236 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14237 but 0xfffffffd is not. */
14238 op1 = TREE_OPERAND (top, 1);
14239 if (TREE_CODE (top) == PLUS_EXPR
14240 && nowrap
14241 && TYPE_UNSIGNED (type)
14242 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14243 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14245 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14246 precisely, so be conservative here checking if both op0 and op1
14247 are multiple of bottom. Note we check the second operand first
14248 since it's usually simpler. */
14249 return (multiple_of_p (type, op1, bottom, nowrap)
14250 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14252 CASE_CONVERT:
14253 /* Can't handle conversions from non-integral or wider integral type. */
14254 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14255 || (TYPE_PRECISION (type)
14256 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14257 return 0;
14258 /* NOWRAP only extends to operations in the outermost type so
14259 make sure to strip it off here. */
14260 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14261 TREE_OPERAND (top, 0), bottom, false);
14263 case SAVE_EXPR:
14264 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14266 case COND_EXPR:
14267 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14268 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14270 case INTEGER_CST:
14271 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14272 return 0;
14273 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14274 SIGNED);
14276 case SSA_NAME:
14277 if (TREE_CODE (bottom) == INTEGER_CST
14278 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14279 && gimple_code (stmt) == GIMPLE_ASSIGN)
14281 enum tree_code code = gimple_assign_rhs_code (stmt);
14283 /* Check for special cases to see if top is defined as multiple
14284 of bottom:
14286 top = (X & ~(bottom - 1) ; bottom is power of 2
14290 Y = X % bottom
14291 top = X - Y. */
14292 if (code == BIT_AND_EXPR
14293 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14294 && TREE_CODE (op2) == INTEGER_CST
14295 && integer_pow2p (bottom)
14296 && wi::multiple_of_p (wi::to_widest (op2),
14297 wi::to_widest (bottom), UNSIGNED))
14298 return 1;
14300 op1 = gimple_assign_rhs1 (stmt);
14301 if (code == MINUS_EXPR
14302 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14303 && TREE_CODE (op2) == SSA_NAME
14304 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14305 && gimple_code (stmt) == GIMPLE_ASSIGN
14306 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14307 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14308 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14309 return 1;
14312 /* fall through */
14314 default:
14315 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14316 return multiple_p (wi::to_poly_widest (top),
14317 wi::to_poly_widest (bottom));
14319 return 0;
14323 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14324 This function returns true for integer expressions, and returns
14325 false if uncertain. */
14327 bool
14328 tree_expr_finite_p (const_tree x)
14330 machine_mode mode = element_mode (x);
14331 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14332 return true;
14333 switch (TREE_CODE (x))
14335 case REAL_CST:
14336 return real_isfinite (TREE_REAL_CST_PTR (x));
14337 case COMPLEX_CST:
14338 return tree_expr_finite_p (TREE_REALPART (x))
14339 && tree_expr_finite_p (TREE_IMAGPART (x));
14340 case FLOAT_EXPR:
14341 return true;
14342 case ABS_EXPR:
14343 case CONVERT_EXPR:
14344 case NON_LVALUE_EXPR:
14345 case NEGATE_EXPR:
14346 case SAVE_EXPR:
14347 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14348 case MIN_EXPR:
14349 case MAX_EXPR:
14350 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14351 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14352 case COND_EXPR:
14353 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14354 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14355 case CALL_EXPR:
14356 switch (get_call_combined_fn (x))
14358 CASE_CFN_FABS:
14359 CASE_CFN_FABS_FN:
14360 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14361 CASE_CFN_FMAX:
14362 CASE_CFN_FMAX_FN:
14363 CASE_CFN_FMIN:
14364 CASE_CFN_FMIN_FN:
14365 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14366 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14367 default:
14368 return false;
14371 default:
14372 return false;
14376 /* Return true if expression X evaluates to an infinity.
14377 This function returns false for integer expressions. */
14379 bool
14380 tree_expr_infinite_p (const_tree x)
14382 if (!HONOR_INFINITIES (x))
14383 return false;
14384 switch (TREE_CODE (x))
14386 case REAL_CST:
14387 return real_isinf (TREE_REAL_CST_PTR (x));
14388 case ABS_EXPR:
14389 case NEGATE_EXPR:
14390 case NON_LVALUE_EXPR:
14391 case SAVE_EXPR:
14392 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14393 case COND_EXPR:
14394 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14395 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14396 default:
14397 return false;
14401 /* Return true if expression X could evaluate to an infinity.
14402 This function returns false for integer expressions, and returns
14403 true if uncertain. */
14405 bool
14406 tree_expr_maybe_infinite_p (const_tree x)
14408 if (!HONOR_INFINITIES (x))
14409 return false;
14410 switch (TREE_CODE (x))
14412 case REAL_CST:
14413 return real_isinf (TREE_REAL_CST_PTR (x));
14414 case FLOAT_EXPR:
14415 return false;
14416 case ABS_EXPR:
14417 case NEGATE_EXPR:
14418 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14419 case COND_EXPR:
14420 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14421 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14422 default:
14423 return true;
14427 /* Return true if expression X evaluates to a signaling NaN.
14428 This function returns false for integer expressions. */
14430 bool
14431 tree_expr_signaling_nan_p (const_tree x)
14433 if (!HONOR_SNANS (x))
14434 return false;
14435 switch (TREE_CODE (x))
14437 case REAL_CST:
14438 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14439 case NON_LVALUE_EXPR:
14440 case SAVE_EXPR:
14441 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14442 case COND_EXPR:
14443 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14444 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14445 default:
14446 return false;
14450 /* Return true if expression X could evaluate to a signaling NaN.
14451 This function returns false for integer expressions, and returns
14452 true if uncertain. */
14454 bool
14455 tree_expr_maybe_signaling_nan_p (const_tree x)
14457 if (!HONOR_SNANS (x))
14458 return false;
14459 switch (TREE_CODE (x))
14461 case REAL_CST:
14462 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14463 case FLOAT_EXPR:
14464 return false;
14465 case ABS_EXPR:
14466 case CONVERT_EXPR:
14467 case NEGATE_EXPR:
14468 case NON_LVALUE_EXPR:
14469 case SAVE_EXPR:
14470 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14471 case MIN_EXPR:
14472 case MAX_EXPR:
14473 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14474 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14475 case COND_EXPR:
14476 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14477 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14478 case CALL_EXPR:
14479 switch (get_call_combined_fn (x))
14481 CASE_CFN_FABS:
14482 CASE_CFN_FABS_FN:
14483 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14484 CASE_CFN_FMAX:
14485 CASE_CFN_FMAX_FN:
14486 CASE_CFN_FMIN:
14487 CASE_CFN_FMIN_FN:
14488 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14489 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14490 default:
14491 return true;
14493 default:
14494 return true;
14498 /* Return true if expression X evaluates to a NaN.
14499 This function returns false for integer expressions. */
14501 bool
14502 tree_expr_nan_p (const_tree x)
14504 if (!HONOR_NANS (x))
14505 return false;
14506 switch (TREE_CODE (x))
14508 case REAL_CST:
14509 return real_isnan (TREE_REAL_CST_PTR (x));
14510 case NON_LVALUE_EXPR:
14511 case SAVE_EXPR:
14512 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14513 case COND_EXPR:
14514 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14515 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14516 default:
14517 return false;
14521 /* Return true if expression X could evaluate to a NaN.
14522 This function returns false for integer expressions, and returns
14523 true if uncertain. */
14525 bool
14526 tree_expr_maybe_nan_p (const_tree x)
14528 if (!HONOR_NANS (x))
14529 return false;
14530 switch (TREE_CODE (x))
14532 case REAL_CST:
14533 return real_isnan (TREE_REAL_CST_PTR (x));
14534 case FLOAT_EXPR:
14535 return false;
14536 case PLUS_EXPR:
14537 case MINUS_EXPR:
14538 case MULT_EXPR:
14539 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14540 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14541 case ABS_EXPR:
14542 case CONVERT_EXPR:
14543 case NEGATE_EXPR:
14544 case NON_LVALUE_EXPR:
14545 case SAVE_EXPR:
14546 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14547 case MIN_EXPR:
14548 case MAX_EXPR:
14549 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14550 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14551 case COND_EXPR:
14552 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14553 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14554 case CALL_EXPR:
14555 switch (get_call_combined_fn (x))
14557 CASE_CFN_FABS:
14558 CASE_CFN_FABS_FN:
14559 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14560 CASE_CFN_FMAX:
14561 CASE_CFN_FMAX_FN:
14562 CASE_CFN_FMIN:
14563 CASE_CFN_FMIN_FN:
14564 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14565 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14566 default:
14567 return true;
14569 default:
14570 return true;
14574 /* Return true if expression X could evaluate to -0.0.
14575 This function returns true if uncertain. */
14577 bool
14578 tree_expr_maybe_real_minus_zero_p (const_tree x)
14580 if (!HONOR_SIGNED_ZEROS (x))
14581 return false;
14582 switch (TREE_CODE (x))
14584 case REAL_CST:
14585 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14586 case INTEGER_CST:
14587 case FLOAT_EXPR:
14588 case ABS_EXPR:
14589 return false;
14590 case NON_LVALUE_EXPR:
14591 case SAVE_EXPR:
14592 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14593 case COND_EXPR:
14594 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14595 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14596 case CALL_EXPR:
14597 switch (get_call_combined_fn (x))
14599 CASE_CFN_FABS:
14600 CASE_CFN_FABS_FN:
14601 return false;
14602 default:
14603 break;
14605 default:
14606 break;
14608 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14609 * but currently those predicates require tree and not const_tree. */
14610 return true;
14613 #define tree_expr_nonnegative_warnv_p(X, Y) \
14614 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14616 #define RECURSE(X) \
14617 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14619 /* Return true if CODE or TYPE is known to be non-negative. */
14621 static bool
14622 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14624 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14625 && truth_value_p (code))
14626 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14627 have a signed:1 type (where the value is -1 and 0). */
14628 return true;
14629 return false;
14632 /* Return true if (CODE OP0) is known to be non-negative. If the return
14633 value is based on the assumption that signed overflow is undefined,
14634 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14635 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14637 bool
14638 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14639 bool *strict_overflow_p, int depth)
14641 if (TYPE_UNSIGNED (type))
14642 return true;
14644 switch (code)
14646 case ABS_EXPR:
14647 /* We can't return 1 if flag_wrapv is set because
14648 ABS_EXPR<INT_MIN> = INT_MIN. */
14649 if (!ANY_INTEGRAL_TYPE_P (type))
14650 return true;
14651 if (TYPE_OVERFLOW_UNDEFINED (type))
14653 *strict_overflow_p = true;
14654 return true;
14656 break;
14658 case NON_LVALUE_EXPR:
14659 case FLOAT_EXPR:
14660 case FIX_TRUNC_EXPR:
14661 return RECURSE (op0);
14663 CASE_CONVERT:
14665 tree inner_type = TREE_TYPE (op0);
14666 tree outer_type = type;
14668 if (TREE_CODE (outer_type) == REAL_TYPE)
14670 if (TREE_CODE (inner_type) == REAL_TYPE)
14671 return RECURSE (op0);
14672 if (INTEGRAL_TYPE_P (inner_type))
14674 if (TYPE_UNSIGNED (inner_type))
14675 return true;
14676 return RECURSE (op0);
14679 else if (INTEGRAL_TYPE_P (outer_type))
14681 if (TREE_CODE (inner_type) == REAL_TYPE)
14682 return RECURSE (op0);
14683 if (INTEGRAL_TYPE_P (inner_type))
14684 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14685 && TYPE_UNSIGNED (inner_type);
14688 break;
14690 default:
14691 return tree_simple_nonnegative_warnv_p (code, type);
14694 /* We don't know sign of `t', so be conservative and return false. */
14695 return false;
14698 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14699 value is based on the assumption that signed overflow is undefined,
14700 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14701 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14703 bool
14704 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14705 tree op1, bool *strict_overflow_p,
14706 int depth)
14708 if (TYPE_UNSIGNED (type))
14709 return true;
14711 switch (code)
14713 case POINTER_PLUS_EXPR:
14714 case PLUS_EXPR:
14715 if (FLOAT_TYPE_P (type))
14716 return RECURSE (op0) && RECURSE (op1);
14718 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14719 both unsigned and at least 2 bits shorter than the result. */
14720 if (TREE_CODE (type) == INTEGER_TYPE
14721 && TREE_CODE (op0) == NOP_EXPR
14722 && TREE_CODE (op1) == NOP_EXPR)
14724 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14725 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14726 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14727 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14729 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14730 TYPE_PRECISION (inner2)) + 1;
14731 return prec < TYPE_PRECISION (type);
14734 break;
14736 case MULT_EXPR:
14737 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14739 /* x * x is always non-negative for floating point x
14740 or without overflow. */
14741 if (operand_equal_p (op0, op1, 0)
14742 || (RECURSE (op0) && RECURSE (op1)))
14744 if (ANY_INTEGRAL_TYPE_P (type)
14745 && TYPE_OVERFLOW_UNDEFINED (type))
14746 *strict_overflow_p = true;
14747 return true;
14751 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14752 both unsigned and their total bits is shorter than the result. */
14753 if (TREE_CODE (type) == INTEGER_TYPE
14754 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14755 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14757 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14758 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14759 : TREE_TYPE (op0);
14760 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14761 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14762 : TREE_TYPE (op1);
14764 bool unsigned0 = TYPE_UNSIGNED (inner0);
14765 bool unsigned1 = TYPE_UNSIGNED (inner1);
14767 if (TREE_CODE (op0) == INTEGER_CST)
14768 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14770 if (TREE_CODE (op1) == INTEGER_CST)
14771 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14773 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14774 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14776 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14777 ? tree_int_cst_min_precision (op0, UNSIGNED)
14778 : TYPE_PRECISION (inner0);
14780 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14781 ? tree_int_cst_min_precision (op1, UNSIGNED)
14782 : TYPE_PRECISION (inner1);
14784 return precision0 + precision1 < TYPE_PRECISION (type);
14787 return false;
14789 case BIT_AND_EXPR:
14790 return RECURSE (op0) || RECURSE (op1);
14792 case MAX_EXPR:
14793 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14794 things. */
14795 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14796 return RECURSE (op0) && RECURSE (op1);
14797 return RECURSE (op0) || RECURSE (op1);
14799 case BIT_IOR_EXPR:
14800 case BIT_XOR_EXPR:
14801 case MIN_EXPR:
14802 case RDIV_EXPR:
14803 case TRUNC_DIV_EXPR:
14804 case CEIL_DIV_EXPR:
14805 case FLOOR_DIV_EXPR:
14806 case ROUND_DIV_EXPR:
14807 return RECURSE (op0) && RECURSE (op1);
14809 case TRUNC_MOD_EXPR:
14810 return RECURSE (op0);
14812 case FLOOR_MOD_EXPR:
14813 return RECURSE (op1);
14815 case CEIL_MOD_EXPR:
14816 case ROUND_MOD_EXPR:
14817 default:
14818 return tree_simple_nonnegative_warnv_p (code, type);
14821 /* We don't know sign of `t', so be conservative and return false. */
14822 return false;
14825 /* Return true if T is known to be non-negative. If the return
14826 value is based on the assumption that signed overflow is undefined,
14827 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14828 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14830 bool
14831 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14833 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14834 return true;
14836 switch (TREE_CODE (t))
14838 case INTEGER_CST:
14839 return tree_int_cst_sgn (t) >= 0;
14841 case REAL_CST:
14842 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14844 case FIXED_CST:
14845 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14847 case COND_EXPR:
14848 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14850 case SSA_NAME:
14851 /* Limit the depth of recursion to avoid quadratic behavior.
14852 This is expected to catch almost all occurrences in practice.
14853 If this code misses important cases that unbounded recursion
14854 would not, passes that need this information could be revised
14855 to provide it through dataflow propagation. */
14856 return (!name_registered_for_update_p (t)
14857 && depth < param_max_ssa_name_query_depth
14858 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14859 strict_overflow_p, depth));
14861 default:
14862 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14866 /* Return true if T is known to be non-negative. If the return
14867 value is based on the assumption that signed overflow is undefined,
14868 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14869 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14871 bool
14872 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14873 bool *strict_overflow_p, int depth)
14875 switch (fn)
14877 CASE_CFN_ACOS:
14878 CASE_CFN_ACOS_FN:
14879 CASE_CFN_ACOSH:
14880 CASE_CFN_ACOSH_FN:
14881 CASE_CFN_CABS:
14882 CASE_CFN_CABS_FN:
14883 CASE_CFN_COSH:
14884 CASE_CFN_COSH_FN:
14885 CASE_CFN_ERFC:
14886 CASE_CFN_ERFC_FN:
14887 CASE_CFN_EXP:
14888 CASE_CFN_EXP_FN:
14889 CASE_CFN_EXP10:
14890 CASE_CFN_EXP2:
14891 CASE_CFN_EXP2_FN:
14892 CASE_CFN_FABS:
14893 CASE_CFN_FABS_FN:
14894 CASE_CFN_FDIM:
14895 CASE_CFN_FDIM_FN:
14896 CASE_CFN_HYPOT:
14897 CASE_CFN_HYPOT_FN:
14898 CASE_CFN_POW10:
14899 CASE_CFN_FFS:
14900 CASE_CFN_PARITY:
14901 CASE_CFN_POPCOUNT:
14902 CASE_CFN_CLZ:
14903 CASE_CFN_CLRSB:
14904 case CFN_BUILT_IN_BSWAP16:
14905 case CFN_BUILT_IN_BSWAP32:
14906 case CFN_BUILT_IN_BSWAP64:
14907 case CFN_BUILT_IN_BSWAP128:
14908 /* Always true. */
14909 return true;
14911 CASE_CFN_SQRT:
14912 CASE_CFN_SQRT_FN:
14913 /* sqrt(-0.0) is -0.0. */
14914 if (!HONOR_SIGNED_ZEROS (type))
14915 return true;
14916 return RECURSE (arg0);
14918 CASE_CFN_ASINH:
14919 CASE_CFN_ASINH_FN:
14920 CASE_CFN_ATAN:
14921 CASE_CFN_ATAN_FN:
14922 CASE_CFN_ATANH:
14923 CASE_CFN_ATANH_FN:
14924 CASE_CFN_CBRT:
14925 CASE_CFN_CBRT_FN:
14926 CASE_CFN_CEIL:
14927 CASE_CFN_CEIL_FN:
14928 CASE_CFN_ERF:
14929 CASE_CFN_ERF_FN:
14930 CASE_CFN_EXPM1:
14931 CASE_CFN_EXPM1_FN:
14932 CASE_CFN_FLOOR:
14933 CASE_CFN_FLOOR_FN:
14934 CASE_CFN_FMOD:
14935 CASE_CFN_FMOD_FN:
14936 CASE_CFN_FREXP:
14937 CASE_CFN_FREXP_FN:
14938 CASE_CFN_ICEIL:
14939 CASE_CFN_IFLOOR:
14940 CASE_CFN_IRINT:
14941 CASE_CFN_IROUND:
14942 CASE_CFN_LCEIL:
14943 CASE_CFN_LDEXP:
14944 CASE_CFN_LFLOOR:
14945 CASE_CFN_LLCEIL:
14946 CASE_CFN_LLFLOOR:
14947 CASE_CFN_LLRINT:
14948 CASE_CFN_LLRINT_FN:
14949 CASE_CFN_LLROUND:
14950 CASE_CFN_LLROUND_FN:
14951 CASE_CFN_LRINT:
14952 CASE_CFN_LRINT_FN:
14953 CASE_CFN_LROUND:
14954 CASE_CFN_LROUND_FN:
14955 CASE_CFN_MODF:
14956 CASE_CFN_MODF_FN:
14957 CASE_CFN_NEARBYINT:
14958 CASE_CFN_NEARBYINT_FN:
14959 CASE_CFN_RINT:
14960 CASE_CFN_RINT_FN:
14961 CASE_CFN_ROUND:
14962 CASE_CFN_ROUND_FN:
14963 CASE_CFN_ROUNDEVEN:
14964 CASE_CFN_ROUNDEVEN_FN:
14965 CASE_CFN_SCALB:
14966 CASE_CFN_SCALBLN:
14967 CASE_CFN_SCALBLN_FN:
14968 CASE_CFN_SCALBN:
14969 CASE_CFN_SCALBN_FN:
14970 CASE_CFN_SIGNBIT:
14971 CASE_CFN_SIGNIFICAND:
14972 CASE_CFN_SINH:
14973 CASE_CFN_SINH_FN:
14974 CASE_CFN_TANH:
14975 CASE_CFN_TANH_FN:
14976 CASE_CFN_TRUNC:
14977 CASE_CFN_TRUNC_FN:
14978 /* True if the 1st argument is nonnegative. */
14979 return RECURSE (arg0);
14981 CASE_CFN_FMAX:
14982 CASE_CFN_FMAX_FN:
14983 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14984 things. In the presence of sNaNs, we're only guaranteed to be
14985 non-negative if both operands are non-negative. In the presence
14986 of qNaNs, we're non-negative if either operand is non-negative
14987 and can't be a qNaN, or if both operands are non-negative. */
14988 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14989 tree_expr_maybe_signaling_nan_p (arg1))
14990 return RECURSE (arg0) && RECURSE (arg1);
14991 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14992 || RECURSE (arg1))
14993 : (RECURSE (arg1)
14994 && !tree_expr_maybe_nan_p (arg1));
14996 CASE_CFN_FMIN:
14997 CASE_CFN_FMIN_FN:
14998 /* True if the 1st AND 2nd arguments are nonnegative. */
14999 return RECURSE (arg0) && RECURSE (arg1);
15001 CASE_CFN_COPYSIGN:
15002 CASE_CFN_COPYSIGN_FN:
15003 /* True if the 2nd argument is nonnegative. */
15004 return RECURSE (arg1);
15006 CASE_CFN_POWI:
15007 /* True if the 1st argument is nonnegative or the second
15008 argument is an even integer. */
15009 if (TREE_CODE (arg1) == INTEGER_CST
15010 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15011 return true;
15012 return RECURSE (arg0);
15014 CASE_CFN_POW:
15015 CASE_CFN_POW_FN:
15016 /* True if the 1st argument is nonnegative or the second
15017 argument is an even integer valued real. */
15018 if (TREE_CODE (arg1) == REAL_CST)
15020 REAL_VALUE_TYPE c;
15021 HOST_WIDE_INT n;
15023 c = TREE_REAL_CST (arg1);
15024 n = real_to_integer (&c);
15025 if ((n & 1) == 0)
15027 REAL_VALUE_TYPE cint;
15028 real_from_integer (&cint, VOIDmode, n, SIGNED);
15029 if (real_identical (&c, &cint))
15030 return true;
15033 return RECURSE (arg0);
15035 default:
15036 break;
15038 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15041 /* Return true if T is known to be non-negative. If the return
15042 value is based on the assumption that signed overflow is undefined,
15043 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15044 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15046 static bool
15047 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15049 enum tree_code code = TREE_CODE (t);
15050 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15051 return true;
15053 switch (code)
15055 case TARGET_EXPR:
15057 tree temp = TARGET_EXPR_SLOT (t);
15058 t = TARGET_EXPR_INITIAL (t);
15060 /* If the initializer is non-void, then it's a normal expression
15061 that will be assigned to the slot. */
15062 if (!VOID_TYPE_P (TREE_TYPE (t)))
15063 return RECURSE (t);
15065 /* Otherwise, the initializer sets the slot in some way. One common
15066 way is an assignment statement at the end of the initializer. */
15067 while (1)
15069 if (TREE_CODE (t) == BIND_EXPR)
15070 t = expr_last (BIND_EXPR_BODY (t));
15071 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15072 || TREE_CODE (t) == TRY_CATCH_EXPR)
15073 t = expr_last (TREE_OPERAND (t, 0));
15074 else if (TREE_CODE (t) == STATEMENT_LIST)
15075 t = expr_last (t);
15076 else
15077 break;
15079 if (TREE_CODE (t) == MODIFY_EXPR
15080 && TREE_OPERAND (t, 0) == temp)
15081 return RECURSE (TREE_OPERAND (t, 1));
15083 return false;
15086 case CALL_EXPR:
15088 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15089 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15091 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15092 get_call_combined_fn (t),
15093 arg0,
15094 arg1,
15095 strict_overflow_p, depth);
15097 case COMPOUND_EXPR:
15098 case MODIFY_EXPR:
15099 return RECURSE (TREE_OPERAND (t, 1));
15101 case BIND_EXPR:
15102 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15104 case SAVE_EXPR:
15105 return RECURSE (TREE_OPERAND (t, 0));
15107 default:
15108 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15112 #undef RECURSE
15113 #undef tree_expr_nonnegative_warnv_p
15115 /* Return true if T is known to be non-negative. If the return
15116 value is based on the assumption that signed overflow is undefined,
15117 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15118 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15120 bool
15121 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15123 enum tree_code code;
15124 if (t == error_mark_node)
15125 return false;
15127 code = TREE_CODE (t);
15128 switch (TREE_CODE_CLASS (code))
15130 case tcc_binary:
15131 case tcc_comparison:
15132 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15133 TREE_TYPE (t),
15134 TREE_OPERAND (t, 0),
15135 TREE_OPERAND (t, 1),
15136 strict_overflow_p, depth);
15138 case tcc_unary:
15139 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15140 TREE_TYPE (t),
15141 TREE_OPERAND (t, 0),
15142 strict_overflow_p, depth);
15144 case tcc_constant:
15145 case tcc_declaration:
15146 case tcc_reference:
15147 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15149 default:
15150 break;
15153 switch (code)
15155 case TRUTH_AND_EXPR:
15156 case TRUTH_OR_EXPR:
15157 case TRUTH_XOR_EXPR:
15158 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15159 TREE_TYPE (t),
15160 TREE_OPERAND (t, 0),
15161 TREE_OPERAND (t, 1),
15162 strict_overflow_p, depth);
15163 case TRUTH_NOT_EXPR:
15164 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15165 TREE_TYPE (t),
15166 TREE_OPERAND (t, 0),
15167 strict_overflow_p, depth);
15169 case COND_EXPR:
15170 case CONSTRUCTOR:
15171 case OBJ_TYPE_REF:
15172 case ADDR_EXPR:
15173 case WITH_SIZE_EXPR:
15174 case SSA_NAME:
15175 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15177 default:
15178 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15182 /* Return true if `t' is known to be non-negative. Handle warnings
15183 about undefined signed overflow. */
15185 bool
15186 tree_expr_nonnegative_p (tree t)
15188 bool ret, strict_overflow_p;
15190 strict_overflow_p = false;
15191 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15192 if (strict_overflow_p)
15193 fold_overflow_warning (("assuming signed overflow does not occur when "
15194 "determining that expression is always "
15195 "non-negative"),
15196 WARN_STRICT_OVERFLOW_MISC);
15197 return ret;
15201 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15202 For floating point we further ensure that T is not denormal.
15203 Similar logic is present in nonzero_address in rtlanal.h.
15205 If the return value is based on the assumption that signed overflow
15206 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15207 change *STRICT_OVERFLOW_P. */
15209 bool
15210 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15211 bool *strict_overflow_p)
15213 switch (code)
15215 case ABS_EXPR:
15216 return tree_expr_nonzero_warnv_p (op0,
15217 strict_overflow_p);
15219 case NOP_EXPR:
15221 tree inner_type = TREE_TYPE (op0);
15222 tree outer_type = type;
15224 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15225 && tree_expr_nonzero_warnv_p (op0,
15226 strict_overflow_p));
15228 break;
15230 case NON_LVALUE_EXPR:
15231 return tree_expr_nonzero_warnv_p (op0,
15232 strict_overflow_p);
15234 default:
15235 break;
15238 return false;
15241 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15242 For floating point we further ensure that T is not denormal.
15243 Similar logic is present in nonzero_address in rtlanal.h.
15245 If the return value is based on the assumption that signed overflow
15246 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15247 change *STRICT_OVERFLOW_P. */
15249 bool
15250 tree_binary_nonzero_warnv_p (enum tree_code code,
15251 tree type,
15252 tree op0,
15253 tree op1, bool *strict_overflow_p)
15255 bool sub_strict_overflow_p;
15256 switch (code)
15258 case POINTER_PLUS_EXPR:
15259 case PLUS_EXPR:
15260 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15262 /* With the presence of negative values it is hard
15263 to say something. */
15264 sub_strict_overflow_p = false;
15265 if (!tree_expr_nonnegative_warnv_p (op0,
15266 &sub_strict_overflow_p)
15267 || !tree_expr_nonnegative_warnv_p (op1,
15268 &sub_strict_overflow_p))
15269 return false;
15270 /* One of operands must be positive and the other non-negative. */
15271 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15272 overflows, on a twos-complement machine the sum of two
15273 nonnegative numbers can never be zero. */
15274 return (tree_expr_nonzero_warnv_p (op0,
15275 strict_overflow_p)
15276 || tree_expr_nonzero_warnv_p (op1,
15277 strict_overflow_p));
15279 break;
15281 case MULT_EXPR:
15282 if (TYPE_OVERFLOW_UNDEFINED (type))
15284 if (tree_expr_nonzero_warnv_p (op0,
15285 strict_overflow_p)
15286 && tree_expr_nonzero_warnv_p (op1,
15287 strict_overflow_p))
15289 *strict_overflow_p = true;
15290 return true;
15293 break;
15295 case MIN_EXPR:
15296 sub_strict_overflow_p = false;
15297 if (tree_expr_nonzero_warnv_p (op0,
15298 &sub_strict_overflow_p)
15299 && tree_expr_nonzero_warnv_p (op1,
15300 &sub_strict_overflow_p))
15302 if (sub_strict_overflow_p)
15303 *strict_overflow_p = true;
15305 break;
15307 case MAX_EXPR:
15308 sub_strict_overflow_p = false;
15309 if (tree_expr_nonzero_warnv_p (op0,
15310 &sub_strict_overflow_p))
15312 if (sub_strict_overflow_p)
15313 *strict_overflow_p = true;
15315 /* When both operands are nonzero, then MAX must be too. */
15316 if (tree_expr_nonzero_warnv_p (op1,
15317 strict_overflow_p))
15318 return true;
15320 /* MAX where operand 0 is positive is positive. */
15321 return tree_expr_nonnegative_warnv_p (op0,
15322 strict_overflow_p);
15324 /* MAX where operand 1 is positive is positive. */
15325 else if (tree_expr_nonzero_warnv_p (op1,
15326 &sub_strict_overflow_p)
15327 && tree_expr_nonnegative_warnv_p (op1,
15328 &sub_strict_overflow_p))
15330 if (sub_strict_overflow_p)
15331 *strict_overflow_p = true;
15332 return true;
15334 break;
15336 case BIT_IOR_EXPR:
15337 return (tree_expr_nonzero_warnv_p (op1,
15338 strict_overflow_p)
15339 || tree_expr_nonzero_warnv_p (op0,
15340 strict_overflow_p));
15342 default:
15343 break;
15346 return false;
15349 /* Return true when T is an address and is known to be nonzero.
15350 For floating point we further ensure that T is not denormal.
15351 Similar logic is present in nonzero_address in rtlanal.h.
15353 If the return value is based on the assumption that signed overflow
15354 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15355 change *STRICT_OVERFLOW_P. */
15357 bool
15358 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15360 bool sub_strict_overflow_p;
15361 switch (TREE_CODE (t))
15363 case INTEGER_CST:
15364 return !integer_zerop (t);
15366 case ADDR_EXPR:
15368 tree base = TREE_OPERAND (t, 0);
15370 if (!DECL_P (base))
15371 base = get_base_address (base);
15373 if (base && TREE_CODE (base) == TARGET_EXPR)
15374 base = TARGET_EXPR_SLOT (base);
15376 if (!base)
15377 return false;
15379 /* For objects in symbol table check if we know they are non-zero.
15380 Don't do anything for variables and functions before symtab is built;
15381 it is quite possible that they will be declared weak later. */
15382 int nonzero_addr = maybe_nonzero_address (base);
15383 if (nonzero_addr >= 0)
15384 return nonzero_addr;
15386 /* Constants are never weak. */
15387 if (CONSTANT_CLASS_P (base))
15388 return true;
15390 return false;
15393 case COND_EXPR:
15394 sub_strict_overflow_p = false;
15395 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15396 &sub_strict_overflow_p)
15397 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15398 &sub_strict_overflow_p))
15400 if (sub_strict_overflow_p)
15401 *strict_overflow_p = true;
15402 return true;
15404 break;
15406 case SSA_NAME:
15407 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15408 break;
15409 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15411 default:
15412 break;
15414 return false;
15417 #define integer_valued_real_p(X) \
15418 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15420 #define RECURSE(X) \
15421 ((integer_valued_real_p) (X, depth + 1))
15423 /* Return true if the floating point result of (CODE OP0) has an
15424 integer value. We also allow +Inf, -Inf and NaN to be considered
15425 integer values. Return false for signaling NaN.
15427 DEPTH is the current nesting depth of the query. */
15429 bool
15430 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15432 switch (code)
15434 case FLOAT_EXPR:
15435 return true;
15437 case ABS_EXPR:
15438 return RECURSE (op0);
15440 CASE_CONVERT:
15442 tree type = TREE_TYPE (op0);
15443 if (TREE_CODE (type) == INTEGER_TYPE)
15444 return true;
15445 if (TREE_CODE (type) == REAL_TYPE)
15446 return RECURSE (op0);
15447 break;
15450 default:
15451 break;
15453 return false;
15456 /* Return true if the floating point result of (CODE OP0 OP1) has an
15457 integer value. We also allow +Inf, -Inf and NaN to be considered
15458 integer values. Return false for signaling NaN.
15460 DEPTH is the current nesting depth of the query. */
15462 bool
15463 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15465 switch (code)
15467 case PLUS_EXPR:
15468 case MINUS_EXPR:
15469 case MULT_EXPR:
15470 case MIN_EXPR:
15471 case MAX_EXPR:
15472 return RECURSE (op0) && RECURSE (op1);
15474 default:
15475 break;
15477 return false;
15480 /* Return true if the floating point result of calling FNDECL with arguments
15481 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15482 considered integer values. Return false for signaling NaN. If FNDECL
15483 takes fewer than 2 arguments, the remaining ARGn are null.
15485 DEPTH is the current nesting depth of the query. */
15487 bool
15488 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15490 switch (fn)
15492 CASE_CFN_CEIL:
15493 CASE_CFN_CEIL_FN:
15494 CASE_CFN_FLOOR:
15495 CASE_CFN_FLOOR_FN:
15496 CASE_CFN_NEARBYINT:
15497 CASE_CFN_NEARBYINT_FN:
15498 CASE_CFN_RINT:
15499 CASE_CFN_RINT_FN:
15500 CASE_CFN_ROUND:
15501 CASE_CFN_ROUND_FN:
15502 CASE_CFN_ROUNDEVEN:
15503 CASE_CFN_ROUNDEVEN_FN:
15504 CASE_CFN_TRUNC:
15505 CASE_CFN_TRUNC_FN:
15506 return true;
15508 CASE_CFN_FMIN:
15509 CASE_CFN_FMIN_FN:
15510 CASE_CFN_FMAX:
15511 CASE_CFN_FMAX_FN:
15512 return RECURSE (arg0) && RECURSE (arg1);
15514 default:
15515 break;
15517 return false;
15520 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15521 has an integer value. We also allow +Inf, -Inf and NaN to be
15522 considered integer values. Return false for signaling NaN.
15524 DEPTH is the current nesting depth of the query. */
15526 bool
15527 integer_valued_real_single_p (tree t, int depth)
15529 switch (TREE_CODE (t))
15531 case REAL_CST:
15532 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15534 case COND_EXPR:
15535 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15537 case SSA_NAME:
15538 /* Limit the depth of recursion to avoid quadratic behavior.
15539 This is expected to catch almost all occurrences in practice.
15540 If this code misses important cases that unbounded recursion
15541 would not, passes that need this information could be revised
15542 to provide it through dataflow propagation. */
15543 return (!name_registered_for_update_p (t)
15544 && depth < param_max_ssa_name_query_depth
15545 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15546 depth));
15548 default:
15549 break;
15551 return false;
15554 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15555 has an integer value. We also allow +Inf, -Inf and NaN to be
15556 considered integer values. Return false for signaling NaN.
15558 DEPTH is the current nesting depth of the query. */
15560 static bool
15561 integer_valued_real_invalid_p (tree t, int depth)
15563 switch (TREE_CODE (t))
15565 case COMPOUND_EXPR:
15566 case MODIFY_EXPR:
15567 case BIND_EXPR:
15568 return RECURSE (TREE_OPERAND (t, 1));
15570 case SAVE_EXPR:
15571 return RECURSE (TREE_OPERAND (t, 0));
15573 default:
15574 break;
15576 return false;
15579 #undef RECURSE
15580 #undef integer_valued_real_p
15582 /* Return true if the floating point expression T has an integer value.
15583 We also allow +Inf, -Inf and NaN to be considered integer values.
15584 Return false for signaling NaN.
15586 DEPTH is the current nesting depth of the query. */
15588 bool
15589 integer_valued_real_p (tree t, int depth)
15591 if (t == error_mark_node)
15592 return false;
15594 STRIP_ANY_LOCATION_WRAPPER (t);
15596 tree_code code = TREE_CODE (t);
15597 switch (TREE_CODE_CLASS (code))
15599 case tcc_binary:
15600 case tcc_comparison:
15601 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15602 TREE_OPERAND (t, 1), depth);
15604 case tcc_unary:
15605 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15607 case tcc_constant:
15608 case tcc_declaration:
15609 case tcc_reference:
15610 return integer_valued_real_single_p (t, depth);
15612 default:
15613 break;
15616 switch (code)
15618 case COND_EXPR:
15619 case SSA_NAME:
15620 return integer_valued_real_single_p (t, depth);
15622 case CALL_EXPR:
15624 tree arg0 = (call_expr_nargs (t) > 0
15625 ? CALL_EXPR_ARG (t, 0)
15626 : NULL_TREE);
15627 tree arg1 = (call_expr_nargs (t) > 1
15628 ? CALL_EXPR_ARG (t, 1)
15629 : NULL_TREE);
15630 return integer_valued_real_call_p (get_call_combined_fn (t),
15631 arg0, arg1, depth);
15634 default:
15635 return integer_valued_real_invalid_p (t, depth);
15639 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15640 attempt to fold the expression to a constant without modifying TYPE,
15641 OP0 or OP1.
15643 If the expression could be simplified to a constant, then return
15644 the constant. If the expression would not be simplified to a
15645 constant, then return NULL_TREE. */
15647 tree
15648 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15650 tree tem = fold_binary (code, type, op0, op1);
15651 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15654 /* Given the components of a unary expression CODE, TYPE and OP0,
15655 attempt to fold the expression to a constant without modifying
15656 TYPE or OP0.
15658 If the expression could be simplified to a constant, then return
15659 the constant. If the expression would not be simplified to a
15660 constant, then return NULL_TREE. */
15662 tree
15663 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15665 tree tem = fold_unary (code, type, op0);
15666 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15669 /* If EXP represents referencing an element in a constant string
15670 (either via pointer arithmetic or array indexing), return the
15671 tree representing the value accessed, otherwise return NULL. */
15673 tree
15674 fold_read_from_constant_string (tree exp)
15676 if ((TREE_CODE (exp) == INDIRECT_REF
15677 || TREE_CODE (exp) == ARRAY_REF)
15678 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15680 tree exp1 = TREE_OPERAND (exp, 0);
15681 tree index;
15682 tree string;
15683 location_t loc = EXPR_LOCATION (exp);
15685 if (TREE_CODE (exp) == INDIRECT_REF)
15686 string = string_constant (exp1, &index, NULL, NULL);
15687 else
15689 tree low_bound = array_ref_low_bound (exp);
15690 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15692 /* Optimize the special-case of a zero lower bound.
15694 We convert the low_bound to sizetype to avoid some problems
15695 with constant folding. (E.g. suppose the lower bound is 1,
15696 and its mode is QI. Without the conversion,l (ARRAY
15697 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15698 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15699 if (! integer_zerop (low_bound))
15700 index = size_diffop_loc (loc, index,
15701 fold_convert_loc (loc, sizetype, low_bound));
15703 string = exp1;
15706 scalar_int_mode char_mode;
15707 if (string
15708 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15709 && TREE_CODE (string) == STRING_CST
15710 && tree_fits_uhwi_p (index)
15711 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15712 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15713 &char_mode)
15714 && GET_MODE_SIZE (char_mode) == 1)
15715 return build_int_cst_type (TREE_TYPE (exp),
15716 (TREE_STRING_POINTER (string)
15717 [TREE_INT_CST_LOW (index)]));
15719 return NULL;
15722 /* Folds a read from vector element at IDX of vector ARG. */
15724 tree
15725 fold_read_from_vector (tree arg, poly_uint64 idx)
15727 unsigned HOST_WIDE_INT i;
15728 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15729 && known_ge (idx, 0u)
15730 && idx.is_constant (&i))
15732 if (TREE_CODE (arg) == VECTOR_CST)
15733 return VECTOR_CST_ELT (arg, i);
15734 else if (TREE_CODE (arg) == CONSTRUCTOR)
15736 if (CONSTRUCTOR_NELTS (arg)
15737 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15738 return NULL_TREE;
15739 if (i >= CONSTRUCTOR_NELTS (arg))
15740 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15741 return CONSTRUCTOR_ELT (arg, i)->value;
15744 return NULL_TREE;
15747 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15748 an integer constant, real, or fixed-point constant.
15750 TYPE is the type of the result. */
15752 static tree
15753 fold_negate_const (tree arg0, tree type)
15755 tree t = NULL_TREE;
15757 switch (TREE_CODE (arg0))
15759 case REAL_CST:
15760 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15761 break;
15763 case FIXED_CST:
15765 FIXED_VALUE_TYPE f;
15766 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15767 &(TREE_FIXED_CST (arg0)), NULL,
15768 TYPE_SATURATING (type));
15769 t = build_fixed (type, f);
15770 /* Propagate overflow flags. */
15771 if (overflow_p | TREE_OVERFLOW (arg0))
15772 TREE_OVERFLOW (t) = 1;
15773 break;
15776 default:
15777 if (poly_int_tree_p (arg0))
15779 wi::overflow_type overflow;
15780 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15781 t = force_fit_type (type, res, 1,
15782 (overflow && ! TYPE_UNSIGNED (type))
15783 || TREE_OVERFLOW (arg0));
15784 break;
15787 gcc_unreachable ();
15790 return t;
15793 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15794 an integer constant or real constant.
15796 TYPE is the type of the result. */
15798 tree
15799 fold_abs_const (tree arg0, tree type)
15801 tree t = NULL_TREE;
15803 switch (TREE_CODE (arg0))
15805 case INTEGER_CST:
15807 /* If the value is unsigned or non-negative, then the absolute value
15808 is the same as the ordinary value. */
15809 wide_int val = wi::to_wide (arg0);
15810 wi::overflow_type overflow = wi::OVF_NONE;
15811 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15814 /* If the value is negative, then the absolute value is
15815 its negation. */
15816 else
15817 val = wi::neg (val, &overflow);
15819 /* Force to the destination type, set TREE_OVERFLOW for signed
15820 TYPE only. */
15821 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15823 break;
15825 case REAL_CST:
15826 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15827 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15828 else
15829 t = arg0;
15830 break;
15832 default:
15833 gcc_unreachable ();
15836 return t;
15839 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15840 constant. TYPE is the type of the result. */
15842 static tree
15843 fold_not_const (const_tree arg0, tree type)
15845 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15847 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15850 /* Given CODE, a relational operator, the target type, TYPE and two
15851 constant operands OP0 and OP1, return the result of the
15852 relational operation. If the result is not a compile time
15853 constant, then return NULL_TREE. */
15855 static tree
15856 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15858 int result, invert;
15860 /* From here on, the only cases we handle are when the result is
15861 known to be a constant. */
15863 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15865 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15866 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15868 /* Handle the cases where either operand is a NaN. */
15869 if (real_isnan (c0) || real_isnan (c1))
15871 switch (code)
15873 case EQ_EXPR:
15874 case ORDERED_EXPR:
15875 result = 0;
15876 break;
15878 case NE_EXPR:
15879 case UNORDERED_EXPR:
15880 case UNLT_EXPR:
15881 case UNLE_EXPR:
15882 case UNGT_EXPR:
15883 case UNGE_EXPR:
15884 case UNEQ_EXPR:
15885 result = 1;
15886 break;
15888 case LT_EXPR:
15889 case LE_EXPR:
15890 case GT_EXPR:
15891 case GE_EXPR:
15892 case LTGT_EXPR:
15893 if (flag_trapping_math)
15894 return NULL_TREE;
15895 result = 0;
15896 break;
15898 default:
15899 gcc_unreachable ();
15902 return constant_boolean_node (result, type);
15905 return constant_boolean_node (real_compare (code, c0, c1), type);
15908 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15910 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15911 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15912 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15915 /* Handle equality/inequality of complex constants. */
15916 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15918 tree rcond = fold_relational_const (code, type,
15919 TREE_REALPART (op0),
15920 TREE_REALPART (op1));
15921 tree icond = fold_relational_const (code, type,
15922 TREE_IMAGPART (op0),
15923 TREE_IMAGPART (op1));
15924 if (code == EQ_EXPR)
15925 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15926 else if (code == NE_EXPR)
15927 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15928 else
15929 return NULL_TREE;
15932 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15934 if (!VECTOR_TYPE_P (type))
15936 /* Have vector comparison with scalar boolean result. */
15937 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15938 && known_eq (VECTOR_CST_NELTS (op0),
15939 VECTOR_CST_NELTS (op1)));
15940 unsigned HOST_WIDE_INT nunits;
15941 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15942 return NULL_TREE;
15943 for (unsigned i = 0; i < nunits; i++)
15945 tree elem0 = VECTOR_CST_ELT (op0, i);
15946 tree elem1 = VECTOR_CST_ELT (op1, i);
15947 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15948 if (tmp == NULL_TREE)
15949 return NULL_TREE;
15950 if (integer_zerop (tmp))
15951 return constant_boolean_node (code == NE_EXPR, type);
15953 return constant_boolean_node (code == EQ_EXPR, type);
15955 tree_vector_builder elts;
15956 if (!elts.new_binary_operation (type, op0, op1, false))
15957 return NULL_TREE;
15958 unsigned int count = elts.encoded_nelts ();
15959 for (unsigned i = 0; i < count; i++)
15961 tree elem_type = TREE_TYPE (type);
15962 tree elem0 = VECTOR_CST_ELT (op0, i);
15963 tree elem1 = VECTOR_CST_ELT (op1, i);
15965 tree tem = fold_relational_const (code, elem_type,
15966 elem0, elem1);
15968 if (tem == NULL_TREE)
15969 return NULL_TREE;
15971 elts.quick_push (build_int_cst (elem_type,
15972 integer_zerop (tem) ? 0 : -1));
15975 return elts.build ();
15978 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15980 To compute GT, swap the arguments and do LT.
15981 To compute GE, do LT and invert the result.
15982 To compute LE, swap the arguments, do LT and invert the result.
15983 To compute NE, do EQ and invert the result.
15985 Therefore, the code below must handle only EQ and LT. */
15987 if (code == LE_EXPR || code == GT_EXPR)
15989 std::swap (op0, op1);
15990 code = swap_tree_comparison (code);
15993 /* Note that it is safe to invert for real values here because we
15994 have already handled the one case that it matters. */
15996 invert = 0;
15997 if (code == NE_EXPR || code == GE_EXPR)
15999 invert = 1;
16000 code = invert_tree_comparison (code, false);
16003 /* Compute a result for LT or EQ if args permit;
16004 Otherwise return T. */
16005 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16007 if (code == EQ_EXPR)
16008 result = tree_int_cst_equal (op0, op1);
16009 else
16010 result = tree_int_cst_lt (op0, op1);
16012 else
16013 return NULL_TREE;
16015 if (invert)
16016 result ^= 1;
16017 return constant_boolean_node (result, type);
16020 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16021 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16022 itself. */
16024 tree
16025 fold_build_cleanup_point_expr (tree type, tree expr)
16027 /* If the expression does not have side effects then we don't have to wrap
16028 it with a cleanup point expression. */
16029 if (!TREE_SIDE_EFFECTS (expr))
16030 return expr;
16032 /* If the expression is a return, check to see if the expression inside the
16033 return has no side effects or the right hand side of the modify expression
16034 inside the return. If either don't have side effects set we don't need to
16035 wrap the expression in a cleanup point expression. Note we don't check the
16036 left hand side of the modify because it should always be a return decl. */
16037 if (TREE_CODE (expr) == RETURN_EXPR)
16039 tree op = TREE_OPERAND (expr, 0);
16040 if (!op || !TREE_SIDE_EFFECTS (op))
16041 return expr;
16042 op = TREE_OPERAND (op, 1);
16043 if (!TREE_SIDE_EFFECTS (op))
16044 return expr;
16047 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16050 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16051 of an indirection through OP0, or NULL_TREE if no simplification is
16052 possible. */
16054 tree
16055 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16057 tree sub = op0;
16058 tree subtype;
16059 poly_uint64 const_op01;
16061 STRIP_NOPS (sub);
16062 subtype = TREE_TYPE (sub);
16063 if (!POINTER_TYPE_P (subtype)
16064 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16065 return NULL_TREE;
16067 if (TREE_CODE (sub) == ADDR_EXPR)
16069 tree op = TREE_OPERAND (sub, 0);
16070 tree optype = TREE_TYPE (op);
16072 /* *&CONST_DECL -> to the value of the const decl. */
16073 if (TREE_CODE (op) == CONST_DECL)
16074 return DECL_INITIAL (op);
16075 /* *&p => p; make sure to handle *&"str"[cst] here. */
16076 if (type == optype)
16078 tree fop = fold_read_from_constant_string (op);
16079 if (fop)
16080 return fop;
16081 else
16082 return op;
16084 /* *(foo *)&fooarray => fooarray[0] */
16085 else if (TREE_CODE (optype) == ARRAY_TYPE
16086 && type == TREE_TYPE (optype)
16087 && (!in_gimple_form
16088 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16090 tree type_domain = TYPE_DOMAIN (optype);
16091 tree min_val = size_zero_node;
16092 if (type_domain && TYPE_MIN_VALUE (type_domain))
16093 min_val = TYPE_MIN_VALUE (type_domain);
16094 if (in_gimple_form
16095 && TREE_CODE (min_val) != INTEGER_CST)
16096 return NULL_TREE;
16097 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16098 NULL_TREE, NULL_TREE);
16100 /* *(foo *)&complexfoo => __real__ complexfoo */
16101 else if (TREE_CODE (optype) == COMPLEX_TYPE
16102 && type == TREE_TYPE (optype))
16103 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16104 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16105 else if (VECTOR_TYPE_P (optype)
16106 && type == TREE_TYPE (optype))
16108 tree part_width = TYPE_SIZE (type);
16109 tree index = bitsize_int (0);
16110 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16111 index);
16115 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16116 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16118 tree op00 = TREE_OPERAND (sub, 0);
16119 tree op01 = TREE_OPERAND (sub, 1);
16121 STRIP_NOPS (op00);
16122 if (TREE_CODE (op00) == ADDR_EXPR)
16124 tree op00type;
16125 op00 = TREE_OPERAND (op00, 0);
16126 op00type = TREE_TYPE (op00);
16128 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16129 if (VECTOR_TYPE_P (op00type)
16130 && type == TREE_TYPE (op00type)
16131 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16132 but we want to treat offsets with MSB set as negative.
16133 For the code below negative offsets are invalid and
16134 TYPE_SIZE of the element is something unsigned, so
16135 check whether op01 fits into poly_int64, which implies
16136 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16137 then just use poly_uint64 because we want to treat the
16138 value as unsigned. */
16139 && tree_fits_poly_int64_p (op01))
16141 tree part_width = TYPE_SIZE (type);
16142 poly_uint64 max_offset
16143 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16144 * TYPE_VECTOR_SUBPARTS (op00type));
16145 if (known_lt (const_op01, max_offset))
16147 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16148 return fold_build3_loc (loc,
16149 BIT_FIELD_REF, type, op00,
16150 part_width, index);
16153 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16154 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16155 && type == TREE_TYPE (op00type))
16157 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16158 const_op01))
16159 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16161 /* ((foo *)&fooarray)[1] => fooarray[1] */
16162 else if (TREE_CODE (op00type) == ARRAY_TYPE
16163 && type == TREE_TYPE (op00type))
16165 tree type_domain = TYPE_DOMAIN (op00type);
16166 tree min_val = size_zero_node;
16167 if (type_domain && TYPE_MIN_VALUE (type_domain))
16168 min_val = TYPE_MIN_VALUE (type_domain);
16169 poly_uint64 type_size, index;
16170 if (poly_int_tree_p (min_val)
16171 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16172 && multiple_p (const_op01, type_size, &index))
16174 poly_offset_int off = index + wi::to_poly_offset (min_val);
16175 op01 = wide_int_to_tree (sizetype, off);
16176 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16177 NULL_TREE, NULL_TREE);
16183 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16184 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16185 && type == TREE_TYPE (TREE_TYPE (subtype))
16186 && (!in_gimple_form
16187 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16189 tree type_domain;
16190 tree min_val = size_zero_node;
16191 sub = build_fold_indirect_ref_loc (loc, sub);
16192 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16193 if (type_domain && TYPE_MIN_VALUE (type_domain))
16194 min_val = TYPE_MIN_VALUE (type_domain);
16195 if (in_gimple_form
16196 && TREE_CODE (min_val) != INTEGER_CST)
16197 return NULL_TREE;
16198 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16199 NULL_TREE);
16202 return NULL_TREE;
16205 /* Builds an expression for an indirection through T, simplifying some
16206 cases. */
16208 tree
16209 build_fold_indirect_ref_loc (location_t loc, tree t)
16211 tree type = TREE_TYPE (TREE_TYPE (t));
16212 tree sub = fold_indirect_ref_1 (loc, type, t);
16214 if (sub)
16215 return sub;
16217 return build1_loc (loc, INDIRECT_REF, type, t);
16220 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16222 tree
16223 fold_indirect_ref_loc (location_t loc, tree t)
16225 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16227 if (sub)
16228 return sub;
16229 else
16230 return t;
16233 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16234 whose result is ignored. The type of the returned tree need not be
16235 the same as the original expression. */
16237 tree
16238 fold_ignored_result (tree t)
16240 if (!TREE_SIDE_EFFECTS (t))
16241 return integer_zero_node;
16243 for (;;)
16244 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16246 case tcc_unary:
16247 t = TREE_OPERAND (t, 0);
16248 break;
16250 case tcc_binary:
16251 case tcc_comparison:
16252 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16253 t = TREE_OPERAND (t, 0);
16254 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16255 t = TREE_OPERAND (t, 1);
16256 else
16257 return t;
16258 break;
16260 case tcc_expression:
16261 switch (TREE_CODE (t))
16263 case COMPOUND_EXPR:
16264 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16265 return t;
16266 t = TREE_OPERAND (t, 0);
16267 break;
16269 case COND_EXPR:
16270 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16271 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16272 return t;
16273 t = TREE_OPERAND (t, 0);
16274 break;
16276 default:
16277 return t;
16279 break;
16281 default:
16282 return t;
16286 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16288 tree
16289 round_up_loc (location_t loc, tree value, unsigned int divisor)
16291 tree div = NULL_TREE;
16293 if (divisor == 1)
16294 return value;
16296 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16297 have to do anything. Only do this when we are not given a const,
16298 because in that case, this check is more expensive than just
16299 doing it. */
16300 if (TREE_CODE (value) != INTEGER_CST)
16302 div = build_int_cst (TREE_TYPE (value), divisor);
16304 if (multiple_of_p (TREE_TYPE (value), value, div))
16305 return value;
16308 /* If divisor is a power of two, simplify this to bit manipulation. */
16309 if (pow2_or_zerop (divisor))
16311 if (TREE_CODE (value) == INTEGER_CST)
16313 wide_int val = wi::to_wide (value);
16314 bool overflow_p;
16316 if ((val & (divisor - 1)) == 0)
16317 return value;
16319 overflow_p = TREE_OVERFLOW (value);
16320 val += divisor - 1;
16321 val &= (int) -divisor;
16322 if (val == 0)
16323 overflow_p = true;
16325 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16327 else
16329 tree t;
16331 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16332 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16333 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16334 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16337 else
16339 if (!div)
16340 div = build_int_cst (TREE_TYPE (value), divisor);
16341 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16342 value = size_binop_loc (loc, MULT_EXPR, value, div);
16345 return value;
16348 /* Likewise, but round down. */
16350 tree
16351 round_down_loc (location_t loc, tree value, int divisor)
16353 tree div = NULL_TREE;
16355 gcc_assert (divisor > 0);
16356 if (divisor == 1)
16357 return value;
16359 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16360 have to do anything. Only do this when we are not given a const,
16361 because in that case, this check is more expensive than just
16362 doing it. */
16363 if (TREE_CODE (value) != INTEGER_CST)
16365 div = build_int_cst (TREE_TYPE (value), divisor);
16367 if (multiple_of_p (TREE_TYPE (value), value, div))
16368 return value;
16371 /* If divisor is a power of two, simplify this to bit manipulation. */
16372 if (pow2_or_zerop (divisor))
16374 tree t;
16376 t = build_int_cst (TREE_TYPE (value), -divisor);
16377 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16379 else
16381 if (!div)
16382 div = build_int_cst (TREE_TYPE (value), divisor);
16383 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16384 value = size_binop_loc (loc, MULT_EXPR, value, div);
16387 return value;
16390 /* Returns the pointer to the base of the object addressed by EXP and
16391 extracts the information about the offset of the access, storing it
16392 to PBITPOS and POFFSET. */
16394 static tree
16395 split_address_to_core_and_offset (tree exp,
16396 poly_int64_pod *pbitpos, tree *poffset)
16398 tree core;
16399 machine_mode mode;
16400 int unsignedp, reversep, volatilep;
16401 poly_int64 bitsize;
16402 location_t loc = EXPR_LOCATION (exp);
16404 if (TREE_CODE (exp) == SSA_NAME)
16405 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16406 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16407 exp = gimple_assign_rhs1 (def);
16409 if (TREE_CODE (exp) == ADDR_EXPR)
16411 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16412 poffset, &mode, &unsignedp, &reversep,
16413 &volatilep);
16414 core = build_fold_addr_expr_loc (loc, core);
16416 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16418 core = TREE_OPERAND (exp, 0);
16419 STRIP_NOPS (core);
16420 *pbitpos = 0;
16421 *poffset = TREE_OPERAND (exp, 1);
16422 if (poly_int_tree_p (*poffset))
16424 poly_offset_int tem
16425 = wi::sext (wi::to_poly_offset (*poffset),
16426 TYPE_PRECISION (TREE_TYPE (*poffset)));
16427 tem <<= LOG2_BITS_PER_UNIT;
16428 if (tem.to_shwi (pbitpos))
16429 *poffset = NULL_TREE;
16432 else
16434 core = exp;
16435 *pbitpos = 0;
16436 *poffset = NULL_TREE;
16439 return core;
16442 /* Returns true if addresses of E1 and E2 differ by a constant, false
16443 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16445 bool
16446 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16448 tree core1, core2;
16449 poly_int64 bitpos1, bitpos2;
16450 tree toffset1, toffset2, tdiff, type;
16452 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16453 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16455 poly_int64 bytepos1, bytepos2;
16456 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16457 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16458 || !operand_equal_p (core1, core2, 0))
16459 return false;
16461 if (toffset1 && toffset2)
16463 type = TREE_TYPE (toffset1);
16464 if (type != TREE_TYPE (toffset2))
16465 toffset2 = fold_convert (type, toffset2);
16467 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16468 if (!cst_and_fits_in_hwi (tdiff))
16469 return false;
16471 *diff = int_cst_value (tdiff);
16473 else if (toffset1 || toffset2)
16475 /* If only one of the offsets is non-constant, the difference cannot
16476 be a constant. */
16477 return false;
16479 else
16480 *diff = 0;
16482 *diff += bytepos1 - bytepos2;
16483 return true;
16486 /* Return OFF converted to a pointer offset type suitable as offset for
16487 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16488 tree
16489 convert_to_ptrofftype_loc (location_t loc, tree off)
16491 if (ptrofftype_p (TREE_TYPE (off)))
16492 return off;
16493 return fold_convert_loc (loc, sizetype, off);
16496 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16497 tree
16498 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16500 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16501 ptr, convert_to_ptrofftype_loc (loc, off));
16504 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16505 tree
16506 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16508 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16509 ptr, size_int (off));
16512 /* Return a pointer to a NUL-terminated string containing the sequence
16513 of bytes corresponding to the representation of the object referred to
16514 by SRC (or a subsequence of such bytes within it if SRC is a reference
16515 to an initialized constant array plus some constant offset).
16516 Set *STRSIZE the number of bytes in the constant sequence including
16517 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16518 where A is the array that stores the constant sequence that SRC points
16519 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16520 need not point to a string or even an array of characters but may point
16521 to an object of any type. */
16523 const char *
16524 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16526 /* The offset into the array A storing the string, and A's byte size. */
16527 tree offset_node;
16528 tree mem_size;
16530 if (strsize)
16531 *strsize = 0;
16533 if (strsize)
16534 src = byte_representation (src, &offset_node, &mem_size, NULL);
16535 else
16536 src = string_constant (src, &offset_node, &mem_size, NULL);
16537 if (!src)
16538 return NULL;
16540 unsigned HOST_WIDE_INT offset = 0;
16541 if (offset_node != NULL_TREE)
16543 if (!tree_fits_uhwi_p (offset_node))
16544 return NULL;
16545 else
16546 offset = tree_to_uhwi (offset_node);
16549 if (!tree_fits_uhwi_p (mem_size))
16550 return NULL;
16552 /* ARRAY_SIZE is the byte size of the array the constant sequence
16553 is stored in and equal to sizeof A. INIT_BYTES is the number
16554 of bytes in the constant sequence used to initialize the array,
16555 including any embedded NULs as well as the terminating NUL (for
16556 strings), but not including any trailing zeros/NULs past
16557 the terminating one appended implicitly to a string literal to
16558 zero out the remainder of the array it's stored in. For example,
16559 given:
16560 const char a[7] = "abc\0d";
16561 n = strlen (a + 1);
16562 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16563 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16564 is equal to strlen (A) + 1. */
16565 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16566 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16567 const char *string = TREE_STRING_POINTER (src);
16569 /* Ideally this would turn into a gcc_checking_assert over time. */
16570 if (init_bytes > array_size)
16571 init_bytes = array_size;
16573 if (init_bytes == 0 || offset >= array_size)
16574 return NULL;
16576 if (strsize)
16578 /* Compute and store the number of characters from the beginning
16579 of the substring at OFFSET to the end, including the terminating
16580 nul. Offsets past the initial length refer to null strings. */
16581 if (offset < init_bytes)
16582 *strsize = init_bytes - offset;
16583 else
16584 *strsize = 1;
16586 else
16588 tree eltype = TREE_TYPE (TREE_TYPE (src));
16589 /* Support only properly NUL-terminated single byte strings. */
16590 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16591 return NULL;
16592 if (string[init_bytes - 1] != '\0')
16593 return NULL;
16596 return offset < init_bytes ? string + offset : "";
16599 /* Return a pointer to a NUL-terminated string corresponding to
16600 the expression STR referencing a constant string, possibly
16601 involving a constant offset. Return null if STR either doesn't
16602 reference a constant string or if it involves a nonconstant
16603 offset. */
16605 const char *
16606 c_getstr (tree str)
16608 return getbyterep (str, NULL);
16611 /* Given a tree T, compute which bits in T may be nonzero. */
16613 wide_int
16614 tree_nonzero_bits (const_tree t)
16616 switch (TREE_CODE (t))
16618 case INTEGER_CST:
16619 return wi::to_wide (t);
16620 case SSA_NAME:
16621 return get_nonzero_bits (t);
16622 case NON_LVALUE_EXPR:
16623 case SAVE_EXPR:
16624 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16625 case BIT_AND_EXPR:
16626 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16627 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16628 case BIT_IOR_EXPR:
16629 case BIT_XOR_EXPR:
16630 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16631 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16632 case COND_EXPR:
16633 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16634 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16635 CASE_CONVERT:
16636 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16637 TYPE_PRECISION (TREE_TYPE (t)),
16638 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16639 case PLUS_EXPR:
16640 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16642 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16643 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16644 if (wi::bit_and (nzbits1, nzbits2) == 0)
16645 return wi::bit_or (nzbits1, nzbits2);
16647 break;
16648 case LSHIFT_EXPR:
16649 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16651 tree type = TREE_TYPE (t);
16652 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16653 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16654 TYPE_PRECISION (type));
16655 return wi::neg_p (arg1)
16656 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16657 : wi::lshift (nzbits, arg1);
16659 break;
16660 case RSHIFT_EXPR:
16661 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16663 tree type = TREE_TYPE (t);
16664 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16665 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16666 TYPE_PRECISION (type));
16667 return wi::neg_p (arg1)
16668 ? wi::lshift (nzbits, -arg1)
16669 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16671 break;
16672 default:
16673 break;
16676 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16679 /* Helper function for address compare simplifications in match.pd.
16680 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16681 TYPE is the type of comparison operands.
16682 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16683 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16684 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16685 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16686 and 2 if unknown. */
16689 address_compare (tree_code code, tree type, tree op0, tree op1,
16690 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16691 bool generic)
16693 if (TREE_CODE (op0) == SSA_NAME)
16694 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16695 if (TREE_CODE (op1) == SSA_NAME)
16696 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16697 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16698 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16699 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16700 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16701 if (base0 && TREE_CODE (base0) == MEM_REF)
16703 off0 += mem_ref_offset (base0).force_shwi ();
16704 base0 = TREE_OPERAND (base0, 0);
16706 if (base1 && TREE_CODE (base1) == MEM_REF)
16708 off1 += mem_ref_offset (base1).force_shwi ();
16709 base1 = TREE_OPERAND (base1, 0);
16711 if (base0 == NULL_TREE || base1 == NULL_TREE)
16712 return 2;
16714 int equal = 2;
16715 /* Punt in GENERIC on variables with value expressions;
16716 the value expressions might point to fields/elements
16717 of other vars etc. */
16718 if (generic
16719 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16720 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16721 return 2;
16722 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16724 symtab_node *node0 = symtab_node::get_create (base0);
16725 symtab_node *node1 = symtab_node::get_create (base1);
16726 equal = node0->equal_address_to (node1);
16728 else if ((DECL_P (base0)
16729 || TREE_CODE (base0) == SSA_NAME
16730 || TREE_CODE (base0) == STRING_CST)
16731 && (DECL_P (base1)
16732 || TREE_CODE (base1) == SSA_NAME
16733 || TREE_CODE (base1) == STRING_CST))
16734 equal = (base0 == base1);
16735 /* Assume different STRING_CSTs with the same content will be
16736 merged. */
16737 if (equal == 0
16738 && TREE_CODE (base0) == STRING_CST
16739 && TREE_CODE (base1) == STRING_CST
16740 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16741 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16742 TREE_STRING_LENGTH (base0)) == 0)
16743 equal = 1;
16744 if (equal == 1)
16746 if (code == EQ_EXPR
16747 || code == NE_EXPR
16748 /* If the offsets are equal we can ignore overflow. */
16749 || known_eq (off0, off1)
16750 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16751 /* Or if we compare using pointers to decls or strings. */
16752 || (POINTER_TYPE_P (type)
16753 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16754 return 1;
16755 return 2;
16757 if (equal != 0)
16758 return equal;
16759 if (code != EQ_EXPR && code != NE_EXPR)
16760 return 2;
16762 /* At this point we know (or assume) the two pointers point at
16763 different objects. */
16764 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16765 off0.is_constant (&ioff0);
16766 off1.is_constant (&ioff1);
16767 /* Punt on non-zero offsets from functions. */
16768 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16769 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16770 return 2;
16771 /* Or if the bases are neither decls nor string literals. */
16772 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16773 return 2;
16774 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16775 return 2;
16776 /* For initializers, assume addresses of different functions are
16777 different. */
16778 if (folding_initializer
16779 && TREE_CODE (base0) == FUNCTION_DECL
16780 && TREE_CODE (base1) == FUNCTION_DECL)
16781 return 0;
16783 /* Compute whether one address points to the start of one
16784 object and another one to the end of another one. */
16785 poly_int64 size0 = 0, size1 = 0;
16786 if (TREE_CODE (base0) == STRING_CST)
16788 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16789 equal = 2;
16790 else
16791 size0 = TREE_STRING_LENGTH (base0);
16793 else if (TREE_CODE (base0) == FUNCTION_DECL)
16794 size0 = 1;
16795 else
16797 tree sz0 = DECL_SIZE_UNIT (base0);
16798 if (!tree_fits_poly_int64_p (sz0))
16799 equal = 2;
16800 else
16801 size0 = tree_to_poly_int64 (sz0);
16803 if (TREE_CODE (base1) == STRING_CST)
16805 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16806 equal = 2;
16807 else
16808 size1 = TREE_STRING_LENGTH (base1);
16810 else if (TREE_CODE (base1) == FUNCTION_DECL)
16811 size1 = 1;
16812 else
16814 tree sz1 = DECL_SIZE_UNIT (base1);
16815 if (!tree_fits_poly_int64_p (sz1))
16816 equal = 2;
16817 else
16818 size1 = tree_to_poly_int64 (sz1);
16820 if (equal == 0)
16822 /* If one offset is pointing (or could be) to the beginning of one
16823 object and the other is pointing to one past the last byte of the
16824 other object, punt. */
16825 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16826 equal = 2;
16827 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16828 equal = 2;
16829 /* If both offsets are the same, there are some cases we know that are
16830 ok. Either if we know they aren't zero, or if we know both sizes
16831 are no zero. */
16832 if (equal == 2
16833 && known_eq (off0, off1)
16834 && (known_ne (off0, 0)
16835 || (known_ne (size0, 0) && known_ne (size1, 0))))
16836 equal = 0;
16839 /* At this point, equal is 2 if either one or both pointers are out of
16840 bounds of their object, or one points to start of its object and the
16841 other points to end of its object. This is unspecified behavior
16842 e.g. in C++. Otherwise equal is 0. */
16843 if (folding_cxx_constexpr && equal)
16844 return equal;
16846 /* When both pointers point to string literals, even when equal is 0,
16847 due to tail merging of string literals the pointers might be the same. */
16848 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16850 if (ioff0 < 0
16851 || ioff1 < 0
16852 || ioff0 > TREE_STRING_LENGTH (base0)
16853 || ioff1 > TREE_STRING_LENGTH (base1))
16854 return 2;
16856 /* If the bytes in the string literals starting at the pointers
16857 differ, the pointers need to be different. */
16858 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16859 TREE_STRING_POINTER (base1) + ioff1,
16860 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16861 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16863 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16864 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16865 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16866 ioffmin) == 0)
16867 /* If even the bytes in the string literal before the
16868 pointers are the same, the string literals could be
16869 tail merged. */
16870 return 2;
16872 return 0;
16875 if (folding_cxx_constexpr)
16876 return 0;
16878 /* If this is a pointer comparison, ignore for now even
16879 valid equalities where one pointer is the offset zero
16880 of one object and the other to one past end of another one. */
16881 if (!INTEGRAL_TYPE_P (type))
16882 return 0;
16884 /* Assume that string literals can't be adjacent to variables
16885 (automatic or global). */
16886 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16887 return 0;
16889 /* Assume that automatic variables can't be adjacent to global
16890 variables. */
16891 if (is_global_var (base0) != is_global_var (base1))
16892 return 0;
16894 return equal;
16897 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16898 tree
16899 ctor_single_nonzero_element (const_tree t)
16901 unsigned HOST_WIDE_INT idx;
16902 constructor_elt *ce;
16903 tree elt = NULL_TREE;
16905 if (TREE_CODE (t) != CONSTRUCTOR)
16906 return NULL_TREE;
16907 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16908 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16910 if (elt)
16911 return NULL_TREE;
16912 elt = ce->value;
16914 return elt;
16917 #if CHECKING_P
16919 namespace selftest {
16921 /* Helper functions for writing tests of folding trees. */
16923 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16925 static void
16926 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16927 tree constant)
16929 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16932 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16933 wrapping WRAPPED_EXPR. */
16935 static void
16936 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16937 tree wrapped_expr)
16939 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16940 ASSERT_NE (wrapped_expr, result);
16941 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16942 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16945 /* Verify that various arithmetic binary operations are folded
16946 correctly. */
16948 static void
16949 test_arithmetic_folding ()
16951 tree type = integer_type_node;
16952 tree x = create_tmp_var_raw (type, "x");
16953 tree zero = build_zero_cst (type);
16954 tree one = build_int_cst (type, 1);
16956 /* Addition. */
16957 /* 1 <-- (0 + 1) */
16958 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16959 one);
16960 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16961 one);
16963 /* (nonlvalue)x <-- (x + 0) */
16964 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16967 /* Subtraction. */
16968 /* 0 <-- (x - x) */
16969 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16970 zero);
16971 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16974 /* Multiplication. */
16975 /* 0 <-- (x * 0) */
16976 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16977 zero);
16979 /* (nonlvalue)x <-- (x * 1) */
16980 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16984 /* Verify that various binary operations on vectors are folded
16985 correctly. */
16987 static void
16988 test_vector_folding ()
16990 tree inner_type = integer_type_node;
16991 tree type = build_vector_type (inner_type, 4);
16992 tree zero = build_zero_cst (type);
16993 tree one = build_one_cst (type);
16994 tree index = build_index_vector (type, 0, 1);
16996 /* Verify equality tests that return a scalar boolean result. */
16997 tree res_type = boolean_type_node;
16998 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16999 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17000 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17001 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17002 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17003 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17004 index, one)));
17005 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17006 index, index)));
17007 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17008 index, index)));
17011 /* Verify folding of VEC_DUPLICATE_EXPRs. */
17013 static void
17014 test_vec_duplicate_folding ()
17016 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17017 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17018 /* This will be 1 if VEC_MODE isn't a vector mode. */
17019 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17021 tree type = build_vector_type (ssizetype, nunits);
17022 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17023 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17024 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17027 /* Run all of the selftests within this file. */
17029 void
17030 fold_const_cc_tests ()
17032 test_arithmetic_folding ();
17033 test_vector_folding ();
17034 test_vec_duplicate_folding ();
17037 } // namespace selftest
17039 #endif /* CHECKING_P */