c++: only cache constexpr calls that are constant exprs
[official-gcc.git] / gcc / fold-const.cc
bloba02ede79fed28cc3cd3cc01ba9c6d8867a6a03e5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
153 /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
155 tree_code
156 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158 enum tree_code code = ERROR_MARK;
160 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
161 return ERROR_MARK;
163 if (!operand_equal_p (exp0, exp2))
164 return ERROR_MARK;
166 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
170 /* X <= Y - 1 equals to X < Y. */
171 if (cmp == LE_EXPR)
172 code = LT_EXPR;
173 /* X > Y - 1 equals to X >= Y. */
174 if (cmp == GT_EXPR)
175 code = GE_EXPR;
176 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
179 value_range r;
180 get_range_query (cfun)->range_of_expr (r, exp0);
181 if (r.undefined_p ())
182 r.set_varying (TREE_TYPE (exp0));
184 widest_int min = widest_int::from (r.lower_bound (),
185 TYPE_SIGN (TREE_TYPE (exp0)));
186 if (min == wi::to_widest (exp1))
187 code = MAX_EXPR;
190 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
192 /* X < Y + 1 equals to X <= Y. */
193 if (cmp == LT_EXPR)
194 code = LE_EXPR;
195 /* X >= Y + 1 equals to X > Y. */
196 if (cmp == GE_EXPR)
197 code = GT_EXPR;
198 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
201 value_range r;
202 get_range_query (cfun)->range_of_expr (r, exp0);
203 if (r.undefined_p ())
204 r.set_varying (TREE_TYPE (exp0));
206 widest_int max = widest_int::from (r.upper_bound (),
207 TYPE_SIGN (TREE_TYPE (exp0)));
208 if (max == wi::to_widest (exp1))
209 code = MIN_EXPR;
213 if (code != ERROR_MARK
214 || operand_equal_p (exp1, exp3))
216 if (cmp == LT_EXPR || cmp == LE_EXPR)
217 code = MIN_EXPR;
218 if (cmp == GT_EXPR || cmp == GE_EXPR)
219 code = MAX_EXPR;
221 return code;
224 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
225 Otherwise, return LOC. */
227 static location_t
228 expr_location_or (tree t, location_t loc)
230 location_t tloc = EXPR_LOCATION (t);
231 return tloc == UNKNOWN_LOCATION ? loc : tloc;
234 /* Similar to protected_set_expr_location, but never modify x in place,
235 if location can and needs to be set, unshare it. */
237 tree
238 protected_set_expr_location_unshare (tree x, location_t loc)
240 if (CAN_HAVE_LOCATION_P (x)
241 && EXPR_LOCATION (x) != loc
242 && !(TREE_CODE (x) == SAVE_EXPR
243 || TREE_CODE (x) == TARGET_EXPR
244 || TREE_CODE (x) == BIND_EXPR))
246 x = copy_node (x);
247 SET_EXPR_LOCATION (x, loc);
249 return x;
252 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
253 division and returns the quotient. Otherwise returns
254 NULL_TREE. */
256 tree
257 div_if_zero_remainder (const_tree arg1, const_tree arg2)
259 widest_int quo;
261 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
262 SIGNED, &quo))
263 return wide_int_to_tree (TREE_TYPE (arg1), quo);
265 return NULL_TREE;
268 /* This is nonzero if we should defer warnings about undefined
269 overflow. This facility exists because these warnings are a
270 special case. The code to estimate loop iterations does not want
271 to issue any warnings, since it works with expressions which do not
272 occur in user code. Various bits of cleanup code call fold(), but
273 only use the result if it has certain characteristics (e.g., is a
274 constant); that code only wants to issue a warning if the result is
275 used. */
277 static int fold_deferring_overflow_warnings;
279 /* If a warning about undefined overflow is deferred, this is the
280 warning. Note that this may cause us to turn two warnings into
281 one, but that is fine since it is sufficient to only give one
282 warning per expression. */
284 static const char* fold_deferred_overflow_warning;
286 /* If a warning about undefined overflow is deferred, this is the
287 level at which the warning should be emitted. */
289 static enum warn_strict_overflow_code fold_deferred_overflow_code;
291 /* Start deferring overflow warnings. We could use a stack here to
292 permit nested calls, but at present it is not necessary. */
294 void
295 fold_defer_overflow_warnings (void)
297 ++fold_deferring_overflow_warnings;
300 /* Stop deferring overflow warnings. If there is a pending warning,
301 and ISSUE is true, then issue the warning if appropriate. STMT is
302 the statement with which the warning should be associated (used for
303 location information); STMT may be NULL. CODE is the level of the
304 warning--a warn_strict_overflow_code value. This function will use
305 the smaller of CODE and the deferred code when deciding whether to
306 issue the warning. CODE may be zero to mean to always use the
307 deferred code. */
309 void
310 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
312 const char *warnmsg;
313 location_t locus;
315 gcc_assert (fold_deferring_overflow_warnings > 0);
316 --fold_deferring_overflow_warnings;
317 if (fold_deferring_overflow_warnings > 0)
319 if (fold_deferred_overflow_warning != NULL
320 && code != 0
321 && code < (int) fold_deferred_overflow_code)
322 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
323 return;
326 warnmsg = fold_deferred_overflow_warning;
327 fold_deferred_overflow_warning = NULL;
329 if (!issue || warnmsg == NULL)
330 return;
332 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
333 return;
335 /* Use the smallest code level when deciding to issue the
336 warning. */
337 if (code == 0 || code > (int) fold_deferred_overflow_code)
338 code = fold_deferred_overflow_code;
340 if (!issue_strict_overflow_warning (code))
341 return;
343 if (stmt == NULL)
344 locus = input_location;
345 else
346 locus = gimple_location (stmt);
347 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
350 /* Stop deferring overflow warnings, ignoring any deferred
351 warnings. */
353 void
354 fold_undefer_and_ignore_overflow_warnings (void)
356 fold_undefer_overflow_warnings (false, NULL, 0);
359 /* Whether we are deferring overflow warnings. */
361 bool
362 fold_deferring_overflow_warnings_p (void)
364 return fold_deferring_overflow_warnings > 0;
367 /* This is called when we fold something based on the fact that signed
368 overflow is undefined. */
370 void
371 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
373 if (fold_deferring_overflow_warnings > 0)
375 if (fold_deferred_overflow_warning == NULL
376 || wc < fold_deferred_overflow_code)
378 fold_deferred_overflow_warning = gmsgid;
379 fold_deferred_overflow_code = wc;
382 else if (issue_strict_overflow_warning (wc))
383 warning (OPT_Wstrict_overflow, gmsgid);
386 /* Return true if the built-in mathematical function specified by CODE
387 is odd, i.e. -f(x) == f(-x). */
389 bool
390 negate_mathfn_p (combined_fn fn)
392 switch (fn)
394 CASE_CFN_ASIN:
395 CASE_CFN_ASIN_FN:
396 CASE_CFN_ASINH:
397 CASE_CFN_ASINH_FN:
398 CASE_CFN_ATAN:
399 CASE_CFN_ATAN_FN:
400 CASE_CFN_ATANH:
401 CASE_CFN_ATANH_FN:
402 CASE_CFN_CASIN:
403 CASE_CFN_CASIN_FN:
404 CASE_CFN_CASINH:
405 CASE_CFN_CASINH_FN:
406 CASE_CFN_CATAN:
407 CASE_CFN_CATAN_FN:
408 CASE_CFN_CATANH:
409 CASE_CFN_CATANH_FN:
410 CASE_CFN_CBRT:
411 CASE_CFN_CBRT_FN:
412 CASE_CFN_CPROJ:
413 CASE_CFN_CPROJ_FN:
414 CASE_CFN_CSIN:
415 CASE_CFN_CSIN_FN:
416 CASE_CFN_CSINH:
417 CASE_CFN_CSINH_FN:
418 CASE_CFN_CTAN:
419 CASE_CFN_CTAN_FN:
420 CASE_CFN_CTANH:
421 CASE_CFN_CTANH_FN:
422 CASE_CFN_ERF:
423 CASE_CFN_ERF_FN:
424 CASE_CFN_LLROUND:
425 CASE_CFN_LLROUND_FN:
426 CASE_CFN_LROUND:
427 CASE_CFN_LROUND_FN:
428 CASE_CFN_ROUND:
429 CASE_CFN_ROUNDEVEN:
430 CASE_CFN_ROUNDEVEN_FN:
431 CASE_CFN_SIN:
432 CASE_CFN_SIN_FN:
433 CASE_CFN_SINH:
434 CASE_CFN_SINH_FN:
435 CASE_CFN_TAN:
436 CASE_CFN_TAN_FN:
437 CASE_CFN_TANH:
438 CASE_CFN_TANH_FN:
439 CASE_CFN_TRUNC:
440 CASE_CFN_TRUNC_FN:
441 return true;
443 CASE_CFN_LLRINT:
444 CASE_CFN_LLRINT_FN:
445 CASE_CFN_LRINT:
446 CASE_CFN_LRINT_FN:
447 CASE_CFN_NEARBYINT:
448 CASE_CFN_NEARBYINT_FN:
449 CASE_CFN_RINT:
450 CASE_CFN_RINT_FN:
451 return !flag_rounding_math;
453 default:
454 break;
456 return false;
459 /* Check whether we may negate an integer constant T without causing
460 overflow. */
462 bool
463 may_negate_without_overflow_p (const_tree t)
465 tree type;
467 gcc_assert (TREE_CODE (t) == INTEGER_CST);
469 type = TREE_TYPE (t);
470 if (TYPE_UNSIGNED (type))
471 return false;
473 return !wi::only_sign_bit_p (wi::to_wide (t));
476 /* Determine whether an expression T can be cheaply negated using
477 the function negate_expr without introducing undefined overflow. */
479 static bool
480 negate_expr_p (tree t)
482 tree type;
484 if (t == 0)
485 return false;
487 type = TREE_TYPE (t);
489 STRIP_SIGN_NOPS (t);
490 switch (TREE_CODE (t))
492 case INTEGER_CST:
493 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
494 return true;
496 /* Check that -CST will not overflow type. */
497 return may_negate_without_overflow_p (t);
498 case BIT_NOT_EXPR:
499 return (INTEGRAL_TYPE_P (type)
500 && TYPE_OVERFLOW_WRAPS (type));
502 case FIXED_CST:
503 return true;
505 case NEGATE_EXPR:
506 return !TYPE_OVERFLOW_SANITIZED (type);
508 case REAL_CST:
509 /* We want to canonicalize to positive real constants. Pretend
510 that only negative ones can be easily negated. */
511 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
513 case COMPLEX_CST:
514 return negate_expr_p (TREE_REALPART (t))
515 && negate_expr_p (TREE_IMAGPART (t));
517 case VECTOR_CST:
519 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
520 return true;
522 /* Steps don't prevent negation. */
523 unsigned int count = vector_cst_encoded_nelts (t);
524 for (unsigned int i = 0; i < count; ++i)
525 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
526 return false;
528 return true;
531 case COMPLEX_EXPR:
532 return negate_expr_p (TREE_OPERAND (t, 0))
533 && negate_expr_p (TREE_OPERAND (t, 1));
535 case CONJ_EXPR:
536 return negate_expr_p (TREE_OPERAND (t, 0));
538 case PLUS_EXPR:
539 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
540 || HONOR_SIGNED_ZEROS (type)
541 || (ANY_INTEGRAL_TYPE_P (type)
542 && ! TYPE_OVERFLOW_WRAPS (type)))
543 return false;
544 /* -(A + B) -> (-B) - A. */
545 if (negate_expr_p (TREE_OPERAND (t, 1)))
546 return true;
547 /* -(A + B) -> (-A) - B. */
548 return negate_expr_p (TREE_OPERAND (t, 0));
550 case MINUS_EXPR:
551 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
552 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
553 && !HONOR_SIGNED_ZEROS (type)
554 && (! ANY_INTEGRAL_TYPE_P (type)
555 || TYPE_OVERFLOW_WRAPS (type));
557 case MULT_EXPR:
558 if (TYPE_UNSIGNED (type))
559 break;
560 /* INT_MIN/n * n doesn't overflow while negating one operand it does
561 if n is a (negative) power of two. */
562 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
563 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
564 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
565 && (wi::popcount
566 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
567 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
568 && (wi::popcount
569 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
570 break;
572 /* Fall through. */
574 case RDIV_EXPR:
575 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
576 return negate_expr_p (TREE_OPERAND (t, 1))
577 || negate_expr_p (TREE_OPERAND (t, 0));
578 break;
580 case TRUNC_DIV_EXPR:
581 case ROUND_DIV_EXPR:
582 case EXACT_DIV_EXPR:
583 if (TYPE_UNSIGNED (type))
584 break;
585 /* In general we can't negate A in A / B, because if A is INT_MIN and
586 B is not 1 we change the sign of the result. */
587 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
588 && negate_expr_p (TREE_OPERAND (t, 0)))
589 return true;
590 /* In general we can't negate B in A / B, because if A is INT_MIN and
591 B is 1, we may turn this into INT_MIN / -1 which is undefined
592 and actually traps on some architectures. */
593 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
594 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
595 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
596 && ! integer_onep (TREE_OPERAND (t, 1))))
597 return negate_expr_p (TREE_OPERAND (t, 1));
598 break;
600 case NOP_EXPR:
601 /* Negate -((double)float) as (double)(-float). */
602 if (SCALAR_FLOAT_TYPE_P (type))
604 tree tem = strip_float_extensions (t);
605 if (tem != t)
606 return negate_expr_p (tem);
608 break;
610 case CALL_EXPR:
611 /* Negate -f(x) as f(-x). */
612 if (negate_mathfn_p (get_call_combined_fn (t)))
613 return negate_expr_p (CALL_EXPR_ARG (t, 0));
614 break;
616 case RSHIFT_EXPR:
617 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
618 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
620 tree op1 = TREE_OPERAND (t, 1);
621 if (wi::to_wide (op1) == element_precision (type) - 1)
622 return true;
624 break;
626 default:
627 break;
629 return false;
632 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
633 simplification is possible.
634 If negate_expr_p would return true for T, NULL_TREE will never be
635 returned. */
637 static tree
638 fold_negate_expr_1 (location_t loc, tree t)
640 tree type = TREE_TYPE (t);
641 tree tem;
643 switch (TREE_CODE (t))
645 /* Convert - (~A) to A + 1. */
646 case BIT_NOT_EXPR:
647 if (INTEGRAL_TYPE_P (type))
648 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
649 build_one_cst (type));
650 break;
652 case INTEGER_CST:
653 tem = fold_negate_const (t, type);
654 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
655 || (ANY_INTEGRAL_TYPE_P (type)
656 && !TYPE_OVERFLOW_TRAPS (type)
657 && TYPE_OVERFLOW_WRAPS (type))
658 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
659 return tem;
660 break;
662 case POLY_INT_CST:
663 case REAL_CST:
664 case FIXED_CST:
665 tem = fold_negate_const (t, type);
666 return tem;
668 case COMPLEX_CST:
670 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
671 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
672 if (rpart && ipart)
673 return build_complex (type, rpart, ipart);
675 break;
677 case VECTOR_CST:
679 tree_vector_builder elts;
680 elts.new_unary_operation (type, t, true);
681 unsigned int count = elts.encoded_nelts ();
682 for (unsigned int i = 0; i < count; ++i)
684 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
685 if (elt == NULL_TREE)
686 return NULL_TREE;
687 elts.quick_push (elt);
690 return elts.build ();
693 case COMPLEX_EXPR:
694 if (negate_expr_p (t))
695 return fold_build2_loc (loc, COMPLEX_EXPR, type,
696 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
697 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
698 break;
700 case CONJ_EXPR:
701 if (negate_expr_p (t))
702 return fold_build1_loc (loc, CONJ_EXPR, type,
703 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
704 break;
706 case NEGATE_EXPR:
707 if (!TYPE_OVERFLOW_SANITIZED (type))
708 return TREE_OPERAND (t, 0);
709 break;
711 case PLUS_EXPR:
712 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
713 && !HONOR_SIGNED_ZEROS (type))
715 /* -(A + B) -> (-B) - A. */
716 if (negate_expr_p (TREE_OPERAND (t, 1)))
718 tem = negate_expr (TREE_OPERAND (t, 1));
719 return fold_build2_loc (loc, MINUS_EXPR, type,
720 tem, TREE_OPERAND (t, 0));
723 /* -(A + B) -> (-A) - B. */
724 if (negate_expr_p (TREE_OPERAND (t, 0)))
726 tem = negate_expr (TREE_OPERAND (t, 0));
727 return fold_build2_loc (loc, MINUS_EXPR, type,
728 tem, TREE_OPERAND (t, 1));
731 break;
733 case MINUS_EXPR:
734 /* - (A - B) -> B - A */
735 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
736 && !HONOR_SIGNED_ZEROS (type))
737 return fold_build2_loc (loc, MINUS_EXPR, type,
738 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
739 break;
741 case MULT_EXPR:
742 if (TYPE_UNSIGNED (type))
743 break;
745 /* Fall through. */
747 case RDIV_EXPR:
748 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
750 tem = TREE_OPERAND (t, 1);
751 if (negate_expr_p (tem))
752 return fold_build2_loc (loc, TREE_CODE (t), type,
753 TREE_OPERAND (t, 0), negate_expr (tem));
754 tem = TREE_OPERAND (t, 0);
755 if (negate_expr_p (tem))
756 return fold_build2_loc (loc, TREE_CODE (t), type,
757 negate_expr (tem), TREE_OPERAND (t, 1));
759 break;
761 case TRUNC_DIV_EXPR:
762 case ROUND_DIV_EXPR:
763 case EXACT_DIV_EXPR:
764 if (TYPE_UNSIGNED (type))
765 break;
766 /* In general we can't negate A in A / B, because if A is INT_MIN and
767 B is not 1 we change the sign of the result. */
768 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
769 && negate_expr_p (TREE_OPERAND (t, 0)))
770 return fold_build2_loc (loc, TREE_CODE (t), type,
771 negate_expr (TREE_OPERAND (t, 0)),
772 TREE_OPERAND (t, 1));
773 /* In general we can't negate B in A / B, because if A is INT_MIN and
774 B is 1, we may turn this into INT_MIN / -1 which is undefined
775 and actually traps on some architectures. */
776 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
777 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
778 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
779 && ! integer_onep (TREE_OPERAND (t, 1))))
780 && negate_expr_p (TREE_OPERAND (t, 1)))
781 return fold_build2_loc (loc, TREE_CODE (t), type,
782 TREE_OPERAND (t, 0),
783 negate_expr (TREE_OPERAND (t, 1)));
784 break;
786 case NOP_EXPR:
787 /* Convert -((double)float) into (double)(-float). */
788 if (SCALAR_FLOAT_TYPE_P (type))
790 tem = strip_float_extensions (t);
791 if (tem != t && negate_expr_p (tem))
792 return fold_convert_loc (loc, type, negate_expr (tem));
794 break;
796 case CALL_EXPR:
797 /* Negate -f(x) as f(-x). */
798 if (negate_mathfn_p (get_call_combined_fn (t))
799 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
801 tree fndecl, arg;
803 fndecl = get_callee_fndecl (t);
804 arg = negate_expr (CALL_EXPR_ARG (t, 0));
805 return build_call_expr_loc (loc, fndecl, 1, arg);
807 break;
809 case RSHIFT_EXPR:
810 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
811 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
813 tree op1 = TREE_OPERAND (t, 1);
814 if (wi::to_wide (op1) == element_precision (type) - 1)
816 tree ntype = TYPE_UNSIGNED (type)
817 ? signed_type_for (type)
818 : unsigned_type_for (type);
819 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
820 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
821 return fold_convert_loc (loc, type, temp);
824 break;
826 default:
827 break;
830 return NULL_TREE;
833 /* A wrapper for fold_negate_expr_1. */
835 static tree
836 fold_negate_expr (location_t loc, tree t)
838 tree type = TREE_TYPE (t);
839 STRIP_SIGN_NOPS (t);
840 tree tem = fold_negate_expr_1 (loc, t);
841 if (tem == NULL_TREE)
842 return NULL_TREE;
843 return fold_convert_loc (loc, type, tem);
846 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
847 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
848 return NULL_TREE. */
850 static tree
851 negate_expr (tree t)
853 tree type, tem;
854 location_t loc;
856 if (t == NULL_TREE)
857 return NULL_TREE;
859 loc = EXPR_LOCATION (t);
860 type = TREE_TYPE (t);
861 STRIP_SIGN_NOPS (t);
863 tem = fold_negate_expr (loc, t);
864 if (!tem)
865 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
866 return fold_convert_loc (loc, type, tem);
869 /* Split a tree IN into a constant, literal and variable parts that could be
870 combined with CODE to make IN. "constant" means an expression with
871 TREE_CONSTANT but that isn't an actual constant. CODE must be a
872 commutative arithmetic operation. Store the constant part into *CONP,
873 the literal in *LITP and return the variable part. If a part isn't
874 present, set it to null. If the tree does not decompose in this way,
875 return the entire tree as the variable part and the other parts as null.
877 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
878 case, we negate an operand that was subtracted. Except if it is a
879 literal for which we use *MINUS_LITP instead.
881 If NEGATE_P is true, we are negating all of IN, again except a literal
882 for which we use *MINUS_LITP instead. If a variable part is of pointer
883 type, it is negated after converting to TYPE. This prevents us from
884 generating illegal MINUS pointer expression. LOC is the location of
885 the converted variable part.
887 If IN is itself a literal or constant, return it as appropriate.
889 Note that we do not guarantee that any of the three values will be the
890 same type as IN, but they will have the same signedness and mode. */
892 static tree
893 split_tree (tree in, tree type, enum tree_code code,
894 tree *minus_varp, tree *conp, tree *minus_conp,
895 tree *litp, tree *minus_litp, int negate_p)
897 tree var = 0;
898 *minus_varp = 0;
899 *conp = 0;
900 *minus_conp = 0;
901 *litp = 0;
902 *minus_litp = 0;
904 /* Strip any conversions that don't change the machine mode or signedness. */
905 STRIP_SIGN_NOPS (in);
907 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
908 || TREE_CODE (in) == FIXED_CST)
909 *litp = in;
910 else if (TREE_CODE (in) == code
911 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
912 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
913 /* We can associate addition and subtraction together (even
914 though the C standard doesn't say so) for integers because
915 the value is not affected. For reals, the value might be
916 affected, so we can't. */
917 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
918 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
919 || (code == MINUS_EXPR
920 && (TREE_CODE (in) == PLUS_EXPR
921 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
923 tree op0 = TREE_OPERAND (in, 0);
924 tree op1 = TREE_OPERAND (in, 1);
925 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
926 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
928 /* First see if either of the operands is a literal, then a constant. */
929 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
930 || TREE_CODE (op0) == FIXED_CST)
931 *litp = op0, op0 = 0;
932 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
933 || TREE_CODE (op1) == FIXED_CST)
934 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
936 if (op0 != 0 && TREE_CONSTANT (op0))
937 *conp = op0, op0 = 0;
938 else if (op1 != 0 && TREE_CONSTANT (op1))
939 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
941 /* If we haven't dealt with either operand, this is not a case we can
942 decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 if (op0 != 0 && op1 != 0)
944 var = in;
945 else if (op0 != 0)
946 var = op0;
947 else
948 var = op1, neg_var_p = neg1_p;
950 /* Now do any needed negations. */
951 if (neg_litp_p)
952 *minus_litp = *litp, *litp = 0;
953 if (neg_conp_p && *conp)
954 *minus_conp = *conp, *conp = 0;
955 if (neg_var_p && var)
956 *minus_varp = var, var = 0;
958 else if (TREE_CONSTANT (in))
959 *conp = in;
960 else if (TREE_CODE (in) == BIT_NOT_EXPR
961 && code == PLUS_EXPR)
963 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
964 when IN is constant. */
965 *litp = build_minus_one_cst (type);
966 *minus_varp = TREE_OPERAND (in, 0);
968 else
969 var = in;
971 if (negate_p)
973 if (*litp)
974 *minus_litp = *litp, *litp = 0;
975 else if (*minus_litp)
976 *litp = *minus_litp, *minus_litp = 0;
977 if (*conp)
978 *minus_conp = *conp, *conp = 0;
979 else if (*minus_conp)
980 *conp = *minus_conp, *minus_conp = 0;
981 if (var)
982 *minus_varp = var, var = 0;
983 else if (*minus_varp)
984 var = *minus_varp, *minus_varp = 0;
987 if (*litp
988 && TREE_OVERFLOW_P (*litp))
989 *litp = drop_tree_overflow (*litp);
990 if (*minus_litp
991 && TREE_OVERFLOW_P (*minus_litp))
992 *minus_litp = drop_tree_overflow (*minus_litp);
994 return var;
997 /* Re-associate trees split by the above function. T1 and T2 are
998 either expressions to associate or null. Return the new
999 expression, if any. LOC is the location of the new expression. If
1000 we build an operation, do it in TYPE and with CODE. */
1002 static tree
1003 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1005 if (t1 == 0)
1007 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1008 return t2;
1010 else if (t2 == 0)
1011 return t1;
1013 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1014 try to fold this since we will have infinite recursion. But do
1015 deal with any NEGATE_EXPRs. */
1016 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1017 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1018 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1020 if (code == PLUS_EXPR)
1022 if (TREE_CODE (t1) == NEGATE_EXPR)
1023 return build2_loc (loc, MINUS_EXPR, type,
1024 fold_convert_loc (loc, type, t2),
1025 fold_convert_loc (loc, type,
1026 TREE_OPERAND (t1, 0)));
1027 else if (TREE_CODE (t2) == NEGATE_EXPR)
1028 return build2_loc (loc, MINUS_EXPR, type,
1029 fold_convert_loc (loc, type, t1),
1030 fold_convert_loc (loc, type,
1031 TREE_OPERAND (t2, 0)));
1032 else if (integer_zerop (t2))
1033 return fold_convert_loc (loc, type, t1);
1035 else if (code == MINUS_EXPR)
1037 if (integer_zerop (t2))
1038 return fold_convert_loc (loc, type, t1);
1041 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1042 fold_convert_loc (loc, type, t2));
1045 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1046 fold_convert_loc (loc, type, t2));
1049 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1050 for use in int_const_binop, size_binop and size_diffop. */
1052 static bool
1053 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1055 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1056 return false;
1057 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1058 return false;
1060 switch (code)
1062 case LSHIFT_EXPR:
1063 case RSHIFT_EXPR:
1064 case LROTATE_EXPR:
1065 case RROTATE_EXPR:
1066 return true;
1068 default:
1069 break;
1072 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1073 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1074 && TYPE_MODE (type1) == TYPE_MODE (type2);
1077 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1078 a new constant in RES. Return FALSE if we don't know how to
1079 evaluate CODE at compile-time. */
1081 bool
1082 wide_int_binop (wide_int &res,
1083 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1084 signop sign, wi::overflow_type *overflow)
1086 wide_int tmp;
1087 *overflow = wi::OVF_NONE;
1088 switch (code)
1090 case BIT_IOR_EXPR:
1091 res = wi::bit_or (arg1, arg2);
1092 break;
1094 case BIT_XOR_EXPR:
1095 res = wi::bit_xor (arg1, arg2);
1096 break;
1098 case BIT_AND_EXPR:
1099 res = wi::bit_and (arg1, arg2);
1100 break;
1102 case LSHIFT_EXPR:
1103 if (wi::neg_p (arg2))
1104 return false;
1105 res = wi::lshift (arg1, arg2);
1106 break;
1108 case RSHIFT_EXPR:
1109 if (wi::neg_p (arg2))
1110 return false;
1111 /* It's unclear from the C standard whether shifts can overflow.
1112 The following code ignores overflow; perhaps a C standard
1113 interpretation ruling is needed. */
1114 res = wi::rshift (arg1, arg2, sign);
1115 break;
1117 case RROTATE_EXPR:
1118 case LROTATE_EXPR:
1119 if (wi::neg_p (arg2))
1121 tmp = -arg2;
1122 if (code == RROTATE_EXPR)
1123 code = LROTATE_EXPR;
1124 else
1125 code = RROTATE_EXPR;
1127 else
1128 tmp = arg2;
1130 if (code == RROTATE_EXPR)
1131 res = wi::rrotate (arg1, tmp);
1132 else
1133 res = wi::lrotate (arg1, tmp);
1134 break;
1136 case PLUS_EXPR:
1137 res = wi::add (arg1, arg2, sign, overflow);
1138 break;
1140 case MINUS_EXPR:
1141 res = wi::sub (arg1, arg2, sign, overflow);
1142 break;
1144 case MULT_EXPR:
1145 res = wi::mul (arg1, arg2, sign, overflow);
1146 break;
1148 case MULT_HIGHPART_EXPR:
1149 res = wi::mul_high (arg1, arg2, sign);
1150 break;
1152 case TRUNC_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (arg2 == 0)
1155 return false;
1156 res = wi::div_trunc (arg1, arg2, sign, overflow);
1157 break;
1159 case FLOOR_DIV_EXPR:
1160 if (arg2 == 0)
1161 return false;
1162 res = wi::div_floor (arg1, arg2, sign, overflow);
1163 break;
1165 case CEIL_DIV_EXPR:
1166 if (arg2 == 0)
1167 return false;
1168 res = wi::div_ceil (arg1, arg2, sign, overflow);
1169 break;
1171 case ROUND_DIV_EXPR:
1172 if (arg2 == 0)
1173 return false;
1174 res = wi::div_round (arg1, arg2, sign, overflow);
1175 break;
1177 case TRUNC_MOD_EXPR:
1178 if (arg2 == 0)
1179 return false;
1180 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1181 break;
1183 case FLOOR_MOD_EXPR:
1184 if (arg2 == 0)
1185 return false;
1186 res = wi::mod_floor (arg1, arg2, sign, overflow);
1187 break;
1189 case CEIL_MOD_EXPR:
1190 if (arg2 == 0)
1191 return false;
1192 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1193 break;
1195 case ROUND_MOD_EXPR:
1196 if (arg2 == 0)
1197 return false;
1198 res = wi::mod_round (arg1, arg2, sign, overflow);
1199 break;
1201 case MIN_EXPR:
1202 res = wi::min (arg1, arg2, sign);
1203 break;
1205 case MAX_EXPR:
1206 res = wi::max (arg1, arg2, sign);
1207 break;
1209 default:
1210 return false;
1212 return true;
1215 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1216 produce a new constant in RES. Return FALSE if we don't know how
1217 to evaluate CODE at compile-time. */
1219 static bool
1220 poly_int_binop (poly_wide_int &res, enum tree_code code,
1221 const_tree arg1, const_tree arg2,
1222 signop sign, wi::overflow_type *overflow)
1224 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1225 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1226 switch (code)
1228 case PLUS_EXPR:
1229 res = wi::add (wi::to_poly_wide (arg1),
1230 wi::to_poly_wide (arg2), sign, overflow);
1231 break;
1233 case MINUS_EXPR:
1234 res = wi::sub (wi::to_poly_wide (arg1),
1235 wi::to_poly_wide (arg2), sign, overflow);
1236 break;
1238 case MULT_EXPR:
1239 if (TREE_CODE (arg2) == INTEGER_CST)
1240 res = wi::mul (wi::to_poly_wide (arg1),
1241 wi::to_wide (arg2), sign, overflow);
1242 else if (TREE_CODE (arg1) == INTEGER_CST)
1243 res = wi::mul (wi::to_poly_wide (arg2),
1244 wi::to_wide (arg1), sign, overflow);
1245 else
1246 return NULL_TREE;
1247 break;
1249 case LSHIFT_EXPR:
1250 if (TREE_CODE (arg2) == INTEGER_CST)
1251 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1252 else
1253 return false;
1254 break;
1256 case BIT_IOR_EXPR:
1257 if (TREE_CODE (arg2) != INTEGER_CST
1258 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1259 &res))
1260 return false;
1261 break;
1263 default:
1264 return false;
1266 return true;
1269 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1270 produce a new constant. Return NULL_TREE if we don't know how to
1271 evaluate CODE at compile-time. */
1273 tree
1274 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1275 int overflowable)
1277 poly_wide_int poly_res;
1278 tree type = TREE_TYPE (arg1);
1279 signop sign = TYPE_SIGN (type);
1280 wi::overflow_type overflow = wi::OVF_NONE;
1282 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1284 wide_int warg1 = wi::to_wide (arg1), res;
1285 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1286 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1287 return NULL_TREE;
1288 poly_res = res;
1290 else if (!poly_int_tree_p (arg1)
1291 || !poly_int_tree_p (arg2)
1292 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1293 return NULL_TREE;
1294 return force_fit_type (type, poly_res, overflowable,
1295 (((sign == SIGNED || overflowable == -1)
1296 && overflow)
1297 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1300 /* Return true if binary operation OP distributes over addition in operand
1301 OPNO, with the other operand being held constant. OPNO counts from 1. */
1303 static bool
1304 distributes_over_addition_p (tree_code op, int opno)
1306 switch (op)
1308 case PLUS_EXPR:
1309 case MINUS_EXPR:
1310 case MULT_EXPR:
1311 return true;
1313 case LSHIFT_EXPR:
1314 return opno == 1;
1316 default:
1317 return false;
1321 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1322 constant. We assume ARG1 and ARG2 have the same data type, or at least
1323 are the same kind of constant and the same machine mode. Return zero if
1324 combining the constants is not allowed in the current operating mode. */
1326 static tree
1327 const_binop (enum tree_code code, tree arg1, tree arg2)
1329 /* Sanity check for the recursive cases. */
1330 if (!arg1 || !arg2)
1331 return NULL_TREE;
1333 STRIP_NOPS (arg1);
1334 STRIP_NOPS (arg2);
1336 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1338 if (code == POINTER_PLUS_EXPR)
1339 return int_const_binop (PLUS_EXPR,
1340 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1342 return int_const_binop (code, arg1, arg2);
1345 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1347 machine_mode mode;
1348 REAL_VALUE_TYPE d1;
1349 REAL_VALUE_TYPE d2;
1350 REAL_VALUE_TYPE value;
1351 REAL_VALUE_TYPE result;
1352 bool inexact;
1353 tree t, type;
1355 /* The following codes are handled by real_arithmetic. */
1356 switch (code)
1358 case PLUS_EXPR:
1359 case MINUS_EXPR:
1360 case MULT_EXPR:
1361 case RDIV_EXPR:
1362 case MIN_EXPR:
1363 case MAX_EXPR:
1364 break;
1366 default:
1367 return NULL_TREE;
1370 d1 = TREE_REAL_CST (arg1);
1371 d2 = TREE_REAL_CST (arg2);
1373 type = TREE_TYPE (arg1);
1374 mode = TYPE_MODE (type);
1376 /* Don't perform operation if we honor signaling NaNs and
1377 either operand is a signaling NaN. */
1378 if (HONOR_SNANS (mode)
1379 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1380 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1381 return NULL_TREE;
1383 /* Don't perform operation if it would raise a division
1384 by zero exception. */
1385 if (code == RDIV_EXPR
1386 && real_equal (&d2, &dconst0)
1387 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1388 return NULL_TREE;
1390 /* If either operand is a NaN, just return it. Otherwise, set up
1391 for floating-point trap; we return an overflow. */
1392 if (REAL_VALUE_ISNAN (d1))
1394 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1395 is off. */
1396 d1.signalling = 0;
1397 t = build_real (type, d1);
1398 return t;
1400 else if (REAL_VALUE_ISNAN (d2))
1402 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1403 is off. */
1404 d2.signalling = 0;
1405 t = build_real (type, d2);
1406 return t;
1409 inexact = real_arithmetic (&value, code, &d1, &d2);
1410 real_convert (&result, mode, &value);
1412 /* Don't constant fold this floating point operation if
1413 both operands are not NaN but the result is NaN, and
1414 flag_trapping_math. Such operations should raise an
1415 invalid operation exception. */
1416 if (flag_trapping_math
1417 && MODE_HAS_NANS (mode)
1418 && REAL_VALUE_ISNAN (result)
1419 && !REAL_VALUE_ISNAN (d1)
1420 && !REAL_VALUE_ISNAN (d2))
1421 return NULL_TREE;
1423 /* Don't constant fold this floating point operation if
1424 the result has overflowed and flag_trapping_math. */
1425 if (flag_trapping_math
1426 && MODE_HAS_INFINITIES (mode)
1427 && REAL_VALUE_ISINF (result)
1428 && !REAL_VALUE_ISINF (d1)
1429 && !REAL_VALUE_ISINF (d2))
1430 return NULL_TREE;
1432 /* Don't constant fold this floating point operation if the
1433 result may dependent upon the run-time rounding mode and
1434 flag_rounding_math is set, or if GCC's software emulation
1435 is unable to accurately represent the result. */
1436 if ((flag_rounding_math
1437 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1438 && (inexact || !real_identical (&result, &value)))
1439 return NULL_TREE;
1441 t = build_real (type, result);
1443 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1444 return t;
1447 if (TREE_CODE (arg1) == FIXED_CST)
1449 FIXED_VALUE_TYPE f1;
1450 FIXED_VALUE_TYPE f2;
1451 FIXED_VALUE_TYPE result;
1452 tree t, type;
1453 bool sat_p;
1454 bool overflow_p;
1456 /* The following codes are handled by fixed_arithmetic. */
1457 switch (code)
1459 case PLUS_EXPR:
1460 case MINUS_EXPR:
1461 case MULT_EXPR:
1462 case TRUNC_DIV_EXPR:
1463 if (TREE_CODE (arg2) != FIXED_CST)
1464 return NULL_TREE;
1465 f2 = TREE_FIXED_CST (arg2);
1466 break;
1468 case LSHIFT_EXPR:
1469 case RSHIFT_EXPR:
1471 if (TREE_CODE (arg2) != INTEGER_CST)
1472 return NULL_TREE;
1473 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1474 f2.data.high = w2.elt (1);
1475 f2.data.low = w2.ulow ();
1476 f2.mode = SImode;
1478 break;
1480 default:
1481 return NULL_TREE;
1484 f1 = TREE_FIXED_CST (arg1);
1485 type = TREE_TYPE (arg1);
1486 sat_p = TYPE_SATURATING (type);
1487 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1488 t = build_fixed (type, result);
1489 /* Propagate overflow flags. */
1490 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1491 TREE_OVERFLOW (t) = 1;
1492 return t;
1495 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1497 tree type = TREE_TYPE (arg1);
1498 tree r1 = TREE_REALPART (arg1);
1499 tree i1 = TREE_IMAGPART (arg1);
1500 tree r2 = TREE_REALPART (arg2);
1501 tree i2 = TREE_IMAGPART (arg2);
1502 tree real, imag;
1504 switch (code)
1506 case PLUS_EXPR:
1507 case MINUS_EXPR:
1508 real = const_binop (code, r1, r2);
1509 imag = const_binop (code, i1, i2);
1510 break;
1512 case MULT_EXPR:
1513 if (COMPLEX_FLOAT_TYPE_P (type))
1514 return do_mpc_arg2 (arg1, arg2, type,
1515 /* do_nonfinite= */ folding_initializer,
1516 mpc_mul);
1518 real = const_binop (MINUS_EXPR,
1519 const_binop (MULT_EXPR, r1, r2),
1520 const_binop (MULT_EXPR, i1, i2));
1521 imag = const_binop (PLUS_EXPR,
1522 const_binop (MULT_EXPR, r1, i2),
1523 const_binop (MULT_EXPR, i1, r2));
1524 break;
1526 case RDIV_EXPR:
1527 if (COMPLEX_FLOAT_TYPE_P (type))
1528 return do_mpc_arg2 (arg1, arg2, type,
1529 /* do_nonfinite= */ folding_initializer,
1530 mpc_div);
1531 /* Fallthru. */
1532 case TRUNC_DIV_EXPR:
1533 case CEIL_DIV_EXPR:
1534 case FLOOR_DIV_EXPR:
1535 case ROUND_DIV_EXPR:
1536 if (flag_complex_method == 0)
1538 /* Keep this algorithm in sync with
1539 tree-complex.cc:expand_complex_div_straight().
1541 Expand complex division to scalars, straightforward algorithm.
1542 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1543 t = br*br + bi*bi
1545 tree magsquared
1546 = const_binop (PLUS_EXPR,
1547 const_binop (MULT_EXPR, r2, r2),
1548 const_binop (MULT_EXPR, i2, i2));
1549 tree t1
1550 = const_binop (PLUS_EXPR,
1551 const_binop (MULT_EXPR, r1, r2),
1552 const_binop (MULT_EXPR, i1, i2));
1553 tree t2
1554 = const_binop (MINUS_EXPR,
1555 const_binop (MULT_EXPR, i1, r2),
1556 const_binop (MULT_EXPR, r1, i2));
1558 real = const_binop (code, t1, magsquared);
1559 imag = const_binop (code, t2, magsquared);
1561 else
1563 /* Keep this algorithm in sync with
1564 tree-complex.cc:expand_complex_div_wide().
1566 Expand complex division to scalars, modified algorithm to minimize
1567 overflow with wide input ranges. */
1568 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1569 fold_abs_const (r2, TREE_TYPE (type)),
1570 fold_abs_const (i2, TREE_TYPE (type)));
1572 if (integer_nonzerop (compare))
1574 /* In the TRUE branch, we compute
1575 ratio = br/bi;
1576 div = (br * ratio) + bi;
1577 tr = (ar * ratio) + ai;
1578 ti = (ai * ratio) - ar;
1579 tr = tr / div;
1580 ti = ti / div; */
1581 tree ratio = const_binop (code, r2, i2);
1582 tree div = const_binop (PLUS_EXPR, i2,
1583 const_binop (MULT_EXPR, r2, ratio));
1584 real = const_binop (MULT_EXPR, r1, ratio);
1585 real = const_binop (PLUS_EXPR, real, i1);
1586 real = const_binop (code, real, div);
1588 imag = const_binop (MULT_EXPR, i1, ratio);
1589 imag = const_binop (MINUS_EXPR, imag, r1);
1590 imag = const_binop (code, imag, div);
1592 else
1594 /* In the FALSE branch, we compute
1595 ratio = d/c;
1596 divisor = (d * ratio) + c;
1597 tr = (b * ratio) + a;
1598 ti = b - (a * ratio);
1599 tr = tr / div;
1600 ti = ti / div; */
1601 tree ratio = const_binop (code, i2, r2);
1602 tree div = const_binop (PLUS_EXPR, r2,
1603 const_binop (MULT_EXPR, i2, ratio));
1605 real = const_binop (MULT_EXPR, i1, ratio);
1606 real = const_binop (PLUS_EXPR, real, r1);
1607 real = const_binop (code, real, div);
1609 imag = const_binop (MULT_EXPR, r1, ratio);
1610 imag = const_binop (MINUS_EXPR, i1, imag);
1611 imag = const_binop (code, imag, div);
1614 break;
1616 default:
1617 return NULL_TREE;
1620 if (real && imag)
1621 return build_complex (type, real, imag);
1624 if (TREE_CODE (arg1) == VECTOR_CST
1625 && TREE_CODE (arg2) == VECTOR_CST
1626 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1627 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1629 tree type = TREE_TYPE (arg1);
1630 bool step_ok_p;
1631 if (VECTOR_CST_STEPPED_P (arg1)
1632 && VECTOR_CST_STEPPED_P (arg2))
1633 /* We can operate directly on the encoding if:
1635 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1636 implies
1637 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1639 Addition and subtraction are the supported operators
1640 for which this is true. */
1641 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1642 else if (VECTOR_CST_STEPPED_P (arg1))
1643 /* We can operate directly on stepped encodings if:
1645 a3 - a2 == a2 - a1
1646 implies:
1647 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1649 which is true if (x -> x op c) distributes over addition. */
1650 step_ok_p = distributes_over_addition_p (code, 1);
1651 else
1652 /* Similarly in reverse. */
1653 step_ok_p = distributes_over_addition_p (code, 2);
1654 tree_vector_builder elts;
1655 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1656 return NULL_TREE;
1657 unsigned int count = elts.encoded_nelts ();
1658 for (unsigned int i = 0; i < count; ++i)
1660 tree elem1 = VECTOR_CST_ELT (arg1, i);
1661 tree elem2 = VECTOR_CST_ELT (arg2, i);
1663 tree elt = const_binop (code, elem1, elem2);
1665 /* It is possible that const_binop cannot handle the given
1666 code and return NULL_TREE */
1667 if (elt == NULL_TREE)
1668 return NULL_TREE;
1669 elts.quick_push (elt);
1672 return elts.build ();
1675 /* Shifts allow a scalar offset for a vector. */
1676 if (TREE_CODE (arg1) == VECTOR_CST
1677 && TREE_CODE (arg2) == INTEGER_CST)
1679 tree type = TREE_TYPE (arg1);
1680 bool step_ok_p = distributes_over_addition_p (code, 1);
1681 tree_vector_builder elts;
1682 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1683 return NULL_TREE;
1684 unsigned int count = elts.encoded_nelts ();
1685 for (unsigned int i = 0; i < count; ++i)
1687 tree elem1 = VECTOR_CST_ELT (arg1, i);
1689 tree elt = const_binop (code, elem1, arg2);
1691 /* It is possible that const_binop cannot handle the given
1692 code and return NULL_TREE. */
1693 if (elt == NULL_TREE)
1694 return NULL_TREE;
1695 elts.quick_push (elt);
1698 return elts.build ();
1700 return NULL_TREE;
1703 /* Overload that adds a TYPE parameter to be able to dispatch
1704 to fold_relational_const. */
1706 tree
1707 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1709 if (TREE_CODE_CLASS (code) == tcc_comparison)
1710 return fold_relational_const (code, type, arg1, arg2);
1712 /* ??? Until we make the const_binop worker take the type of the
1713 result as argument put those cases that need it here. */
1714 switch (code)
1716 case VEC_SERIES_EXPR:
1717 if (CONSTANT_CLASS_P (arg1)
1718 && CONSTANT_CLASS_P (arg2))
1719 return build_vec_series (type, arg1, arg2);
1720 return NULL_TREE;
1722 case COMPLEX_EXPR:
1723 if ((TREE_CODE (arg1) == REAL_CST
1724 && TREE_CODE (arg2) == REAL_CST)
1725 || (TREE_CODE (arg1) == INTEGER_CST
1726 && TREE_CODE (arg2) == INTEGER_CST))
1727 return build_complex (type, arg1, arg2);
1728 return NULL_TREE;
1730 case POINTER_DIFF_EXPR:
1731 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1733 poly_offset_int res = (wi::to_poly_offset (arg1)
1734 - wi::to_poly_offset (arg2));
1735 return force_fit_type (type, res, 1,
1736 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1738 return NULL_TREE;
1740 case VEC_PACK_TRUNC_EXPR:
1741 case VEC_PACK_FIX_TRUNC_EXPR:
1742 case VEC_PACK_FLOAT_EXPR:
1744 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1746 if (TREE_CODE (arg1) != VECTOR_CST
1747 || TREE_CODE (arg2) != VECTOR_CST)
1748 return NULL_TREE;
1750 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1751 return NULL_TREE;
1753 out_nelts = in_nelts * 2;
1754 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1755 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1757 tree_vector_builder elts (type, out_nelts, 1);
1758 for (i = 0; i < out_nelts; i++)
1760 tree elt = (i < in_nelts
1761 ? VECTOR_CST_ELT (arg1, i)
1762 : VECTOR_CST_ELT (arg2, i - in_nelts));
1763 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1764 ? NOP_EXPR
1765 : code == VEC_PACK_FLOAT_EXPR
1766 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1767 TREE_TYPE (type), elt);
1768 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1769 return NULL_TREE;
1770 elts.quick_push (elt);
1773 return elts.build ();
1776 case VEC_WIDEN_MULT_LO_EXPR:
1777 case VEC_WIDEN_MULT_HI_EXPR:
1778 case VEC_WIDEN_MULT_EVEN_EXPR:
1779 case VEC_WIDEN_MULT_ODD_EXPR:
1781 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1783 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1784 return NULL_TREE;
1786 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1787 return NULL_TREE;
1788 out_nelts = in_nelts / 2;
1789 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1790 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1792 if (code == VEC_WIDEN_MULT_LO_EXPR)
1793 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1794 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1795 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1796 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1797 scale = 1, ofs = 0;
1798 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1799 scale = 1, ofs = 1;
1801 tree_vector_builder elts (type, out_nelts, 1);
1802 for (out = 0; out < out_nelts; out++)
1804 unsigned int in = (out << scale) + ofs;
1805 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1806 VECTOR_CST_ELT (arg1, in));
1807 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1808 VECTOR_CST_ELT (arg2, in));
1810 if (t1 == NULL_TREE || t2 == NULL_TREE)
1811 return NULL_TREE;
1812 tree elt = const_binop (MULT_EXPR, t1, t2);
1813 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1814 return NULL_TREE;
1815 elts.quick_push (elt);
1818 return elts.build ();
1821 default:;
1824 if (TREE_CODE_CLASS (code) != tcc_binary)
1825 return NULL_TREE;
1827 /* Make sure type and arg0 have the same saturating flag. */
1828 gcc_checking_assert (TYPE_SATURATING (type)
1829 == TYPE_SATURATING (TREE_TYPE (arg1)));
1831 return const_binop (code, arg1, arg2);
1834 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1835 Return zero if computing the constants is not possible. */
1837 tree
1838 const_unop (enum tree_code code, tree type, tree arg0)
1840 /* Don't perform the operation, other than NEGATE and ABS, if
1841 flag_signaling_nans is on and the operand is a signaling NaN. */
1842 if (TREE_CODE (arg0) == REAL_CST
1843 && HONOR_SNANS (arg0)
1844 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1845 && code != NEGATE_EXPR
1846 && code != ABS_EXPR
1847 && code != ABSU_EXPR)
1848 return NULL_TREE;
1850 switch (code)
1852 CASE_CONVERT:
1853 case FLOAT_EXPR:
1854 case FIX_TRUNC_EXPR:
1855 case FIXED_CONVERT_EXPR:
1856 return fold_convert_const (code, type, arg0);
1858 case ADDR_SPACE_CONVERT_EXPR:
1859 /* If the source address is 0, and the source address space
1860 cannot have a valid object at 0, fold to dest type null. */
1861 if (integer_zerop (arg0)
1862 && !(targetm.addr_space.zero_address_valid
1863 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1864 return fold_convert_const (code, type, arg0);
1865 break;
1867 case VIEW_CONVERT_EXPR:
1868 return fold_view_convert_expr (type, arg0);
1870 case NEGATE_EXPR:
1872 /* Can't call fold_negate_const directly here as that doesn't
1873 handle all cases and we might not be able to negate some
1874 constants. */
1875 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1876 if (tem && CONSTANT_CLASS_P (tem))
1877 return tem;
1878 break;
1881 case ABS_EXPR:
1882 case ABSU_EXPR:
1883 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1884 return fold_abs_const (arg0, type);
1885 break;
1887 case CONJ_EXPR:
1888 if (TREE_CODE (arg0) == COMPLEX_CST)
1890 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1891 TREE_TYPE (type));
1892 return build_complex (type, TREE_REALPART (arg0), ipart);
1894 break;
1896 case BIT_NOT_EXPR:
1897 if (TREE_CODE (arg0) == INTEGER_CST)
1898 return fold_not_const (arg0, type);
1899 else if (POLY_INT_CST_P (arg0))
1900 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1901 /* Perform BIT_NOT_EXPR on each element individually. */
1902 else if (TREE_CODE (arg0) == VECTOR_CST)
1904 tree elem;
1906 /* This can cope with stepped encodings because ~x == -1 - x. */
1907 tree_vector_builder elements;
1908 elements.new_unary_operation (type, arg0, true);
1909 unsigned int i, count = elements.encoded_nelts ();
1910 for (i = 0; i < count; ++i)
1912 elem = VECTOR_CST_ELT (arg0, i);
1913 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1914 if (elem == NULL_TREE)
1915 break;
1916 elements.quick_push (elem);
1918 if (i == count)
1919 return elements.build ();
1921 break;
1923 case TRUTH_NOT_EXPR:
1924 if (TREE_CODE (arg0) == INTEGER_CST)
1925 return constant_boolean_node (integer_zerop (arg0), type);
1926 break;
1928 case REALPART_EXPR:
1929 if (TREE_CODE (arg0) == COMPLEX_CST)
1930 return fold_convert (type, TREE_REALPART (arg0));
1931 break;
1933 case IMAGPART_EXPR:
1934 if (TREE_CODE (arg0) == COMPLEX_CST)
1935 return fold_convert (type, TREE_IMAGPART (arg0));
1936 break;
1938 case VEC_UNPACK_LO_EXPR:
1939 case VEC_UNPACK_HI_EXPR:
1940 case VEC_UNPACK_FLOAT_LO_EXPR:
1941 case VEC_UNPACK_FLOAT_HI_EXPR:
1942 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1943 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1945 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1946 enum tree_code subcode;
1948 if (TREE_CODE (arg0) != VECTOR_CST)
1949 return NULL_TREE;
1951 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1952 return NULL_TREE;
1953 out_nelts = in_nelts / 2;
1954 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1956 unsigned int offset = 0;
1957 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1958 || code == VEC_UNPACK_FLOAT_LO_EXPR
1959 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1960 offset = out_nelts;
1962 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1963 subcode = NOP_EXPR;
1964 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1965 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1966 subcode = FLOAT_EXPR;
1967 else
1968 subcode = FIX_TRUNC_EXPR;
1970 tree_vector_builder elts (type, out_nelts, 1);
1971 for (i = 0; i < out_nelts; i++)
1973 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1974 VECTOR_CST_ELT (arg0, i + offset));
1975 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1976 return NULL_TREE;
1977 elts.quick_push (elt);
1980 return elts.build ();
1983 case VEC_DUPLICATE_EXPR:
1984 if (CONSTANT_CLASS_P (arg0))
1985 return build_vector_from_val (type, arg0);
1986 return NULL_TREE;
1988 default:
1989 break;
1992 return NULL_TREE;
1995 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1996 indicates which particular sizetype to create. */
1998 tree
1999 size_int_kind (poly_int64 number, enum size_type_kind kind)
2001 return build_int_cst (sizetype_tab[(int) kind], number);
2004 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2005 is a tree code. The type of the result is taken from the operands.
2006 Both must be equivalent integer types, ala int_binop_types_match_p.
2007 If the operands are constant, so is the result. */
2009 tree
2010 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2012 tree type = TREE_TYPE (arg0);
2014 if (arg0 == error_mark_node || arg1 == error_mark_node)
2015 return error_mark_node;
2017 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2018 TREE_TYPE (arg1)));
2020 /* Handle the special case of two poly_int constants faster. */
2021 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2023 /* And some specific cases even faster than that. */
2024 if (code == PLUS_EXPR)
2026 if (integer_zerop (arg0)
2027 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2028 return arg1;
2029 if (integer_zerop (arg1)
2030 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2031 return arg0;
2033 else if (code == MINUS_EXPR)
2035 if (integer_zerop (arg1)
2036 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2037 return arg0;
2039 else if (code == MULT_EXPR)
2041 if (integer_onep (arg0)
2042 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2043 return arg1;
2046 /* Handle general case of two integer constants. For sizetype
2047 constant calculations we always want to know about overflow,
2048 even in the unsigned case. */
2049 tree res = int_const_binop (code, arg0, arg1, -1);
2050 if (res != NULL_TREE)
2051 return res;
2054 return fold_build2_loc (loc, code, type, arg0, arg1);
2057 /* Given two values, either both of sizetype or both of bitsizetype,
2058 compute the difference between the two values. Return the value
2059 in signed type corresponding to the type of the operands. */
2061 tree
2062 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2064 tree type = TREE_TYPE (arg0);
2065 tree ctype;
2067 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2068 TREE_TYPE (arg1)));
2070 /* If the type is already signed, just do the simple thing. */
2071 if (!TYPE_UNSIGNED (type))
2072 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2074 if (type == sizetype)
2075 ctype = ssizetype;
2076 else if (type == bitsizetype)
2077 ctype = sbitsizetype;
2078 else
2079 ctype = signed_type_for (type);
2081 /* If either operand is not a constant, do the conversions to the signed
2082 type and subtract. The hardware will do the right thing with any
2083 overflow in the subtraction. */
2084 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2085 return size_binop_loc (loc, MINUS_EXPR,
2086 fold_convert_loc (loc, ctype, arg0),
2087 fold_convert_loc (loc, ctype, arg1));
2089 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2090 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2091 overflow) and negate (which can't either). Special-case a result
2092 of zero while we're here. */
2093 if (tree_int_cst_equal (arg0, arg1))
2094 return build_int_cst (ctype, 0);
2095 else if (tree_int_cst_lt (arg1, arg0))
2096 return fold_convert_loc (loc, ctype,
2097 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2098 else
2099 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2100 fold_convert_loc (loc, ctype,
2101 size_binop_loc (loc,
2102 MINUS_EXPR,
2103 arg1, arg0)));
2106 /* A subroutine of fold_convert_const handling conversions of an
2107 INTEGER_CST to another integer type. */
2109 static tree
2110 fold_convert_const_int_from_int (tree type, const_tree arg1)
2112 /* Given an integer constant, make new constant with new type,
2113 appropriately sign-extended or truncated. Use widest_int
2114 so that any extension is done according ARG1's type. */
2115 return force_fit_type (type, wi::to_widest (arg1),
2116 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2117 TREE_OVERFLOW (arg1));
2120 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2121 to an integer type. */
2123 static tree
2124 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2126 bool overflow = false;
2127 tree t;
2129 /* The following code implements the floating point to integer
2130 conversion rules required by the Java Language Specification,
2131 that IEEE NaNs are mapped to zero and values that overflow
2132 the target precision saturate, i.e. values greater than
2133 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2134 are mapped to INT_MIN. These semantics are allowed by the
2135 C and C++ standards that simply state that the behavior of
2136 FP-to-integer conversion is unspecified upon overflow. */
2138 wide_int val;
2139 REAL_VALUE_TYPE r;
2140 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2142 switch (code)
2144 case FIX_TRUNC_EXPR:
2145 real_trunc (&r, VOIDmode, &x);
2146 break;
2148 default:
2149 gcc_unreachable ();
2152 /* If R is NaN, return zero and show we have an overflow. */
2153 if (REAL_VALUE_ISNAN (r))
2155 overflow = true;
2156 val = wi::zero (TYPE_PRECISION (type));
2159 /* See if R is less than the lower bound or greater than the
2160 upper bound. */
2162 if (! overflow)
2164 tree lt = TYPE_MIN_VALUE (type);
2165 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2166 if (real_less (&r, &l))
2168 overflow = true;
2169 val = wi::to_wide (lt);
2173 if (! overflow)
2175 tree ut = TYPE_MAX_VALUE (type);
2176 if (ut)
2178 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2179 if (real_less (&u, &r))
2181 overflow = true;
2182 val = wi::to_wide (ut);
2187 if (! overflow)
2188 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2190 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2191 return t;
2194 /* A subroutine of fold_convert_const handling conversions of a
2195 FIXED_CST to an integer type. */
2197 static tree
2198 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2200 tree t;
2201 double_int temp, temp_trunc;
2202 scalar_mode mode;
2204 /* Right shift FIXED_CST to temp by fbit. */
2205 temp = TREE_FIXED_CST (arg1).data;
2206 mode = TREE_FIXED_CST (arg1).mode;
2207 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2209 temp = temp.rshift (GET_MODE_FBIT (mode),
2210 HOST_BITS_PER_DOUBLE_INT,
2211 SIGNED_FIXED_POINT_MODE_P (mode));
2213 /* Left shift temp to temp_trunc by fbit. */
2214 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2215 HOST_BITS_PER_DOUBLE_INT,
2216 SIGNED_FIXED_POINT_MODE_P (mode));
2218 else
2220 temp = double_int_zero;
2221 temp_trunc = double_int_zero;
2224 /* If FIXED_CST is negative, we need to round the value toward 0.
2225 By checking if the fractional bits are not zero to add 1 to temp. */
2226 if (SIGNED_FIXED_POINT_MODE_P (mode)
2227 && temp_trunc.is_negative ()
2228 && TREE_FIXED_CST (arg1).data != temp_trunc)
2229 temp += double_int_one;
2231 /* Given a fixed-point constant, make new constant with new type,
2232 appropriately sign-extended or truncated. */
2233 t = force_fit_type (type, temp, -1,
2234 (temp.is_negative ()
2235 && (TYPE_UNSIGNED (type)
2236 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2237 | TREE_OVERFLOW (arg1));
2239 return t;
2242 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2243 to another floating point type. */
2245 static tree
2246 fold_convert_const_real_from_real (tree type, const_tree arg1)
2248 REAL_VALUE_TYPE value;
2249 tree t;
2251 /* If the underlying modes are the same, simply treat it as
2252 copy and rebuild with TREE_REAL_CST information and the
2253 given type. */
2254 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2256 t = build_real (type, TREE_REAL_CST (arg1));
2257 return t;
2260 /* Don't perform the operation if flag_signaling_nans is on
2261 and the operand is a signaling NaN. */
2262 if (HONOR_SNANS (arg1)
2263 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2264 return NULL_TREE;
2266 /* With flag_rounding_math we should respect the current rounding mode
2267 unless the conversion is exact. */
2268 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2269 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2270 return NULL_TREE;
2272 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2273 t = build_real (type, value);
2275 /* If converting an infinity or NAN to a representation that doesn't
2276 have one, set the overflow bit so that we can produce some kind of
2277 error message at the appropriate point if necessary. It's not the
2278 most user-friendly message, but it's better than nothing. */
2279 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2280 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2281 TREE_OVERFLOW (t) = 1;
2282 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2283 && !MODE_HAS_NANS (TYPE_MODE (type)))
2284 TREE_OVERFLOW (t) = 1;
2285 /* Regular overflow, conversion produced an infinity in a mode that
2286 can't represent them. */
2287 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2288 && REAL_VALUE_ISINF (value)
2289 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2290 TREE_OVERFLOW (t) = 1;
2291 else
2292 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2293 return t;
2296 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2297 to a floating point type. */
2299 static tree
2300 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2302 REAL_VALUE_TYPE value;
2303 tree t;
2305 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2306 &TREE_FIXED_CST (arg1));
2307 t = build_real (type, value);
2309 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2310 return t;
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2316 static tree
2317 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2319 FIXED_VALUE_TYPE value;
2320 tree t;
2321 bool overflow_p;
2323 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2324 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2325 t = build_fixed (type, value);
2327 /* Propagate overflow flags. */
2328 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 return t;
2333 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2334 to a fixed-point type. */
2336 static tree
2337 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2339 FIXED_VALUE_TYPE value;
2340 tree t;
2341 bool overflow_p;
2342 double_int di;
2344 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2346 di.low = TREE_INT_CST_ELT (arg1, 0);
2347 if (TREE_INT_CST_NUNITS (arg1) == 1)
2348 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2349 else
2350 di.high = TREE_INT_CST_ELT (arg1, 1);
2352 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2353 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2354 TYPE_SATURATING (type));
2355 t = build_fixed (type, value);
2357 /* Propagate overflow flags. */
2358 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 return t;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2366 static tree
2367 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 FIXED_VALUE_TYPE value;
2370 tree t;
2371 bool overflow_p;
2373 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2374 &TREE_REAL_CST (arg1),
2375 TYPE_SATURATING (type));
2376 t = build_fixed (type, value);
2378 /* Propagate overflow flags. */
2379 if (overflow_p | TREE_OVERFLOW (arg1))
2380 TREE_OVERFLOW (t) = 1;
2381 return t;
2384 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2385 type TYPE. If no simplification can be done return NULL_TREE. */
2387 static tree
2388 fold_convert_const (enum tree_code code, tree type, tree arg1)
2390 tree arg_type = TREE_TYPE (arg1);
2391 if (arg_type == type)
2392 return arg1;
2394 /* We can't widen types, since the runtime value could overflow the
2395 original type before being extended to the new type. */
2396 if (POLY_INT_CST_P (arg1)
2397 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2398 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2399 return build_poly_int_cst (type,
2400 poly_wide_int::from (poly_int_cst_value (arg1),
2401 TYPE_PRECISION (type),
2402 TYPE_SIGN (arg_type)));
2404 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2405 || TREE_CODE (type) == OFFSET_TYPE)
2407 if (TREE_CODE (arg1) == INTEGER_CST)
2408 return fold_convert_const_int_from_int (type, arg1);
2409 else if (TREE_CODE (arg1) == REAL_CST)
2410 return fold_convert_const_int_from_real (code, type, arg1);
2411 else if (TREE_CODE (arg1) == FIXED_CST)
2412 return fold_convert_const_int_from_fixed (type, arg1);
2414 else if (SCALAR_FLOAT_TYPE_P (type))
2416 if (TREE_CODE (arg1) == INTEGER_CST)
2418 tree res = build_real_from_int_cst (type, arg1);
2419 /* Avoid the folding if flag_rounding_math is on and the
2420 conversion is not exact. */
2421 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2423 bool fail = false;
2424 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2425 TYPE_PRECISION (TREE_TYPE (arg1)));
2426 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2427 return NULL_TREE;
2429 return res;
2431 else if (TREE_CODE (arg1) == REAL_CST)
2432 return fold_convert_const_real_from_real (type, arg1);
2433 else if (TREE_CODE (arg1) == FIXED_CST)
2434 return fold_convert_const_real_from_fixed (type, arg1);
2436 else if (FIXED_POINT_TYPE_P (type))
2438 if (TREE_CODE (arg1) == FIXED_CST)
2439 return fold_convert_const_fixed_from_fixed (type, arg1);
2440 else if (TREE_CODE (arg1) == INTEGER_CST)
2441 return fold_convert_const_fixed_from_int (type, arg1);
2442 else if (TREE_CODE (arg1) == REAL_CST)
2443 return fold_convert_const_fixed_from_real (type, arg1);
2445 else if (VECTOR_TYPE_P (type))
2447 if (TREE_CODE (arg1) == VECTOR_CST
2448 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2450 tree elttype = TREE_TYPE (type);
2451 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2452 /* We can't handle steps directly when extending, since the
2453 values need to wrap at the original precision first. */
2454 bool step_ok_p
2455 = (INTEGRAL_TYPE_P (elttype)
2456 && INTEGRAL_TYPE_P (arg1_elttype)
2457 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2458 tree_vector_builder v;
2459 if (!v.new_unary_operation (type, arg1, step_ok_p))
2460 return NULL_TREE;
2461 unsigned int len = v.encoded_nelts ();
2462 for (unsigned int i = 0; i < len; ++i)
2464 tree elt = VECTOR_CST_ELT (arg1, i);
2465 tree cvt = fold_convert_const (code, elttype, elt);
2466 if (cvt == NULL_TREE)
2467 return NULL_TREE;
2468 v.quick_push (cvt);
2470 return v.build ();
2473 return NULL_TREE;
2476 /* Construct a vector of zero elements of vector type TYPE. */
2478 static tree
2479 build_zero_vector (tree type)
2481 tree t;
2483 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2484 return build_vector_from_val (type, t);
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2492 const_tree orig = TREE_TYPE (arg);
2494 if (type == orig)
2495 return true;
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2505 switch (TREE_CODE (type))
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 return (INTEGRAL_TYPE_P (orig)
2511 || (POINTER_TYPE_P (orig)
2512 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2513 || TREE_CODE (orig) == OFFSET_TYPE);
2515 case REAL_TYPE:
2516 case FIXED_POINT_TYPE:
2517 case VOID_TYPE:
2518 return TREE_CODE (type) == TREE_CODE (orig);
2520 case VECTOR_TYPE:
2521 return (VECTOR_TYPE_P (orig)
2522 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2523 TYPE_VECTOR_SUBPARTS (orig))
2524 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2526 default:
2527 return false;
2531 /* Convert expression ARG to type TYPE. Used by the middle-end for
2532 simple conversions in preference to calling the front-end's convert. */
2534 tree
2535 fold_convert_loc (location_t loc, tree type, tree arg)
2537 tree orig = TREE_TYPE (arg);
2538 tree tem;
2540 if (type == orig)
2541 return arg;
2543 if (TREE_CODE (arg) == ERROR_MARK
2544 || TREE_CODE (type) == ERROR_MARK
2545 || TREE_CODE (orig) == ERROR_MARK)
2546 return error_mark_node;
2548 switch (TREE_CODE (type))
2550 case POINTER_TYPE:
2551 case REFERENCE_TYPE:
2552 /* Handle conversions between pointers to different address spaces. */
2553 if (POINTER_TYPE_P (orig)
2554 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2555 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2556 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2557 /* fall through */
2559 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2560 case OFFSET_TYPE:
2561 if (TREE_CODE (arg) == INTEGER_CST)
2563 tem = fold_convert_const (NOP_EXPR, type, arg);
2564 if (tem != NULL_TREE)
2565 return tem;
2567 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2568 || TREE_CODE (orig) == OFFSET_TYPE)
2569 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2570 if (TREE_CODE (orig) == COMPLEX_TYPE)
2571 return fold_convert_loc (loc, type,
2572 fold_build1_loc (loc, REALPART_EXPR,
2573 TREE_TYPE (orig), arg));
2574 gcc_assert (VECTOR_TYPE_P (orig)
2575 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2576 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2578 case REAL_TYPE:
2579 if (TREE_CODE (arg) == INTEGER_CST)
2581 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2582 if (tem != NULL_TREE)
2583 return tem;
2585 else if (TREE_CODE (arg) == REAL_CST)
2587 tem = fold_convert_const (NOP_EXPR, type, arg);
2588 if (tem != NULL_TREE)
2589 return tem;
2591 else if (TREE_CODE (arg) == FIXED_CST)
2593 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2594 if (tem != NULL_TREE)
2595 return tem;
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2605 case REAL_TYPE:
2606 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2608 case FIXED_POINT_TYPE:
2609 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2611 case COMPLEX_TYPE:
2612 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2613 return fold_convert_loc (loc, type, tem);
2615 default:
2616 gcc_unreachable ();
2619 case FIXED_POINT_TYPE:
2620 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2621 || TREE_CODE (arg) == REAL_CST)
2623 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2624 if (tem != NULL_TREE)
2625 goto fold_convert_exit;
2628 switch (TREE_CODE (orig))
2630 case FIXED_POINT_TYPE:
2631 case INTEGER_TYPE:
2632 case ENUMERAL_TYPE:
2633 case BOOLEAN_TYPE:
2634 case REAL_TYPE:
2635 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2637 case COMPLEX_TYPE:
2638 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2639 return fold_convert_loc (loc, type, tem);
2641 default:
2642 gcc_unreachable ();
2645 case COMPLEX_TYPE:
2646 switch (TREE_CODE (orig))
2648 case INTEGER_TYPE:
2649 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2650 case POINTER_TYPE: case REFERENCE_TYPE:
2651 case REAL_TYPE:
2652 case FIXED_POINT_TYPE:
2653 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2654 fold_convert_loc (loc, TREE_TYPE (type), arg),
2655 fold_convert_loc (loc, TREE_TYPE (type),
2656 integer_zero_node));
2657 case COMPLEX_TYPE:
2659 tree rpart, ipart;
2661 if (TREE_CODE (arg) == COMPLEX_EXPR)
2663 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2664 TREE_OPERAND (arg, 0));
2665 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2666 TREE_OPERAND (arg, 1));
2667 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2670 arg = save_expr (arg);
2671 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2673 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2674 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2675 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2678 default:
2679 gcc_unreachable ();
2682 case VECTOR_TYPE:
2683 if (integer_zerop (arg))
2684 return build_zero_vector (type);
2685 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2686 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2687 || VECTOR_TYPE_P (orig));
2688 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2690 case VOID_TYPE:
2691 tem = fold_ignored_result (arg);
2692 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2694 default:
2695 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2696 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2697 gcc_unreachable ();
2699 fold_convert_exit:
2700 tem = protected_set_expr_location_unshare (tem, loc);
2701 return tem;
2704 /* Return false if expr can be assumed not to be an lvalue, true
2705 otherwise. */
2707 static bool
2708 maybe_lvalue_p (const_tree x)
2710 /* We only need to wrap lvalue tree codes. */
2711 switch (TREE_CODE (x))
2713 case VAR_DECL:
2714 case PARM_DECL:
2715 case RESULT_DECL:
2716 case LABEL_DECL:
2717 case FUNCTION_DECL:
2718 case SSA_NAME:
2719 case COMPOUND_LITERAL_EXPR:
2721 case COMPONENT_REF:
2722 case MEM_REF:
2723 case INDIRECT_REF:
2724 case ARRAY_REF:
2725 case ARRAY_RANGE_REF:
2726 case BIT_FIELD_REF:
2727 case OBJ_TYPE_REF:
2729 case REALPART_EXPR:
2730 case IMAGPART_EXPR:
2731 case PREINCREMENT_EXPR:
2732 case PREDECREMENT_EXPR:
2733 case SAVE_EXPR:
2734 case TRY_CATCH_EXPR:
2735 case WITH_CLEANUP_EXPR:
2736 case COMPOUND_EXPR:
2737 case MODIFY_EXPR:
2738 case TARGET_EXPR:
2739 case COND_EXPR:
2740 case BIND_EXPR:
2741 case VIEW_CONVERT_EXPR:
2742 break;
2744 default:
2745 /* Assume the worst for front-end tree codes. */
2746 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2747 break;
2748 return false;
2751 return true;
2754 /* Return an expr equal to X but certainly not valid as an lvalue. */
2756 tree
2757 non_lvalue_loc (location_t loc, tree x)
2759 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2760 us. */
2761 if (in_gimple_form)
2762 return x;
2764 if (! maybe_lvalue_p (x))
2765 return x;
2766 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2769 /* Given a tree comparison code, return the code that is the logical inverse.
2770 It is generally not safe to do this for floating-point comparisons, except
2771 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2772 ERROR_MARK in this case. */
2774 enum tree_code
2775 invert_tree_comparison (enum tree_code code, bool honor_nans)
2777 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2778 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2779 return ERROR_MARK;
2781 switch (code)
2783 case EQ_EXPR:
2784 return NE_EXPR;
2785 case NE_EXPR:
2786 return EQ_EXPR;
2787 case GT_EXPR:
2788 return honor_nans ? UNLE_EXPR : LE_EXPR;
2789 case GE_EXPR:
2790 return honor_nans ? UNLT_EXPR : LT_EXPR;
2791 case LT_EXPR:
2792 return honor_nans ? UNGE_EXPR : GE_EXPR;
2793 case LE_EXPR:
2794 return honor_nans ? UNGT_EXPR : GT_EXPR;
2795 case LTGT_EXPR:
2796 return UNEQ_EXPR;
2797 case UNEQ_EXPR:
2798 return LTGT_EXPR;
2799 case UNGT_EXPR:
2800 return LE_EXPR;
2801 case UNGE_EXPR:
2802 return LT_EXPR;
2803 case UNLT_EXPR:
2804 return GE_EXPR;
2805 case UNLE_EXPR:
2806 return GT_EXPR;
2807 case ORDERED_EXPR:
2808 return UNORDERED_EXPR;
2809 case UNORDERED_EXPR:
2810 return ORDERED_EXPR;
2811 default:
2812 gcc_unreachable ();
2816 /* Similar, but return the comparison that results if the operands are
2817 swapped. This is safe for floating-point. */
2819 enum tree_code
2820 swap_tree_comparison (enum tree_code code)
2822 switch (code)
2824 case EQ_EXPR:
2825 case NE_EXPR:
2826 case ORDERED_EXPR:
2827 case UNORDERED_EXPR:
2828 case LTGT_EXPR:
2829 case UNEQ_EXPR:
2830 return code;
2831 case GT_EXPR:
2832 return LT_EXPR;
2833 case GE_EXPR:
2834 return LE_EXPR;
2835 case LT_EXPR:
2836 return GT_EXPR;
2837 case LE_EXPR:
2838 return GE_EXPR;
2839 case UNGT_EXPR:
2840 return UNLT_EXPR;
2841 case UNGE_EXPR:
2842 return UNLE_EXPR;
2843 case UNLT_EXPR:
2844 return UNGT_EXPR;
2845 case UNLE_EXPR:
2846 return UNGE_EXPR;
2847 default:
2848 gcc_unreachable ();
2853 /* Convert a comparison tree code from an enum tree_code representation
2854 into a compcode bit-based encoding. This function is the inverse of
2855 compcode_to_comparison. */
2857 static enum comparison_code
2858 comparison_to_compcode (enum tree_code code)
2860 switch (code)
2862 case LT_EXPR:
2863 return COMPCODE_LT;
2864 case EQ_EXPR:
2865 return COMPCODE_EQ;
2866 case LE_EXPR:
2867 return COMPCODE_LE;
2868 case GT_EXPR:
2869 return COMPCODE_GT;
2870 case NE_EXPR:
2871 return COMPCODE_NE;
2872 case GE_EXPR:
2873 return COMPCODE_GE;
2874 case ORDERED_EXPR:
2875 return COMPCODE_ORD;
2876 case UNORDERED_EXPR:
2877 return COMPCODE_UNORD;
2878 case UNLT_EXPR:
2879 return COMPCODE_UNLT;
2880 case UNEQ_EXPR:
2881 return COMPCODE_UNEQ;
2882 case UNLE_EXPR:
2883 return COMPCODE_UNLE;
2884 case UNGT_EXPR:
2885 return COMPCODE_UNGT;
2886 case LTGT_EXPR:
2887 return COMPCODE_LTGT;
2888 case UNGE_EXPR:
2889 return COMPCODE_UNGE;
2890 default:
2891 gcc_unreachable ();
2895 /* Convert a compcode bit-based encoding of a comparison operator back
2896 to GCC's enum tree_code representation. This function is the
2897 inverse of comparison_to_compcode. */
2899 static enum tree_code
2900 compcode_to_comparison (enum comparison_code code)
2902 switch (code)
2904 case COMPCODE_LT:
2905 return LT_EXPR;
2906 case COMPCODE_EQ:
2907 return EQ_EXPR;
2908 case COMPCODE_LE:
2909 return LE_EXPR;
2910 case COMPCODE_GT:
2911 return GT_EXPR;
2912 case COMPCODE_NE:
2913 return NE_EXPR;
2914 case COMPCODE_GE:
2915 return GE_EXPR;
2916 case COMPCODE_ORD:
2917 return ORDERED_EXPR;
2918 case COMPCODE_UNORD:
2919 return UNORDERED_EXPR;
2920 case COMPCODE_UNLT:
2921 return UNLT_EXPR;
2922 case COMPCODE_UNEQ:
2923 return UNEQ_EXPR;
2924 case COMPCODE_UNLE:
2925 return UNLE_EXPR;
2926 case COMPCODE_UNGT:
2927 return UNGT_EXPR;
2928 case COMPCODE_LTGT:
2929 return LTGT_EXPR;
2930 case COMPCODE_UNGE:
2931 return UNGE_EXPR;
2932 default:
2933 gcc_unreachable ();
2937 /* Return true if COND1 tests the opposite condition of COND2. */
2939 bool
2940 inverse_conditions_p (const_tree cond1, const_tree cond2)
2942 return (COMPARISON_CLASS_P (cond1)
2943 && COMPARISON_CLASS_P (cond2)
2944 && (invert_tree_comparison
2945 (TREE_CODE (cond1),
2946 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2947 && operand_equal_p (TREE_OPERAND (cond1, 0),
2948 TREE_OPERAND (cond2, 0), 0)
2949 && operand_equal_p (TREE_OPERAND (cond1, 1),
2950 TREE_OPERAND (cond2, 1), 0));
2953 /* Return a tree for the comparison which is the combination of
2954 doing the AND or OR (depending on CODE) of the two operations LCODE
2955 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2956 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2957 if this makes the transformation invalid. */
2959 tree
2960 combine_comparisons (location_t loc,
2961 enum tree_code code, enum tree_code lcode,
2962 enum tree_code rcode, tree truth_type,
2963 tree ll_arg, tree lr_arg)
2965 bool honor_nans = HONOR_NANS (ll_arg);
2966 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2967 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2968 int compcode;
2970 switch (code)
2972 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2973 compcode = lcompcode & rcompcode;
2974 break;
2976 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2977 compcode = lcompcode | rcompcode;
2978 break;
2980 default:
2981 return NULL_TREE;
2984 if (!honor_nans)
2986 /* Eliminate unordered comparisons, as well as LTGT and ORD
2987 which are not used unless the mode has NaNs. */
2988 compcode &= ~COMPCODE_UNORD;
2989 if (compcode == COMPCODE_LTGT)
2990 compcode = COMPCODE_NE;
2991 else if (compcode == COMPCODE_ORD)
2992 compcode = COMPCODE_TRUE;
2994 else if (flag_trapping_math)
2996 /* Check that the original operation and the optimized ones will trap
2997 under the same condition. */
2998 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2999 && (lcompcode != COMPCODE_EQ)
3000 && (lcompcode != COMPCODE_ORD);
3001 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3002 && (rcompcode != COMPCODE_EQ)
3003 && (rcompcode != COMPCODE_ORD);
3004 bool trap = (compcode & COMPCODE_UNORD) == 0
3005 && (compcode != COMPCODE_EQ)
3006 && (compcode != COMPCODE_ORD);
3008 /* In a short-circuited boolean expression the LHS might be
3009 such that the RHS, if evaluated, will never trap. For
3010 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3011 if neither x nor y is NaN. (This is a mixed blessing: for
3012 example, the expression above will never trap, hence
3013 optimizing it to x < y would be invalid). */
3014 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3015 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3016 rtrap = false;
3018 /* If the comparison was short-circuited, and only the RHS
3019 trapped, we may now generate a spurious trap. */
3020 if (rtrap && !ltrap
3021 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3022 return NULL_TREE;
3024 /* If we changed the conditions that cause a trap, we lose. */
3025 if ((ltrap || rtrap) != trap)
3026 return NULL_TREE;
3029 if (compcode == COMPCODE_TRUE)
3030 return constant_boolean_node (true, truth_type);
3031 else if (compcode == COMPCODE_FALSE)
3032 return constant_boolean_node (false, truth_type);
3033 else
3035 enum tree_code tcode;
3037 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3038 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3042 /* Return nonzero if two operands (typically of the same tree node)
3043 are necessarily equal. FLAGS modifies behavior as follows:
3045 If OEP_ONLY_CONST is set, only return nonzero for constants.
3046 This function tests whether the operands are indistinguishable;
3047 it does not test whether they are equal using C's == operation.
3048 The distinction is important for IEEE floating point, because
3049 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3050 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3052 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3053 even though it may hold multiple values during a function.
3054 This is because a GCC tree node guarantees that nothing else is
3055 executed between the evaluation of its "operands" (which may often
3056 be evaluated in arbitrary order). Hence if the operands themselves
3057 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3058 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3059 unset means assuming isochronic (or instantaneous) tree equivalence.
3060 Unless comparing arbitrary expression trees, such as from different
3061 statements, this flag can usually be left unset.
3063 If OEP_PURE_SAME is set, then pure functions with identical arguments
3064 are considered the same. It is used when the caller has other ways
3065 to ensure that global memory is unchanged in between.
3067 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3068 not values of expressions.
3070 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3071 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3073 If OEP_BITWISE is set, then require the values to be bitwise identical
3074 rather than simply numerically equal. Do not take advantage of things
3075 like math-related flags or undefined behavior; only return true for
3076 values that are provably bitwise identical in all circumstances.
3078 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3079 any operand with side effect. This is unnecesarily conservative in the
3080 case we know that arg0 and arg1 are in disjoint code paths (such as in
3081 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3082 addresses with TREE_CONSTANT flag set so we know that &var == &var
3083 even if var is volatile. */
3085 bool
3086 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3087 unsigned int flags)
3089 bool r;
3090 if (verify_hash_value (arg0, arg1, flags, &r))
3091 return r;
3093 STRIP_ANY_LOCATION_WRAPPER (arg0);
3094 STRIP_ANY_LOCATION_WRAPPER (arg1);
3096 /* If either is ERROR_MARK, they aren't equal. */
3097 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3098 || TREE_TYPE (arg0) == error_mark_node
3099 || TREE_TYPE (arg1) == error_mark_node)
3100 return false;
3102 /* Similar, if either does not have a type (like a template id),
3103 they aren't equal. */
3104 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3105 return false;
3107 /* Bitwise identity makes no sense if the values have different layouts. */
3108 if ((flags & OEP_BITWISE)
3109 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3110 return false;
3112 /* We cannot consider pointers to different address space equal. */
3113 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3114 && POINTER_TYPE_P (TREE_TYPE (arg1))
3115 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3116 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3117 return false;
3119 /* Check equality of integer constants before bailing out due to
3120 precision differences. */
3121 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3123 /* Address of INTEGER_CST is not defined; check that we did not forget
3124 to drop the OEP_ADDRESS_OF flags. */
3125 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3126 return tree_int_cst_equal (arg0, arg1);
3129 if (!(flags & OEP_ADDRESS_OF))
3131 /* If both types don't have the same signedness, then we can't consider
3132 them equal. We must check this before the STRIP_NOPS calls
3133 because they may change the signedness of the arguments. As pointers
3134 strictly don't have a signedness, require either two pointers or
3135 two non-pointers as well. */
3136 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3137 || POINTER_TYPE_P (TREE_TYPE (arg0))
3138 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3139 return false;
3141 /* If both types don't have the same precision, then it is not safe
3142 to strip NOPs. */
3143 if (element_precision (TREE_TYPE (arg0))
3144 != element_precision (TREE_TYPE (arg1)))
3145 return false;
3147 STRIP_NOPS (arg0);
3148 STRIP_NOPS (arg1);
3150 #if 0
3151 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3152 sanity check once the issue is solved. */
3153 else
3154 /* Addresses of conversions and SSA_NAMEs (and many other things)
3155 are not defined. Check that we did not forget to drop the
3156 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3157 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3158 && TREE_CODE (arg0) != SSA_NAME);
3159 #endif
3161 /* In case both args are comparisons but with different comparison
3162 code, try to swap the comparison operands of one arg to produce
3163 a match and compare that variant. */
3164 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3165 && COMPARISON_CLASS_P (arg0)
3166 && COMPARISON_CLASS_P (arg1))
3168 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3170 if (TREE_CODE (arg0) == swap_code)
3171 return operand_equal_p (TREE_OPERAND (arg0, 0),
3172 TREE_OPERAND (arg1, 1), flags)
3173 && operand_equal_p (TREE_OPERAND (arg0, 1),
3174 TREE_OPERAND (arg1, 0), flags);
3177 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3179 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3180 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3182 else if (flags & OEP_ADDRESS_OF)
3184 /* If we are interested in comparing addresses ignore
3185 MEM_REF wrappings of the base that can appear just for
3186 TBAA reasons. */
3187 if (TREE_CODE (arg0) == MEM_REF
3188 && DECL_P (arg1)
3189 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3190 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3191 && integer_zerop (TREE_OPERAND (arg0, 1)))
3192 return true;
3193 else if (TREE_CODE (arg1) == MEM_REF
3194 && DECL_P (arg0)
3195 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3196 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3197 && integer_zerop (TREE_OPERAND (arg1, 1)))
3198 return true;
3199 return false;
3201 else
3202 return false;
3205 /* When not checking adddresses, this is needed for conversions and for
3206 COMPONENT_REF. Might as well play it safe and always test this. */
3207 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3208 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3209 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3210 && !(flags & OEP_ADDRESS_OF)))
3211 return false;
3213 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3214 We don't care about side effects in that case because the SAVE_EXPR
3215 takes care of that for us. In all other cases, two expressions are
3216 equal if they have no side effects. If we have two identical
3217 expressions with side effects that should be treated the same due
3218 to the only side effects being identical SAVE_EXPR's, that will
3219 be detected in the recursive calls below.
3220 If we are taking an invariant address of two identical objects
3221 they are necessarily equal as well. */
3222 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3223 && (TREE_CODE (arg0) == SAVE_EXPR
3224 || (flags & OEP_MATCH_SIDE_EFFECTS)
3225 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3226 return true;
3228 /* Next handle constant cases, those for which we can return 1 even
3229 if ONLY_CONST is set. */
3230 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3231 switch (TREE_CODE (arg0))
3233 case INTEGER_CST:
3234 return tree_int_cst_equal (arg0, arg1);
3236 case FIXED_CST:
3237 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3238 TREE_FIXED_CST (arg1));
3240 case REAL_CST:
3241 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3242 return true;
3244 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3246 /* If we do not distinguish between signed and unsigned zero,
3247 consider them equal. */
3248 if (real_zerop (arg0) && real_zerop (arg1))
3249 return true;
3251 return false;
3253 case VECTOR_CST:
3255 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3256 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3257 return false;
3259 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3260 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3261 return false;
3263 unsigned int count = vector_cst_encoded_nelts (arg0);
3264 for (unsigned int i = 0; i < count; ++i)
3265 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3266 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3267 return false;
3268 return true;
3271 case COMPLEX_CST:
3272 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3273 flags)
3274 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3275 flags));
3277 case STRING_CST:
3278 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3279 && ! memcmp (TREE_STRING_POINTER (arg0),
3280 TREE_STRING_POINTER (arg1),
3281 TREE_STRING_LENGTH (arg0)));
3283 case ADDR_EXPR:
3284 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3285 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3286 flags | OEP_ADDRESS_OF
3287 | OEP_MATCH_SIDE_EFFECTS);
3288 case CONSTRUCTOR:
3289 /* In GIMPLE empty constructors are allowed in initializers of
3290 aggregates. */
3291 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3292 default:
3293 break;
3296 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3297 two instances of undefined behavior will give identical results. */
3298 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3299 return false;
3301 /* Define macros to test an operand from arg0 and arg1 for equality and a
3302 variant that allows null and views null as being different from any
3303 non-null value. In the latter case, if either is null, the both
3304 must be; otherwise, do the normal comparison. */
3305 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3306 TREE_OPERAND (arg1, N), flags)
3308 #define OP_SAME_WITH_NULL(N) \
3309 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3310 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3312 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3314 case tcc_unary:
3315 /* Two conversions are equal only if signedness and modes match. */
3316 switch (TREE_CODE (arg0))
3318 CASE_CONVERT:
3319 case FIX_TRUNC_EXPR:
3320 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3321 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3322 return false;
3323 break;
3324 default:
3325 break;
3328 return OP_SAME (0);
3331 case tcc_comparison:
3332 case tcc_binary:
3333 if (OP_SAME (0) && OP_SAME (1))
3334 return true;
3336 /* For commutative ops, allow the other order. */
3337 return (commutative_tree_code (TREE_CODE (arg0))
3338 && operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 1), flags)
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 0), flags));
3343 case tcc_reference:
3344 /* If either of the pointer (or reference) expressions we are
3345 dereferencing contain a side effect, these cannot be equal,
3346 but their addresses can be. */
3347 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3348 && (TREE_SIDE_EFFECTS (arg0)
3349 || TREE_SIDE_EFFECTS (arg1)))
3350 return false;
3352 switch (TREE_CODE (arg0))
3354 case INDIRECT_REF:
3355 if (!(flags & OEP_ADDRESS_OF))
3357 if (TYPE_ALIGN (TREE_TYPE (arg0))
3358 != TYPE_ALIGN (TREE_TYPE (arg1)))
3359 return false;
3360 /* Verify that the access types are compatible. */
3361 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3362 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3363 return false;
3365 flags &= ~OEP_ADDRESS_OF;
3366 return OP_SAME (0);
3368 case IMAGPART_EXPR:
3369 /* Require the same offset. */
3370 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3371 TYPE_SIZE (TREE_TYPE (arg1)),
3372 flags & ~OEP_ADDRESS_OF))
3373 return false;
3375 /* Fallthru. */
3376 case REALPART_EXPR:
3377 case VIEW_CONVERT_EXPR:
3378 return OP_SAME (0);
3380 case TARGET_MEM_REF:
3381 case MEM_REF:
3382 if (!(flags & OEP_ADDRESS_OF))
3384 /* Require equal access sizes */
3385 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3386 && (!TYPE_SIZE (TREE_TYPE (arg0))
3387 || !TYPE_SIZE (TREE_TYPE (arg1))
3388 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3389 TYPE_SIZE (TREE_TYPE (arg1)),
3390 flags)))
3391 return false;
3392 /* Verify that access happens in similar types. */
3393 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3394 return false;
3395 /* Verify that accesses are TBAA compatible. */
3396 if (!alias_ptr_types_compatible_p
3397 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3398 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3399 || (MR_DEPENDENCE_CLIQUE (arg0)
3400 != MR_DEPENDENCE_CLIQUE (arg1))
3401 || (MR_DEPENDENCE_BASE (arg0)
3402 != MR_DEPENDENCE_BASE (arg1)))
3403 return false;
3404 /* Verify that alignment is compatible. */
3405 if (TYPE_ALIGN (TREE_TYPE (arg0))
3406 != TYPE_ALIGN (TREE_TYPE (arg1)))
3407 return false;
3409 flags &= ~OEP_ADDRESS_OF;
3410 return (OP_SAME (0) && OP_SAME (1)
3411 /* TARGET_MEM_REF require equal extra operands. */
3412 && (TREE_CODE (arg0) != TARGET_MEM_REF
3413 || (OP_SAME_WITH_NULL (2)
3414 && OP_SAME_WITH_NULL (3)
3415 && OP_SAME_WITH_NULL (4))));
3417 case ARRAY_REF:
3418 case ARRAY_RANGE_REF:
3419 if (!OP_SAME (0))
3420 return false;
3421 flags &= ~OEP_ADDRESS_OF;
3422 /* Compare the array index by value if it is constant first as we
3423 may have different types but same value here. */
3424 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3425 TREE_OPERAND (arg1, 1))
3426 || OP_SAME (1))
3427 && OP_SAME_WITH_NULL (2)
3428 && OP_SAME_WITH_NULL (3)
3429 /* Compare low bound and element size as with OEP_ADDRESS_OF
3430 we have to account for the offset of the ref. */
3431 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3432 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3433 || (operand_equal_p (array_ref_low_bound
3434 (CONST_CAST_TREE (arg0)),
3435 array_ref_low_bound
3436 (CONST_CAST_TREE (arg1)), flags)
3437 && operand_equal_p (array_ref_element_size
3438 (CONST_CAST_TREE (arg0)),
3439 array_ref_element_size
3440 (CONST_CAST_TREE (arg1)),
3441 flags))));
3443 case COMPONENT_REF:
3444 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3445 may be NULL when we're called to compare MEM_EXPRs. */
3446 if (!OP_SAME_WITH_NULL (0))
3447 return false;
3449 bool compare_address = flags & OEP_ADDRESS_OF;
3451 /* Most of time we only need to compare FIELD_DECLs for equality.
3452 However when determining address look into actual offsets.
3453 These may match for unions and unshared record types. */
3454 flags &= ~OEP_ADDRESS_OF;
3455 if (!OP_SAME (1))
3457 if (compare_address
3458 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3460 tree field0 = TREE_OPERAND (arg0, 1);
3461 tree field1 = TREE_OPERAND (arg1, 1);
3463 /* Non-FIELD_DECL operands can appear in C++ templates. */
3464 if (TREE_CODE (field0) != FIELD_DECL
3465 || TREE_CODE (field1) != FIELD_DECL
3466 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3467 DECL_FIELD_OFFSET (field1), flags)
3468 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3469 DECL_FIELD_BIT_OFFSET (field1),
3470 flags))
3471 return false;
3473 else
3474 return false;
3477 return OP_SAME_WITH_NULL (2);
3479 case BIT_FIELD_REF:
3480 if (!OP_SAME (0))
3481 return false;
3482 flags &= ~OEP_ADDRESS_OF;
3483 return OP_SAME (1) && OP_SAME (2);
3485 default:
3486 return false;
3489 case tcc_expression:
3490 switch (TREE_CODE (arg0))
3492 case ADDR_EXPR:
3493 /* Be sure we pass right ADDRESS_OF flag. */
3494 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3495 return operand_equal_p (TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0),
3497 flags | OEP_ADDRESS_OF);
3499 case TRUTH_NOT_EXPR:
3500 return OP_SAME (0);
3502 case TRUTH_ANDIF_EXPR:
3503 case TRUTH_ORIF_EXPR:
3504 return OP_SAME (0) && OP_SAME (1);
3506 case WIDEN_MULT_PLUS_EXPR:
3507 case WIDEN_MULT_MINUS_EXPR:
3508 if (!OP_SAME (2))
3509 return false;
3510 /* The multiplcation operands are commutative. */
3511 /* FALLTHRU */
3513 case TRUTH_AND_EXPR:
3514 case TRUTH_OR_EXPR:
3515 case TRUTH_XOR_EXPR:
3516 if (OP_SAME (0) && OP_SAME (1))
3517 return true;
3519 /* Otherwise take into account this is a commutative operation. */
3520 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3521 TREE_OPERAND (arg1, 1), flags)
3522 && operand_equal_p (TREE_OPERAND (arg0, 1),
3523 TREE_OPERAND (arg1, 0), flags));
3525 case COND_EXPR:
3526 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3527 return false;
3528 flags &= ~OEP_ADDRESS_OF;
3529 return OP_SAME (0);
3531 case BIT_INSERT_EXPR:
3532 /* BIT_INSERT_EXPR has an implict operand as the type precision
3533 of op1. Need to check to make sure they are the same. */
3534 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3535 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3536 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3537 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3538 return false;
3539 /* FALLTHRU */
3541 case VEC_COND_EXPR:
3542 case DOT_PROD_EXPR:
3543 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3545 case MODIFY_EXPR:
3546 case INIT_EXPR:
3547 case COMPOUND_EXPR:
3548 case PREDECREMENT_EXPR:
3549 case PREINCREMENT_EXPR:
3550 case POSTDECREMENT_EXPR:
3551 case POSTINCREMENT_EXPR:
3552 if (flags & OEP_LEXICOGRAPHIC)
3553 return OP_SAME (0) && OP_SAME (1);
3554 return false;
3556 case CLEANUP_POINT_EXPR:
3557 case EXPR_STMT:
3558 case SAVE_EXPR:
3559 if (flags & OEP_LEXICOGRAPHIC)
3560 return OP_SAME (0);
3561 return false;
3563 case OBJ_TYPE_REF:
3564 /* Virtual table reference. */
3565 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3566 OBJ_TYPE_REF_EXPR (arg1), flags))
3567 return false;
3568 flags &= ~OEP_ADDRESS_OF;
3569 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3570 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3571 return false;
3572 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3573 OBJ_TYPE_REF_OBJECT (arg1), flags))
3574 return false;
3575 if (virtual_method_call_p (arg0))
3577 if (!virtual_method_call_p (arg1))
3578 return false;
3579 return types_same_for_odr (obj_type_ref_class (arg0),
3580 obj_type_ref_class (arg1));
3582 return false;
3584 default:
3585 return false;
3588 case tcc_vl_exp:
3589 switch (TREE_CODE (arg0))
3591 case CALL_EXPR:
3592 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3593 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3594 /* If not both CALL_EXPRs are either internal or normal function
3595 functions, then they are not equal. */
3596 return false;
3597 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3599 /* If the CALL_EXPRs call different internal functions, then they
3600 are not equal. */
3601 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3602 return false;
3604 else
3606 /* If the CALL_EXPRs call different functions, then they are not
3607 equal. */
3608 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3609 flags))
3610 return false;
3613 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3615 unsigned int cef = call_expr_flags (arg0);
3616 if (flags & OEP_PURE_SAME)
3617 cef &= ECF_CONST | ECF_PURE;
3618 else
3619 cef &= ECF_CONST;
3620 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3621 return false;
3624 /* Now see if all the arguments are the same. */
3626 const_call_expr_arg_iterator iter0, iter1;
3627 const_tree a0, a1;
3628 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3629 a1 = first_const_call_expr_arg (arg1, &iter1);
3630 a0 && a1;
3631 a0 = next_const_call_expr_arg (&iter0),
3632 a1 = next_const_call_expr_arg (&iter1))
3633 if (! operand_equal_p (a0, a1, flags))
3634 return false;
3636 /* If we get here and both argument lists are exhausted
3637 then the CALL_EXPRs are equal. */
3638 return ! (a0 || a1);
3640 default:
3641 return false;
3644 case tcc_declaration:
3645 /* Consider __builtin_sqrt equal to sqrt. */
3646 if (TREE_CODE (arg0) == FUNCTION_DECL)
3647 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3648 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3649 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3650 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3652 if (DECL_P (arg0)
3653 && (flags & OEP_DECL_NAME)
3654 && (flags & OEP_LEXICOGRAPHIC))
3656 /* Consider decls with the same name equal. The caller needs
3657 to make sure they refer to the same entity (such as a function
3658 formal parameter). */
3659 tree a0name = DECL_NAME (arg0);
3660 tree a1name = DECL_NAME (arg1);
3661 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3662 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3663 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3665 return false;
3667 case tcc_exceptional:
3668 if (TREE_CODE (arg0) == CONSTRUCTOR)
3670 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3671 return false;
3673 /* In GIMPLE constructors are used only to build vectors from
3674 elements. Individual elements in the constructor must be
3675 indexed in increasing order and form an initial sequence.
3677 We make no effort to compare constructors in generic.
3678 (see sem_variable::equals in ipa-icf which can do so for
3679 constants). */
3680 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3681 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3682 return false;
3684 /* Be sure that vectors constructed have the same representation.
3685 We only tested element precision and modes to match.
3686 Vectors may be BLKmode and thus also check that the number of
3687 parts match. */
3688 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3689 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3690 return false;
3692 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3693 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3694 unsigned int len = vec_safe_length (v0);
3696 if (len != vec_safe_length (v1))
3697 return false;
3699 for (unsigned int i = 0; i < len; i++)
3701 constructor_elt *c0 = &(*v0)[i];
3702 constructor_elt *c1 = &(*v1)[i];
3704 if (!operand_equal_p (c0->value, c1->value, flags)
3705 /* In GIMPLE the indexes can be either NULL or matching i.
3706 Double check this so we won't get false
3707 positives for GENERIC. */
3708 || (c0->index
3709 && (TREE_CODE (c0->index) != INTEGER_CST
3710 || compare_tree_int (c0->index, i)))
3711 || (c1->index
3712 && (TREE_CODE (c1->index) != INTEGER_CST
3713 || compare_tree_int (c1->index, i))))
3714 return false;
3716 return true;
3718 else if (TREE_CODE (arg0) == STATEMENT_LIST
3719 && (flags & OEP_LEXICOGRAPHIC))
3721 /* Compare the STATEMENT_LISTs. */
3722 tree_stmt_iterator tsi1, tsi2;
3723 tree body1 = CONST_CAST_TREE (arg0);
3724 tree body2 = CONST_CAST_TREE (arg1);
3725 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3726 tsi_next (&tsi1), tsi_next (&tsi2))
3728 /* The lists don't have the same number of statements. */
3729 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3730 return false;
3731 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3732 return true;
3733 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3734 flags & (OEP_LEXICOGRAPHIC
3735 | OEP_NO_HASH_CHECK)))
3736 return false;
3739 return false;
3741 case tcc_statement:
3742 switch (TREE_CODE (arg0))
3744 case RETURN_EXPR:
3745 if (flags & OEP_LEXICOGRAPHIC)
3746 return OP_SAME_WITH_NULL (0);
3747 return false;
3748 case DEBUG_BEGIN_STMT:
3749 if (flags & OEP_LEXICOGRAPHIC)
3750 return true;
3751 return false;
3752 default:
3753 return false;
3756 default:
3757 return false;
3760 #undef OP_SAME
3761 #undef OP_SAME_WITH_NULL
3764 /* Generate a hash value for an expression. This can be used iteratively
3765 by passing a previous result as the HSTATE argument. */
3767 void
3768 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3769 unsigned int flags)
3771 int i;
3772 enum tree_code code;
3773 enum tree_code_class tclass;
3775 if (t == NULL_TREE || t == error_mark_node)
3777 hstate.merge_hash (0);
3778 return;
3781 STRIP_ANY_LOCATION_WRAPPER (t);
3783 if (!(flags & OEP_ADDRESS_OF))
3784 STRIP_NOPS (t);
3786 code = TREE_CODE (t);
3788 switch (code)
3790 /* Alas, constants aren't shared, so we can't rely on pointer
3791 identity. */
3792 case VOID_CST:
3793 hstate.merge_hash (0);
3794 return;
3795 case INTEGER_CST:
3796 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3797 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3798 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3799 return;
3800 case REAL_CST:
3802 unsigned int val2;
3803 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3804 val2 = rvc_zero;
3805 else
3806 val2 = real_hash (TREE_REAL_CST_PTR (t));
3807 hstate.merge_hash (val2);
3808 return;
3810 case FIXED_CST:
3812 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3813 hstate.merge_hash (val2);
3814 return;
3816 case STRING_CST:
3817 hstate.add ((const void *) TREE_STRING_POINTER (t),
3818 TREE_STRING_LENGTH (t));
3819 return;
3820 case COMPLEX_CST:
3821 hash_operand (TREE_REALPART (t), hstate, flags);
3822 hash_operand (TREE_IMAGPART (t), hstate, flags);
3823 return;
3824 case VECTOR_CST:
3826 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3827 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3828 unsigned int count = vector_cst_encoded_nelts (t);
3829 for (unsigned int i = 0; i < count; ++i)
3830 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3831 return;
3833 case SSA_NAME:
3834 /* We can just compare by pointer. */
3835 hstate.add_hwi (SSA_NAME_VERSION (t));
3836 return;
3837 case PLACEHOLDER_EXPR:
3838 /* The node itself doesn't matter. */
3839 return;
3840 case BLOCK:
3841 case OMP_CLAUSE:
3842 /* Ignore. */
3843 return;
3844 case TREE_LIST:
3845 /* A list of expressions, for a CALL_EXPR or as the elements of a
3846 VECTOR_CST. */
3847 for (; t; t = TREE_CHAIN (t))
3848 hash_operand (TREE_VALUE (t), hstate, flags);
3849 return;
3850 case CONSTRUCTOR:
3852 unsigned HOST_WIDE_INT idx;
3853 tree field, value;
3854 flags &= ~OEP_ADDRESS_OF;
3855 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3858 /* In GIMPLE the indexes can be either NULL or matching i. */
3859 if (field == NULL_TREE)
3860 field = bitsize_int (idx);
3861 hash_operand (field, hstate, flags);
3862 hash_operand (value, hstate, flags);
3864 return;
3866 case STATEMENT_LIST:
3868 tree_stmt_iterator i;
3869 for (i = tsi_start (CONST_CAST_TREE (t));
3870 !tsi_end_p (i); tsi_next (&i))
3871 hash_operand (tsi_stmt (i), hstate, flags);
3872 return;
3874 case TREE_VEC:
3875 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3876 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3877 return;
3878 case IDENTIFIER_NODE:
3879 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3880 return;
3881 case FUNCTION_DECL:
3882 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3883 Otherwise nodes that compare equal according to operand_equal_p might
3884 get different hash codes. However, don't do this for machine specific
3885 or front end builtins, since the function code is overloaded in those
3886 cases. */
3887 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3888 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3890 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3891 code = TREE_CODE (t);
3893 /* FALL THROUGH */
3894 default:
3895 if (POLY_INT_CST_P (t))
3897 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3898 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3899 return;
3901 tclass = TREE_CODE_CLASS (code);
3903 if (tclass == tcc_declaration)
3905 /* DECL's have a unique ID */
3906 hstate.add_hwi (DECL_UID (t));
3908 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3910 /* For comparisons that can be swapped, use the lower
3911 tree code. */
3912 enum tree_code ccode = swap_tree_comparison (code);
3913 if (code < ccode)
3914 ccode = code;
3915 hstate.add_object (ccode);
3916 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3917 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3919 else if (CONVERT_EXPR_CODE_P (code))
3921 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3922 operand_equal_p. */
3923 enum tree_code ccode = NOP_EXPR;
3924 hstate.add_object (ccode);
3926 /* Don't hash the type, that can lead to having nodes which
3927 compare equal according to operand_equal_p, but which
3928 have different hash codes. Make sure to include signedness
3929 in the hash computation. */
3930 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3931 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3933 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3934 else if (code == MEM_REF
3935 && (flags & OEP_ADDRESS_OF) != 0
3936 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3937 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3938 && integer_zerop (TREE_OPERAND (t, 1)))
3939 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3940 hstate, flags);
3941 /* Don't ICE on FE specific trees, or their arguments etc.
3942 during operand_equal_p hash verification. */
3943 else if (!IS_EXPR_CODE_CLASS (tclass))
3944 gcc_assert (flags & OEP_HASH_CHECK);
3945 else
3947 unsigned int sflags = flags;
3949 hstate.add_object (code);
3951 switch (code)
3953 case ADDR_EXPR:
3954 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3955 flags |= OEP_ADDRESS_OF;
3956 sflags = flags;
3957 break;
3959 case INDIRECT_REF:
3960 case MEM_REF:
3961 case TARGET_MEM_REF:
3962 flags &= ~OEP_ADDRESS_OF;
3963 sflags = flags;
3964 break;
3966 case COMPONENT_REF:
3967 if (sflags & OEP_ADDRESS_OF)
3969 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3970 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3971 hstate, flags & ~OEP_ADDRESS_OF);
3972 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3973 hstate, flags & ~OEP_ADDRESS_OF);
3974 return;
3976 break;
3977 case ARRAY_REF:
3978 case ARRAY_RANGE_REF:
3979 case BIT_FIELD_REF:
3980 sflags &= ~OEP_ADDRESS_OF;
3981 break;
3983 case COND_EXPR:
3984 flags &= ~OEP_ADDRESS_OF;
3985 break;
3987 case WIDEN_MULT_PLUS_EXPR:
3988 case WIDEN_MULT_MINUS_EXPR:
3990 /* The multiplication operands are commutative. */
3991 inchash::hash one, two;
3992 hash_operand (TREE_OPERAND (t, 0), one, flags);
3993 hash_operand (TREE_OPERAND (t, 1), two, flags);
3994 hstate.add_commutative (one, two);
3995 hash_operand (TREE_OPERAND (t, 2), two, flags);
3996 return;
3999 case CALL_EXPR:
4000 if (CALL_EXPR_FN (t) == NULL_TREE)
4001 hstate.add_int (CALL_EXPR_IFN (t));
4002 break;
4004 case TARGET_EXPR:
4005 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4006 Usually different TARGET_EXPRs just should use
4007 different temporaries in their slots. */
4008 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4009 return;
4011 case OBJ_TYPE_REF:
4012 /* Virtual table reference. */
4013 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4014 flags &= ~OEP_ADDRESS_OF;
4015 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4016 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4017 if (!virtual_method_call_p (t))
4018 return;
4019 if (tree c = obj_type_ref_class (t))
4021 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4022 /* We compute mangled names only when free_lang_data is run.
4023 In that case we can hash precisely. */
4024 if (TREE_CODE (c) == TYPE_DECL
4025 && DECL_ASSEMBLER_NAME_SET_P (c))
4026 hstate.add_object
4027 (IDENTIFIER_HASH_VALUE
4028 (DECL_ASSEMBLER_NAME (c)));
4030 return;
4031 default:
4032 break;
4035 /* Don't hash the type, that can lead to having nodes which
4036 compare equal according to operand_equal_p, but which
4037 have different hash codes. */
4038 if (code == NON_LVALUE_EXPR)
4040 /* Make sure to include signness in the hash computation. */
4041 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4042 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4045 else if (commutative_tree_code (code))
4047 /* It's a commutative expression. We want to hash it the same
4048 however it appears. We do this by first hashing both operands
4049 and then rehashing based on the order of their independent
4050 hashes. */
4051 inchash::hash one, two;
4052 hash_operand (TREE_OPERAND (t, 0), one, flags);
4053 hash_operand (TREE_OPERAND (t, 1), two, flags);
4054 hstate.add_commutative (one, two);
4056 else
4057 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4058 hash_operand (TREE_OPERAND (t, i), hstate,
4059 i == 0 ? flags : sflags);
4061 return;
4065 bool
4066 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4067 unsigned int flags, bool *ret)
4069 /* When checking and unless comparing DECL names, verify that if
4070 the outermost operand_equal_p call returns non-zero then ARG0
4071 and ARG1 have the same hash value. */
4072 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4074 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4076 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4078 inchash::hash hstate0 (0), hstate1 (0);
4079 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4080 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4081 hashval_t h0 = hstate0.end ();
4082 hashval_t h1 = hstate1.end ();
4083 gcc_assert (h0 == h1);
4085 *ret = true;
4087 else
4088 *ret = false;
4090 return true;
4093 return false;
4097 static operand_compare default_compare_instance;
4099 /* Conveinece wrapper around operand_compare class because usually we do
4100 not need to play with the valueizer. */
4102 bool
4103 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4105 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4108 namespace inchash
4111 /* Generate a hash value for an expression. This can be used iteratively
4112 by passing a previous result as the HSTATE argument.
4114 This function is intended to produce the same hash for expressions which
4115 would compare equal using operand_equal_p. */
4116 void
4117 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4119 default_compare_instance.hash_operand (t, hstate, flags);
4124 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4125 with a different signedness or a narrower precision. */
4127 static bool
4128 operand_equal_for_comparison_p (tree arg0, tree arg1)
4130 if (operand_equal_p (arg0, arg1, 0))
4131 return true;
4133 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4134 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4135 return false;
4137 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4138 and see if the inner values are the same. This removes any
4139 signedness comparison, which doesn't matter here. */
4140 tree op0 = arg0;
4141 tree op1 = arg1;
4142 STRIP_NOPS (op0);
4143 STRIP_NOPS (op1);
4144 if (operand_equal_p (op0, op1, 0))
4145 return true;
4147 /* Discard a single widening conversion from ARG1 and see if the inner
4148 value is the same as ARG0. */
4149 if (CONVERT_EXPR_P (arg1)
4150 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4151 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4152 < TYPE_PRECISION (TREE_TYPE (arg1))
4153 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4154 return true;
4156 return false;
4159 /* See if ARG is an expression that is either a comparison or is performing
4160 arithmetic on comparisons. The comparisons must only be comparing
4161 two different values, which will be stored in *CVAL1 and *CVAL2; if
4162 they are nonzero it means that some operands have already been found.
4163 No variables may be used anywhere else in the expression except in the
4164 comparisons.
4166 If this is true, return 1. Otherwise, return zero. */
4168 static bool
4169 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4171 enum tree_code code = TREE_CODE (arg);
4172 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4174 /* We can handle some of the tcc_expression cases here. */
4175 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4176 tclass = tcc_unary;
4177 else if (tclass == tcc_expression
4178 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4179 || code == COMPOUND_EXPR))
4180 tclass = tcc_binary;
4182 switch (tclass)
4184 case tcc_unary:
4185 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4187 case tcc_binary:
4188 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4189 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4191 case tcc_constant:
4192 return true;
4194 case tcc_expression:
4195 if (code == COND_EXPR)
4196 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4197 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4198 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4199 return false;
4201 case tcc_comparison:
4202 /* First see if we can handle the first operand, then the second. For
4203 the second operand, we know *CVAL1 can't be zero. It must be that
4204 one side of the comparison is each of the values; test for the
4205 case where this isn't true by failing if the two operands
4206 are the same. */
4208 if (operand_equal_p (TREE_OPERAND (arg, 0),
4209 TREE_OPERAND (arg, 1), 0))
4210 return false;
4212 if (*cval1 == 0)
4213 *cval1 = TREE_OPERAND (arg, 0);
4214 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4216 else if (*cval2 == 0)
4217 *cval2 = TREE_OPERAND (arg, 0);
4218 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4220 else
4221 return false;
4223 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4225 else if (*cval2 == 0)
4226 *cval2 = TREE_OPERAND (arg, 1);
4227 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4229 else
4230 return false;
4232 return true;
4234 default:
4235 return false;
4239 /* ARG is a tree that is known to contain just arithmetic operations and
4240 comparisons. Evaluate the operations in the tree substituting NEW0 for
4241 any occurrence of OLD0 as an operand of a comparison and likewise for
4242 NEW1 and OLD1. */
4244 static tree
4245 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4246 tree old1, tree new1)
4248 tree type = TREE_TYPE (arg);
4249 enum tree_code code = TREE_CODE (arg);
4250 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4252 /* We can handle some of the tcc_expression cases here. */
4253 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4254 tclass = tcc_unary;
4255 else if (tclass == tcc_expression
4256 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4257 tclass = tcc_binary;
4259 switch (tclass)
4261 case tcc_unary:
4262 return fold_build1_loc (loc, code, type,
4263 eval_subst (loc, TREE_OPERAND (arg, 0),
4264 old0, new0, old1, new1));
4266 case tcc_binary:
4267 return fold_build2_loc (loc, code, type,
4268 eval_subst (loc, TREE_OPERAND (arg, 0),
4269 old0, new0, old1, new1),
4270 eval_subst (loc, TREE_OPERAND (arg, 1),
4271 old0, new0, old1, new1));
4273 case tcc_expression:
4274 switch (code)
4276 case SAVE_EXPR:
4277 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4278 old1, new1);
4280 case COMPOUND_EXPR:
4281 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4282 old1, new1);
4284 case COND_EXPR:
4285 return fold_build3_loc (loc, code, type,
4286 eval_subst (loc, TREE_OPERAND (arg, 0),
4287 old0, new0, old1, new1),
4288 eval_subst (loc, TREE_OPERAND (arg, 1),
4289 old0, new0, old1, new1),
4290 eval_subst (loc, TREE_OPERAND (arg, 2),
4291 old0, new0, old1, new1));
4292 default:
4293 break;
4295 /* Fall through - ??? */
4297 case tcc_comparison:
4299 tree arg0 = TREE_OPERAND (arg, 0);
4300 tree arg1 = TREE_OPERAND (arg, 1);
4302 /* We need to check both for exact equality and tree equality. The
4303 former will be true if the operand has a side-effect. In that
4304 case, we know the operand occurred exactly once. */
4306 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4307 arg0 = new0;
4308 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4309 arg0 = new1;
4311 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4312 arg1 = new0;
4313 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4314 arg1 = new1;
4316 return fold_build2_loc (loc, code, type, arg0, arg1);
4319 default:
4320 return arg;
4324 /* Return a tree for the case when the result of an expression is RESULT
4325 converted to TYPE and OMITTED was previously an operand of the expression
4326 but is now not needed (e.g., we folded OMITTED * 0).
4328 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4329 the conversion of RESULT to TYPE. */
4331 tree
4332 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4334 tree t = fold_convert_loc (loc, type, result);
4336 /* If the resulting operand is an empty statement, just return the omitted
4337 statement casted to void. */
4338 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4339 return build1_loc (loc, NOP_EXPR, void_type_node,
4340 fold_ignored_result (omitted));
4342 if (TREE_SIDE_EFFECTS (omitted))
4343 return build2_loc (loc, COMPOUND_EXPR, type,
4344 fold_ignored_result (omitted), t);
4346 return non_lvalue_loc (loc, t);
4349 /* Return a tree for the case when the result of an expression is RESULT
4350 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4351 of the expression but are now not needed.
4353 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4354 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4355 evaluated before OMITTED2. Otherwise, if neither has side effects,
4356 just do the conversion of RESULT to TYPE. */
4358 tree
4359 omit_two_operands_loc (location_t loc, tree type, tree result,
4360 tree omitted1, tree omitted2)
4362 tree t = fold_convert_loc (loc, type, result);
4364 if (TREE_SIDE_EFFECTS (omitted2))
4365 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4366 if (TREE_SIDE_EFFECTS (omitted1))
4367 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4369 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4373 /* Return a simplified tree node for the truth-negation of ARG. This
4374 never alters ARG itself. We assume that ARG is an operation that
4375 returns a truth value (0 or 1).
4377 FIXME: one would think we would fold the result, but it causes
4378 problems with the dominator optimizer. */
4380 static tree
4381 fold_truth_not_expr (location_t loc, tree arg)
4383 tree type = TREE_TYPE (arg);
4384 enum tree_code code = TREE_CODE (arg);
4385 location_t loc1, loc2;
4387 /* If this is a comparison, we can simply invert it, except for
4388 floating-point non-equality comparisons, in which case we just
4389 enclose a TRUTH_NOT_EXPR around what we have. */
4391 if (TREE_CODE_CLASS (code) == tcc_comparison)
4393 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4394 if (FLOAT_TYPE_P (op_type)
4395 && flag_trapping_math
4396 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4397 && code != NE_EXPR && code != EQ_EXPR)
4398 return NULL_TREE;
4400 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4401 if (code == ERROR_MARK)
4402 return NULL_TREE;
4404 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4405 TREE_OPERAND (arg, 1));
4406 copy_warning (ret, arg);
4407 return ret;
4410 switch (code)
4412 case INTEGER_CST:
4413 return constant_boolean_node (integer_zerop (arg), type);
4415 case TRUTH_AND_EXPR:
4416 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4417 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4418 return build2_loc (loc, TRUTH_OR_EXPR, type,
4419 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4420 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4422 case TRUTH_OR_EXPR:
4423 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4424 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4425 return build2_loc (loc, TRUTH_AND_EXPR, type,
4426 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4427 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4429 case TRUTH_XOR_EXPR:
4430 /* Here we can invert either operand. We invert the first operand
4431 unless the second operand is a TRUTH_NOT_EXPR in which case our
4432 result is the XOR of the first operand with the inside of the
4433 negation of the second operand. */
4435 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4436 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4437 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4438 else
4439 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4440 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4441 TREE_OPERAND (arg, 1));
4443 case TRUTH_ANDIF_EXPR:
4444 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4445 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4446 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4447 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4448 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4450 case TRUTH_ORIF_EXPR:
4451 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4452 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4453 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4454 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4455 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4457 case TRUTH_NOT_EXPR:
4458 return TREE_OPERAND (arg, 0);
4460 case COND_EXPR:
4462 tree arg1 = TREE_OPERAND (arg, 1);
4463 tree arg2 = TREE_OPERAND (arg, 2);
4465 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4466 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4468 /* A COND_EXPR may have a throw as one operand, which
4469 then has void type. Just leave void operands
4470 as they are. */
4471 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4472 VOID_TYPE_P (TREE_TYPE (arg1))
4473 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4474 VOID_TYPE_P (TREE_TYPE (arg2))
4475 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4478 case COMPOUND_EXPR:
4479 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4480 return build2_loc (loc, COMPOUND_EXPR, type,
4481 TREE_OPERAND (arg, 0),
4482 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4484 case NON_LVALUE_EXPR:
4485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4486 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4488 CASE_CONVERT:
4489 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4490 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4492 /* fall through */
4494 case FLOAT_EXPR:
4495 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4496 return build1_loc (loc, TREE_CODE (arg), type,
4497 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4499 case BIT_AND_EXPR:
4500 if (!integer_onep (TREE_OPERAND (arg, 1)))
4501 return NULL_TREE;
4502 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4504 case SAVE_EXPR:
4505 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4507 case CLEANUP_POINT_EXPR:
4508 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4509 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4510 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4512 default:
4513 return NULL_TREE;
4517 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4518 assume that ARG is an operation that returns a truth value (0 or 1
4519 for scalars, 0 or -1 for vectors). Return the folded expression if
4520 folding is successful. Otherwise, return NULL_TREE. */
4522 static tree
4523 fold_invert_truthvalue (location_t loc, tree arg)
4525 tree type = TREE_TYPE (arg);
4526 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4527 ? BIT_NOT_EXPR
4528 : TRUTH_NOT_EXPR,
4529 type, arg);
4532 /* Return a simplified tree node for the truth-negation of ARG. This
4533 never alters ARG itself. We assume that ARG is an operation that
4534 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4536 tree
4537 invert_truthvalue_loc (location_t loc, tree arg)
4539 if (TREE_CODE (arg) == ERROR_MARK)
4540 return arg;
4542 tree type = TREE_TYPE (arg);
4543 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4544 ? BIT_NOT_EXPR
4545 : TRUTH_NOT_EXPR,
4546 type, arg);
4549 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4550 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4551 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4552 is the original memory reference used to preserve the alias set of
4553 the access. */
4555 static tree
4556 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4557 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4558 int unsignedp, int reversep)
4560 tree result, bftype;
4562 /* Attempt not to lose the access path if possible. */
4563 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4565 tree ninner = TREE_OPERAND (orig_inner, 0);
4566 machine_mode nmode;
4567 poly_int64 nbitsize, nbitpos;
4568 tree noffset;
4569 int nunsignedp, nreversep, nvolatilep = 0;
4570 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4571 &noffset, &nmode, &nunsignedp,
4572 &nreversep, &nvolatilep);
4573 if (base == inner
4574 && noffset == NULL_TREE
4575 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4576 && !reversep
4577 && !nreversep
4578 && !nvolatilep)
4580 inner = ninner;
4581 bitpos -= nbitpos;
4585 alias_set_type iset = get_alias_set (orig_inner);
4586 if (iset == 0 && get_alias_set (inner) != iset)
4587 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4588 build_fold_addr_expr (inner),
4589 build_int_cst (ptr_type_node, 0));
4591 if (known_eq (bitpos, 0) && !reversep)
4593 tree size = TYPE_SIZE (TREE_TYPE (inner));
4594 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4595 || POINTER_TYPE_P (TREE_TYPE (inner)))
4596 && tree_fits_shwi_p (size)
4597 && tree_to_shwi (size) == bitsize)
4598 return fold_convert_loc (loc, type, inner);
4601 bftype = type;
4602 if (TYPE_PRECISION (bftype) != bitsize
4603 || TYPE_UNSIGNED (bftype) == !unsignedp)
4604 bftype = build_nonstandard_integer_type (bitsize, 0);
4606 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4607 bitsize_int (bitsize), bitsize_int (bitpos));
4608 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4610 if (bftype != type)
4611 result = fold_convert_loc (loc, type, result);
4613 return result;
4616 /* Optimize a bit-field compare.
4618 There are two cases: First is a compare against a constant and the
4619 second is a comparison of two items where the fields are at the same
4620 bit position relative to the start of a chunk (byte, halfword, word)
4621 large enough to contain it. In these cases we can avoid the shift
4622 implicit in bitfield extractions.
4624 For constants, we emit a compare of the shifted constant with the
4625 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4626 compared. For two fields at the same position, we do the ANDs with the
4627 similar mask and compare the result of the ANDs.
4629 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4630 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4631 are the left and right operands of the comparison, respectively.
4633 If the optimization described above can be done, we return the resulting
4634 tree. Otherwise we return zero. */
4636 static tree
4637 optimize_bit_field_compare (location_t loc, enum tree_code code,
4638 tree compare_type, tree lhs, tree rhs)
4640 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4641 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4642 tree type = TREE_TYPE (lhs);
4643 tree unsigned_type;
4644 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4645 machine_mode lmode, rmode;
4646 scalar_int_mode nmode;
4647 int lunsignedp, runsignedp;
4648 int lreversep, rreversep;
4649 int lvolatilep = 0, rvolatilep = 0;
4650 tree linner, rinner = NULL_TREE;
4651 tree mask;
4652 tree offset;
4654 /* Get all the information about the extractions being done. If the bit size
4655 is the same as the size of the underlying object, we aren't doing an
4656 extraction at all and so can do nothing. We also don't want to
4657 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4658 then will no longer be able to replace it. */
4659 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4660 &lunsignedp, &lreversep, &lvolatilep);
4661 if (linner == lhs
4662 || !known_size_p (plbitsize)
4663 || !plbitsize.is_constant (&lbitsize)
4664 || !plbitpos.is_constant (&lbitpos)
4665 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4666 || offset != 0
4667 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4668 || lvolatilep)
4669 return 0;
4671 if (const_p)
4672 rreversep = lreversep;
4673 else
4675 /* If this is not a constant, we can only do something if bit positions,
4676 sizes, signedness and storage order are the same. */
4677 rinner
4678 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4679 &runsignedp, &rreversep, &rvolatilep);
4681 if (rinner == rhs
4682 || maybe_ne (lbitpos, rbitpos)
4683 || maybe_ne (lbitsize, rbitsize)
4684 || lunsignedp != runsignedp
4685 || lreversep != rreversep
4686 || offset != 0
4687 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4688 || rvolatilep)
4689 return 0;
4692 /* Honor the C++ memory model and mimic what RTL expansion does. */
4693 poly_uint64 bitstart = 0;
4694 poly_uint64 bitend = 0;
4695 if (TREE_CODE (lhs) == COMPONENT_REF)
4697 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4698 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4699 return 0;
4702 /* See if we can find a mode to refer to this field. We should be able to,
4703 but fail if we can't. */
4704 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4705 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4706 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4707 TYPE_ALIGN (TREE_TYPE (rinner))),
4708 BITS_PER_WORD, false, &nmode))
4709 return 0;
4711 /* Set signed and unsigned types of the precision of this mode for the
4712 shifts below. */
4713 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4715 /* Compute the bit position and size for the new reference and our offset
4716 within it. If the new reference is the same size as the original, we
4717 won't optimize anything, so return zero. */
4718 nbitsize = GET_MODE_BITSIZE (nmode);
4719 nbitpos = lbitpos & ~ (nbitsize - 1);
4720 lbitpos -= nbitpos;
4721 if (nbitsize == lbitsize)
4722 return 0;
4724 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4725 lbitpos = nbitsize - lbitsize - lbitpos;
4727 /* Make the mask to be used against the extracted field. */
4728 mask = build_int_cst_type (unsigned_type, -1);
4729 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4730 mask = const_binop (RSHIFT_EXPR, mask,
4731 size_int (nbitsize - lbitsize - lbitpos));
4733 if (! const_p)
4735 if (nbitpos < 0)
4736 return 0;
4738 /* If not comparing with constant, just rework the comparison
4739 and return. */
4740 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4741 nbitsize, nbitpos, 1, lreversep);
4742 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4743 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4744 nbitsize, nbitpos, 1, rreversep);
4745 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4746 return fold_build2_loc (loc, code, compare_type, t1, t2);
4749 /* Otherwise, we are handling the constant case. See if the constant is too
4750 big for the field. Warn and return a tree for 0 (false) if so. We do
4751 this not only for its own sake, but to avoid having to test for this
4752 error case below. If we didn't, we might generate wrong code.
4754 For unsigned fields, the constant shifted right by the field length should
4755 be all zero. For signed fields, the high-order bits should agree with
4756 the sign bit. */
4758 if (lunsignedp)
4760 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4762 warning (0, "comparison is always %d due to width of bit-field",
4763 code == NE_EXPR);
4764 return constant_boolean_node (code == NE_EXPR, compare_type);
4767 else
4769 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4770 if (tem != 0 && tem != -1)
4772 warning (0, "comparison is always %d due to width of bit-field",
4773 code == NE_EXPR);
4774 return constant_boolean_node (code == NE_EXPR, compare_type);
4778 if (nbitpos < 0)
4779 return 0;
4781 /* Single-bit compares should always be against zero. */
4782 if (lbitsize == 1 && ! integer_zerop (rhs))
4784 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4785 rhs = build_int_cst (type, 0);
4788 /* Make a new bitfield reference, shift the constant over the
4789 appropriate number of bits and mask it with the computed mask
4790 (in case this was a signed field). If we changed it, make a new one. */
4791 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4792 nbitsize, nbitpos, 1, lreversep);
4794 rhs = const_binop (BIT_AND_EXPR,
4795 const_binop (LSHIFT_EXPR,
4796 fold_convert_loc (loc, unsigned_type, rhs),
4797 size_int (lbitpos)),
4798 mask);
4800 lhs = build2_loc (loc, code, compare_type,
4801 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4802 return lhs;
4805 /* Subroutine for fold_truth_andor_1: decode a field reference.
4807 If EXP is a comparison reference, we return the innermost reference.
4809 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4810 set to the starting bit number.
4812 If the innermost field can be completely contained in a mode-sized
4813 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4815 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4816 otherwise it is not changed.
4818 *PUNSIGNEDP is set to the signedness of the field.
4820 *PREVERSEP is set to the storage order of the field.
4822 *PMASK is set to the mask used. This is either contained in a
4823 BIT_AND_EXPR or derived from the width of the field.
4825 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4827 Return 0 if this is not a component reference or is one that we can't
4828 do anything with. */
4830 static tree
4831 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4832 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4833 int *punsignedp, int *preversep, int *pvolatilep,
4834 tree *pmask, tree *pand_mask)
4836 tree exp = *exp_;
4837 tree outer_type = 0;
4838 tree and_mask = 0;
4839 tree mask, inner, offset;
4840 tree unsigned_type;
4841 unsigned int precision;
4843 /* All the optimizations using this function assume integer fields.
4844 There are problems with FP fields since the type_for_size call
4845 below can fail for, e.g., XFmode. */
4846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4847 return NULL_TREE;
4849 /* We are interested in the bare arrangement of bits, so strip everything
4850 that doesn't affect the machine mode. However, record the type of the
4851 outermost expression if it may matter below. */
4852 if (CONVERT_EXPR_P (exp)
4853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4854 outer_type = TREE_TYPE (exp);
4855 STRIP_NOPS (exp);
4857 if (TREE_CODE (exp) == BIT_AND_EXPR)
4859 and_mask = TREE_OPERAND (exp, 1);
4860 exp = TREE_OPERAND (exp, 0);
4861 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4862 if (TREE_CODE (and_mask) != INTEGER_CST)
4863 return NULL_TREE;
4866 poly_int64 poly_bitsize, poly_bitpos;
4867 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4868 pmode, punsignedp, preversep, pvolatilep);
4869 if ((inner == exp && and_mask == 0)
4870 || !poly_bitsize.is_constant (pbitsize)
4871 || !poly_bitpos.is_constant (pbitpos)
4872 || *pbitsize < 0
4873 || offset != 0
4874 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4875 /* Reject out-of-bound accesses (PR79731). */
4876 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4877 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4878 *pbitpos + *pbitsize) < 0))
4879 return NULL_TREE;
4881 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4882 if (unsigned_type == NULL_TREE)
4883 return NULL_TREE;
4885 *exp_ = exp;
4887 /* If the number of bits in the reference is the same as the bitsize of
4888 the outer type, then the outer type gives the signedness. Otherwise
4889 (in case of a small bitfield) the signedness is unchanged. */
4890 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4891 *punsignedp = TYPE_UNSIGNED (outer_type);
4893 /* Compute the mask to access the bitfield. */
4894 precision = TYPE_PRECISION (unsigned_type);
4896 mask = build_int_cst_type (unsigned_type, -1);
4898 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4899 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4901 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4902 if (and_mask != 0)
4903 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4904 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4906 *pmask = mask;
4907 *pand_mask = and_mask;
4908 return inner;
4911 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4912 bit positions and MASK is SIGNED. */
4914 static bool
4915 all_ones_mask_p (const_tree mask, unsigned int size)
4917 tree type = TREE_TYPE (mask);
4918 unsigned int precision = TYPE_PRECISION (type);
4920 /* If this function returns true when the type of the mask is
4921 UNSIGNED, then there will be errors. In particular see
4922 gcc.c-torture/execute/990326-1.c. There does not appear to be
4923 any documentation paper trail as to why this is so. But the pre
4924 wide-int worked with that restriction and it has been preserved
4925 here. */
4926 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4927 return false;
4929 return wi::mask (size, false, precision) == wi::to_wide (mask);
4932 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4933 represents the sign bit of EXP's type. If EXP represents a sign
4934 or zero extension, also test VAL against the unextended type.
4935 The return value is the (sub)expression whose sign bit is VAL,
4936 or NULL_TREE otherwise. */
4938 tree
4939 sign_bit_p (tree exp, const_tree val)
4941 int width;
4942 tree t;
4944 /* Tree EXP must have an integral type. */
4945 t = TREE_TYPE (exp);
4946 if (! INTEGRAL_TYPE_P (t))
4947 return NULL_TREE;
4949 /* Tree VAL must be an integer constant. */
4950 if (TREE_CODE (val) != INTEGER_CST
4951 || TREE_OVERFLOW (val))
4952 return NULL_TREE;
4954 width = TYPE_PRECISION (t);
4955 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4956 return exp;
4958 /* Handle extension from a narrower type. */
4959 if (TREE_CODE (exp) == NOP_EXPR
4960 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4961 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4963 return NULL_TREE;
4966 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4967 operand is simple enough to be evaluated unconditionally. */
4969 static bool
4970 simple_operand_p (const_tree exp)
4972 /* Strip any conversions that don't change the machine mode. */
4973 STRIP_NOPS (exp);
4975 return (CONSTANT_CLASS_P (exp)
4976 || TREE_CODE (exp) == SSA_NAME
4977 || (DECL_P (exp)
4978 && ! TREE_ADDRESSABLE (exp)
4979 && ! TREE_THIS_VOLATILE (exp)
4980 && ! DECL_NONLOCAL (exp)
4981 /* Don't regard global variables as simple. They may be
4982 allocated in ways unknown to the compiler (shared memory,
4983 #pragma weak, etc). */
4984 && ! TREE_PUBLIC (exp)
4985 && ! DECL_EXTERNAL (exp)
4986 /* Weakrefs are not safe to be read, since they can be NULL.
4987 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4988 have DECL_WEAK flag set. */
4989 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4990 /* Loading a static variable is unduly expensive, but global
4991 registers aren't expensive. */
4992 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4995 /* Determine if an operand is simple enough to be evaluated unconditionally.
4996 In addition to simple_operand_p, we assume that comparisons, conversions,
4997 and logic-not operations are simple, if their operands are simple, too. */
4999 bool
5000 simple_condition_p (tree exp)
5002 enum tree_code code;
5004 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5005 return false;
5007 while (CONVERT_EXPR_P (exp))
5008 exp = TREE_OPERAND (exp, 0);
5010 code = TREE_CODE (exp);
5012 if (TREE_CODE_CLASS (code) == tcc_comparison)
5013 return (simple_operand_p (TREE_OPERAND (exp, 0))
5014 && simple_operand_p (TREE_OPERAND (exp, 1)));
5016 if (code == TRUTH_NOT_EXPR)
5017 return simple_condition_p (TREE_OPERAND (exp, 0));
5019 return simple_operand_p (exp);
5023 /* The following functions are subroutines to fold_range_test and allow it to
5024 try to change a logical combination of comparisons into a range test.
5026 For example, both
5027 X == 2 || X == 3 || X == 4 || X == 5
5029 X >= 2 && X <= 5
5030 are converted to
5031 (unsigned) (X - 2) <= 3
5033 We describe each set of comparisons as being either inside or outside
5034 a range, using a variable named like IN_P, and then describe the
5035 range with a lower and upper bound. If one of the bounds is omitted,
5036 it represents either the highest or lowest value of the type.
5038 In the comments below, we represent a range by two numbers in brackets
5039 preceded by a "+" to designate being inside that range, or a "-" to
5040 designate being outside that range, so the condition can be inverted by
5041 flipping the prefix. An omitted bound is represented by a "-". For
5042 example, "- [-, 10]" means being outside the range starting at the lowest
5043 possible value and ending at 10, in other words, being greater than 10.
5044 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5045 always false.
5047 We set up things so that the missing bounds are handled in a consistent
5048 manner so neither a missing bound nor "true" and "false" need to be
5049 handled using a special case. */
5051 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5052 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5053 and UPPER1_P are nonzero if the respective argument is an upper bound
5054 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5055 must be specified for a comparison. ARG1 will be converted to ARG0's
5056 type if both are specified. */
5058 static tree
5059 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5060 tree arg1, int upper1_p)
5062 tree tem;
5063 int result;
5064 int sgn0, sgn1;
5066 /* If neither arg represents infinity, do the normal operation.
5067 Else, if not a comparison, return infinity. Else handle the special
5068 comparison rules. Note that most of the cases below won't occur, but
5069 are handled for consistency. */
5071 if (arg0 != 0 && arg1 != 0)
5073 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5074 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5075 STRIP_NOPS (tem);
5076 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5079 if (TREE_CODE_CLASS (code) != tcc_comparison)
5080 return 0;
5082 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5083 for neither. In real maths, we cannot assume open ended ranges are
5084 the same. But, this is computer arithmetic, where numbers are finite.
5085 We can therefore make the transformation of any unbounded range with
5086 the value Z, Z being greater than any representable number. This permits
5087 us to treat unbounded ranges as equal. */
5088 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5089 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5090 switch (code)
5092 case EQ_EXPR:
5093 result = sgn0 == sgn1;
5094 break;
5095 case NE_EXPR:
5096 result = sgn0 != sgn1;
5097 break;
5098 case LT_EXPR:
5099 result = sgn0 < sgn1;
5100 break;
5101 case LE_EXPR:
5102 result = sgn0 <= sgn1;
5103 break;
5104 case GT_EXPR:
5105 result = sgn0 > sgn1;
5106 break;
5107 case GE_EXPR:
5108 result = sgn0 >= sgn1;
5109 break;
5110 default:
5111 gcc_unreachable ();
5114 return constant_boolean_node (result, type);
5117 /* Helper routine for make_range. Perform one step for it, return
5118 new expression if the loop should continue or NULL_TREE if it should
5119 stop. */
5121 tree
5122 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5123 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5124 bool *strict_overflow_p)
5126 tree arg0_type = TREE_TYPE (arg0);
5127 tree n_low, n_high, low = *p_low, high = *p_high;
5128 int in_p = *p_in_p, n_in_p;
5130 switch (code)
5132 case TRUTH_NOT_EXPR:
5133 /* We can only do something if the range is testing for zero. */
5134 if (low == NULL_TREE || high == NULL_TREE
5135 || ! integer_zerop (low) || ! integer_zerop (high))
5136 return NULL_TREE;
5137 *p_in_p = ! in_p;
5138 return arg0;
5140 case EQ_EXPR: case NE_EXPR:
5141 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5142 /* We can only do something if the range is testing for zero
5143 and if the second operand is an integer constant. Note that
5144 saying something is "in" the range we make is done by
5145 complementing IN_P since it will set in the initial case of
5146 being not equal to zero; "out" is leaving it alone. */
5147 if (low == NULL_TREE || high == NULL_TREE
5148 || ! integer_zerop (low) || ! integer_zerop (high)
5149 || TREE_CODE (arg1) != INTEGER_CST)
5150 return NULL_TREE;
5152 switch (code)
5154 case NE_EXPR: /* - [c, c] */
5155 low = high = arg1;
5156 break;
5157 case EQ_EXPR: /* + [c, c] */
5158 in_p = ! in_p, low = high = arg1;
5159 break;
5160 case GT_EXPR: /* - [-, c] */
5161 low = 0, high = arg1;
5162 break;
5163 case GE_EXPR: /* + [c, -] */
5164 in_p = ! in_p, low = arg1, high = 0;
5165 break;
5166 case LT_EXPR: /* - [c, -] */
5167 low = arg1, high = 0;
5168 break;
5169 case LE_EXPR: /* + [-, c] */
5170 in_p = ! in_p, low = 0, high = arg1;
5171 break;
5172 default:
5173 gcc_unreachable ();
5176 /* If this is an unsigned comparison, we also know that EXP is
5177 greater than or equal to zero. We base the range tests we make
5178 on that fact, so we record it here so we can parse existing
5179 range tests. We test arg0_type since often the return type
5180 of, e.g. EQ_EXPR, is boolean. */
5181 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5183 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5184 in_p, low, high, 1,
5185 build_int_cst (arg0_type, 0),
5186 NULL_TREE))
5187 return NULL_TREE;
5189 in_p = n_in_p, low = n_low, high = n_high;
5191 /* If the high bound is missing, but we have a nonzero low
5192 bound, reverse the range so it goes from zero to the low bound
5193 minus 1. */
5194 if (high == 0 && low && ! integer_zerop (low))
5196 in_p = ! in_p;
5197 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5198 build_int_cst (TREE_TYPE (low), 1), 0);
5199 low = build_int_cst (arg0_type, 0);
5203 *p_low = low;
5204 *p_high = high;
5205 *p_in_p = in_p;
5206 return arg0;
5208 case NEGATE_EXPR:
5209 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5210 low and high are non-NULL, then normalize will DTRT. */
5211 if (!TYPE_UNSIGNED (arg0_type)
5212 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5214 if (low == NULL_TREE)
5215 low = TYPE_MIN_VALUE (arg0_type);
5216 if (high == NULL_TREE)
5217 high = TYPE_MAX_VALUE (arg0_type);
5220 /* (-x) IN [a,b] -> x in [-b, -a] */
5221 n_low = range_binop (MINUS_EXPR, exp_type,
5222 build_int_cst (exp_type, 0),
5223 0, high, 1);
5224 n_high = range_binop (MINUS_EXPR, exp_type,
5225 build_int_cst (exp_type, 0),
5226 0, low, 0);
5227 if (n_high != 0 && TREE_OVERFLOW (n_high))
5228 return NULL_TREE;
5229 goto normalize;
5231 case BIT_NOT_EXPR:
5232 /* ~ X -> -X - 1 */
5233 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5234 build_int_cst (exp_type, 1));
5236 case PLUS_EXPR:
5237 case MINUS_EXPR:
5238 if (TREE_CODE (arg1) != INTEGER_CST)
5239 return NULL_TREE;
5241 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5242 move a constant to the other side. */
5243 if (!TYPE_UNSIGNED (arg0_type)
5244 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5245 return NULL_TREE;
5247 /* If EXP is signed, any overflow in the computation is undefined,
5248 so we don't worry about it so long as our computations on
5249 the bounds don't overflow. For unsigned, overflow is defined
5250 and this is exactly the right thing. */
5251 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5252 arg0_type, low, 0, arg1, 0);
5253 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5254 arg0_type, high, 1, arg1, 0);
5255 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5256 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5257 return NULL_TREE;
5259 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5260 *strict_overflow_p = true;
5262 normalize:
5263 /* Check for an unsigned range which has wrapped around the maximum
5264 value thus making n_high < n_low, and normalize it. */
5265 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5267 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5268 build_int_cst (TREE_TYPE (n_high), 1), 0);
5269 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5270 build_int_cst (TREE_TYPE (n_low), 1), 0);
5272 /* If the range is of the form +/- [ x+1, x ], we won't
5273 be able to normalize it. But then, it represents the
5274 whole range or the empty set, so make it
5275 +/- [ -, - ]. */
5276 if (tree_int_cst_equal (n_low, low)
5277 && tree_int_cst_equal (n_high, high))
5278 low = high = 0;
5279 else
5280 in_p = ! in_p;
5282 else
5283 low = n_low, high = n_high;
5285 *p_low = low;
5286 *p_high = high;
5287 *p_in_p = in_p;
5288 return arg0;
5290 CASE_CONVERT:
5291 case NON_LVALUE_EXPR:
5292 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5293 return NULL_TREE;
5295 if (! INTEGRAL_TYPE_P (arg0_type)
5296 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5297 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5298 return NULL_TREE;
5300 n_low = low, n_high = high;
5302 if (n_low != 0)
5303 n_low = fold_convert_loc (loc, arg0_type, n_low);
5305 if (n_high != 0)
5306 n_high = fold_convert_loc (loc, arg0_type, n_high);
5308 /* If we're converting arg0 from an unsigned type, to exp,
5309 a signed type, we will be doing the comparison as unsigned.
5310 The tests above have already verified that LOW and HIGH
5311 are both positive.
5313 So we have to ensure that we will handle large unsigned
5314 values the same way that the current signed bounds treat
5315 negative values. */
5317 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5319 tree high_positive;
5320 tree equiv_type;
5321 /* For fixed-point modes, we need to pass the saturating flag
5322 as the 2nd parameter. */
5323 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5324 equiv_type
5325 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5326 TYPE_SATURATING (arg0_type));
5327 else
5328 equiv_type
5329 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5331 /* A range without an upper bound is, naturally, unbounded.
5332 Since convert would have cropped a very large value, use
5333 the max value for the destination type. */
5334 high_positive
5335 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5336 : TYPE_MAX_VALUE (arg0_type);
5338 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5339 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5340 fold_convert_loc (loc, arg0_type,
5341 high_positive),
5342 build_int_cst (arg0_type, 1));
5344 /* If the low bound is specified, "and" the range with the
5345 range for which the original unsigned value will be
5346 positive. */
5347 if (low != 0)
5349 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5350 1, fold_convert_loc (loc, arg0_type,
5351 integer_zero_node),
5352 high_positive))
5353 return NULL_TREE;
5355 in_p = (n_in_p == in_p);
5357 else
5359 /* Otherwise, "or" the range with the range of the input
5360 that will be interpreted as negative. */
5361 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5362 1, fold_convert_loc (loc, arg0_type,
5363 integer_zero_node),
5364 high_positive))
5365 return NULL_TREE;
5367 in_p = (in_p != n_in_p);
5371 /* Otherwise, if we are converting arg0 from signed type, to exp,
5372 an unsigned type, we will do the comparison as signed. If
5373 high is non-NULL, we punt above if it doesn't fit in the signed
5374 type, so if we get through here, +[-, high] or +[low, high] are
5375 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5376 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5377 high is not, the +[low, -] range is equivalent to union of
5378 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5379 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5380 low being 0, which should be treated as [-, -]. */
5381 else if (TYPE_UNSIGNED (exp_type)
5382 && !TYPE_UNSIGNED (arg0_type)
5383 && low
5384 && !high)
5386 if (integer_zerop (low))
5387 n_low = NULL_TREE;
5388 else
5390 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5391 n_low, build_int_cst (arg0_type, -1));
5392 n_low = build_zero_cst (arg0_type);
5393 in_p = !in_p;
5397 *p_low = n_low;
5398 *p_high = n_high;
5399 *p_in_p = in_p;
5400 return arg0;
5402 default:
5403 return NULL_TREE;
5407 /* Given EXP, a logical expression, set the range it is testing into
5408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5409 actually being tested. *PLOW and *PHIGH will be made of the same
5410 type as the returned expression. If EXP is not a comparison, we
5411 will most likely not be returning a useful value and range. Set
5412 *STRICT_OVERFLOW_P to true if the return value is only valid
5413 because signed overflow is undefined; otherwise, do not change
5414 *STRICT_OVERFLOW_P. */
5416 tree
5417 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5418 bool *strict_overflow_p)
5420 enum tree_code code;
5421 tree arg0, arg1 = NULL_TREE;
5422 tree exp_type, nexp;
5423 int in_p;
5424 tree low, high;
5425 location_t loc = EXPR_LOCATION (exp);
5427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5428 and see if we can refine the range. Some of the cases below may not
5429 happen, but it doesn't seem worth worrying about this. We "continue"
5430 the outer loop when we've changed something; otherwise we "break"
5431 the switch, which will "break" the while. */
5433 in_p = 0;
5434 low = high = build_int_cst (TREE_TYPE (exp), 0);
5436 while (1)
5438 code = TREE_CODE (exp);
5439 exp_type = TREE_TYPE (exp);
5440 arg0 = NULL_TREE;
5442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5444 if (TREE_OPERAND_LENGTH (exp) > 0)
5445 arg0 = TREE_OPERAND (exp, 0);
5446 if (TREE_CODE_CLASS (code) == tcc_binary
5447 || TREE_CODE_CLASS (code) == tcc_comparison
5448 || (TREE_CODE_CLASS (code) == tcc_expression
5449 && TREE_OPERAND_LENGTH (exp) > 1))
5450 arg1 = TREE_OPERAND (exp, 1);
5452 if (arg0 == NULL_TREE)
5453 break;
5455 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5456 &high, &in_p, strict_overflow_p);
5457 if (nexp == NULL_TREE)
5458 break;
5459 exp = nexp;
5462 /* If EXP is a constant, we can evaluate whether this is true or false. */
5463 if (TREE_CODE (exp) == INTEGER_CST)
5465 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5466 exp, 0, low, 0))
5467 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5468 exp, 1, high, 1)));
5469 low = high = 0;
5470 exp = 0;
5473 *pin_p = in_p, *plow = low, *phigh = high;
5474 return exp;
5477 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5478 a bitwise check i.e. when
5479 LOW == 0xXX...X00...0
5480 HIGH == 0xXX...X11...1
5481 Return corresponding mask in MASK and stem in VALUE. */
5483 static bool
5484 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5485 tree *value)
5487 if (TREE_CODE (low) != INTEGER_CST
5488 || TREE_CODE (high) != INTEGER_CST)
5489 return false;
5491 unsigned prec = TYPE_PRECISION (type);
5492 wide_int lo = wi::to_wide (low, prec);
5493 wide_int hi = wi::to_wide (high, prec);
5495 wide_int end_mask = lo ^ hi;
5496 if ((end_mask & (end_mask + 1)) != 0
5497 || (lo & end_mask) != 0)
5498 return false;
5500 wide_int stem_mask = ~end_mask;
5501 wide_int stem = lo & stem_mask;
5502 if (stem != (hi & stem_mask))
5503 return false;
5505 *mask = wide_int_to_tree (type, stem_mask);
5506 *value = wide_int_to_tree (type, stem);
5508 return true;
5511 /* Helper routine for build_range_check and match.pd. Return the type to
5512 perform the check or NULL if it shouldn't be optimized. */
5514 tree
5515 range_check_type (tree etype)
5517 /* First make sure that arithmetics in this type is valid, then make sure
5518 that it wraps around. */
5519 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5520 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5522 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5524 tree utype, minv, maxv;
5526 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5527 for the type in question, as we rely on this here. */
5528 utype = unsigned_type_for (etype);
5529 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5530 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5531 build_int_cst (TREE_TYPE (maxv), 1), 1);
5532 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5534 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5535 minv, 1, maxv, 1)))
5536 etype = utype;
5537 else
5538 return NULL_TREE;
5540 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5541 etype = unsigned_type_for (etype);
5542 return etype;
5545 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5546 type, TYPE, return an expression to test if EXP is in (or out of, depending
5547 on IN_P) the range. Return 0 if the test couldn't be created. */
5549 tree
5550 build_range_check (location_t loc, tree type, tree exp, int in_p,
5551 tree low, tree high)
5553 tree etype = TREE_TYPE (exp), mask, value;
5555 /* Disable this optimization for function pointer expressions
5556 on targets that require function pointer canonicalization. */
5557 if (targetm.have_canonicalize_funcptr_for_compare ()
5558 && POINTER_TYPE_P (etype)
5559 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5560 return NULL_TREE;
5562 if (! in_p)
5564 value = build_range_check (loc, type, exp, 1, low, high);
5565 if (value != 0)
5566 return invert_truthvalue_loc (loc, value);
5568 return 0;
5571 if (low == 0 && high == 0)
5572 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5574 if (low == 0)
5575 return fold_build2_loc (loc, LE_EXPR, type, exp,
5576 fold_convert_loc (loc, etype, high));
5578 if (high == 0)
5579 return fold_build2_loc (loc, GE_EXPR, type, exp,
5580 fold_convert_loc (loc, etype, low));
5582 if (operand_equal_p (low, high, 0))
5583 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5584 fold_convert_loc (loc, etype, low));
5586 if (TREE_CODE (exp) == BIT_AND_EXPR
5587 && maskable_range_p (low, high, etype, &mask, &value))
5588 return fold_build2_loc (loc, EQ_EXPR, type,
5589 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5590 exp, mask),
5591 value);
5593 if (integer_zerop (low))
5595 if (! TYPE_UNSIGNED (etype))
5597 etype = unsigned_type_for (etype);
5598 high = fold_convert_loc (loc, etype, high);
5599 exp = fold_convert_loc (loc, etype, exp);
5601 return build_range_check (loc, type, exp, 1, 0, high);
5604 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5605 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5607 int prec = TYPE_PRECISION (etype);
5609 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5611 if (TYPE_UNSIGNED (etype))
5613 tree signed_etype = signed_type_for (etype);
5614 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5615 etype
5616 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5617 else
5618 etype = signed_etype;
5619 exp = fold_convert_loc (loc, etype, exp);
5621 return fold_build2_loc (loc, GT_EXPR, type, exp,
5622 build_int_cst (etype, 0));
5626 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5627 This requires wrap-around arithmetics for the type of the expression. */
5628 etype = range_check_type (etype);
5629 if (etype == NULL_TREE)
5630 return NULL_TREE;
5632 high = fold_convert_loc (loc, etype, high);
5633 low = fold_convert_loc (loc, etype, low);
5634 exp = fold_convert_loc (loc, etype, exp);
5636 value = const_binop (MINUS_EXPR, high, low);
5638 if (value != 0 && !TREE_OVERFLOW (value))
5639 return build_range_check (loc, type,
5640 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5641 1, build_int_cst (etype, 0), value);
5643 return 0;
5646 /* Return the predecessor of VAL in its type, handling the infinite case. */
5648 static tree
5649 range_predecessor (tree val)
5651 tree type = TREE_TYPE (val);
5653 if (INTEGRAL_TYPE_P (type)
5654 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5655 return 0;
5656 else
5657 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5658 build_int_cst (TREE_TYPE (val), 1), 0);
5661 /* Return the successor of VAL in its type, handling the infinite case. */
5663 static tree
5664 range_successor (tree val)
5666 tree type = TREE_TYPE (val);
5668 if (INTEGRAL_TYPE_P (type)
5669 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5670 return 0;
5671 else
5672 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5673 build_int_cst (TREE_TYPE (val), 1), 0);
5676 /* Given two ranges, see if we can merge them into one. Return 1 if we
5677 can, 0 if we can't. Set the output range into the specified parameters. */
5679 bool
5680 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5681 tree high0, int in1_p, tree low1, tree high1)
5683 bool no_overlap;
5684 int subset;
5685 int temp;
5686 tree tem;
5687 int in_p;
5688 tree low, high;
5689 int lowequal = ((low0 == 0 && low1 == 0)
5690 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5691 low0, 0, low1, 0)));
5692 int highequal = ((high0 == 0 && high1 == 0)
5693 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5694 high0, 1, high1, 1)));
5696 /* Make range 0 be the range that starts first, or ends last if they
5697 start at the same value. Swap them if it isn't. */
5698 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5699 low0, 0, low1, 0))
5700 || (lowequal
5701 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5702 high1, 1, high0, 1))))
5704 temp = in0_p, in0_p = in1_p, in1_p = temp;
5705 tem = low0, low0 = low1, low1 = tem;
5706 tem = high0, high0 = high1, high1 = tem;
5709 /* If the second range is != high1 where high1 is the type maximum of
5710 the type, try first merging with < high1 range. */
5711 if (low1
5712 && high1
5713 && TREE_CODE (low1) == INTEGER_CST
5714 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5715 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5716 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5717 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5718 && operand_equal_p (low1, high1, 0))
5720 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5721 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5722 !in1_p, NULL_TREE, range_predecessor (low1)))
5723 return true;
5724 /* Similarly for the second range != low1 where low1 is the type minimum
5725 of the type, try first merging with > low1 range. */
5726 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5727 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5728 !in1_p, range_successor (low1), NULL_TREE))
5729 return true;
5732 /* Now flag two cases, whether the ranges are disjoint or whether the
5733 second range is totally subsumed in the first. Note that the tests
5734 below are simplified by the ones above. */
5735 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5736 high0, 1, low1, 0));
5737 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5738 high1, 1, high0, 1));
5740 /* We now have four cases, depending on whether we are including or
5741 excluding the two ranges. */
5742 if (in0_p && in1_p)
5744 /* If they don't overlap, the result is false. If the second range
5745 is a subset it is the result. Otherwise, the range is from the start
5746 of the second to the end of the first. */
5747 if (no_overlap)
5748 in_p = 0, low = high = 0;
5749 else if (subset)
5750 in_p = 1, low = low1, high = high1;
5751 else
5752 in_p = 1, low = low1, high = high0;
5755 else if (in0_p && ! in1_p)
5757 /* If they don't overlap, the result is the first range. If they are
5758 equal, the result is false. If the second range is a subset of the
5759 first, and the ranges begin at the same place, we go from just after
5760 the end of the second range to the end of the first. If the second
5761 range is not a subset of the first, or if it is a subset and both
5762 ranges end at the same place, the range starts at the start of the
5763 first range and ends just before the second range.
5764 Otherwise, we can't describe this as a single range. */
5765 if (no_overlap)
5766 in_p = 1, low = low0, high = high0;
5767 else if (lowequal && highequal)
5768 in_p = 0, low = high = 0;
5769 else if (subset && lowequal)
5771 low = range_successor (high1);
5772 high = high0;
5773 in_p = 1;
5774 if (low == 0)
5776 /* We are in the weird situation where high0 > high1 but
5777 high1 has no successor. Punt. */
5778 return 0;
5781 else if (! subset || highequal)
5783 low = low0;
5784 high = range_predecessor (low1);
5785 in_p = 1;
5786 if (high == 0)
5788 /* low0 < low1 but low1 has no predecessor. Punt. */
5789 return 0;
5792 else
5793 return 0;
5796 else if (! in0_p && in1_p)
5798 /* If they don't overlap, the result is the second range. If the second
5799 is a subset of the first, the result is false. Otherwise,
5800 the range starts just after the first range and ends at the
5801 end of the second. */
5802 if (no_overlap)
5803 in_p = 1, low = low1, high = high1;
5804 else if (subset || highequal)
5805 in_p = 0, low = high = 0;
5806 else
5808 low = range_successor (high0);
5809 high = high1;
5810 in_p = 1;
5811 if (low == 0)
5813 /* high1 > high0 but high0 has no successor. Punt. */
5814 return 0;
5819 else
5821 /* The case where we are excluding both ranges. Here the complex case
5822 is if they don't overlap. In that case, the only time we have a
5823 range is if they are adjacent. If the second is a subset of the
5824 first, the result is the first. Otherwise, the range to exclude
5825 starts at the beginning of the first range and ends at the end of the
5826 second. */
5827 if (no_overlap)
5829 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5830 range_successor (high0),
5831 1, low1, 0)))
5832 in_p = 0, low = low0, high = high1;
5833 else
5835 /* Canonicalize - [min, x] into - [-, x]. */
5836 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5837 switch (TREE_CODE (TREE_TYPE (low0)))
5839 case ENUMERAL_TYPE:
5840 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5841 GET_MODE_BITSIZE
5842 (TYPE_MODE (TREE_TYPE (low0)))))
5843 break;
5844 /* FALLTHROUGH */
5845 case INTEGER_TYPE:
5846 if (tree_int_cst_equal (low0,
5847 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5848 low0 = 0;
5849 break;
5850 case POINTER_TYPE:
5851 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5852 && integer_zerop (low0))
5853 low0 = 0;
5854 break;
5855 default:
5856 break;
5859 /* Canonicalize - [x, max] into - [x, -]. */
5860 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5861 switch (TREE_CODE (TREE_TYPE (high1)))
5863 case ENUMERAL_TYPE:
5864 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5865 GET_MODE_BITSIZE
5866 (TYPE_MODE (TREE_TYPE (high1)))))
5867 break;
5868 /* FALLTHROUGH */
5869 case INTEGER_TYPE:
5870 if (tree_int_cst_equal (high1,
5871 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5872 high1 = 0;
5873 break;
5874 case POINTER_TYPE:
5875 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5876 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5877 high1, 1,
5878 build_int_cst (TREE_TYPE (high1), 1),
5879 1)))
5880 high1 = 0;
5881 break;
5882 default:
5883 break;
5886 /* The ranges might be also adjacent between the maximum and
5887 minimum values of the given type. For
5888 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5889 return + [x + 1, y - 1]. */
5890 if (low0 == 0 && high1 == 0)
5892 low = range_successor (high0);
5893 high = range_predecessor (low1);
5894 if (low == 0 || high == 0)
5895 return 0;
5897 in_p = 1;
5899 else
5900 return 0;
5903 else if (subset)
5904 in_p = 0, low = low0, high = high0;
5905 else
5906 in_p = 0, low = low0, high = high1;
5909 *pin_p = in_p, *plow = low, *phigh = high;
5910 return 1;
5914 /* Subroutine of fold, looking inside expressions of the form
5915 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5916 are the three operands of the COND_EXPR. This function is
5917 being used also to optimize A op B ? C : A, by reversing the
5918 comparison first.
5920 Return a folded expression whose code is not a COND_EXPR
5921 anymore, or NULL_TREE if no folding opportunity is found. */
5923 static tree
5924 fold_cond_expr_with_comparison (location_t loc, tree type,
5925 enum tree_code comp_code,
5926 tree arg00, tree arg01, tree arg1, tree arg2)
5928 tree arg1_type = TREE_TYPE (arg1);
5929 tree tem;
5931 STRIP_NOPS (arg1);
5932 STRIP_NOPS (arg2);
5934 /* If we have A op 0 ? A : -A, consider applying the following
5935 transformations:
5937 A == 0? A : -A same as -A
5938 A != 0? A : -A same as A
5939 A >= 0? A : -A same as abs (A)
5940 A > 0? A : -A same as abs (A)
5941 A <= 0? A : -A same as -abs (A)
5942 A < 0? A : -A same as -abs (A)
5944 None of these transformations work for modes with signed
5945 zeros. If A is +/-0, the first two transformations will
5946 change the sign of the result (from +0 to -0, or vice
5947 versa). The last four will fix the sign of the result,
5948 even though the original expressions could be positive or
5949 negative, depending on the sign of A.
5951 Note that all these transformations are correct if A is
5952 NaN, since the two alternatives (A and -A) are also NaNs. */
5953 if (!HONOR_SIGNED_ZEROS (type)
5954 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5955 ? real_zerop (arg01)
5956 : integer_zerop (arg01))
5957 && ((TREE_CODE (arg2) == NEGATE_EXPR
5958 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5959 /* In the case that A is of the form X-Y, '-A' (arg2) may
5960 have already been folded to Y-X, check for that. */
5961 || (TREE_CODE (arg1) == MINUS_EXPR
5962 && TREE_CODE (arg2) == MINUS_EXPR
5963 && operand_equal_p (TREE_OPERAND (arg1, 0),
5964 TREE_OPERAND (arg2, 1), 0)
5965 && operand_equal_p (TREE_OPERAND (arg1, 1),
5966 TREE_OPERAND (arg2, 0), 0))))
5967 switch (comp_code)
5969 case EQ_EXPR:
5970 case UNEQ_EXPR:
5971 tem = fold_convert_loc (loc, arg1_type, arg1);
5972 return fold_convert_loc (loc, type, negate_expr (tem));
5973 case NE_EXPR:
5974 case LTGT_EXPR:
5975 return fold_convert_loc (loc, type, arg1);
5976 case UNGE_EXPR:
5977 case UNGT_EXPR:
5978 if (flag_trapping_math)
5979 break;
5980 /* Fall through. */
5981 case GE_EXPR:
5982 case GT_EXPR:
5983 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5984 break;
5985 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5986 return fold_convert_loc (loc, type, tem);
5987 case UNLE_EXPR:
5988 case UNLT_EXPR:
5989 if (flag_trapping_math)
5990 break;
5991 /* FALLTHRU */
5992 case LE_EXPR:
5993 case LT_EXPR:
5994 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5995 break;
5996 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5997 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5999 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6000 is not, invokes UB both in abs and in the negation of it.
6001 So, use ABSU_EXPR instead. */
6002 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6003 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6004 tem = negate_expr (tem);
6005 return fold_convert_loc (loc, type, tem);
6007 else
6009 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6010 return negate_expr (fold_convert_loc (loc, type, tem));
6012 default:
6013 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6014 break;
6017 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6018 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6019 both transformations are correct when A is NaN: A != 0
6020 is then true, and A == 0 is false. */
6022 if (!HONOR_SIGNED_ZEROS (type)
6023 && integer_zerop (arg01) && integer_zerop (arg2))
6025 if (comp_code == NE_EXPR)
6026 return fold_convert_loc (loc, type, arg1);
6027 else if (comp_code == EQ_EXPR)
6028 return build_zero_cst (type);
6031 /* Try some transformations of A op B ? A : B.
6033 A == B? A : B same as B
6034 A != B? A : B same as A
6035 A >= B? A : B same as max (A, B)
6036 A > B? A : B same as max (B, A)
6037 A <= B? A : B same as min (A, B)
6038 A < B? A : B same as min (B, A)
6040 As above, these transformations don't work in the presence
6041 of signed zeros. For example, if A and B are zeros of
6042 opposite sign, the first two transformations will change
6043 the sign of the result. In the last four, the original
6044 expressions give different results for (A=+0, B=-0) and
6045 (A=-0, B=+0), but the transformed expressions do not.
6047 The first two transformations are correct if either A or B
6048 is a NaN. In the first transformation, the condition will
6049 be false, and B will indeed be chosen. In the case of the
6050 second transformation, the condition A != B will be true,
6051 and A will be chosen.
6053 The conversions to max() and min() are not correct if B is
6054 a number and A is not. The conditions in the original
6055 expressions will be false, so all four give B. The min()
6056 and max() versions would give a NaN instead. */
6057 if (!HONOR_SIGNED_ZEROS (type)
6058 && operand_equal_for_comparison_p (arg01, arg2)
6059 /* Avoid these transformations if the COND_EXPR may be used
6060 as an lvalue in the C++ front-end. PR c++/19199. */
6061 && (in_gimple_form
6062 || VECTOR_TYPE_P (type)
6063 || (! lang_GNU_CXX ()
6064 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6065 || ! maybe_lvalue_p (arg1)
6066 || ! maybe_lvalue_p (arg2)))
6068 tree comp_op0 = arg00;
6069 tree comp_op1 = arg01;
6070 tree comp_type = TREE_TYPE (comp_op0);
6072 switch (comp_code)
6074 case EQ_EXPR:
6075 return fold_convert_loc (loc, type, arg2);
6076 case NE_EXPR:
6077 return fold_convert_loc (loc, type, arg1);
6078 case LE_EXPR:
6079 case LT_EXPR:
6080 case UNLE_EXPR:
6081 case UNLT_EXPR:
6082 /* In C++ a ?: expression can be an lvalue, so put the
6083 operand which will be used if they are equal first
6084 so that we can convert this back to the
6085 corresponding COND_EXPR. */
6086 if (!HONOR_NANS (arg1))
6088 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6089 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6090 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6091 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6092 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6093 comp_op1, comp_op0);
6094 return fold_convert_loc (loc, type, tem);
6096 break;
6097 case GE_EXPR:
6098 case GT_EXPR:
6099 case UNGE_EXPR:
6100 case UNGT_EXPR:
6101 if (!HONOR_NANS (arg1))
6103 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6104 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6105 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6106 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6107 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6108 comp_op1, comp_op0);
6109 return fold_convert_loc (loc, type, tem);
6111 break;
6112 case UNEQ_EXPR:
6113 if (!HONOR_NANS (arg1))
6114 return fold_convert_loc (loc, type, arg2);
6115 break;
6116 case LTGT_EXPR:
6117 if (!HONOR_NANS (arg1))
6118 return fold_convert_loc (loc, type, arg1);
6119 break;
6120 default:
6121 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6122 break;
6126 return NULL_TREE;
6131 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6132 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6133 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6134 false) >= 2)
6135 #endif
6137 /* EXP is some logical combination of boolean tests. See if we can
6138 merge it into some range test. Return the new tree if so. */
6140 static tree
6141 fold_range_test (location_t loc, enum tree_code code, tree type,
6142 tree op0, tree op1)
6144 int or_op = (code == TRUTH_ORIF_EXPR
6145 || code == TRUTH_OR_EXPR);
6146 int in0_p, in1_p, in_p;
6147 tree low0, low1, low, high0, high1, high;
6148 bool strict_overflow_p = false;
6149 tree tem, lhs, rhs;
6150 const char * const warnmsg = G_("assuming signed overflow does not occur "
6151 "when simplifying range test");
6153 if (!INTEGRAL_TYPE_P (type))
6154 return 0;
6156 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6157 /* If op0 is known true or false and this is a short-circuiting
6158 operation we must not merge with op1 since that makes side-effects
6159 unconditional. So special-case this. */
6160 if (!lhs
6161 && ((code == TRUTH_ORIF_EXPR && in0_p)
6162 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6163 return op0;
6164 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6166 /* If this is an OR operation, invert both sides; we will invert
6167 again at the end. */
6168 if (or_op)
6169 in0_p = ! in0_p, in1_p = ! in1_p;
6171 /* If both expressions are the same, if we can merge the ranges, and we
6172 can build the range test, return it or it inverted. If one of the
6173 ranges is always true or always false, consider it to be the same
6174 expression as the other. */
6175 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6176 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6177 in1_p, low1, high1)
6178 && (tem = (build_range_check (loc, type,
6179 lhs != 0 ? lhs
6180 : rhs != 0 ? rhs : integer_zero_node,
6181 in_p, low, high))) != 0)
6183 if (strict_overflow_p)
6184 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6185 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6188 /* On machines where the branch cost is expensive, if this is a
6189 short-circuited branch and the underlying object on both sides
6190 is the same, make a non-short-circuit operation. */
6191 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6192 if (param_logical_op_non_short_circuit != -1)
6193 logical_op_non_short_circuit
6194 = param_logical_op_non_short_circuit;
6195 if (logical_op_non_short_circuit
6196 && !sanitize_coverage_p ()
6197 && lhs != 0 && rhs != 0
6198 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6199 && operand_equal_p (lhs, rhs, 0))
6201 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6202 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6203 which cases we can't do this. */
6204 if (simple_operand_p (lhs))
6205 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6206 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6207 type, op0, op1);
6209 else if (!lang_hooks.decls.global_bindings_p ()
6210 && !CONTAINS_PLACEHOLDER_P (lhs))
6212 tree common = save_expr (lhs);
6214 if ((lhs = build_range_check (loc, type, common,
6215 or_op ? ! in0_p : in0_p,
6216 low0, high0)) != 0
6217 && (rhs = build_range_check (loc, type, common,
6218 or_op ? ! in1_p : in1_p,
6219 low1, high1)) != 0)
6221 if (strict_overflow_p)
6222 fold_overflow_warning (warnmsg,
6223 WARN_STRICT_OVERFLOW_COMPARISON);
6224 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6225 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6226 type, lhs, rhs);
6231 return 0;
6234 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6235 bit value. Arrange things so the extra bits will be set to zero if and
6236 only if C is signed-extended to its full width. If MASK is nonzero,
6237 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6239 static tree
6240 unextend (tree c, int p, int unsignedp, tree mask)
6242 tree type = TREE_TYPE (c);
6243 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6244 tree temp;
6246 if (p == modesize || unsignedp)
6247 return c;
6249 /* We work by getting just the sign bit into the low-order bit, then
6250 into the high-order bit, then sign-extend. We then XOR that value
6251 with C. */
6252 temp = build_int_cst (TREE_TYPE (c),
6253 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6255 /* We must use a signed type in order to get an arithmetic right shift.
6256 However, we must also avoid introducing accidental overflows, so that
6257 a subsequent call to integer_zerop will work. Hence we must
6258 do the type conversion here. At this point, the constant is either
6259 zero or one, and the conversion to a signed type can never overflow.
6260 We could get an overflow if this conversion is done anywhere else. */
6261 if (TYPE_UNSIGNED (type))
6262 temp = fold_convert (signed_type_for (type), temp);
6264 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6265 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6266 if (mask != 0)
6267 temp = const_binop (BIT_AND_EXPR, temp,
6268 fold_convert (TREE_TYPE (c), mask));
6269 /* If necessary, convert the type back to match the type of C. */
6270 if (TYPE_UNSIGNED (type))
6271 temp = fold_convert (type, temp);
6273 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6276 /* For an expression that has the form
6277 (A && B) || ~B
6279 (A || B) && ~B,
6280 we can drop one of the inner expressions and simplify to
6281 A || ~B
6283 A && ~B
6284 LOC is the location of the resulting expression. OP is the inner
6285 logical operation; the left-hand side in the examples above, while CMPOP
6286 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6287 removing a condition that guards another, as in
6288 (A != NULL && A->...) || A == NULL
6289 which we must not transform. If RHS_ONLY is true, only eliminate the
6290 right-most operand of the inner logical operation. */
6292 static tree
6293 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6294 bool rhs_only)
6296 tree type = TREE_TYPE (cmpop);
6297 enum tree_code code = TREE_CODE (cmpop);
6298 enum tree_code truthop_code = TREE_CODE (op);
6299 tree lhs = TREE_OPERAND (op, 0);
6300 tree rhs = TREE_OPERAND (op, 1);
6301 tree orig_lhs = lhs, orig_rhs = rhs;
6302 enum tree_code rhs_code = TREE_CODE (rhs);
6303 enum tree_code lhs_code = TREE_CODE (lhs);
6304 enum tree_code inv_code;
6306 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6307 return NULL_TREE;
6309 if (TREE_CODE_CLASS (code) != tcc_comparison)
6310 return NULL_TREE;
6312 if (rhs_code == truthop_code)
6314 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6315 if (newrhs != NULL_TREE)
6317 rhs = newrhs;
6318 rhs_code = TREE_CODE (rhs);
6321 if (lhs_code == truthop_code && !rhs_only)
6323 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6324 if (newlhs != NULL_TREE)
6326 lhs = newlhs;
6327 lhs_code = TREE_CODE (lhs);
6331 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6332 if (inv_code == rhs_code
6333 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6334 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6335 return lhs;
6336 if (!rhs_only && inv_code == lhs_code
6337 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6338 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6339 return rhs;
6340 if (rhs != orig_rhs || lhs != orig_lhs)
6341 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6342 lhs, rhs);
6343 return NULL_TREE;
6346 /* Find ways of folding logical expressions of LHS and RHS:
6347 Try to merge two comparisons to the same innermost item.
6348 Look for range tests like "ch >= '0' && ch <= '9'".
6349 Look for combinations of simple terms on machines with expensive branches
6350 and evaluate the RHS unconditionally.
6352 For example, if we have p->a == 2 && p->b == 4 and we can make an
6353 object large enough to span both A and B, we can do this with a comparison
6354 against the object ANDed with the a mask.
6356 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6357 operations to do this with one comparison.
6359 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6360 function and the one above.
6362 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6363 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6365 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6366 two operands.
6368 We return the simplified tree or 0 if no optimization is possible. */
6370 static tree
6371 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6372 tree lhs, tree rhs)
6374 /* If this is the "or" of two comparisons, we can do something if
6375 the comparisons are NE_EXPR. If this is the "and", we can do something
6376 if the comparisons are EQ_EXPR. I.e.,
6377 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6379 WANTED_CODE is this operation code. For single bit fields, we can
6380 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6381 comparison for one-bit fields. */
6383 enum tree_code wanted_code;
6384 enum tree_code lcode, rcode;
6385 tree ll_arg, lr_arg, rl_arg, rr_arg;
6386 tree ll_inner, lr_inner, rl_inner, rr_inner;
6387 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6388 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6389 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6390 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6391 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6392 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6393 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6394 scalar_int_mode lnmode, rnmode;
6395 tree ll_mask, lr_mask, rl_mask, rr_mask;
6396 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6397 tree l_const, r_const;
6398 tree lntype, rntype, result;
6399 HOST_WIDE_INT first_bit, end_bit;
6400 int volatilep;
6402 /* Start by getting the comparison codes. Fail if anything is volatile.
6403 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6404 it were surrounded with a NE_EXPR. */
6406 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6407 return 0;
6409 lcode = TREE_CODE (lhs);
6410 rcode = TREE_CODE (rhs);
6412 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6414 lhs = build2 (NE_EXPR, truth_type, lhs,
6415 build_int_cst (TREE_TYPE (lhs), 0));
6416 lcode = NE_EXPR;
6419 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6421 rhs = build2 (NE_EXPR, truth_type, rhs,
6422 build_int_cst (TREE_TYPE (rhs), 0));
6423 rcode = NE_EXPR;
6426 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6427 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6428 return 0;
6430 ll_arg = TREE_OPERAND (lhs, 0);
6431 lr_arg = TREE_OPERAND (lhs, 1);
6432 rl_arg = TREE_OPERAND (rhs, 0);
6433 rr_arg = TREE_OPERAND (rhs, 1);
6435 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6436 if (simple_operand_p (ll_arg)
6437 && simple_operand_p (lr_arg))
6439 if (operand_equal_p (ll_arg, rl_arg, 0)
6440 && operand_equal_p (lr_arg, rr_arg, 0))
6442 result = combine_comparisons (loc, code, lcode, rcode,
6443 truth_type, ll_arg, lr_arg);
6444 if (result)
6445 return result;
6447 else if (operand_equal_p (ll_arg, rr_arg, 0)
6448 && operand_equal_p (lr_arg, rl_arg, 0))
6450 result = combine_comparisons (loc, code, lcode,
6451 swap_tree_comparison (rcode),
6452 truth_type, ll_arg, lr_arg);
6453 if (result)
6454 return result;
6458 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6461 /* If the RHS can be evaluated unconditionally and its operands are
6462 simple, it wins to evaluate the RHS unconditionally on machines
6463 with expensive branches. In this case, this isn't a comparison
6464 that can be merged. */
6466 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6467 false) >= 2
6468 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6469 && simple_operand_p (rl_arg)
6470 && simple_operand_p (rr_arg))
6472 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6473 if (code == TRUTH_OR_EXPR
6474 && lcode == NE_EXPR && integer_zerop (lr_arg)
6475 && rcode == NE_EXPR && integer_zerop (rr_arg)
6476 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6477 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6478 return build2_loc (loc, NE_EXPR, truth_type,
6479 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6480 ll_arg, rl_arg),
6481 build_int_cst (TREE_TYPE (ll_arg), 0));
6483 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6484 if (code == TRUTH_AND_EXPR
6485 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6486 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6487 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6488 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6489 return build2_loc (loc, EQ_EXPR, truth_type,
6490 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6491 ll_arg, rl_arg),
6492 build_int_cst (TREE_TYPE (ll_arg), 0));
6495 /* See if the comparisons can be merged. Then get all the parameters for
6496 each side. */
6498 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6499 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6500 return 0;
6502 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6503 volatilep = 0;
6504 ll_inner = decode_field_reference (loc, &ll_arg,
6505 &ll_bitsize, &ll_bitpos, &ll_mode,
6506 &ll_unsignedp, &ll_reversep, &volatilep,
6507 &ll_mask, &ll_and_mask);
6508 lr_inner = decode_field_reference (loc, &lr_arg,
6509 &lr_bitsize, &lr_bitpos, &lr_mode,
6510 &lr_unsignedp, &lr_reversep, &volatilep,
6511 &lr_mask, &lr_and_mask);
6512 rl_inner = decode_field_reference (loc, &rl_arg,
6513 &rl_bitsize, &rl_bitpos, &rl_mode,
6514 &rl_unsignedp, &rl_reversep, &volatilep,
6515 &rl_mask, &rl_and_mask);
6516 rr_inner = decode_field_reference (loc, &rr_arg,
6517 &rr_bitsize, &rr_bitpos, &rr_mode,
6518 &rr_unsignedp, &rr_reversep, &volatilep,
6519 &rr_mask, &rr_and_mask);
6521 /* It must be true that the inner operation on the lhs of each
6522 comparison must be the same if we are to be able to do anything.
6523 Then see if we have constants. If not, the same must be true for
6524 the rhs's. */
6525 if (volatilep
6526 || ll_reversep != rl_reversep
6527 || ll_inner == 0 || rl_inner == 0
6528 || ! operand_equal_p (ll_inner, rl_inner, 0))
6529 return 0;
6531 if (TREE_CODE (lr_arg) == INTEGER_CST
6532 && TREE_CODE (rr_arg) == INTEGER_CST)
6534 l_const = lr_arg, r_const = rr_arg;
6535 lr_reversep = ll_reversep;
6537 else if (lr_reversep != rr_reversep
6538 || lr_inner == 0 || rr_inner == 0
6539 || ! operand_equal_p (lr_inner, rr_inner, 0))
6540 return 0;
6541 else
6542 l_const = r_const = 0;
6544 /* If either comparison code is not correct for our logical operation,
6545 fail. However, we can convert a one-bit comparison against zero into
6546 the opposite comparison against that bit being set in the field. */
6548 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6549 if (lcode != wanted_code)
6551 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6553 /* Make the left operand unsigned, since we are only interested
6554 in the value of one bit. Otherwise we are doing the wrong
6555 thing below. */
6556 ll_unsignedp = 1;
6557 l_const = ll_mask;
6559 else
6560 return 0;
6563 /* This is analogous to the code for l_const above. */
6564 if (rcode != wanted_code)
6566 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6568 rl_unsignedp = 1;
6569 r_const = rl_mask;
6571 else
6572 return 0;
6575 /* See if we can find a mode that contains both fields being compared on
6576 the left. If we can't, fail. Otherwise, update all constants and masks
6577 to be relative to a field of that size. */
6578 first_bit = MIN (ll_bitpos, rl_bitpos);
6579 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6580 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6581 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6582 volatilep, &lnmode))
6583 return 0;
6585 lnbitsize = GET_MODE_BITSIZE (lnmode);
6586 lnbitpos = first_bit & ~ (lnbitsize - 1);
6587 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6588 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6590 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6592 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6593 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6596 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6597 size_int (xll_bitpos));
6598 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6599 size_int (xrl_bitpos));
6600 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6601 return 0;
6603 if (l_const)
6605 l_const = fold_convert_loc (loc, lntype, l_const);
6606 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6607 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6608 if (l_const == NULL_TREE)
6609 return 0;
6610 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6611 fold_build1_loc (loc, BIT_NOT_EXPR,
6612 lntype, ll_mask))))
6614 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6616 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6619 if (r_const)
6621 r_const = fold_convert_loc (loc, lntype, r_const);
6622 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6623 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6624 if (r_const == NULL_TREE)
6625 return 0;
6626 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6627 fold_build1_loc (loc, BIT_NOT_EXPR,
6628 lntype, rl_mask))))
6630 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6632 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6636 /* If the right sides are not constant, do the same for it. Also,
6637 disallow this optimization if a size, signedness or storage order
6638 mismatch occurs between the left and right sides. */
6639 if (l_const == 0)
6641 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6642 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6643 || ll_reversep != lr_reversep
6644 /* Make sure the two fields on the right
6645 correspond to the left without being swapped. */
6646 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6647 return 0;
6649 first_bit = MIN (lr_bitpos, rr_bitpos);
6650 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6651 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6652 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6653 volatilep, &rnmode))
6654 return 0;
6656 rnbitsize = GET_MODE_BITSIZE (rnmode);
6657 rnbitpos = first_bit & ~ (rnbitsize - 1);
6658 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6659 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6661 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6663 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6664 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6667 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6668 rntype, lr_mask),
6669 size_int (xlr_bitpos));
6670 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6671 rntype, rr_mask),
6672 size_int (xrr_bitpos));
6673 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6674 return 0;
6676 /* Make a mask that corresponds to both fields being compared.
6677 Do this for both items being compared. If the operands are the
6678 same size and the bits being compared are in the same position
6679 then we can do this by masking both and comparing the masked
6680 results. */
6681 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6682 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6683 if (lnbitsize == rnbitsize
6684 && xll_bitpos == xlr_bitpos
6685 && lnbitpos >= 0
6686 && rnbitpos >= 0)
6688 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6689 lntype, lnbitsize, lnbitpos,
6690 ll_unsignedp || rl_unsignedp, ll_reversep);
6691 if (! all_ones_mask_p (ll_mask, lnbitsize))
6692 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6694 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6695 rntype, rnbitsize, rnbitpos,
6696 lr_unsignedp || rr_unsignedp, lr_reversep);
6697 if (! all_ones_mask_p (lr_mask, rnbitsize))
6698 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6700 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6703 /* There is still another way we can do something: If both pairs of
6704 fields being compared are adjacent, we may be able to make a wider
6705 field containing them both.
6707 Note that we still must mask the lhs/rhs expressions. Furthermore,
6708 the mask must be shifted to account for the shift done by
6709 make_bit_field_ref. */
6710 if (((ll_bitsize + ll_bitpos == rl_bitpos
6711 && lr_bitsize + lr_bitpos == rr_bitpos)
6712 || (ll_bitpos == rl_bitpos + rl_bitsize
6713 && lr_bitpos == rr_bitpos + rr_bitsize))
6714 && ll_bitpos >= 0
6715 && rl_bitpos >= 0
6716 && lr_bitpos >= 0
6717 && rr_bitpos >= 0)
6719 tree type;
6721 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6722 ll_bitsize + rl_bitsize,
6723 MIN (ll_bitpos, rl_bitpos),
6724 ll_unsignedp, ll_reversep);
6725 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6726 lr_bitsize + rr_bitsize,
6727 MIN (lr_bitpos, rr_bitpos),
6728 lr_unsignedp, lr_reversep);
6730 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6731 size_int (MIN (xll_bitpos, xrl_bitpos)));
6732 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6733 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6734 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6735 return 0;
6737 /* Convert to the smaller type before masking out unwanted bits. */
6738 type = lntype;
6739 if (lntype != rntype)
6741 if (lnbitsize > rnbitsize)
6743 lhs = fold_convert_loc (loc, rntype, lhs);
6744 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6745 type = rntype;
6747 else if (lnbitsize < rnbitsize)
6749 rhs = fold_convert_loc (loc, lntype, rhs);
6750 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6751 type = lntype;
6755 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6756 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6758 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6759 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6761 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6764 return 0;
6767 /* Handle the case of comparisons with constants. If there is something in
6768 common between the masks, those bits of the constants must be the same.
6769 If not, the condition is always false. Test for this to avoid generating
6770 incorrect code below. */
6771 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6772 if (! integer_zerop (result)
6773 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6774 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6776 if (wanted_code == NE_EXPR)
6778 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6779 return constant_boolean_node (true, truth_type);
6781 else
6783 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6784 return constant_boolean_node (false, truth_type);
6788 if (lnbitpos < 0)
6789 return 0;
6791 /* Construct the expression we will return. First get the component
6792 reference we will make. Unless the mask is all ones the width of
6793 that field, perform the mask operation. Then compare with the
6794 merged constant. */
6795 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6796 lntype, lnbitsize, lnbitpos,
6797 ll_unsignedp || rl_unsignedp, ll_reversep);
6799 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6800 if (! all_ones_mask_p (ll_mask, lnbitsize))
6801 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6803 return build2_loc (loc, wanted_code, truth_type, result,
6804 const_binop (BIT_IOR_EXPR, l_const, r_const));
6807 /* T is an integer expression that is being multiplied, divided, or taken a
6808 modulus (CODE says which and what kind of divide or modulus) by a
6809 constant C. See if we can eliminate that operation by folding it with
6810 other operations already in T. WIDE_TYPE, if non-null, is a type that
6811 should be used for the computation if wider than our type.
6813 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6814 (X * 2) + (Y * 4). We must, however, be assured that either the original
6815 expression would not overflow or that overflow is undefined for the type
6816 in the language in question.
6818 If we return a non-null expression, it is an equivalent form of the
6819 original computation, but need not be in the original type.
6821 We set *STRICT_OVERFLOW_P to true if the return values depends on
6822 signed overflow being undefined. Otherwise we do not change
6823 *STRICT_OVERFLOW_P. */
6825 static tree
6826 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6827 bool *strict_overflow_p)
6829 /* To avoid exponential search depth, refuse to allow recursion past
6830 three levels. Beyond that (1) it's highly unlikely that we'll find
6831 something interesting and (2) we've probably processed it before
6832 when we built the inner expression. */
6834 static int depth;
6835 tree ret;
6837 if (depth > 3)
6838 return NULL;
6840 depth++;
6841 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6842 depth--;
6844 return ret;
6847 static tree
6848 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6849 bool *strict_overflow_p)
6851 tree type = TREE_TYPE (t);
6852 enum tree_code tcode = TREE_CODE (t);
6853 tree ctype = (wide_type != 0
6854 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6855 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6856 ? wide_type : type);
6857 tree t1, t2;
6858 bool same_p = tcode == code;
6859 tree op0 = NULL_TREE, op1 = NULL_TREE;
6860 bool sub_strict_overflow_p;
6862 /* Don't deal with constants of zero here; they confuse the code below. */
6863 if (integer_zerop (c))
6864 return NULL_TREE;
6866 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6867 op0 = TREE_OPERAND (t, 0);
6869 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6870 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6872 /* Note that we need not handle conditional operations here since fold
6873 already handles those cases. So just do arithmetic here. */
6874 switch (tcode)
6876 case INTEGER_CST:
6877 /* For a constant, we can always simplify if we are a multiply
6878 or (for divide and modulus) if it is a multiple of our constant. */
6879 if (code == MULT_EXPR
6880 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6881 TYPE_SIGN (type)))
6883 tree tem = const_binop (code, fold_convert (ctype, t),
6884 fold_convert (ctype, c));
6885 /* If the multiplication overflowed, we lost information on it.
6886 See PR68142 and PR69845. */
6887 if (TREE_OVERFLOW (tem))
6888 return NULL_TREE;
6889 return tem;
6891 break;
6893 CASE_CONVERT: case NON_LVALUE_EXPR:
6894 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6895 break;
6896 /* If op0 is an expression ... */
6897 if ((COMPARISON_CLASS_P (op0)
6898 || UNARY_CLASS_P (op0)
6899 || BINARY_CLASS_P (op0)
6900 || VL_EXP_CLASS_P (op0)
6901 || EXPRESSION_CLASS_P (op0))
6902 /* ... and has wrapping overflow, and its type is smaller
6903 than ctype, then we cannot pass through as widening. */
6904 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6905 && (TYPE_PRECISION (ctype)
6906 > TYPE_PRECISION (TREE_TYPE (op0))))
6907 /* ... or this is a truncation (t is narrower than op0),
6908 then we cannot pass through this narrowing. */
6909 || (TYPE_PRECISION (type)
6910 < TYPE_PRECISION (TREE_TYPE (op0)))
6911 /* ... or signedness changes for division or modulus,
6912 then we cannot pass through this conversion. */
6913 || (code != MULT_EXPR
6914 && (TYPE_UNSIGNED (ctype)
6915 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6916 /* ... or has undefined overflow while the converted to
6917 type has not, we cannot do the operation in the inner type
6918 as that would introduce undefined overflow. */
6919 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6920 && !TYPE_OVERFLOW_UNDEFINED (type))))
6921 break;
6923 /* Pass the constant down and see if we can make a simplification. If
6924 we can, replace this expression with the inner simplification for
6925 possible later conversion to our or some other type. */
6926 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6927 && TREE_CODE (t2) == INTEGER_CST
6928 && !TREE_OVERFLOW (t2)
6929 && (t1 = extract_muldiv (op0, t2, code,
6930 code == MULT_EXPR ? ctype : NULL_TREE,
6931 strict_overflow_p)) != 0)
6932 return t1;
6933 break;
6935 case ABS_EXPR:
6936 /* If widening the type changes it from signed to unsigned, then we
6937 must avoid building ABS_EXPR itself as unsigned. */
6938 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6940 tree cstype = (*signed_type_for) (ctype);
6941 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6942 != 0)
6944 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6945 return fold_convert (ctype, t1);
6947 break;
6949 /* If the constant is negative, we cannot simplify this. */
6950 if (tree_int_cst_sgn (c) == -1)
6951 break;
6952 /* FALLTHROUGH */
6953 case NEGATE_EXPR:
6954 /* For division and modulus, type can't be unsigned, as e.g.
6955 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6956 For signed types, even with wrapping overflow, this is fine. */
6957 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6958 break;
6959 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6960 != 0)
6961 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6962 break;
6964 case MIN_EXPR: case MAX_EXPR:
6965 /* If widening the type changes the signedness, then we can't perform
6966 this optimization as that changes the result. */
6967 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6968 break;
6970 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6971 sub_strict_overflow_p = false;
6972 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6973 &sub_strict_overflow_p)) != 0
6974 && (t2 = extract_muldiv (op1, c, code, wide_type,
6975 &sub_strict_overflow_p)) != 0)
6977 if (tree_int_cst_sgn (c) < 0)
6978 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6979 if (sub_strict_overflow_p)
6980 *strict_overflow_p = true;
6981 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6982 fold_convert (ctype, t2));
6984 break;
6986 case LSHIFT_EXPR: case RSHIFT_EXPR:
6987 /* If the second operand is constant, this is a multiplication
6988 or floor division, by a power of two, so we can treat it that
6989 way unless the multiplier or divisor overflows. Signed
6990 left-shift overflow is implementation-defined rather than
6991 undefined in C90, so do not convert signed left shift into
6992 multiplication. */
6993 if (TREE_CODE (op1) == INTEGER_CST
6994 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6995 /* const_binop may not detect overflow correctly,
6996 so check for it explicitly here. */
6997 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6998 wi::to_wide (op1))
6999 && (t1 = fold_convert (ctype,
7000 const_binop (LSHIFT_EXPR, size_one_node,
7001 op1))) != 0
7002 && !TREE_OVERFLOW (t1))
7003 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7004 ? MULT_EXPR : FLOOR_DIV_EXPR,
7005 ctype,
7006 fold_convert (ctype, op0),
7007 t1),
7008 c, code, wide_type, strict_overflow_p);
7009 break;
7011 case PLUS_EXPR: case MINUS_EXPR:
7012 /* See if we can eliminate the operation on both sides. If we can, we
7013 can return a new PLUS or MINUS. If we can't, the only remaining
7014 cases where we can do anything are if the second operand is a
7015 constant. */
7016 sub_strict_overflow_p = false;
7017 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7018 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7019 if (t1 != 0 && t2 != 0
7020 && TYPE_OVERFLOW_WRAPS (ctype)
7021 && (code == MULT_EXPR
7022 /* If not multiplication, we can only do this if both operands
7023 are divisible by c. */
7024 || (multiple_of_p (ctype, op0, c)
7025 && multiple_of_p (ctype, op1, c))))
7027 if (sub_strict_overflow_p)
7028 *strict_overflow_p = true;
7029 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7030 fold_convert (ctype, t2));
7033 /* If this was a subtraction, negate OP1 and set it to be an addition.
7034 This simplifies the logic below. */
7035 if (tcode == MINUS_EXPR)
7037 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7038 /* If OP1 was not easily negatable, the constant may be OP0. */
7039 if (TREE_CODE (op0) == INTEGER_CST)
7041 std::swap (op0, op1);
7042 std::swap (t1, t2);
7046 if (TREE_CODE (op1) != INTEGER_CST)
7047 break;
7049 /* If either OP1 or C are negative, this optimization is not safe for
7050 some of the division and remainder types while for others we need
7051 to change the code. */
7052 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7054 if (code == CEIL_DIV_EXPR)
7055 code = FLOOR_DIV_EXPR;
7056 else if (code == FLOOR_DIV_EXPR)
7057 code = CEIL_DIV_EXPR;
7058 else if (code != MULT_EXPR
7059 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7060 break;
7063 /* If it's a multiply or a division/modulus operation of a multiple
7064 of our constant, do the operation and verify it doesn't overflow. */
7065 if (code == MULT_EXPR
7066 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7067 TYPE_SIGN (type)))
7069 op1 = const_binop (code, fold_convert (ctype, op1),
7070 fold_convert (ctype, c));
7071 /* We allow the constant to overflow with wrapping semantics. */
7072 if (op1 == 0
7073 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7074 break;
7076 else
7077 break;
7079 /* If we have an unsigned type, we cannot widen the operation since it
7080 will change the result if the original computation overflowed. */
7081 if (TYPE_UNSIGNED (ctype) && ctype != type)
7082 break;
7084 /* The last case is if we are a multiply. In that case, we can
7085 apply the distributive law to commute the multiply and addition
7086 if the multiplication of the constants doesn't overflow
7087 and overflow is defined. With undefined overflow
7088 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7089 But fold_plusminus_mult_expr would factor back any power-of-two
7090 value so do not distribute in the first place in this case. */
7091 if (code == MULT_EXPR
7092 && TYPE_OVERFLOW_WRAPS (ctype)
7093 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7094 return fold_build2 (tcode, ctype,
7095 fold_build2 (code, ctype,
7096 fold_convert (ctype, op0),
7097 fold_convert (ctype, c)),
7098 op1);
7100 break;
7102 case MULT_EXPR:
7103 /* We have a special case here if we are doing something like
7104 (C * 8) % 4 since we know that's zero. */
7105 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7106 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7107 /* If the multiplication can overflow we cannot optimize this. */
7108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7109 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7110 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7111 TYPE_SIGN (type)))
7113 *strict_overflow_p = true;
7114 return omit_one_operand (type, integer_zero_node, op0);
7117 /* ... fall through ... */
7119 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7120 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7121 /* If we can extract our operation from the LHS, do so and return a
7122 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7123 do something only if the second operand is a constant. */
7124 if (same_p
7125 && TYPE_OVERFLOW_WRAPS (ctype)
7126 && (t1 = extract_muldiv (op0, c, code, wide_type,
7127 strict_overflow_p)) != 0)
7128 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7129 fold_convert (ctype, op1));
7130 else if (tcode == MULT_EXPR && code == MULT_EXPR
7131 && TYPE_OVERFLOW_WRAPS (ctype)
7132 && (t1 = extract_muldiv (op1, c, code, wide_type,
7133 strict_overflow_p)) != 0)
7134 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7135 fold_convert (ctype, t1));
7136 else if (TREE_CODE (op1) != INTEGER_CST)
7137 return 0;
7139 /* If these are the same operation types, we can associate them
7140 assuming no overflow. */
7141 if (tcode == code)
7143 bool overflow_p = false;
7144 wi::overflow_type overflow_mul;
7145 signop sign = TYPE_SIGN (ctype);
7146 unsigned prec = TYPE_PRECISION (ctype);
7147 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7148 wi::to_wide (c, prec),
7149 sign, &overflow_mul);
7150 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7151 if (overflow_mul
7152 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7153 overflow_p = true;
7154 if (!overflow_p)
7155 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7156 wide_int_to_tree (ctype, mul));
7159 /* If these operations "cancel" each other, we have the main
7160 optimizations of this pass, which occur when either constant is a
7161 multiple of the other, in which case we replace this with either an
7162 operation or CODE or TCODE.
7164 If we have an unsigned type, we cannot do this since it will change
7165 the result if the original computation overflowed. */
7166 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7167 && !TYPE_OVERFLOW_SANITIZED (ctype)
7168 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7169 || (tcode == MULT_EXPR
7170 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7171 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7172 && code != MULT_EXPR)))
7174 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7175 TYPE_SIGN (type)))
7177 *strict_overflow_p = true;
7178 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7179 fold_convert (ctype,
7180 const_binop (TRUNC_DIV_EXPR,
7181 op1, c)));
7183 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7184 TYPE_SIGN (type)))
7186 *strict_overflow_p = true;
7187 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7188 fold_convert (ctype,
7189 const_binop (TRUNC_DIV_EXPR,
7190 c, op1)));
7193 break;
7195 default:
7196 break;
7199 return 0;
7202 /* Return a node which has the indicated constant VALUE (either 0 or
7203 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7204 and is of the indicated TYPE. */
7206 tree
7207 constant_boolean_node (bool value, tree type)
7209 if (type == integer_type_node)
7210 return value ? integer_one_node : integer_zero_node;
7211 else if (type == boolean_type_node)
7212 return value ? boolean_true_node : boolean_false_node;
7213 else if (VECTOR_TYPE_P (type))
7214 return build_vector_from_val (type,
7215 build_int_cst (TREE_TYPE (type),
7216 value ? -1 : 0));
7217 else
7218 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7222 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7223 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7224 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7225 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7226 COND is the first argument to CODE; otherwise (as in the example
7227 given here), it is the second argument. TYPE is the type of the
7228 original expression. Return NULL_TREE if no simplification is
7229 possible. */
7231 static tree
7232 fold_binary_op_with_conditional_arg (location_t loc,
7233 enum tree_code code,
7234 tree type, tree op0, tree op1,
7235 tree cond, tree arg, int cond_first_p)
7237 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7238 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7239 tree test, true_value, false_value;
7240 tree lhs = NULL_TREE;
7241 tree rhs = NULL_TREE;
7242 enum tree_code cond_code = COND_EXPR;
7244 /* Do not move possibly trapping operations into the conditional as this
7245 pessimizes code and causes gimplification issues when applied late. */
7246 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7247 ANY_INTEGRAL_TYPE_P (type)
7248 && TYPE_OVERFLOW_TRAPS (type), op1))
7249 return NULL_TREE;
7251 if (TREE_CODE (cond) == COND_EXPR
7252 || TREE_CODE (cond) == VEC_COND_EXPR)
7254 test = TREE_OPERAND (cond, 0);
7255 true_value = TREE_OPERAND (cond, 1);
7256 false_value = TREE_OPERAND (cond, 2);
7257 /* If this operand throws an expression, then it does not make
7258 sense to try to perform a logical or arithmetic operation
7259 involving it. */
7260 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7261 lhs = true_value;
7262 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7263 rhs = false_value;
7265 else if (!(TREE_CODE (type) != VECTOR_TYPE
7266 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7268 tree testtype = TREE_TYPE (cond);
7269 test = cond;
7270 true_value = constant_boolean_node (true, testtype);
7271 false_value = constant_boolean_node (false, testtype);
7273 else
7274 /* Detect the case of mixing vector and scalar types - bail out. */
7275 return NULL_TREE;
7277 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7278 cond_code = VEC_COND_EXPR;
7280 /* This transformation is only worthwhile if we don't have to wrap ARG
7281 in a SAVE_EXPR and the operation can be simplified without recursing
7282 on at least one of the branches once its pushed inside the COND_EXPR. */
7283 if (!TREE_CONSTANT (arg)
7284 && (TREE_SIDE_EFFECTS (arg)
7285 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7286 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7287 return NULL_TREE;
7289 arg = fold_convert_loc (loc, arg_type, arg);
7290 if (lhs == 0)
7292 true_value = fold_convert_loc (loc, cond_type, true_value);
7293 if (cond_first_p)
7294 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7295 else
7296 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7298 if (rhs == 0)
7300 false_value = fold_convert_loc (loc, cond_type, false_value);
7301 if (cond_first_p)
7302 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7303 else
7304 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7307 /* Check that we have simplified at least one of the branches. */
7308 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7309 return NULL_TREE;
7311 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7315 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7317 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7318 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7319 if ARG - ZERO_ARG is the same as X.
7321 If ARG is NULL, check for any value of type TYPE.
7323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7324 and finite. The problematic cases are when X is zero, and its mode
7325 has signed zeros. In the case of rounding towards -infinity,
7326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7329 bool
7330 fold_real_zero_addition_p (const_tree type, const_tree arg,
7331 const_tree zero_arg, int negate)
7333 if (!real_zerop (zero_arg))
7334 return false;
7336 /* Don't allow the fold with -fsignaling-nans. */
7337 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7338 return false;
7340 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7341 if (!HONOR_SIGNED_ZEROS (type))
7342 return true;
7344 /* There is no case that is safe for all rounding modes. */
7345 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7346 return false;
7348 /* In a vector or complex, we would need to check the sign of all zeros. */
7349 if (TREE_CODE (zero_arg) == VECTOR_CST)
7350 zero_arg = uniform_vector_p (zero_arg);
7351 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7352 return false;
7354 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7355 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7356 negate = !negate;
7358 /* The mode has signed zeros, and we have to honor their sign.
7359 In this situation, there are only two cases we can return true for.
7360 (i) X - 0 is the same as X with default rounding.
7361 (ii) X + 0 is X when X can't possibly be -0.0. */
7362 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7365 /* Subroutine of match.pd that optimizes comparisons of a division by
7366 a nonzero integer constant against an integer constant, i.e.
7367 X/C1 op C2.
7369 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7370 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7372 enum tree_code
7373 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7374 tree *hi, bool *neg_overflow)
7376 tree prod, tmp, type = TREE_TYPE (c1);
7377 signop sign = TYPE_SIGN (type);
7378 wi::overflow_type overflow;
7380 /* We have to do this the hard way to detect unsigned overflow.
7381 prod = int_const_binop (MULT_EXPR, c1, c2); */
7382 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7383 prod = force_fit_type (type, val, -1, overflow);
7384 *neg_overflow = false;
7386 if (sign == UNSIGNED)
7388 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7389 *lo = prod;
7391 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7392 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7393 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7395 else if (tree_int_cst_sgn (c1) >= 0)
7397 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7398 switch (tree_int_cst_sgn (c2))
7400 case -1:
7401 *neg_overflow = true;
7402 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7403 *hi = prod;
7404 break;
7406 case 0:
7407 *lo = fold_negate_const (tmp, type);
7408 *hi = tmp;
7409 break;
7411 case 1:
7412 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7413 *lo = prod;
7414 break;
7416 default:
7417 gcc_unreachable ();
7420 else
7422 /* A negative divisor reverses the relational operators. */
7423 code = swap_tree_comparison (code);
7425 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7426 switch (tree_int_cst_sgn (c2))
7428 case -1:
7429 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7430 *lo = prod;
7431 break;
7433 case 0:
7434 *hi = fold_negate_const (tmp, type);
7435 *lo = tmp;
7436 break;
7438 case 1:
7439 *neg_overflow = true;
7440 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7441 *hi = prod;
7442 break;
7444 default:
7445 gcc_unreachable ();
7449 if (code != EQ_EXPR && code != NE_EXPR)
7450 return code;
7452 if (TREE_OVERFLOW (*lo)
7453 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7454 *lo = NULL_TREE;
7455 if (TREE_OVERFLOW (*hi)
7456 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7457 *hi = NULL_TREE;
7459 return code;
7462 /* Test whether it is preferable to swap two operands, ARG0 and
7463 ARG1, for example because ARG0 is an integer constant and ARG1
7464 isn't. */
7466 bool
7467 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7469 if (CONSTANT_CLASS_P (arg1))
7470 return false;
7471 if (CONSTANT_CLASS_P (arg0))
7472 return true;
7474 STRIP_NOPS (arg0);
7475 STRIP_NOPS (arg1);
7477 if (TREE_CONSTANT (arg1))
7478 return false;
7479 if (TREE_CONSTANT (arg0))
7480 return true;
7482 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7483 for commutative and comparison operators. Ensuring a canonical
7484 form allows the optimizers to find additional redundancies without
7485 having to explicitly check for both orderings. */
7486 if (TREE_CODE (arg0) == SSA_NAME
7487 && TREE_CODE (arg1) == SSA_NAME
7488 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7489 return true;
7491 /* Put SSA_NAMEs last. */
7492 if (TREE_CODE (arg1) == SSA_NAME)
7493 return false;
7494 if (TREE_CODE (arg0) == SSA_NAME)
7495 return true;
7497 /* Put variables last. */
7498 if (DECL_P (arg1))
7499 return false;
7500 if (DECL_P (arg0))
7501 return true;
7503 return false;
7507 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7508 means A >= Y && A != MAX, but in this case we know that
7509 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7511 static tree
7512 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7514 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7516 if (TREE_CODE (bound) == LT_EXPR)
7517 a = TREE_OPERAND (bound, 0);
7518 else if (TREE_CODE (bound) == GT_EXPR)
7519 a = TREE_OPERAND (bound, 1);
7520 else
7521 return NULL_TREE;
7523 typea = TREE_TYPE (a);
7524 if (!INTEGRAL_TYPE_P (typea)
7525 && !POINTER_TYPE_P (typea))
7526 return NULL_TREE;
7528 if (TREE_CODE (ineq) == LT_EXPR)
7530 a1 = TREE_OPERAND (ineq, 1);
7531 y = TREE_OPERAND (ineq, 0);
7533 else if (TREE_CODE (ineq) == GT_EXPR)
7535 a1 = TREE_OPERAND (ineq, 0);
7536 y = TREE_OPERAND (ineq, 1);
7538 else
7539 return NULL_TREE;
7541 if (TREE_TYPE (a1) != typea)
7542 return NULL_TREE;
7544 if (POINTER_TYPE_P (typea))
7546 /* Convert the pointer types into integer before taking the difference. */
7547 tree ta = fold_convert_loc (loc, ssizetype, a);
7548 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7549 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7551 else
7552 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7554 if (!diff || !integer_onep (diff))
7555 return NULL_TREE;
7557 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7560 /* Fold a sum or difference of at least one multiplication.
7561 Returns the folded tree or NULL if no simplification could be made. */
7563 static tree
7564 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7565 tree arg0, tree arg1)
7567 tree arg00, arg01, arg10, arg11;
7568 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7570 /* (A * C) +- (B * C) -> (A+-B) * C.
7571 (A * C) +- A -> A * (C+-1).
7572 We are most concerned about the case where C is a constant,
7573 but other combinations show up during loop reduction. Since
7574 it is not difficult, try all four possibilities. */
7576 if (TREE_CODE (arg0) == MULT_EXPR)
7578 arg00 = TREE_OPERAND (arg0, 0);
7579 arg01 = TREE_OPERAND (arg0, 1);
7581 else if (TREE_CODE (arg0) == INTEGER_CST)
7583 arg00 = build_one_cst (type);
7584 arg01 = arg0;
7586 else
7588 /* We cannot generate constant 1 for fract. */
7589 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7590 return NULL_TREE;
7591 arg00 = arg0;
7592 arg01 = build_one_cst (type);
7594 if (TREE_CODE (arg1) == MULT_EXPR)
7596 arg10 = TREE_OPERAND (arg1, 0);
7597 arg11 = TREE_OPERAND (arg1, 1);
7599 else if (TREE_CODE (arg1) == INTEGER_CST)
7601 arg10 = build_one_cst (type);
7602 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7603 the purpose of this canonicalization. */
7604 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7605 && negate_expr_p (arg1)
7606 && code == PLUS_EXPR)
7608 arg11 = negate_expr (arg1);
7609 code = MINUS_EXPR;
7611 else
7612 arg11 = arg1;
7614 else
7616 /* We cannot generate constant 1 for fract. */
7617 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7618 return NULL_TREE;
7619 arg10 = arg1;
7620 arg11 = build_one_cst (type);
7622 same = NULL_TREE;
7624 /* Prefer factoring a common non-constant. */
7625 if (operand_equal_p (arg00, arg10, 0))
7626 same = arg00, alt0 = arg01, alt1 = arg11;
7627 else if (operand_equal_p (arg01, arg11, 0))
7628 same = arg01, alt0 = arg00, alt1 = arg10;
7629 else if (operand_equal_p (arg00, arg11, 0))
7630 same = arg00, alt0 = arg01, alt1 = arg10;
7631 else if (operand_equal_p (arg01, arg10, 0))
7632 same = arg01, alt0 = arg00, alt1 = arg11;
7634 /* No identical multiplicands; see if we can find a common
7635 power-of-two factor in non-power-of-two multiplies. This
7636 can help in multi-dimensional array access. */
7637 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7639 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7640 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7641 HOST_WIDE_INT tmp;
7642 bool swap = false;
7643 tree maybe_same;
7645 /* Move min of absolute values to int11. */
7646 if (absu_hwi (int01) < absu_hwi (int11))
7648 tmp = int01, int01 = int11, int11 = tmp;
7649 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7650 maybe_same = arg01;
7651 swap = true;
7653 else
7654 maybe_same = arg11;
7656 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7657 if (factor > 1
7658 && pow2p_hwi (factor)
7659 && (int01 & (factor - 1)) == 0
7660 /* The remainder should not be a constant, otherwise we
7661 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7662 increased the number of multiplications necessary. */
7663 && TREE_CODE (arg10) != INTEGER_CST)
7665 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7666 build_int_cst (TREE_TYPE (arg00),
7667 int01 / int11));
7668 alt1 = arg10;
7669 same = maybe_same;
7670 if (swap)
7671 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7675 if (!same)
7676 return NULL_TREE;
7678 if (! ANY_INTEGRAL_TYPE_P (type)
7679 || TYPE_OVERFLOW_WRAPS (type)
7680 /* We are neither factoring zero nor minus one. */
7681 || TREE_CODE (same) == INTEGER_CST)
7682 return fold_build2_loc (loc, MULT_EXPR, type,
7683 fold_build2_loc (loc, code, type,
7684 fold_convert_loc (loc, type, alt0),
7685 fold_convert_loc (loc, type, alt1)),
7686 fold_convert_loc (loc, type, same));
7688 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7689 same may be minus one and thus the multiplication may overflow. Perform
7690 the sum operation in an unsigned type. */
7691 tree utype = unsigned_type_for (type);
7692 tree tem = fold_build2_loc (loc, code, utype,
7693 fold_convert_loc (loc, utype, alt0),
7694 fold_convert_loc (loc, utype, alt1));
7695 /* If the sum evaluated to a constant that is not -INF the multiplication
7696 cannot overflow. */
7697 if (TREE_CODE (tem) == INTEGER_CST
7698 && (wi::to_wide (tem)
7699 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7700 return fold_build2_loc (loc, MULT_EXPR, type,
7701 fold_convert (type, tem), same);
7703 /* Do not resort to unsigned multiplication because
7704 we lose the no-overflow property of the expression. */
7705 return NULL_TREE;
7708 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7709 specified by EXPR into the buffer PTR of length LEN bytes.
7710 Return the number of bytes placed in the buffer, or zero
7711 upon failure. */
7713 static int
7714 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7716 tree type = TREE_TYPE (expr);
7717 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7718 int byte, offset, word, words;
7719 unsigned char value;
7721 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7722 return 0;
7723 if (off == -1)
7724 off = 0;
7726 if (ptr == NULL)
7727 /* Dry run. */
7728 return MIN (len, total_bytes - off);
7730 words = total_bytes / UNITS_PER_WORD;
7732 for (byte = 0; byte < total_bytes; byte++)
7734 int bitpos = byte * BITS_PER_UNIT;
7735 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7736 number of bytes. */
7737 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7739 if (total_bytes > UNITS_PER_WORD)
7741 word = byte / UNITS_PER_WORD;
7742 if (WORDS_BIG_ENDIAN)
7743 word = (words - 1) - word;
7744 offset = word * UNITS_PER_WORD;
7745 if (BYTES_BIG_ENDIAN)
7746 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7747 else
7748 offset += byte % UNITS_PER_WORD;
7750 else
7751 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7752 if (offset >= off && offset - off < len)
7753 ptr[offset - off] = value;
7755 return MIN (len, total_bytes - off);
7759 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7760 specified by EXPR into the buffer PTR of length LEN bytes.
7761 Return the number of bytes placed in the buffer, or zero
7762 upon failure. */
7764 static int
7765 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7767 tree type = TREE_TYPE (expr);
7768 scalar_mode mode = SCALAR_TYPE_MODE (type);
7769 int total_bytes = GET_MODE_SIZE (mode);
7770 FIXED_VALUE_TYPE value;
7771 tree i_value, i_type;
7773 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7774 return 0;
7776 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7778 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7779 return 0;
7781 value = TREE_FIXED_CST (expr);
7782 i_value = double_int_to_tree (i_type, value.data);
7784 return native_encode_int (i_value, ptr, len, off);
7788 /* Subroutine of native_encode_expr. Encode the REAL_CST
7789 specified by EXPR into the buffer PTR of length LEN bytes.
7790 Return the number of bytes placed in the buffer, or zero
7791 upon failure. */
7793 static int
7794 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7796 tree type = TREE_TYPE (expr);
7797 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7798 int byte, offset, word, words, bitpos;
7799 unsigned char value;
7801 /* There are always 32 bits in each long, no matter the size of
7802 the hosts long. We handle floating point representations with
7803 up to 192 bits. */
7804 long tmp[6];
7806 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7807 return 0;
7808 if (off == -1)
7809 off = 0;
7811 if (ptr == NULL)
7812 /* Dry run. */
7813 return MIN (len, total_bytes - off);
7815 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7817 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7819 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7820 bitpos += BITS_PER_UNIT)
7822 byte = (bitpos / BITS_PER_UNIT) & 3;
7823 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7825 if (UNITS_PER_WORD < 4)
7827 word = byte / UNITS_PER_WORD;
7828 if (WORDS_BIG_ENDIAN)
7829 word = (words - 1) - word;
7830 offset = word * UNITS_PER_WORD;
7831 if (BYTES_BIG_ENDIAN)
7832 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 else
7834 offset += byte % UNITS_PER_WORD;
7836 else
7838 offset = byte;
7839 if (BYTES_BIG_ENDIAN)
7841 /* Reverse bytes within each long, or within the entire float
7842 if it's smaller than a long (for HFmode). */
7843 offset = MIN (3, total_bytes - 1) - offset;
7844 gcc_assert (offset >= 0);
7847 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7848 if (offset >= off
7849 && offset - off < len)
7850 ptr[offset - off] = value;
7852 return MIN (len, total_bytes - off);
7855 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7856 specified by EXPR into the buffer PTR of length LEN bytes.
7857 Return the number of bytes placed in the buffer, or zero
7858 upon failure. */
7860 static int
7861 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7863 int rsize, isize;
7864 tree part;
7866 part = TREE_REALPART (expr);
7867 rsize = native_encode_expr (part, ptr, len, off);
7868 if (off == -1 && rsize == 0)
7869 return 0;
7870 part = TREE_IMAGPART (expr);
7871 if (off != -1)
7872 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7873 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7874 len - rsize, off);
7875 if (off == -1 && isize != rsize)
7876 return 0;
7877 return rsize + isize;
7880 /* Like native_encode_vector, but only encode the first COUNT elements.
7881 The other arguments are as for native_encode_vector. */
7883 static int
7884 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7885 int off, unsigned HOST_WIDE_INT count)
7887 tree itype = TREE_TYPE (TREE_TYPE (expr));
7888 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7889 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7891 /* This is the only case in which elements can be smaller than a byte.
7892 Element 0 is always in the lsb of the containing byte. */
7893 unsigned int elt_bits = TYPE_PRECISION (itype);
7894 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7895 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7896 return 0;
7898 if (off == -1)
7899 off = 0;
7901 /* Zero the buffer and then set bits later where necessary. */
7902 int extract_bytes = MIN (len, total_bytes - off);
7903 if (ptr)
7904 memset (ptr, 0, extract_bytes);
7906 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7907 unsigned int first_elt = off * elts_per_byte;
7908 unsigned int extract_elts = extract_bytes * elts_per_byte;
7909 for (unsigned int i = 0; i < extract_elts; ++i)
7911 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7912 if (TREE_CODE (elt) != INTEGER_CST)
7913 return 0;
7915 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7917 unsigned int bit = i * elt_bits;
7918 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7921 return extract_bytes;
7924 int offset = 0;
7925 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7926 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7928 if (off >= size)
7930 off -= size;
7931 continue;
7933 tree elem = VECTOR_CST_ELT (expr, i);
7934 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7935 len - offset, off);
7936 if ((off == -1 && res != size) || res == 0)
7937 return 0;
7938 offset += res;
7939 if (offset >= len)
7940 return (off == -1 && i < count - 1) ? 0 : offset;
7941 if (off != -1)
7942 off = 0;
7944 return offset;
7947 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7950 upon failure. */
7952 static int
7953 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7955 unsigned HOST_WIDE_INT count;
7956 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7957 return 0;
7958 return native_encode_vector_part (expr, ptr, len, off, count);
7962 /* Subroutine of native_encode_expr. Encode the STRING_CST
7963 specified by EXPR into the buffer PTR of length LEN bytes.
7964 Return the number of bytes placed in the buffer, or zero
7965 upon failure. */
7967 static int
7968 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7970 tree type = TREE_TYPE (expr);
7972 /* Wide-char strings are encoded in target byte-order so native
7973 encoding them is trivial. */
7974 if (BITS_PER_UNIT != CHAR_BIT
7975 || TREE_CODE (type) != ARRAY_TYPE
7976 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7977 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7978 return 0;
7980 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7981 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7982 return 0;
7983 if (off == -1)
7984 off = 0;
7985 len = MIN (total_bytes - off, len);
7986 if (ptr == NULL)
7987 /* Dry run. */;
7988 else
7990 int written = 0;
7991 if (off < TREE_STRING_LENGTH (expr))
7993 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7994 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7996 memset (ptr + written, 0, len - written);
7998 return len;
8002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8003 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8004 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8005 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8006 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8007 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8008 Return the number of bytes placed in the buffer, or zero upon failure. */
8011 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8013 /* We don't support starting at negative offset and -1 is special. */
8014 if (off < -1)
8015 return 0;
8017 switch (TREE_CODE (expr))
8019 case INTEGER_CST:
8020 return native_encode_int (expr, ptr, len, off);
8022 case REAL_CST:
8023 return native_encode_real (expr, ptr, len, off);
8025 case FIXED_CST:
8026 return native_encode_fixed (expr, ptr, len, off);
8028 case COMPLEX_CST:
8029 return native_encode_complex (expr, ptr, len, off);
8031 case VECTOR_CST:
8032 return native_encode_vector (expr, ptr, len, off);
8034 case STRING_CST:
8035 return native_encode_string (expr, ptr, len, off);
8037 default:
8038 return 0;
8042 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8043 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8044 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8045 machine modes, we can't just use build_nonstandard_integer_type. */
8047 tree
8048 find_bitfield_repr_type (int fieldsize, int len)
8050 machine_mode mode;
8051 for (int pass = 0; pass < 2; pass++)
8053 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8054 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8055 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8056 && known_eq (GET_MODE_PRECISION (mode),
8057 GET_MODE_BITSIZE (mode))
8058 && known_le (GET_MODE_SIZE (mode), len))
8060 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8061 if (ret && TYPE_MODE (ret) == mode)
8062 return ret;
8066 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8067 if (int_n_enabled_p[i]
8068 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8069 && int_n_trees[i].unsigned_type)
8071 tree ret = int_n_trees[i].unsigned_type;
8072 mode = TYPE_MODE (ret);
8073 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8074 && known_eq (GET_MODE_PRECISION (mode),
8075 GET_MODE_BITSIZE (mode))
8076 && known_le (GET_MODE_SIZE (mode), len))
8077 return ret;
8080 return NULL_TREE;
8083 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8084 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8085 to be non-NULL and OFF zero), then in addition to filling the
8086 bytes pointed by PTR with the value also clear any bits pointed
8087 by MASK that are known to be initialized, keep them as is for
8088 e.g. uninitialized padding bits or uninitialized fields. */
8091 native_encode_initializer (tree init, unsigned char *ptr, int len,
8092 int off, unsigned char *mask)
8094 int r;
8096 /* We don't support starting at negative offset and -1 is special. */
8097 if (off < -1 || init == NULL_TREE)
8098 return 0;
8100 gcc_assert (mask == NULL || (off == 0 && ptr));
8102 STRIP_NOPS (init);
8103 switch (TREE_CODE (init))
8105 case VIEW_CONVERT_EXPR:
8106 case NON_LVALUE_EXPR:
8107 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8108 mask);
8109 default:
8110 r = native_encode_expr (init, ptr, len, off);
8111 if (mask)
8112 memset (mask, 0, r);
8113 return r;
8114 case CONSTRUCTOR:
8115 tree type = TREE_TYPE (init);
8116 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8117 if (total_bytes < 0)
8118 return 0;
8119 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8120 return 0;
8121 int o = off == -1 ? 0 : off;
8122 if (TREE_CODE (type) == ARRAY_TYPE)
8124 tree min_index;
8125 unsigned HOST_WIDE_INT cnt;
8126 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8127 constructor_elt *ce;
8129 if (!TYPE_DOMAIN (type)
8130 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8131 return 0;
8133 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8134 if (fieldsize <= 0)
8135 return 0;
8137 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8138 if (ptr)
8139 memset (ptr, '\0', MIN (total_bytes - off, len));
8141 for (cnt = 0; ; cnt++)
8143 tree val = NULL_TREE, index = NULL_TREE;
8144 HOST_WIDE_INT pos = curpos, count = 0;
8145 bool full = false;
8146 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8148 val = ce->value;
8149 index = ce->index;
8151 else if (mask == NULL
8152 || CONSTRUCTOR_NO_CLEARING (init)
8153 || curpos >= total_bytes)
8154 break;
8155 else
8156 pos = total_bytes;
8158 if (index && TREE_CODE (index) == RANGE_EXPR)
8160 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8161 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8162 return 0;
8163 offset_int wpos
8164 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8165 - wi::to_offset (min_index),
8166 TYPE_PRECISION (sizetype));
8167 wpos *= fieldsize;
8168 if (!wi::fits_shwi_p (pos))
8169 return 0;
8170 pos = wpos.to_shwi ();
8171 offset_int wcount
8172 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8173 - wi::to_offset (TREE_OPERAND (index, 0)),
8174 TYPE_PRECISION (sizetype));
8175 if (!wi::fits_shwi_p (wcount))
8176 return 0;
8177 count = wcount.to_shwi ();
8179 else if (index)
8181 if (TREE_CODE (index) != INTEGER_CST)
8182 return 0;
8183 offset_int wpos
8184 = wi::sext (wi::to_offset (index)
8185 - wi::to_offset (min_index),
8186 TYPE_PRECISION (sizetype));
8187 wpos *= fieldsize;
8188 if (!wi::fits_shwi_p (wpos))
8189 return 0;
8190 pos = wpos.to_shwi ();
8193 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8195 if (valueinit == -1)
8197 tree zero = build_zero_cst (TREE_TYPE (type));
8198 r = native_encode_initializer (zero, ptr + curpos,
8199 fieldsize, 0,
8200 mask + curpos);
8201 if (TREE_CODE (zero) == CONSTRUCTOR)
8202 ggc_free (zero);
8203 if (!r)
8204 return 0;
8205 valueinit = curpos;
8206 curpos += fieldsize;
8208 while (curpos != pos)
8210 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8211 memcpy (mask + curpos, mask + valueinit, fieldsize);
8212 curpos += fieldsize;
8216 curpos = pos;
8217 if (val)
8220 if (off == -1
8221 || (curpos >= off
8222 && (curpos + fieldsize
8223 <= (HOST_WIDE_INT) off + len)))
8225 if (full)
8227 if (ptr)
8228 memcpy (ptr + (curpos - o), ptr + (pos - o),
8229 fieldsize);
8230 if (mask)
8231 memcpy (mask + curpos, mask + pos, fieldsize);
8233 else if (!native_encode_initializer (val,
8235 ? ptr + curpos - o
8236 : NULL,
8237 fieldsize,
8238 off == -1 ? -1
8239 : 0,
8240 mask
8241 ? mask + curpos
8242 : NULL))
8243 return 0;
8244 else
8246 full = true;
8247 pos = curpos;
8250 else if (curpos + fieldsize > off
8251 && curpos < (HOST_WIDE_INT) off + len)
8253 /* Partial overlap. */
8254 unsigned char *p = NULL;
8255 int no = 0;
8256 int l;
8257 gcc_assert (mask == NULL);
8258 if (curpos >= off)
8260 if (ptr)
8261 p = ptr + curpos - off;
8262 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8263 fieldsize);
8265 else
8267 p = ptr;
8268 no = off - curpos;
8269 l = len;
8271 if (!native_encode_initializer (val, p, l, no, NULL))
8272 return 0;
8274 curpos += fieldsize;
8276 while (count-- != 0);
8278 return MIN (total_bytes - off, len);
8280 else if (TREE_CODE (type) == RECORD_TYPE
8281 || TREE_CODE (type) == UNION_TYPE)
8283 unsigned HOST_WIDE_INT cnt;
8284 constructor_elt *ce;
8285 tree fld_base = TYPE_FIELDS (type);
8286 tree to_free = NULL_TREE;
8288 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8289 if (ptr != NULL)
8290 memset (ptr, '\0', MIN (total_bytes - o, len));
8291 for (cnt = 0; ; cnt++)
8293 tree val = NULL_TREE, field = NULL_TREE;
8294 HOST_WIDE_INT pos = 0, fieldsize;
8295 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8297 if (to_free)
8299 ggc_free (to_free);
8300 to_free = NULL_TREE;
8303 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8305 val = ce->value;
8306 field = ce->index;
8307 if (field == NULL_TREE)
8308 return 0;
8310 pos = int_byte_position (field);
8311 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8312 continue;
8314 else if (mask == NULL
8315 || CONSTRUCTOR_NO_CLEARING (init))
8316 break;
8317 else
8318 pos = total_bytes;
8320 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8322 tree fld;
8323 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8325 if (TREE_CODE (fld) != FIELD_DECL)
8326 continue;
8327 if (fld == field)
8328 break;
8329 if (DECL_PADDING_P (fld))
8330 continue;
8331 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8332 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8333 return 0;
8334 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8335 continue;
8336 break;
8338 if (fld == NULL_TREE)
8340 if (ce == NULL)
8341 break;
8342 return 0;
8344 fld_base = DECL_CHAIN (fld);
8345 if (fld != field)
8347 cnt--;
8348 field = fld;
8349 pos = int_byte_position (field);
8350 val = build_zero_cst (TREE_TYPE (fld));
8351 if (TREE_CODE (val) == CONSTRUCTOR)
8352 to_free = val;
8356 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8357 && TYPE_DOMAIN (TREE_TYPE (field))
8358 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8360 if (mask || off != -1)
8361 return 0;
8362 if (val == NULL_TREE)
8363 continue;
8364 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8365 return 0;
8366 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8367 if (fieldsize < 0
8368 || (int) fieldsize != fieldsize
8369 || (pos + fieldsize) > INT_MAX)
8370 return 0;
8371 if (pos + fieldsize > total_bytes)
8373 if (ptr != NULL && total_bytes < len)
8374 memset (ptr + total_bytes, '\0',
8375 MIN (pos + fieldsize, len) - total_bytes);
8376 total_bytes = pos + fieldsize;
8379 else
8381 if (DECL_SIZE_UNIT (field) == NULL_TREE
8382 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8383 return 0;
8384 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8386 if (fieldsize == 0)
8387 continue;
8389 /* Prepare to deal with integral bit-fields and filter out other
8390 bit-fields that do not start and end on a byte boundary. */
8391 if (DECL_BIT_FIELD (field))
8393 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8394 return 0;
8395 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8396 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8398 bpos %= BITS_PER_UNIT;
8399 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8400 epos = fieldsize % BITS_PER_UNIT;
8401 fieldsize += BITS_PER_UNIT - 1;
8402 fieldsize /= BITS_PER_UNIT;
8404 else if (bpos % BITS_PER_UNIT
8405 || DECL_SIZE (field) == NULL_TREE
8406 || !tree_fits_shwi_p (DECL_SIZE (field))
8407 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8408 return 0;
8411 if (off != -1 && pos + fieldsize <= off)
8412 continue;
8414 if (val == NULL_TREE)
8415 continue;
8417 if (DECL_BIT_FIELD (field)
8418 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8420 /* FIXME: Handle PDP endian. */
8421 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8422 return 0;
8424 if (TREE_CODE (val) != INTEGER_CST)
8425 return 0;
8427 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8428 tree repr_type = NULL_TREE;
8429 HOST_WIDE_INT rpos = 0;
8430 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8432 rpos = int_byte_position (repr);
8433 repr_type = TREE_TYPE (repr);
8435 else
8437 repr_type = find_bitfield_repr_type (fieldsize, len);
8438 if (repr_type == NULL_TREE)
8439 return 0;
8440 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8441 gcc_assert (repr_size > 0 && repr_size <= len);
8442 if (pos + repr_size <= o + len)
8443 rpos = pos;
8444 else
8446 rpos = o + len - repr_size;
8447 gcc_assert (rpos <= pos);
8451 if (rpos > pos)
8452 return 0;
8453 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8454 int diff = (TYPE_PRECISION (repr_type)
8455 - TYPE_PRECISION (TREE_TYPE (field)));
8456 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8457 if (!BYTES_BIG_ENDIAN)
8458 w = wi::lshift (w, bitoff);
8459 else
8460 w = wi::lshift (w, diff - bitoff);
8461 val = wide_int_to_tree (repr_type, w);
8463 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8464 / BITS_PER_UNIT + 1];
8465 int l = native_encode_int (val, buf, sizeof buf, 0);
8466 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8467 return 0;
8469 if (ptr == NULL)
8470 continue;
8472 /* If the bitfield does not start at byte boundary, handle
8473 the partial byte at the start. */
8474 if (bpos
8475 && (off == -1 || (pos >= off && len >= 1)))
8477 if (!BYTES_BIG_ENDIAN)
8479 int msk = (1 << bpos) - 1;
8480 buf[pos - rpos] &= ~msk;
8481 buf[pos - rpos] |= ptr[pos - o] & msk;
8482 if (mask)
8484 if (fieldsize > 1 || epos == 0)
8485 mask[pos] &= msk;
8486 else
8487 mask[pos] &= (msk | ~((1 << epos) - 1));
8490 else
8492 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8493 buf[pos - rpos] &= msk;
8494 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8495 if (mask)
8497 if (fieldsize > 1 || epos == 0)
8498 mask[pos] &= ~msk;
8499 else
8500 mask[pos] &= (~msk
8501 | ((1 << (BITS_PER_UNIT - epos))
8502 - 1));
8506 /* If the bitfield does not end at byte boundary, handle
8507 the partial byte at the end. */
8508 if (epos
8509 && (off == -1
8510 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8512 if (!BYTES_BIG_ENDIAN)
8514 int msk = (1 << epos) - 1;
8515 buf[pos - rpos + fieldsize - 1] &= msk;
8516 buf[pos - rpos + fieldsize - 1]
8517 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8518 if (mask && (fieldsize > 1 || bpos == 0))
8519 mask[pos + fieldsize - 1] &= ~msk;
8521 else
8523 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8524 buf[pos - rpos + fieldsize - 1] &= ~msk;
8525 buf[pos - rpos + fieldsize - 1]
8526 |= ptr[pos + fieldsize - 1 - o] & msk;
8527 if (mask && (fieldsize > 1 || bpos == 0))
8528 mask[pos + fieldsize - 1] &= msk;
8531 if (off == -1
8532 || (pos >= off
8533 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8535 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8536 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8537 memset (mask + pos + (bpos != 0), 0,
8538 fieldsize - (bpos != 0) - (epos != 0));
8540 else
8542 /* Partial overlap. */
8543 HOST_WIDE_INT fsz = fieldsize;
8544 gcc_assert (mask == NULL);
8545 if (pos < off)
8547 fsz -= (off - pos);
8548 pos = off;
8550 if (pos + fsz > (HOST_WIDE_INT) off + len)
8551 fsz = (HOST_WIDE_INT) off + len - pos;
8552 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8554 continue;
8557 if (off == -1
8558 || (pos >= off
8559 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8561 int fldsize = fieldsize;
8562 if (off == -1)
8564 tree fld = DECL_CHAIN (field);
8565 while (fld)
8567 if (TREE_CODE (fld) == FIELD_DECL)
8568 break;
8569 fld = DECL_CHAIN (fld);
8571 if (fld == NULL_TREE)
8572 fldsize = len - pos;
8574 r = native_encode_initializer (val, ptr ? ptr + pos - o
8575 : NULL,
8576 fldsize,
8577 off == -1 ? -1 : 0,
8578 mask ? mask + pos : NULL);
8579 if (!r)
8580 return 0;
8581 if (off == -1
8582 && fldsize != fieldsize
8583 && r > fieldsize
8584 && pos + r > total_bytes)
8585 total_bytes = pos + r;
8587 else
8589 /* Partial overlap. */
8590 unsigned char *p = NULL;
8591 int no = 0;
8592 int l;
8593 gcc_assert (mask == NULL);
8594 if (pos >= off)
8596 if (ptr)
8597 p = ptr + pos - off;
8598 l = MIN ((HOST_WIDE_INT) off + len - pos,
8599 fieldsize);
8601 else
8603 p = ptr;
8604 no = off - pos;
8605 l = len;
8607 if (!native_encode_initializer (val, p, l, no, NULL))
8608 return 0;
8611 return MIN (total_bytes - off, len);
8613 return 0;
8618 /* Subroutine of native_interpret_expr. Interpret the contents of
8619 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8620 If the buffer cannot be interpreted, return NULL_TREE. */
8622 static tree
8623 native_interpret_int (tree type, const unsigned char *ptr, int len)
8625 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8627 if (total_bytes > len
8628 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8629 return NULL_TREE;
8631 wide_int result = wi::from_buffer (ptr, total_bytes);
8633 return wide_int_to_tree (type, result);
8637 /* Subroutine of native_interpret_expr. Interpret the contents of
8638 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8639 If the buffer cannot be interpreted, return NULL_TREE. */
8641 static tree
8642 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8644 scalar_mode mode = SCALAR_TYPE_MODE (type);
8645 int total_bytes = GET_MODE_SIZE (mode);
8646 double_int result;
8647 FIXED_VALUE_TYPE fixed_value;
8649 if (total_bytes > len
8650 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8651 return NULL_TREE;
8653 result = double_int::from_buffer (ptr, total_bytes);
8654 fixed_value = fixed_from_double_int (result, mode);
8656 return build_fixed (type, fixed_value);
8660 /* Subroutine of native_interpret_expr. Interpret the contents of
8661 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8662 If the buffer cannot be interpreted, return NULL_TREE. */
8664 tree
8665 native_interpret_real (tree type, const unsigned char *ptr, int len)
8667 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8668 int total_bytes = GET_MODE_SIZE (mode);
8669 unsigned char value;
8670 /* There are always 32 bits in each long, no matter the size of
8671 the hosts long. We handle floating point representations with
8672 up to 192 bits. */
8673 REAL_VALUE_TYPE r;
8674 long tmp[6];
8676 if (total_bytes > len || total_bytes > 24)
8677 return NULL_TREE;
8678 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8680 memset (tmp, 0, sizeof (tmp));
8681 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8682 bitpos += BITS_PER_UNIT)
8684 /* Both OFFSET and BYTE index within a long;
8685 bitpos indexes the whole float. */
8686 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8687 if (UNITS_PER_WORD < 4)
8689 int word = byte / UNITS_PER_WORD;
8690 if (WORDS_BIG_ENDIAN)
8691 word = (words - 1) - word;
8692 offset = word * UNITS_PER_WORD;
8693 if (BYTES_BIG_ENDIAN)
8694 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8695 else
8696 offset += byte % UNITS_PER_WORD;
8698 else
8700 offset = byte;
8701 if (BYTES_BIG_ENDIAN)
8703 /* Reverse bytes within each long, or within the entire float
8704 if it's smaller than a long (for HFmode). */
8705 offset = MIN (3, total_bytes - 1) - offset;
8706 gcc_assert (offset >= 0);
8709 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8711 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8714 real_from_target (&r, tmp, mode);
8715 return build_real (type, r);
8719 /* Subroutine of native_interpret_expr. Interpret the contents of
8720 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8721 If the buffer cannot be interpreted, return NULL_TREE. */
8723 static tree
8724 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8726 tree etype, rpart, ipart;
8727 int size;
8729 etype = TREE_TYPE (type);
8730 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8731 if (size * 2 > len)
8732 return NULL_TREE;
8733 rpart = native_interpret_expr (etype, ptr, size);
8734 if (!rpart)
8735 return NULL_TREE;
8736 ipart = native_interpret_expr (etype, ptr+size, size);
8737 if (!ipart)
8738 return NULL_TREE;
8739 return build_complex (type, rpart, ipart);
8742 /* Read a vector of type TYPE from the target memory image given by BYTES,
8743 which contains LEN bytes. The vector is known to be encodable using
8744 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8746 Return the vector on success, otherwise return null. */
8748 static tree
8749 native_interpret_vector_part (tree type, const unsigned char *bytes,
8750 unsigned int len, unsigned int npatterns,
8751 unsigned int nelts_per_pattern)
8753 tree elt_type = TREE_TYPE (type);
8754 if (VECTOR_BOOLEAN_TYPE_P (type)
8755 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8757 /* This is the only case in which elements can be smaller than a byte.
8758 Element 0 is always in the lsb of the containing byte. */
8759 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8760 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8761 return NULL_TREE;
8763 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8764 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8766 unsigned int bit_index = i * elt_bits;
8767 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8768 unsigned int lsb = bit_index % BITS_PER_UNIT;
8769 builder.quick_push (bytes[byte_index] & (1 << lsb)
8770 ? build_all_ones_cst (elt_type)
8771 : build_zero_cst (elt_type));
8773 return builder.build ();
8776 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8777 if (elt_bytes * npatterns * nelts_per_pattern > len)
8778 return NULL_TREE;
8780 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8781 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8783 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8784 if (!elt)
8785 return NULL_TREE;
8786 builder.quick_push (elt);
8787 bytes += elt_bytes;
8789 return builder.build ();
8792 /* Subroutine of native_interpret_expr. Interpret the contents of
8793 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8794 If the buffer cannot be interpreted, return NULL_TREE. */
8796 static tree
8797 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8799 unsigned HOST_WIDE_INT size;
8801 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8802 || size > len)
8803 return NULL_TREE;
8805 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8806 return native_interpret_vector_part (type, ptr, len, count, 1);
8810 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8811 the buffer PTR of length LEN as a constant of type TYPE. For
8812 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8813 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8814 return NULL_TREE. */
8816 tree
8817 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8819 switch (TREE_CODE (type))
8821 case INTEGER_TYPE:
8822 case ENUMERAL_TYPE:
8823 case BOOLEAN_TYPE:
8824 case POINTER_TYPE:
8825 case REFERENCE_TYPE:
8826 case OFFSET_TYPE:
8827 return native_interpret_int (type, ptr, len);
8829 case REAL_TYPE:
8830 if (tree ret = native_interpret_real (type, ptr, len))
8832 /* For floating point values in composite modes, punt if this
8833 folding doesn't preserve bit representation. As the mode doesn't
8834 have fixed precision while GCC pretends it does, there could be
8835 valid values that GCC can't really represent accurately.
8836 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8837 bit combinationations which GCC doesn't preserve. */
8838 unsigned char buf[24 * 2];
8839 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8840 int total_bytes = GET_MODE_SIZE (mode);
8841 memcpy (buf + 24, ptr, total_bytes);
8842 clear_type_padding_in_mask (type, buf + 24);
8843 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8844 || memcmp (buf + 24, buf, total_bytes) != 0)
8845 return NULL_TREE;
8846 return ret;
8848 return NULL_TREE;
8850 case FIXED_POINT_TYPE:
8851 return native_interpret_fixed (type, ptr, len);
8853 case COMPLEX_TYPE:
8854 return native_interpret_complex (type, ptr, len);
8856 case VECTOR_TYPE:
8857 return native_interpret_vector (type, ptr, len);
8859 default:
8860 return NULL_TREE;
8864 /* Returns true if we can interpret the contents of a native encoding
8865 as TYPE. */
8867 bool
8868 can_native_interpret_type_p (tree type)
8870 switch (TREE_CODE (type))
8872 case INTEGER_TYPE:
8873 case ENUMERAL_TYPE:
8874 case BOOLEAN_TYPE:
8875 case POINTER_TYPE:
8876 case REFERENCE_TYPE:
8877 case FIXED_POINT_TYPE:
8878 case REAL_TYPE:
8879 case COMPLEX_TYPE:
8880 case VECTOR_TYPE:
8881 case OFFSET_TYPE:
8882 return true;
8883 default:
8884 return false;
8888 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8889 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8891 tree
8892 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8893 int len)
8895 vec<constructor_elt, va_gc> *elts = NULL;
8896 if (TREE_CODE (type) == ARRAY_TYPE)
8898 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8899 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8900 return NULL_TREE;
8902 HOST_WIDE_INT cnt = 0;
8903 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8905 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8906 return NULL_TREE;
8907 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8909 if (eltsz == 0)
8910 cnt = 0;
8911 HOST_WIDE_INT pos = 0;
8912 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8914 tree v = NULL_TREE;
8915 if (pos >= len || pos + eltsz > len)
8916 return NULL_TREE;
8917 if (can_native_interpret_type_p (TREE_TYPE (type)))
8919 v = native_interpret_expr (TREE_TYPE (type),
8920 ptr + off + pos, eltsz);
8921 if (v == NULL_TREE)
8922 return NULL_TREE;
8924 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8925 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8926 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8927 eltsz);
8928 if (v == NULL_TREE)
8929 return NULL_TREE;
8930 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8932 return build_constructor (type, elts);
8934 if (TREE_CODE (type) != RECORD_TYPE)
8935 return NULL_TREE;
8936 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8938 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8939 continue;
8940 tree fld = field;
8941 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8942 int diff = 0;
8943 tree v = NULL_TREE;
8944 if (DECL_BIT_FIELD (field))
8946 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8947 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8949 poly_int64 bitoffset;
8950 poly_uint64 field_offset, fld_offset;
8951 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8952 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8953 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8954 else
8955 bitoffset = 0;
8956 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8957 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8958 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8959 - TYPE_PRECISION (TREE_TYPE (field)));
8960 if (!bitoffset.is_constant (&bitoff)
8961 || bitoff < 0
8962 || bitoff > diff)
8963 return NULL_TREE;
8965 else
8967 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8968 return NULL_TREE;
8969 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8970 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8971 bpos %= BITS_PER_UNIT;
8972 fieldsize += bpos;
8973 fieldsize += BITS_PER_UNIT - 1;
8974 fieldsize /= BITS_PER_UNIT;
8975 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8976 if (repr_type == NULL_TREE)
8977 return NULL_TREE;
8978 sz = int_size_in_bytes (repr_type);
8979 if (sz < 0 || sz > len)
8980 return NULL_TREE;
8981 pos = int_byte_position (field);
8982 if (pos < 0 || pos > len || pos + fieldsize > len)
8983 return NULL_TREE;
8984 HOST_WIDE_INT rpos;
8985 if (pos + sz <= len)
8986 rpos = pos;
8987 else
8989 rpos = len - sz;
8990 gcc_assert (rpos <= pos);
8992 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8993 pos = rpos;
8994 diff = (TYPE_PRECISION (repr_type)
8995 - TYPE_PRECISION (TREE_TYPE (field)));
8996 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8997 if (v == NULL_TREE)
8998 return NULL_TREE;
8999 fld = NULL_TREE;
9003 if (fld)
9005 sz = int_size_in_bytes (TREE_TYPE (fld));
9006 if (sz < 0 || sz > len)
9007 return NULL_TREE;
9008 tree byte_pos = byte_position (fld);
9009 if (!tree_fits_shwi_p (byte_pos))
9010 return NULL_TREE;
9011 pos = tree_to_shwi (byte_pos);
9012 if (pos < 0 || pos > len || pos + sz > len)
9013 return NULL_TREE;
9015 if (fld == NULL_TREE)
9016 /* Already handled above. */;
9017 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9019 v = native_interpret_expr (TREE_TYPE (fld),
9020 ptr + off + pos, sz);
9021 if (v == NULL_TREE)
9022 return NULL_TREE;
9024 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9025 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9026 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9027 if (v == NULL_TREE)
9028 return NULL_TREE;
9029 if (fld != field)
9031 if (TREE_CODE (v) != INTEGER_CST)
9032 return NULL_TREE;
9034 /* FIXME: Figure out how to handle PDP endian bitfields. */
9035 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9036 return NULL_TREE;
9037 if (!BYTES_BIG_ENDIAN)
9038 v = wide_int_to_tree (TREE_TYPE (field),
9039 wi::lrshift (wi::to_wide (v), bitoff));
9040 else
9041 v = wide_int_to_tree (TREE_TYPE (field),
9042 wi::lrshift (wi::to_wide (v),
9043 diff - bitoff));
9045 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9047 return build_constructor (type, elts);
9050 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9051 or extracted constant positions and/or sizes aren't byte aligned. */
9053 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9054 bits between adjacent elements. AMNT should be within
9055 [0, BITS_PER_UNIT).
9056 Example, AMNT = 2:
9057 00011111|11100000 << 2 = 01111111|10000000
9058 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9060 void
9061 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9062 unsigned int amnt)
9064 if (amnt == 0)
9065 return;
9067 unsigned char carry_over = 0U;
9068 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9069 unsigned char clear_mask = (~0U) << amnt;
9071 for (unsigned int i = 0; i < sz; i++)
9073 unsigned prev_carry_over = carry_over;
9074 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9076 ptr[i] <<= amnt;
9077 if (i != 0)
9079 ptr[i] &= clear_mask;
9080 ptr[i] |= prev_carry_over;
9085 /* Like shift_bytes_in_array_left but for big-endian.
9086 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9087 bits between adjacent elements. AMNT should be within
9088 [0, BITS_PER_UNIT).
9089 Example, AMNT = 2:
9090 00011111|11100000 >> 2 = 00000111|11111000
9091 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9093 void
9094 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9095 unsigned int amnt)
9097 if (amnt == 0)
9098 return;
9100 unsigned char carry_over = 0U;
9101 unsigned char carry_mask = ~(~0U << amnt);
9103 for (unsigned int i = 0; i < sz; i++)
9105 unsigned prev_carry_over = carry_over;
9106 carry_over = ptr[i] & carry_mask;
9108 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9109 ptr[i] >>= amnt;
9110 ptr[i] |= prev_carry_over;
9114 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9115 directly on the VECTOR_CST encoding, in a way that works for variable-
9116 length vectors. Return the resulting VECTOR_CST on success or null
9117 on failure. */
9119 static tree
9120 fold_view_convert_vector_encoding (tree type, tree expr)
9122 tree expr_type = TREE_TYPE (expr);
9123 poly_uint64 type_bits, expr_bits;
9124 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9125 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9126 return NULL_TREE;
9128 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9129 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9130 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9131 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9133 /* We can only preserve the semantics of a stepped pattern if the new
9134 vector element is an integer of the same size. */
9135 if (VECTOR_CST_STEPPED_P (expr)
9136 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9137 return NULL_TREE;
9139 /* The number of bits needed to encode one element from every pattern
9140 of the original vector. */
9141 unsigned int expr_sequence_bits
9142 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9144 /* The number of bits needed to encode one element from every pattern
9145 of the result. */
9146 unsigned int type_sequence_bits
9147 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9149 /* Don't try to read more bytes than are available, which can happen
9150 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9151 The general VIEW_CONVERT handling can cope with that case, so there's
9152 no point complicating things here. */
9153 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9154 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9155 BITS_PER_UNIT);
9156 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9157 if (known_gt (buffer_bits, expr_bits))
9158 return NULL_TREE;
9160 /* Get enough bytes of EXPR to form the new encoding. */
9161 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9162 buffer.quick_grow (buffer_bytes);
9163 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9164 buffer_bits / expr_elt_bits)
9165 != (int) buffer_bytes)
9166 return NULL_TREE;
9168 /* Reencode the bytes as TYPE. */
9169 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9170 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9171 type_npatterns, nelts_per_pattern);
9174 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9175 TYPE at compile-time. If we're unable to perform the conversion
9176 return NULL_TREE. */
9178 static tree
9179 fold_view_convert_expr (tree type, tree expr)
9181 /* We support up to 512-bit values (for V8DFmode). */
9182 unsigned char buffer[64];
9183 int len;
9185 /* Check that the host and target are sane. */
9186 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9187 return NULL_TREE;
9189 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9190 if (tree res = fold_view_convert_vector_encoding (type, expr))
9191 return res;
9193 len = native_encode_expr (expr, buffer, sizeof (buffer));
9194 if (len == 0)
9195 return NULL_TREE;
9197 return native_interpret_expr (type, buffer, len);
9200 /* Build an expression for the address of T. Folds away INDIRECT_REF
9201 to avoid confusing the gimplify process. */
9203 tree
9204 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9206 /* The size of the object is not relevant when talking about its address. */
9207 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9208 t = TREE_OPERAND (t, 0);
9210 if (INDIRECT_REF_P (t))
9212 t = TREE_OPERAND (t, 0);
9214 if (TREE_TYPE (t) != ptrtype)
9215 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9217 else if (TREE_CODE (t) == MEM_REF
9218 && integer_zerop (TREE_OPERAND (t, 1)))
9220 t = TREE_OPERAND (t, 0);
9222 if (TREE_TYPE (t) != ptrtype)
9223 t = fold_convert_loc (loc, ptrtype, t);
9225 else if (TREE_CODE (t) == MEM_REF
9226 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9227 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9228 TREE_OPERAND (t, 0),
9229 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9230 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9232 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9234 if (TREE_TYPE (t) != ptrtype)
9235 t = fold_convert_loc (loc, ptrtype, t);
9237 else
9238 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9240 return t;
9243 /* Build an expression for the address of T. */
9245 tree
9246 build_fold_addr_expr_loc (location_t loc, tree t)
9248 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9250 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9253 /* Fold a unary expression of code CODE and type TYPE with operand
9254 OP0. Return the folded expression if folding is successful.
9255 Otherwise, return NULL_TREE. */
9257 tree
9258 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9260 tree tem;
9261 tree arg0;
9262 enum tree_code_class kind = TREE_CODE_CLASS (code);
9264 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9265 && TREE_CODE_LENGTH (code) == 1);
9267 arg0 = op0;
9268 if (arg0)
9270 if (CONVERT_EXPR_CODE_P (code)
9271 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9273 /* Don't use STRIP_NOPS, because signedness of argument type
9274 matters. */
9275 STRIP_SIGN_NOPS (arg0);
9277 else
9279 /* Strip any conversions that don't change the mode. This
9280 is safe for every expression, except for a comparison
9281 expression because its signedness is derived from its
9282 operands.
9284 Note that this is done as an internal manipulation within
9285 the constant folder, in order to find the simplest
9286 representation of the arguments so that their form can be
9287 studied. In any cases, the appropriate type conversions
9288 should be put back in the tree that will get out of the
9289 constant folder. */
9290 STRIP_NOPS (arg0);
9293 if (CONSTANT_CLASS_P (arg0))
9295 tree tem = const_unop (code, type, arg0);
9296 if (tem)
9298 if (TREE_TYPE (tem) != type)
9299 tem = fold_convert_loc (loc, type, tem);
9300 return tem;
9305 tem = generic_simplify (loc, code, type, op0);
9306 if (tem)
9307 return tem;
9309 if (TREE_CODE_CLASS (code) == tcc_unary)
9311 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9312 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9313 fold_build1_loc (loc, code, type,
9314 fold_convert_loc (loc, TREE_TYPE (op0),
9315 TREE_OPERAND (arg0, 1))));
9316 else if (TREE_CODE (arg0) == COND_EXPR)
9318 tree arg01 = TREE_OPERAND (arg0, 1);
9319 tree arg02 = TREE_OPERAND (arg0, 2);
9320 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9321 arg01 = fold_build1_loc (loc, code, type,
9322 fold_convert_loc (loc,
9323 TREE_TYPE (op0), arg01));
9324 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9325 arg02 = fold_build1_loc (loc, code, type,
9326 fold_convert_loc (loc,
9327 TREE_TYPE (op0), arg02));
9328 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9329 arg01, arg02);
9331 /* If this was a conversion, and all we did was to move into
9332 inside the COND_EXPR, bring it back out. But leave it if
9333 it is a conversion from integer to integer and the
9334 result precision is no wider than a word since such a
9335 conversion is cheap and may be optimized away by combine,
9336 while it couldn't if it were outside the COND_EXPR. Then return
9337 so we don't get into an infinite recursion loop taking the
9338 conversion out and then back in. */
9340 if ((CONVERT_EXPR_CODE_P (code)
9341 || code == NON_LVALUE_EXPR)
9342 && TREE_CODE (tem) == COND_EXPR
9343 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9344 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9345 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9346 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9347 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9348 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9349 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9350 && (INTEGRAL_TYPE_P
9351 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9352 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9353 || flag_syntax_only))
9354 tem = build1_loc (loc, code, type,
9355 build3 (COND_EXPR,
9356 TREE_TYPE (TREE_OPERAND
9357 (TREE_OPERAND (tem, 1), 0)),
9358 TREE_OPERAND (tem, 0),
9359 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9360 TREE_OPERAND (TREE_OPERAND (tem, 2),
9361 0)));
9362 return tem;
9366 switch (code)
9368 case NON_LVALUE_EXPR:
9369 if (!maybe_lvalue_p (op0))
9370 return fold_convert_loc (loc, type, op0);
9371 return NULL_TREE;
9373 CASE_CONVERT:
9374 case FLOAT_EXPR:
9375 case FIX_TRUNC_EXPR:
9376 if (COMPARISON_CLASS_P (op0))
9378 /* If we have (type) (a CMP b) and type is an integral type, return
9379 new expression involving the new type. Canonicalize
9380 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9381 non-integral type.
9382 Do not fold the result as that would not simplify further, also
9383 folding again results in recursions. */
9384 if (TREE_CODE (type) == BOOLEAN_TYPE)
9385 return build2_loc (loc, TREE_CODE (op0), type,
9386 TREE_OPERAND (op0, 0),
9387 TREE_OPERAND (op0, 1));
9388 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9389 && TREE_CODE (type) != VECTOR_TYPE)
9390 return build3_loc (loc, COND_EXPR, type, op0,
9391 constant_boolean_node (true, type),
9392 constant_boolean_node (false, type));
9395 /* Handle (T *)&A.B.C for A being of type T and B and C
9396 living at offset zero. This occurs frequently in
9397 C++ upcasting and then accessing the base. */
9398 if (TREE_CODE (op0) == ADDR_EXPR
9399 && POINTER_TYPE_P (type)
9400 && handled_component_p (TREE_OPERAND (op0, 0)))
9402 poly_int64 bitsize, bitpos;
9403 tree offset;
9404 machine_mode mode;
9405 int unsignedp, reversep, volatilep;
9406 tree base
9407 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9408 &offset, &mode, &unsignedp, &reversep,
9409 &volatilep);
9410 /* If the reference was to a (constant) zero offset, we can use
9411 the address of the base if it has the same base type
9412 as the result type and the pointer type is unqualified. */
9413 if (!offset
9414 && known_eq (bitpos, 0)
9415 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9416 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9417 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9418 return fold_convert_loc (loc, type,
9419 build_fold_addr_expr_loc (loc, base));
9422 if (TREE_CODE (op0) == MODIFY_EXPR
9423 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9424 /* Detect assigning a bitfield. */
9425 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9426 && DECL_BIT_FIELD
9427 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9429 /* Don't leave an assignment inside a conversion
9430 unless assigning a bitfield. */
9431 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9432 /* First do the assignment, then return converted constant. */
9433 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9434 suppress_warning (tem /* What warning? */);
9435 TREE_USED (tem) = 1;
9436 return tem;
9439 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9440 constants (if x has signed type, the sign bit cannot be set
9441 in c). This folds extension into the BIT_AND_EXPR.
9442 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9443 very likely don't have maximal range for their precision and this
9444 transformation effectively doesn't preserve non-maximal ranges. */
9445 if (TREE_CODE (type) == INTEGER_TYPE
9446 && TREE_CODE (op0) == BIT_AND_EXPR
9447 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9449 tree and_expr = op0;
9450 tree and0 = TREE_OPERAND (and_expr, 0);
9451 tree and1 = TREE_OPERAND (and_expr, 1);
9452 int change = 0;
9454 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9455 || (TYPE_PRECISION (type)
9456 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9457 change = 1;
9458 else if (TYPE_PRECISION (TREE_TYPE (and1))
9459 <= HOST_BITS_PER_WIDE_INT
9460 && tree_fits_uhwi_p (and1))
9462 unsigned HOST_WIDE_INT cst;
9464 cst = tree_to_uhwi (and1);
9465 cst &= HOST_WIDE_INT_M1U
9466 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9467 change = (cst == 0);
9468 if (change
9469 && !flag_syntax_only
9470 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9471 == ZERO_EXTEND))
9473 tree uns = unsigned_type_for (TREE_TYPE (and0));
9474 and0 = fold_convert_loc (loc, uns, and0);
9475 and1 = fold_convert_loc (loc, uns, and1);
9478 if (change)
9480 tem = force_fit_type (type, wi::to_widest (and1), 0,
9481 TREE_OVERFLOW (and1));
9482 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9483 fold_convert_loc (loc, type, and0), tem);
9487 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9488 cast (T1)X will fold away. We assume that this happens when X itself
9489 is a cast. */
9490 if (POINTER_TYPE_P (type)
9491 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9492 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9494 tree arg00 = TREE_OPERAND (arg0, 0);
9495 tree arg01 = TREE_OPERAND (arg0, 1);
9497 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9498 when the pointed type needs higher alignment than
9499 the p+ first operand's pointed type. */
9500 if (!in_gimple_form
9501 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9502 && (min_align_of_type (TREE_TYPE (type))
9503 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9504 return NULL_TREE;
9506 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9507 when type is a reference type and arg00's type is not,
9508 because arg00 could be validly nullptr and if arg01 doesn't return,
9509 we don't want false positive binding of reference to nullptr. */
9510 if (TREE_CODE (type) == REFERENCE_TYPE
9511 && !in_gimple_form
9512 && sanitize_flags_p (SANITIZE_NULL)
9513 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9514 return NULL_TREE;
9516 arg00 = fold_convert_loc (loc, type, arg00);
9517 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9520 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9521 of the same precision, and X is an integer type not narrower than
9522 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9523 if (INTEGRAL_TYPE_P (type)
9524 && TREE_CODE (op0) == BIT_NOT_EXPR
9525 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9526 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9527 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9529 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9530 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9531 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9532 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9533 fold_convert_loc (loc, type, tem));
9536 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9537 type of X and Y (integer types only). */
9538 if (INTEGRAL_TYPE_P (type)
9539 && TREE_CODE (op0) == MULT_EXPR
9540 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9541 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9542 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9543 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9545 /* Be careful not to introduce new overflows. */
9546 tree mult_type;
9547 if (TYPE_OVERFLOW_WRAPS (type))
9548 mult_type = type;
9549 else
9550 mult_type = unsigned_type_for (type);
9552 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9554 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9555 fold_convert_loc (loc, mult_type,
9556 TREE_OPERAND (op0, 0)),
9557 fold_convert_loc (loc, mult_type,
9558 TREE_OPERAND (op0, 1)));
9559 return fold_convert_loc (loc, type, tem);
9563 return NULL_TREE;
9565 case VIEW_CONVERT_EXPR:
9566 if (TREE_CODE (op0) == MEM_REF)
9568 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9569 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9570 tem = fold_build2_loc (loc, MEM_REF, type,
9571 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9572 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9573 return tem;
9576 return NULL_TREE;
9578 case NEGATE_EXPR:
9579 tem = fold_negate_expr (loc, arg0);
9580 if (tem)
9581 return fold_convert_loc (loc, type, tem);
9582 return NULL_TREE;
9584 case ABS_EXPR:
9585 /* Convert fabs((double)float) into (double)fabsf(float). */
9586 if (TREE_CODE (arg0) == NOP_EXPR
9587 && TREE_CODE (type) == REAL_TYPE)
9589 tree targ0 = strip_float_extensions (arg0);
9590 if (targ0 != arg0)
9591 return fold_convert_loc (loc, type,
9592 fold_build1_loc (loc, ABS_EXPR,
9593 TREE_TYPE (targ0),
9594 targ0));
9596 return NULL_TREE;
9598 case BIT_NOT_EXPR:
9599 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9600 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9601 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9602 fold_convert_loc (loc, type,
9603 TREE_OPERAND (arg0, 0)))))
9604 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9605 fold_convert_loc (loc, type,
9606 TREE_OPERAND (arg0, 1)));
9607 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9608 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9609 fold_convert_loc (loc, type,
9610 TREE_OPERAND (arg0, 1)))))
9611 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9612 fold_convert_loc (loc, type,
9613 TREE_OPERAND (arg0, 0)), tem);
9615 return NULL_TREE;
9617 case TRUTH_NOT_EXPR:
9618 /* Note that the operand of this must be an int
9619 and its values must be 0 or 1.
9620 ("true" is a fixed value perhaps depending on the language,
9621 but we don't handle values other than 1 correctly yet.) */
9622 tem = fold_truth_not_expr (loc, arg0);
9623 if (!tem)
9624 return NULL_TREE;
9625 return fold_convert_loc (loc, type, tem);
9627 case INDIRECT_REF:
9628 /* Fold *&X to X if X is an lvalue. */
9629 if (TREE_CODE (op0) == ADDR_EXPR)
9631 tree op00 = TREE_OPERAND (op0, 0);
9632 if ((VAR_P (op00)
9633 || TREE_CODE (op00) == PARM_DECL
9634 || TREE_CODE (op00) == RESULT_DECL)
9635 && !TREE_READONLY (op00))
9636 return op00;
9638 return NULL_TREE;
9640 default:
9641 return NULL_TREE;
9642 } /* switch (code) */
9646 /* If the operation was a conversion do _not_ mark a resulting constant
9647 with TREE_OVERFLOW if the original constant was not. These conversions
9648 have implementation defined behavior and retaining the TREE_OVERFLOW
9649 flag here would confuse later passes such as VRP. */
9650 tree
9651 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9652 tree type, tree op0)
9654 tree res = fold_unary_loc (loc, code, type, op0);
9655 if (res
9656 && TREE_CODE (res) == INTEGER_CST
9657 && TREE_CODE (op0) == INTEGER_CST
9658 && CONVERT_EXPR_CODE_P (code))
9659 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9661 return res;
9664 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9665 operands OP0 and OP1. LOC is the location of the resulting expression.
9666 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9667 Return the folded expression if folding is successful. Otherwise,
9668 return NULL_TREE. */
9669 static tree
9670 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9671 tree arg0, tree arg1, tree op0, tree op1)
9673 tree tem;
9675 /* We only do these simplifications if we are optimizing. */
9676 if (!optimize)
9677 return NULL_TREE;
9679 /* Check for things like (A || B) && (A || C). We can convert this
9680 to A || (B && C). Note that either operator can be any of the four
9681 truth and/or operations and the transformation will still be
9682 valid. Also note that we only care about order for the
9683 ANDIF and ORIF operators. If B contains side effects, this
9684 might change the truth-value of A. */
9685 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9686 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9687 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9688 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9689 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9690 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9692 tree a00 = TREE_OPERAND (arg0, 0);
9693 tree a01 = TREE_OPERAND (arg0, 1);
9694 tree a10 = TREE_OPERAND (arg1, 0);
9695 tree a11 = TREE_OPERAND (arg1, 1);
9696 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9697 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9698 && (code == TRUTH_AND_EXPR
9699 || code == TRUTH_OR_EXPR));
9701 if (operand_equal_p (a00, a10, 0))
9702 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9703 fold_build2_loc (loc, code, type, a01, a11));
9704 else if (commutative && operand_equal_p (a00, a11, 0))
9705 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9706 fold_build2_loc (loc, code, type, a01, a10));
9707 else if (commutative && operand_equal_p (a01, a10, 0))
9708 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9709 fold_build2_loc (loc, code, type, a00, a11));
9711 /* This case if tricky because we must either have commutative
9712 operators or else A10 must not have side-effects. */
9714 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9715 && operand_equal_p (a01, a11, 0))
9716 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9717 fold_build2_loc (loc, code, type, a00, a10),
9718 a01);
9721 /* See if we can build a range comparison. */
9722 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9723 return tem;
9725 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9726 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9728 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9729 if (tem)
9730 return fold_build2_loc (loc, code, type, tem, arg1);
9733 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9734 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9736 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9737 if (tem)
9738 return fold_build2_loc (loc, code, type, arg0, tem);
9741 /* Check for the possibility of merging component references. If our
9742 lhs is another similar operation, try to merge its rhs with our
9743 rhs. Then try to merge our lhs and rhs. */
9744 if (TREE_CODE (arg0) == code
9745 && (tem = fold_truth_andor_1 (loc, code, type,
9746 TREE_OPERAND (arg0, 1), arg1)) != 0)
9747 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9749 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9750 return tem;
9752 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9753 if (param_logical_op_non_short_circuit != -1)
9754 logical_op_non_short_circuit
9755 = param_logical_op_non_short_circuit;
9756 if (logical_op_non_short_circuit
9757 && !sanitize_coverage_p ()
9758 && (code == TRUTH_AND_EXPR
9759 || code == TRUTH_ANDIF_EXPR
9760 || code == TRUTH_OR_EXPR
9761 || code == TRUTH_ORIF_EXPR))
9763 enum tree_code ncode, icode;
9765 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9766 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9767 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9769 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9770 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9771 We don't want to pack more than two leafs to a non-IF AND/OR
9772 expression.
9773 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9774 equal to IF-CODE, then we don't want to add right-hand operand.
9775 If the inner right-hand side of left-hand operand has
9776 side-effects, or isn't simple, then we can't add to it,
9777 as otherwise we might destroy if-sequence. */
9778 if (TREE_CODE (arg0) == icode
9779 && simple_condition_p (arg1)
9780 /* Needed for sequence points to handle trappings, and
9781 side-effects. */
9782 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9784 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9785 arg1);
9786 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9787 tem);
9789 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9790 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9791 else if (TREE_CODE (arg1) == icode
9792 && simple_condition_p (arg0)
9793 /* Needed for sequence points to handle trappings, and
9794 side-effects. */
9795 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9797 tem = fold_build2_loc (loc, ncode, type,
9798 arg0, TREE_OPERAND (arg1, 0));
9799 return fold_build2_loc (loc, icode, type, tem,
9800 TREE_OPERAND (arg1, 1));
9802 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9803 into (A OR B).
9804 For sequence point consistancy, we need to check for trapping,
9805 and side-effects. */
9806 else if (code == icode && simple_condition_p (arg0)
9807 && simple_condition_p (arg1))
9808 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9811 return NULL_TREE;
9814 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9815 by changing CODE to reduce the magnitude of constants involved in
9816 ARG0 of the comparison.
9817 Returns a canonicalized comparison tree if a simplification was
9818 possible, otherwise returns NULL_TREE.
9819 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9820 valid if signed overflow is undefined. */
9822 static tree
9823 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9824 tree arg0, tree arg1,
9825 bool *strict_overflow_p)
9827 enum tree_code code0 = TREE_CODE (arg0);
9828 tree t, cst0 = NULL_TREE;
9829 int sgn0;
9831 /* Match A +- CST code arg1. We can change this only if overflow
9832 is undefined. */
9833 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9834 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9835 /* In principle pointers also have undefined overflow behavior,
9836 but that causes problems elsewhere. */
9837 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9838 && (code0 == MINUS_EXPR
9839 || code0 == PLUS_EXPR)
9840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9841 return NULL_TREE;
9843 /* Identify the constant in arg0 and its sign. */
9844 cst0 = TREE_OPERAND (arg0, 1);
9845 sgn0 = tree_int_cst_sgn (cst0);
9847 /* Overflowed constants and zero will cause problems. */
9848 if (integer_zerop (cst0)
9849 || TREE_OVERFLOW (cst0))
9850 return NULL_TREE;
9852 /* See if we can reduce the magnitude of the constant in
9853 arg0 by changing the comparison code. */
9854 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9855 if (code == LT_EXPR
9856 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9857 code = LE_EXPR;
9858 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9859 else if (code == GT_EXPR
9860 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9861 code = GE_EXPR;
9862 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9863 else if (code == LE_EXPR
9864 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9865 code = LT_EXPR;
9866 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9867 else if (code == GE_EXPR
9868 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9869 code = GT_EXPR;
9870 else
9871 return NULL_TREE;
9872 *strict_overflow_p = true;
9874 /* Now build the constant reduced in magnitude. But not if that
9875 would produce one outside of its types range. */
9876 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9877 && ((sgn0 == 1
9878 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9879 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9880 || (sgn0 == -1
9881 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9882 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9883 return NULL_TREE;
9885 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9886 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9887 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9888 t = fold_convert (TREE_TYPE (arg1), t);
9890 return fold_build2_loc (loc, code, type, t, arg1);
9893 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9894 overflow further. Try to decrease the magnitude of constants involved
9895 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9896 and put sole constants at the second argument position.
9897 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9899 static tree
9900 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9901 tree arg0, tree arg1)
9903 tree t;
9904 bool strict_overflow_p;
9905 const char * const warnmsg = G_("assuming signed overflow does not occur "
9906 "when reducing constant in comparison");
9908 /* Try canonicalization by simplifying arg0. */
9909 strict_overflow_p = false;
9910 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9911 &strict_overflow_p);
9912 if (t)
9914 if (strict_overflow_p)
9915 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9916 return t;
9919 /* Try canonicalization by simplifying arg1 using the swapped
9920 comparison. */
9921 code = swap_tree_comparison (code);
9922 strict_overflow_p = false;
9923 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9924 &strict_overflow_p);
9925 if (t && strict_overflow_p)
9926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9927 return t;
9930 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9931 space. This is used to avoid issuing overflow warnings for
9932 expressions like &p->x which cannot wrap. */
9934 static bool
9935 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9937 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9938 return true;
9940 if (maybe_lt (bitpos, 0))
9941 return true;
9943 poly_wide_int wi_offset;
9944 int precision = TYPE_PRECISION (TREE_TYPE (base));
9945 if (offset == NULL_TREE)
9946 wi_offset = wi::zero (precision);
9947 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9948 return true;
9949 else
9950 wi_offset = wi::to_poly_wide (offset);
9952 wi::overflow_type overflow;
9953 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9954 precision);
9955 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9956 if (overflow)
9957 return true;
9959 poly_uint64 total_hwi, size;
9960 if (!total.to_uhwi (&total_hwi)
9961 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9962 &size)
9963 || known_eq (size, 0U))
9964 return true;
9966 if (known_le (total_hwi, size))
9967 return false;
9969 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9970 array. */
9971 if (TREE_CODE (base) == ADDR_EXPR
9972 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9973 &size)
9974 && maybe_ne (size, 0U)
9975 && known_le (total_hwi, size))
9976 return false;
9978 return true;
9981 /* Return a positive integer when the symbol DECL is known to have
9982 a nonzero address, zero when it's known not to (e.g., it's a weak
9983 symbol), and a negative integer when the symbol is not yet in the
9984 symbol table and so whether or not its address is zero is unknown.
9985 For function local objects always return positive integer. */
9986 static int
9987 maybe_nonzero_address (tree decl)
9989 /* Normally, don't do anything for variables and functions before symtab is
9990 built; it is quite possible that DECL will be declared weak later.
9991 But if folding_initializer, we need a constant answer now, so create
9992 the symtab entry and prevent later weak declaration. */
9993 if (DECL_P (decl) && decl_in_symtab_p (decl))
9994 if (struct symtab_node *symbol
9995 = (folding_initializer
9996 ? symtab_node::get_create (decl)
9997 : symtab_node::get (decl)))
9998 return symbol->nonzero_address ();
10000 /* Function local objects are never NULL. */
10001 if (DECL_P (decl)
10002 && (DECL_CONTEXT (decl)
10003 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10004 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10005 return 1;
10007 return -1;
10010 /* Subroutine of fold_binary. This routine performs all of the
10011 transformations that are common to the equality/inequality
10012 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10013 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10014 fold_binary should call fold_binary. Fold a comparison with
10015 tree code CODE and type TYPE with operands OP0 and OP1. Return
10016 the folded comparison or NULL_TREE. */
10018 static tree
10019 fold_comparison (location_t loc, enum tree_code code, tree type,
10020 tree op0, tree op1)
10022 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10023 tree arg0, arg1, tem;
10025 arg0 = op0;
10026 arg1 = op1;
10028 STRIP_SIGN_NOPS (arg0);
10029 STRIP_SIGN_NOPS (arg1);
10031 /* For comparisons of pointers we can decompose it to a compile time
10032 comparison of the base objects and the offsets into the object.
10033 This requires at least one operand being an ADDR_EXPR or a
10034 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10035 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10036 && (TREE_CODE (arg0) == ADDR_EXPR
10037 || TREE_CODE (arg1) == ADDR_EXPR
10038 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10039 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10041 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10042 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10043 machine_mode mode;
10044 int volatilep, reversep, unsignedp;
10045 bool indirect_base0 = false, indirect_base1 = false;
10047 /* Get base and offset for the access. Strip ADDR_EXPR for
10048 get_inner_reference, but put it back by stripping INDIRECT_REF
10049 off the base object if possible. indirect_baseN will be true
10050 if baseN is not an address but refers to the object itself. */
10051 base0 = arg0;
10052 if (TREE_CODE (arg0) == ADDR_EXPR)
10054 base0
10055 = get_inner_reference (TREE_OPERAND (arg0, 0),
10056 &bitsize, &bitpos0, &offset0, &mode,
10057 &unsignedp, &reversep, &volatilep);
10058 if (INDIRECT_REF_P (base0))
10059 base0 = TREE_OPERAND (base0, 0);
10060 else
10061 indirect_base0 = true;
10063 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10065 base0 = TREE_OPERAND (arg0, 0);
10066 STRIP_SIGN_NOPS (base0);
10067 if (TREE_CODE (base0) == ADDR_EXPR)
10069 base0
10070 = get_inner_reference (TREE_OPERAND (base0, 0),
10071 &bitsize, &bitpos0, &offset0, &mode,
10072 &unsignedp, &reversep, &volatilep);
10073 if (INDIRECT_REF_P (base0))
10074 base0 = TREE_OPERAND (base0, 0);
10075 else
10076 indirect_base0 = true;
10078 if (offset0 == NULL_TREE || integer_zerop (offset0))
10079 offset0 = TREE_OPERAND (arg0, 1);
10080 else
10081 offset0 = size_binop (PLUS_EXPR, offset0,
10082 TREE_OPERAND (arg0, 1));
10083 if (poly_int_tree_p (offset0))
10085 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10086 TYPE_PRECISION (sizetype));
10087 tem <<= LOG2_BITS_PER_UNIT;
10088 tem += bitpos0;
10089 if (tem.to_shwi (&bitpos0))
10090 offset0 = NULL_TREE;
10094 base1 = arg1;
10095 if (TREE_CODE (arg1) == ADDR_EXPR)
10097 base1
10098 = get_inner_reference (TREE_OPERAND (arg1, 0),
10099 &bitsize, &bitpos1, &offset1, &mode,
10100 &unsignedp, &reversep, &volatilep);
10101 if (INDIRECT_REF_P (base1))
10102 base1 = TREE_OPERAND (base1, 0);
10103 else
10104 indirect_base1 = true;
10106 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10108 base1 = TREE_OPERAND (arg1, 0);
10109 STRIP_SIGN_NOPS (base1);
10110 if (TREE_CODE (base1) == ADDR_EXPR)
10112 base1
10113 = get_inner_reference (TREE_OPERAND (base1, 0),
10114 &bitsize, &bitpos1, &offset1, &mode,
10115 &unsignedp, &reversep, &volatilep);
10116 if (INDIRECT_REF_P (base1))
10117 base1 = TREE_OPERAND (base1, 0);
10118 else
10119 indirect_base1 = true;
10121 if (offset1 == NULL_TREE || integer_zerop (offset1))
10122 offset1 = TREE_OPERAND (arg1, 1);
10123 else
10124 offset1 = size_binop (PLUS_EXPR, offset1,
10125 TREE_OPERAND (arg1, 1));
10126 if (poly_int_tree_p (offset1))
10128 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10129 TYPE_PRECISION (sizetype));
10130 tem <<= LOG2_BITS_PER_UNIT;
10131 tem += bitpos1;
10132 if (tem.to_shwi (&bitpos1))
10133 offset1 = NULL_TREE;
10137 /* If we have equivalent bases we might be able to simplify. */
10138 if (indirect_base0 == indirect_base1
10139 && operand_equal_p (base0, base1,
10140 indirect_base0 ? OEP_ADDRESS_OF : 0))
10142 /* We can fold this expression to a constant if the non-constant
10143 offset parts are equal. */
10144 if ((offset0 == offset1
10145 || (offset0 && offset1
10146 && operand_equal_p (offset0, offset1, 0)))
10147 && (equality_code
10148 || (indirect_base0
10149 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10150 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10152 if (!equality_code
10153 && maybe_ne (bitpos0, bitpos1)
10154 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10155 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10156 fold_overflow_warning (("assuming pointer wraparound does not "
10157 "occur when comparing P +- C1 with "
10158 "P +- C2"),
10159 WARN_STRICT_OVERFLOW_CONDITIONAL);
10161 switch (code)
10163 case EQ_EXPR:
10164 if (known_eq (bitpos0, bitpos1))
10165 return constant_boolean_node (true, type);
10166 if (known_ne (bitpos0, bitpos1))
10167 return constant_boolean_node (false, type);
10168 break;
10169 case NE_EXPR:
10170 if (known_ne (bitpos0, bitpos1))
10171 return constant_boolean_node (true, type);
10172 if (known_eq (bitpos0, bitpos1))
10173 return constant_boolean_node (false, type);
10174 break;
10175 case LT_EXPR:
10176 if (known_lt (bitpos0, bitpos1))
10177 return constant_boolean_node (true, type);
10178 if (known_ge (bitpos0, bitpos1))
10179 return constant_boolean_node (false, type);
10180 break;
10181 case LE_EXPR:
10182 if (known_le (bitpos0, bitpos1))
10183 return constant_boolean_node (true, type);
10184 if (known_gt (bitpos0, bitpos1))
10185 return constant_boolean_node (false, type);
10186 break;
10187 case GE_EXPR:
10188 if (known_ge (bitpos0, bitpos1))
10189 return constant_boolean_node (true, type);
10190 if (known_lt (bitpos0, bitpos1))
10191 return constant_boolean_node (false, type);
10192 break;
10193 case GT_EXPR:
10194 if (known_gt (bitpos0, bitpos1))
10195 return constant_boolean_node (true, type);
10196 if (known_le (bitpos0, bitpos1))
10197 return constant_boolean_node (false, type);
10198 break;
10199 default:;
10202 /* We can simplify the comparison to a comparison of the variable
10203 offset parts if the constant offset parts are equal.
10204 Be careful to use signed sizetype here because otherwise we
10205 mess with array offsets in the wrong way. This is possible
10206 because pointer arithmetic is restricted to retain within an
10207 object and overflow on pointer differences is undefined as of
10208 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10209 else if (known_eq (bitpos0, bitpos1)
10210 && (equality_code
10211 || (indirect_base0
10212 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10213 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10215 /* By converting to signed sizetype we cover middle-end pointer
10216 arithmetic which operates on unsigned pointer types of size
10217 type size and ARRAY_REF offsets which are properly sign or
10218 zero extended from their type in case it is narrower than
10219 sizetype. */
10220 if (offset0 == NULL_TREE)
10221 offset0 = build_int_cst (ssizetype, 0);
10222 else
10223 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10224 if (offset1 == NULL_TREE)
10225 offset1 = build_int_cst (ssizetype, 0);
10226 else
10227 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10229 if (!equality_code
10230 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10231 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10232 fold_overflow_warning (("assuming pointer wraparound does not "
10233 "occur when comparing P +- C1 with "
10234 "P +- C2"),
10235 WARN_STRICT_OVERFLOW_COMPARISON);
10237 return fold_build2_loc (loc, code, type, offset0, offset1);
10240 /* For equal offsets we can simplify to a comparison of the
10241 base addresses. */
10242 else if (known_eq (bitpos0, bitpos1)
10243 && (indirect_base0
10244 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10245 && (indirect_base1
10246 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10247 && ((offset0 == offset1)
10248 || (offset0 && offset1
10249 && operand_equal_p (offset0, offset1, 0))))
10251 if (indirect_base0)
10252 base0 = build_fold_addr_expr_loc (loc, base0);
10253 if (indirect_base1)
10254 base1 = build_fold_addr_expr_loc (loc, base1);
10255 return fold_build2_loc (loc, code, type, base0, base1);
10257 /* Comparison between an ordinary (non-weak) symbol and a null
10258 pointer can be eliminated since such symbols must have a non
10259 null address. In C, relational expressions between pointers
10260 to objects and null pointers are undefined. The results
10261 below follow the C++ rules with the additional property that
10262 every object pointer compares greater than a null pointer.
10264 else if (((DECL_P (base0)
10265 && maybe_nonzero_address (base0) > 0
10266 /* Avoid folding references to struct members at offset 0 to
10267 prevent tests like '&ptr->firstmember == 0' from getting
10268 eliminated. When ptr is null, although the -> expression
10269 is strictly speaking invalid, GCC retains it as a matter
10270 of QoI. See PR c/44555. */
10271 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10272 || CONSTANT_CLASS_P (base0))
10273 && indirect_base0
10274 /* The caller guarantees that when one of the arguments is
10275 constant (i.e., null in this case) it is second. */
10276 && integer_zerop (arg1))
10278 switch (code)
10280 case EQ_EXPR:
10281 case LE_EXPR:
10282 case LT_EXPR:
10283 return constant_boolean_node (false, type);
10284 case GE_EXPR:
10285 case GT_EXPR:
10286 case NE_EXPR:
10287 return constant_boolean_node (true, type);
10288 default:
10289 gcc_unreachable ();
10294 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10295 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10296 the resulting offset is smaller in absolute value than the
10297 original one and has the same sign. */
10298 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10299 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10300 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10301 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10302 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10303 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10304 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10305 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10307 tree const1 = TREE_OPERAND (arg0, 1);
10308 tree const2 = TREE_OPERAND (arg1, 1);
10309 tree variable1 = TREE_OPERAND (arg0, 0);
10310 tree variable2 = TREE_OPERAND (arg1, 0);
10311 tree cst;
10312 const char * const warnmsg = G_("assuming signed overflow does not "
10313 "occur when combining constants around "
10314 "a comparison");
10316 /* Put the constant on the side where it doesn't overflow and is
10317 of lower absolute value and of same sign than before. */
10318 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10319 ? MINUS_EXPR : PLUS_EXPR,
10320 const2, const1);
10321 if (!TREE_OVERFLOW (cst)
10322 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10323 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10325 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10326 return fold_build2_loc (loc, code, type,
10327 variable1,
10328 fold_build2_loc (loc, TREE_CODE (arg1),
10329 TREE_TYPE (arg1),
10330 variable2, cst));
10333 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10334 ? MINUS_EXPR : PLUS_EXPR,
10335 const1, const2);
10336 if (!TREE_OVERFLOW (cst)
10337 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10338 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10340 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10341 return fold_build2_loc (loc, code, type,
10342 fold_build2_loc (loc, TREE_CODE (arg0),
10343 TREE_TYPE (arg0),
10344 variable1, cst),
10345 variable2);
10349 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10350 if (tem)
10351 return tem;
10353 /* If we are comparing an expression that just has comparisons
10354 of two integer values, arithmetic expressions of those comparisons,
10355 and constants, we can simplify it. There are only three cases
10356 to check: the two values can either be equal, the first can be
10357 greater, or the second can be greater. Fold the expression for
10358 those three values. Since each value must be 0 or 1, we have
10359 eight possibilities, each of which corresponds to the constant 0
10360 or 1 or one of the six possible comparisons.
10362 This handles common cases like (a > b) == 0 but also handles
10363 expressions like ((x > y) - (y > x)) > 0, which supposedly
10364 occur in macroized code. */
10366 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10368 tree cval1 = 0, cval2 = 0;
10370 if (twoval_comparison_p (arg0, &cval1, &cval2)
10371 /* Don't handle degenerate cases here; they should already
10372 have been handled anyway. */
10373 && cval1 != 0 && cval2 != 0
10374 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10375 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10377 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10378 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10379 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10380 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10382 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10383 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10385 /* We can't just pass T to eval_subst in case cval1 or cval2
10386 was the same as ARG1. */
10388 tree high_result
10389 = fold_build2_loc (loc, code, type,
10390 eval_subst (loc, arg0, cval1, maxval,
10391 cval2, minval),
10392 arg1);
10393 tree equal_result
10394 = fold_build2_loc (loc, code, type,
10395 eval_subst (loc, arg0, cval1, maxval,
10396 cval2, maxval),
10397 arg1);
10398 tree low_result
10399 = fold_build2_loc (loc, code, type,
10400 eval_subst (loc, arg0, cval1, minval,
10401 cval2, maxval),
10402 arg1);
10404 /* All three of these results should be 0 or 1. Confirm they are.
10405 Then use those values to select the proper code to use. */
10407 if (TREE_CODE (high_result) == INTEGER_CST
10408 && TREE_CODE (equal_result) == INTEGER_CST
10409 && TREE_CODE (low_result) == INTEGER_CST)
10411 /* Make a 3-bit mask with the high-order bit being the
10412 value for `>', the next for '=', and the low for '<'. */
10413 switch ((integer_onep (high_result) * 4)
10414 + (integer_onep (equal_result) * 2)
10415 + integer_onep (low_result))
10417 case 0:
10418 /* Always false. */
10419 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10420 case 1:
10421 code = LT_EXPR;
10422 break;
10423 case 2:
10424 code = EQ_EXPR;
10425 break;
10426 case 3:
10427 code = LE_EXPR;
10428 break;
10429 case 4:
10430 code = GT_EXPR;
10431 break;
10432 case 5:
10433 code = NE_EXPR;
10434 break;
10435 case 6:
10436 code = GE_EXPR;
10437 break;
10438 case 7:
10439 /* Always true. */
10440 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10443 return fold_build2_loc (loc, code, type, cval1, cval2);
10448 return NULL_TREE;
10452 /* Subroutine of fold_binary. Optimize complex multiplications of the
10453 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10454 argument EXPR represents the expression "z" of type TYPE. */
10456 static tree
10457 fold_mult_zconjz (location_t loc, tree type, tree expr)
10459 tree itype = TREE_TYPE (type);
10460 tree rpart, ipart, tem;
10462 if (TREE_CODE (expr) == COMPLEX_EXPR)
10464 rpart = TREE_OPERAND (expr, 0);
10465 ipart = TREE_OPERAND (expr, 1);
10467 else if (TREE_CODE (expr) == COMPLEX_CST)
10469 rpart = TREE_REALPART (expr);
10470 ipart = TREE_IMAGPART (expr);
10472 else
10474 expr = save_expr (expr);
10475 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10476 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10479 rpart = save_expr (rpart);
10480 ipart = save_expr (ipart);
10481 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10482 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10483 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10484 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10485 build_zero_cst (itype));
10489 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10490 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10491 true if successful. */
10493 static bool
10494 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10496 unsigned HOST_WIDE_INT i, nunits;
10498 if (TREE_CODE (arg) == VECTOR_CST
10499 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10501 for (i = 0; i < nunits; ++i)
10502 elts[i] = VECTOR_CST_ELT (arg, i);
10504 else if (TREE_CODE (arg) == CONSTRUCTOR)
10506 constructor_elt *elt;
10508 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10509 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10510 return false;
10511 else
10512 elts[i] = elt->value;
10514 else
10515 return false;
10516 for (; i < nelts; i++)
10517 elts[i]
10518 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10519 return true;
10522 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10523 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10524 NULL_TREE otherwise. */
10526 tree
10527 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10529 unsigned int i;
10530 unsigned HOST_WIDE_INT nelts;
10531 bool need_ctor = false;
10533 if (!sel.length ().is_constant (&nelts))
10534 return NULL_TREE;
10535 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10536 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10537 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10538 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10539 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10540 return NULL_TREE;
10542 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10543 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10544 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10545 return NULL_TREE;
10547 tree_vector_builder out_elts (type, nelts, 1);
10548 for (i = 0; i < nelts; i++)
10550 HOST_WIDE_INT index;
10551 if (!sel[i].is_constant (&index))
10552 return NULL_TREE;
10553 if (!CONSTANT_CLASS_P (in_elts[index]))
10554 need_ctor = true;
10555 out_elts.quick_push (unshare_expr (in_elts[index]));
10558 if (need_ctor)
10560 vec<constructor_elt, va_gc> *v;
10561 vec_alloc (v, nelts);
10562 for (i = 0; i < nelts; i++)
10563 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10564 return build_constructor (type, v);
10566 else
10567 return out_elts.build ();
10570 /* Try to fold a pointer difference of type TYPE two address expressions of
10571 array references AREF0 and AREF1 using location LOC. Return a
10572 simplified expression for the difference or NULL_TREE. */
10574 static tree
10575 fold_addr_of_array_ref_difference (location_t loc, tree type,
10576 tree aref0, tree aref1,
10577 bool use_pointer_diff)
10579 tree base0 = TREE_OPERAND (aref0, 0);
10580 tree base1 = TREE_OPERAND (aref1, 0);
10581 tree base_offset = build_int_cst (type, 0);
10583 /* If the bases are array references as well, recurse. If the bases
10584 are pointer indirections compute the difference of the pointers.
10585 If the bases are equal, we are set. */
10586 if ((TREE_CODE (base0) == ARRAY_REF
10587 && TREE_CODE (base1) == ARRAY_REF
10588 && (base_offset
10589 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10590 use_pointer_diff)))
10591 || (INDIRECT_REF_P (base0)
10592 && INDIRECT_REF_P (base1)
10593 && (base_offset
10594 = use_pointer_diff
10595 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10596 TREE_OPERAND (base0, 0),
10597 TREE_OPERAND (base1, 0))
10598 : fold_binary_loc (loc, MINUS_EXPR, type,
10599 fold_convert (type,
10600 TREE_OPERAND (base0, 0)),
10601 fold_convert (type,
10602 TREE_OPERAND (base1, 0)))))
10603 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10605 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10606 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10607 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10608 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10609 return fold_build2_loc (loc, PLUS_EXPR, type,
10610 base_offset,
10611 fold_build2_loc (loc, MULT_EXPR, type,
10612 diff, esz));
10614 return NULL_TREE;
10617 /* If the real or vector real constant CST of type TYPE has an exact
10618 inverse, return it, else return NULL. */
10620 tree
10621 exact_inverse (tree type, tree cst)
10623 REAL_VALUE_TYPE r;
10624 tree unit_type;
10625 machine_mode mode;
10627 switch (TREE_CODE (cst))
10629 case REAL_CST:
10630 r = TREE_REAL_CST (cst);
10632 if (exact_real_inverse (TYPE_MODE (type), &r))
10633 return build_real (type, r);
10635 return NULL_TREE;
10637 case VECTOR_CST:
10639 unit_type = TREE_TYPE (type);
10640 mode = TYPE_MODE (unit_type);
10642 tree_vector_builder elts;
10643 if (!elts.new_unary_operation (type, cst, false))
10644 return NULL_TREE;
10645 unsigned int count = elts.encoded_nelts ();
10646 for (unsigned int i = 0; i < count; ++i)
10648 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10649 if (!exact_real_inverse (mode, &r))
10650 return NULL_TREE;
10651 elts.quick_push (build_real (unit_type, r));
10654 return elts.build ();
10657 default:
10658 return NULL_TREE;
10662 /* Mask out the tz least significant bits of X of type TYPE where
10663 tz is the number of trailing zeroes in Y. */
10664 static wide_int
10665 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10667 int tz = wi::ctz (y);
10668 if (tz > 0)
10669 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10670 return x;
10673 /* Return true when T is an address and is known to be nonzero.
10674 For floating point we further ensure that T is not denormal.
10675 Similar logic is present in nonzero_address in rtlanal.h.
10677 If the return value is based on the assumption that signed overflow
10678 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10679 change *STRICT_OVERFLOW_P. */
10681 static bool
10682 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10684 tree type = TREE_TYPE (t);
10685 enum tree_code code;
10687 /* Doing something useful for floating point would need more work. */
10688 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10689 return false;
10691 code = TREE_CODE (t);
10692 switch (TREE_CODE_CLASS (code))
10694 case tcc_unary:
10695 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10696 strict_overflow_p);
10697 case tcc_binary:
10698 case tcc_comparison:
10699 return tree_binary_nonzero_warnv_p (code, type,
10700 TREE_OPERAND (t, 0),
10701 TREE_OPERAND (t, 1),
10702 strict_overflow_p);
10703 case tcc_constant:
10704 case tcc_declaration:
10705 case tcc_reference:
10706 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10708 default:
10709 break;
10712 switch (code)
10714 case TRUTH_NOT_EXPR:
10715 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10716 strict_overflow_p);
10718 case TRUTH_AND_EXPR:
10719 case TRUTH_OR_EXPR:
10720 case TRUTH_XOR_EXPR:
10721 return tree_binary_nonzero_warnv_p (code, type,
10722 TREE_OPERAND (t, 0),
10723 TREE_OPERAND (t, 1),
10724 strict_overflow_p);
10726 case COND_EXPR:
10727 case CONSTRUCTOR:
10728 case OBJ_TYPE_REF:
10729 case ADDR_EXPR:
10730 case WITH_SIZE_EXPR:
10731 case SSA_NAME:
10732 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10734 case COMPOUND_EXPR:
10735 case MODIFY_EXPR:
10736 case BIND_EXPR:
10737 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10738 strict_overflow_p);
10740 case SAVE_EXPR:
10741 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10742 strict_overflow_p);
10744 case CALL_EXPR:
10746 tree fndecl = get_callee_fndecl (t);
10747 if (!fndecl) return false;
10748 if (flag_delete_null_pointer_checks && !flag_check_new
10749 && DECL_IS_OPERATOR_NEW_P (fndecl)
10750 && !TREE_NOTHROW (fndecl))
10751 return true;
10752 if (flag_delete_null_pointer_checks
10753 && lookup_attribute ("returns_nonnull",
10754 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10755 return true;
10756 return alloca_call_p (t);
10759 default:
10760 break;
10762 return false;
10765 /* Return true when T is an address and is known to be nonzero.
10766 Handle warnings about undefined signed overflow. */
10768 bool
10769 tree_expr_nonzero_p (tree t)
10771 bool ret, strict_overflow_p;
10773 strict_overflow_p = false;
10774 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10775 if (strict_overflow_p)
10776 fold_overflow_warning (("assuming signed overflow does not occur when "
10777 "determining that expression is always "
10778 "non-zero"),
10779 WARN_STRICT_OVERFLOW_MISC);
10780 return ret;
10783 /* Return true if T is known not to be equal to an integer W. */
10785 bool
10786 expr_not_equal_to (tree t, const wide_int &w)
10788 int_range_max vr;
10789 switch (TREE_CODE (t))
10791 case INTEGER_CST:
10792 return wi::to_wide (t) != w;
10794 case SSA_NAME:
10795 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10796 return false;
10798 if (cfun)
10799 get_range_query (cfun)->range_of_expr (vr, t);
10800 else
10801 get_global_range_query ()->range_of_expr (vr, t);
10803 if (!vr.undefined_p () && !vr.contains_p (w))
10804 return true;
10805 /* If T has some known zero bits and W has any of those bits set,
10806 then T is known not to be equal to W. */
10807 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10808 TYPE_PRECISION (TREE_TYPE (t))), 0))
10809 return true;
10810 return false;
10812 default:
10813 return false;
10817 /* Fold a binary expression of code CODE and type TYPE with operands
10818 OP0 and OP1. LOC is the location of the resulting expression.
10819 Return the folded expression if folding is successful. Otherwise,
10820 return NULL_TREE. */
10822 tree
10823 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10824 tree op0, tree op1)
10826 enum tree_code_class kind = TREE_CODE_CLASS (code);
10827 tree arg0, arg1, tem;
10828 tree t1 = NULL_TREE;
10829 bool strict_overflow_p;
10830 unsigned int prec;
10832 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10833 && TREE_CODE_LENGTH (code) == 2
10834 && op0 != NULL_TREE
10835 && op1 != NULL_TREE);
10837 arg0 = op0;
10838 arg1 = op1;
10840 /* Strip any conversions that don't change the mode. This is
10841 safe for every expression, except for a comparison expression
10842 because its signedness is derived from its operands. So, in
10843 the latter case, only strip conversions that don't change the
10844 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10845 preserved.
10847 Note that this is done as an internal manipulation within the
10848 constant folder, in order to find the simplest representation
10849 of the arguments so that their form can be studied. In any
10850 cases, the appropriate type conversions should be put back in
10851 the tree that will get out of the constant folder. */
10853 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10855 STRIP_SIGN_NOPS (arg0);
10856 STRIP_SIGN_NOPS (arg1);
10858 else
10860 STRIP_NOPS (arg0);
10861 STRIP_NOPS (arg1);
10864 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10865 constant but we can't do arithmetic on them. */
10866 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10868 tem = const_binop (code, type, arg0, arg1);
10869 if (tem != NULL_TREE)
10871 if (TREE_TYPE (tem) != type)
10872 tem = fold_convert_loc (loc, type, tem);
10873 return tem;
10877 /* If this is a commutative operation, and ARG0 is a constant, move it
10878 to ARG1 to reduce the number of tests below. */
10879 if (commutative_tree_code (code)
10880 && tree_swap_operands_p (arg0, arg1))
10881 return fold_build2_loc (loc, code, type, op1, op0);
10883 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10884 to ARG1 to reduce the number of tests below. */
10885 if (kind == tcc_comparison
10886 && tree_swap_operands_p (arg0, arg1))
10887 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10889 tem = generic_simplify (loc, code, type, op0, op1);
10890 if (tem)
10891 return tem;
10893 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10895 First check for cases where an arithmetic operation is applied to a
10896 compound, conditional, or comparison operation. Push the arithmetic
10897 operation inside the compound or conditional to see if any folding
10898 can then be done. Convert comparison to conditional for this purpose.
10899 The also optimizes non-constant cases that used to be done in
10900 expand_expr.
10902 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10903 one of the operands is a comparison and the other is a comparison, a
10904 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10905 code below would make the expression more complex. Change it to a
10906 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10907 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10909 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10910 || code == EQ_EXPR || code == NE_EXPR)
10911 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10912 && ((truth_value_p (TREE_CODE (arg0))
10913 && (truth_value_p (TREE_CODE (arg1))
10914 || (TREE_CODE (arg1) == BIT_AND_EXPR
10915 && integer_onep (TREE_OPERAND (arg1, 1)))))
10916 || (truth_value_p (TREE_CODE (arg1))
10917 && (truth_value_p (TREE_CODE (arg0))
10918 || (TREE_CODE (arg0) == BIT_AND_EXPR
10919 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10921 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10922 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10923 : TRUTH_XOR_EXPR,
10924 boolean_type_node,
10925 fold_convert_loc (loc, boolean_type_node, arg0),
10926 fold_convert_loc (loc, boolean_type_node, arg1));
10928 if (code == EQ_EXPR)
10929 tem = invert_truthvalue_loc (loc, tem);
10931 return fold_convert_loc (loc, type, tem);
10934 if (TREE_CODE_CLASS (code) == tcc_binary
10935 || TREE_CODE_CLASS (code) == tcc_comparison)
10937 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10939 tem = fold_build2_loc (loc, code, type,
10940 fold_convert_loc (loc, TREE_TYPE (op0),
10941 TREE_OPERAND (arg0, 1)), op1);
10942 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10943 tem);
10945 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10947 tem = fold_build2_loc (loc, code, type, op0,
10948 fold_convert_loc (loc, TREE_TYPE (op1),
10949 TREE_OPERAND (arg1, 1)));
10950 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10951 tem);
10954 if (TREE_CODE (arg0) == COND_EXPR
10955 || TREE_CODE (arg0) == VEC_COND_EXPR
10956 || COMPARISON_CLASS_P (arg0))
10958 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10959 arg0, arg1,
10960 /*cond_first_p=*/1);
10961 if (tem != NULL_TREE)
10962 return tem;
10965 if (TREE_CODE (arg1) == COND_EXPR
10966 || TREE_CODE (arg1) == VEC_COND_EXPR
10967 || COMPARISON_CLASS_P (arg1))
10969 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10970 arg1, arg0,
10971 /*cond_first_p=*/0);
10972 if (tem != NULL_TREE)
10973 return tem;
10977 switch (code)
10979 case MEM_REF:
10980 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10981 if (TREE_CODE (arg0) == ADDR_EXPR
10982 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10984 tree iref = TREE_OPERAND (arg0, 0);
10985 return fold_build2 (MEM_REF, type,
10986 TREE_OPERAND (iref, 0),
10987 int_const_binop (PLUS_EXPR, arg1,
10988 TREE_OPERAND (iref, 1)));
10991 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10992 if (TREE_CODE (arg0) == ADDR_EXPR
10993 && handled_component_p (TREE_OPERAND (arg0, 0)))
10995 tree base;
10996 poly_int64 coffset;
10997 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10998 &coffset);
10999 if (!base)
11000 return NULL_TREE;
11001 return fold_build2 (MEM_REF, type,
11002 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11003 int_const_binop (PLUS_EXPR, arg1,
11004 size_int (coffset)));
11007 return NULL_TREE;
11009 case POINTER_PLUS_EXPR:
11010 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11011 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11012 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11013 return fold_convert_loc (loc, type,
11014 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11015 fold_convert_loc (loc, sizetype,
11016 arg1),
11017 fold_convert_loc (loc, sizetype,
11018 arg0)));
11020 return NULL_TREE;
11022 case PLUS_EXPR:
11023 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11025 /* X + (X / CST) * -CST is X % CST. */
11026 if (TREE_CODE (arg1) == MULT_EXPR
11027 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11028 && operand_equal_p (arg0,
11029 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11031 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11032 tree cst1 = TREE_OPERAND (arg1, 1);
11033 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11034 cst1, cst0);
11035 if (sum && integer_zerop (sum))
11036 return fold_convert_loc (loc, type,
11037 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11038 TREE_TYPE (arg0), arg0,
11039 cst0));
11043 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11044 one. Make sure the type is not saturating and has the signedness of
11045 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11046 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11047 if ((TREE_CODE (arg0) == MULT_EXPR
11048 || TREE_CODE (arg1) == MULT_EXPR)
11049 && !TYPE_SATURATING (type)
11050 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11051 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11052 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11054 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11055 if (tem)
11056 return tem;
11059 if (! FLOAT_TYPE_P (type))
11061 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11062 (plus (plus (mult) (mult)) (foo)) so that we can
11063 take advantage of the factoring cases below. */
11064 if (ANY_INTEGRAL_TYPE_P (type)
11065 && TYPE_OVERFLOW_WRAPS (type)
11066 && (((TREE_CODE (arg0) == PLUS_EXPR
11067 || TREE_CODE (arg0) == MINUS_EXPR)
11068 && TREE_CODE (arg1) == MULT_EXPR)
11069 || ((TREE_CODE (arg1) == PLUS_EXPR
11070 || TREE_CODE (arg1) == MINUS_EXPR)
11071 && TREE_CODE (arg0) == MULT_EXPR)))
11073 tree parg0, parg1, parg, marg;
11074 enum tree_code pcode;
11076 if (TREE_CODE (arg1) == MULT_EXPR)
11077 parg = arg0, marg = arg1;
11078 else
11079 parg = arg1, marg = arg0;
11080 pcode = TREE_CODE (parg);
11081 parg0 = TREE_OPERAND (parg, 0);
11082 parg1 = TREE_OPERAND (parg, 1);
11083 STRIP_NOPS (parg0);
11084 STRIP_NOPS (parg1);
11086 if (TREE_CODE (parg0) == MULT_EXPR
11087 && TREE_CODE (parg1) != MULT_EXPR)
11088 return fold_build2_loc (loc, pcode, type,
11089 fold_build2_loc (loc, PLUS_EXPR, type,
11090 fold_convert_loc (loc, type,
11091 parg0),
11092 fold_convert_loc (loc, type,
11093 marg)),
11094 fold_convert_loc (loc, type, parg1));
11095 if (TREE_CODE (parg0) != MULT_EXPR
11096 && TREE_CODE (parg1) == MULT_EXPR)
11097 return
11098 fold_build2_loc (loc, PLUS_EXPR, type,
11099 fold_convert_loc (loc, type, parg0),
11100 fold_build2_loc (loc, pcode, type,
11101 fold_convert_loc (loc, type, marg),
11102 fold_convert_loc (loc, type,
11103 parg1)));
11106 else
11108 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11109 to __complex__ ( x, y ). This is not the same for SNaNs or
11110 if signed zeros are involved. */
11111 if (!HONOR_SNANS (arg0)
11112 && !HONOR_SIGNED_ZEROS (arg0)
11113 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11115 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11116 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11117 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11118 bool arg0rz = false, arg0iz = false;
11119 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11120 || (arg0i && (arg0iz = real_zerop (arg0i))))
11122 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11123 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11124 if (arg0rz && arg1i && real_zerop (arg1i))
11126 tree rp = arg1r ? arg1r
11127 : build1 (REALPART_EXPR, rtype, arg1);
11128 tree ip = arg0i ? arg0i
11129 : build1 (IMAGPART_EXPR, rtype, arg0);
11130 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11132 else if (arg0iz && arg1r && real_zerop (arg1r))
11134 tree rp = arg0r ? arg0r
11135 : build1 (REALPART_EXPR, rtype, arg0);
11136 tree ip = arg1i ? arg1i
11137 : build1 (IMAGPART_EXPR, rtype, arg1);
11138 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11143 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11144 We associate floats only if the user has specified
11145 -fassociative-math. */
11146 if (flag_associative_math
11147 && TREE_CODE (arg1) == PLUS_EXPR
11148 && TREE_CODE (arg0) != MULT_EXPR)
11150 tree tree10 = TREE_OPERAND (arg1, 0);
11151 tree tree11 = TREE_OPERAND (arg1, 1);
11152 if (TREE_CODE (tree11) == MULT_EXPR
11153 && TREE_CODE (tree10) == MULT_EXPR)
11155 tree tree0;
11156 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11157 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11160 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11161 We associate floats only if the user has specified
11162 -fassociative-math. */
11163 if (flag_associative_math
11164 && TREE_CODE (arg0) == PLUS_EXPR
11165 && TREE_CODE (arg1) != MULT_EXPR)
11167 tree tree00 = TREE_OPERAND (arg0, 0);
11168 tree tree01 = TREE_OPERAND (arg0, 1);
11169 if (TREE_CODE (tree01) == MULT_EXPR
11170 && TREE_CODE (tree00) == MULT_EXPR)
11172 tree tree0;
11173 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11174 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11179 bit_rotate:
11180 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11181 is a rotate of A by C1 bits. */
11182 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11183 is a rotate of A by B bits.
11184 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11185 though in this case CODE must be | and not + or ^, otherwise
11186 it doesn't return A when B is 0. */
11188 enum tree_code code0, code1;
11189 tree rtype;
11190 code0 = TREE_CODE (arg0);
11191 code1 = TREE_CODE (arg1);
11192 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11193 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11194 && operand_equal_p (TREE_OPERAND (arg0, 0),
11195 TREE_OPERAND (arg1, 0), 0)
11196 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11197 TYPE_UNSIGNED (rtype))
11198 /* Only create rotates in complete modes. Other cases are not
11199 expanded properly. */
11200 && (element_precision (rtype)
11201 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11203 tree tree01, tree11;
11204 tree orig_tree01, orig_tree11;
11205 enum tree_code code01, code11;
11207 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11208 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11209 STRIP_NOPS (tree01);
11210 STRIP_NOPS (tree11);
11211 code01 = TREE_CODE (tree01);
11212 code11 = TREE_CODE (tree11);
11213 if (code11 != MINUS_EXPR
11214 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11216 std::swap (code0, code1);
11217 std::swap (code01, code11);
11218 std::swap (tree01, tree11);
11219 std::swap (orig_tree01, orig_tree11);
11221 if (code01 == INTEGER_CST
11222 && code11 == INTEGER_CST
11223 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11224 == element_precision (rtype)))
11226 tem = build2_loc (loc, LROTATE_EXPR,
11227 rtype, TREE_OPERAND (arg0, 0),
11228 code0 == LSHIFT_EXPR
11229 ? orig_tree01 : orig_tree11);
11230 return fold_convert_loc (loc, type, tem);
11232 else if (code11 == MINUS_EXPR)
11234 tree tree110, tree111;
11235 tree110 = TREE_OPERAND (tree11, 0);
11236 tree111 = TREE_OPERAND (tree11, 1);
11237 STRIP_NOPS (tree110);
11238 STRIP_NOPS (tree111);
11239 if (TREE_CODE (tree110) == INTEGER_CST
11240 && compare_tree_int (tree110,
11241 element_precision (rtype)) == 0
11242 && operand_equal_p (tree01, tree111, 0))
11244 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11245 ? LROTATE_EXPR : RROTATE_EXPR),
11246 rtype, TREE_OPERAND (arg0, 0),
11247 orig_tree01);
11248 return fold_convert_loc (loc, type, tem);
11251 else if (code == BIT_IOR_EXPR
11252 && code11 == BIT_AND_EXPR
11253 && pow2p_hwi (element_precision (rtype)))
11255 tree tree110, tree111;
11256 tree110 = TREE_OPERAND (tree11, 0);
11257 tree111 = TREE_OPERAND (tree11, 1);
11258 STRIP_NOPS (tree110);
11259 STRIP_NOPS (tree111);
11260 if (TREE_CODE (tree110) == NEGATE_EXPR
11261 && TREE_CODE (tree111) == INTEGER_CST
11262 && compare_tree_int (tree111,
11263 element_precision (rtype) - 1) == 0
11264 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11266 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11267 ? LROTATE_EXPR : RROTATE_EXPR),
11268 rtype, TREE_OPERAND (arg0, 0),
11269 orig_tree01);
11270 return fold_convert_loc (loc, type, tem);
11276 associate:
11277 /* In most languages, can't associate operations on floats through
11278 parentheses. Rather than remember where the parentheses were, we
11279 don't associate floats at all, unless the user has specified
11280 -fassociative-math.
11281 And, we need to make sure type is not saturating. */
11283 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11284 && !TYPE_SATURATING (type)
11285 && !TYPE_OVERFLOW_SANITIZED (type))
11287 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11288 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11289 tree atype = type;
11290 bool ok = true;
11292 /* Split both trees into variables, constants, and literals. Then
11293 associate each group together, the constants with literals,
11294 then the result with variables. This increases the chances of
11295 literals being recombined later and of generating relocatable
11296 expressions for the sum of a constant and literal. */
11297 var0 = split_tree (arg0, type, code,
11298 &minus_var0, &con0, &minus_con0,
11299 &lit0, &minus_lit0, 0);
11300 var1 = split_tree (arg1, type, code,
11301 &minus_var1, &con1, &minus_con1,
11302 &lit1, &minus_lit1, code == MINUS_EXPR);
11304 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11305 if (code == MINUS_EXPR)
11306 code = PLUS_EXPR;
11308 /* With undefined overflow prefer doing association in a type
11309 which wraps on overflow, if that is one of the operand types. */
11310 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11311 && !TYPE_OVERFLOW_WRAPS (type))
11313 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11314 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11315 atype = TREE_TYPE (arg0);
11316 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11317 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11318 atype = TREE_TYPE (arg1);
11319 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11322 /* With undefined overflow we can only associate constants with one
11323 variable, and constants whose association doesn't overflow. */
11324 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11325 && !TYPE_OVERFLOW_WRAPS (atype))
11327 if ((var0 && var1) || (minus_var0 && minus_var1))
11329 /* ??? If split_tree would handle NEGATE_EXPR we could
11330 simply reject these cases and the allowed cases would
11331 be the var0/minus_var1 ones. */
11332 tree tmp0 = var0 ? var0 : minus_var0;
11333 tree tmp1 = var1 ? var1 : minus_var1;
11334 bool one_neg = false;
11336 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11338 tmp0 = TREE_OPERAND (tmp0, 0);
11339 one_neg = !one_neg;
11341 if (CONVERT_EXPR_P (tmp0)
11342 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11343 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11344 <= TYPE_PRECISION (atype)))
11345 tmp0 = TREE_OPERAND (tmp0, 0);
11346 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11348 tmp1 = TREE_OPERAND (tmp1, 0);
11349 one_neg = !one_neg;
11351 if (CONVERT_EXPR_P (tmp1)
11352 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11353 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11354 <= TYPE_PRECISION (atype)))
11355 tmp1 = TREE_OPERAND (tmp1, 0);
11356 /* The only case we can still associate with two variables
11357 is if they cancel out. */
11358 if (!one_neg
11359 || !operand_equal_p (tmp0, tmp1, 0))
11360 ok = false;
11362 else if ((var0 && minus_var1
11363 && ! operand_equal_p (var0, minus_var1, 0))
11364 || (minus_var0 && var1
11365 && ! operand_equal_p (minus_var0, var1, 0)))
11366 ok = false;
11369 /* Only do something if we found more than two objects. Otherwise,
11370 nothing has changed and we risk infinite recursion. */
11371 if (ok
11372 && ((var0 != 0) + (var1 != 0)
11373 + (minus_var0 != 0) + (minus_var1 != 0)
11374 + (con0 != 0) + (con1 != 0)
11375 + (minus_con0 != 0) + (minus_con1 != 0)
11376 + (lit0 != 0) + (lit1 != 0)
11377 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11379 var0 = associate_trees (loc, var0, var1, code, atype);
11380 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11381 code, atype);
11382 con0 = associate_trees (loc, con0, con1, code, atype);
11383 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11384 code, atype);
11385 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11386 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11387 code, atype);
11389 if (minus_var0 && var0)
11391 var0 = associate_trees (loc, var0, minus_var0,
11392 MINUS_EXPR, atype);
11393 minus_var0 = 0;
11395 if (minus_con0 && con0)
11397 con0 = associate_trees (loc, con0, minus_con0,
11398 MINUS_EXPR, atype);
11399 minus_con0 = 0;
11402 /* Preserve the MINUS_EXPR if the negative part of the literal is
11403 greater than the positive part. Otherwise, the multiplicative
11404 folding code (i.e extract_muldiv) may be fooled in case
11405 unsigned constants are subtracted, like in the following
11406 example: ((X*2 + 4) - 8U)/2. */
11407 if (minus_lit0 && lit0)
11409 if (TREE_CODE (lit0) == INTEGER_CST
11410 && TREE_CODE (minus_lit0) == INTEGER_CST
11411 && tree_int_cst_lt (lit0, minus_lit0)
11412 /* But avoid ending up with only negated parts. */
11413 && (var0 || con0))
11415 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11416 MINUS_EXPR, atype);
11417 lit0 = 0;
11419 else
11421 lit0 = associate_trees (loc, lit0, minus_lit0,
11422 MINUS_EXPR, atype);
11423 minus_lit0 = 0;
11427 /* Don't introduce overflows through reassociation. */
11428 if ((lit0 && TREE_OVERFLOW_P (lit0))
11429 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11430 return NULL_TREE;
11432 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11433 con0 = associate_trees (loc, con0, lit0, code, atype);
11434 lit0 = 0;
11435 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11436 code, atype);
11437 minus_lit0 = 0;
11439 /* Eliminate minus_con0. */
11440 if (minus_con0)
11442 if (con0)
11443 con0 = associate_trees (loc, con0, minus_con0,
11444 MINUS_EXPR, atype);
11445 else if (var0)
11446 var0 = associate_trees (loc, var0, minus_con0,
11447 MINUS_EXPR, atype);
11448 else
11449 gcc_unreachable ();
11450 minus_con0 = 0;
11453 /* Eliminate minus_var0. */
11454 if (minus_var0)
11456 if (con0)
11457 con0 = associate_trees (loc, con0, minus_var0,
11458 MINUS_EXPR, atype);
11459 else
11460 gcc_unreachable ();
11461 minus_var0 = 0;
11464 return
11465 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11466 code, atype));
11470 return NULL_TREE;
11472 case POINTER_DIFF_EXPR:
11473 case MINUS_EXPR:
11474 /* Fold &a[i] - &a[j] to i-j. */
11475 if (TREE_CODE (arg0) == ADDR_EXPR
11476 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11477 && TREE_CODE (arg1) == ADDR_EXPR
11478 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11480 tree tem = fold_addr_of_array_ref_difference (loc, type,
11481 TREE_OPERAND (arg0, 0),
11482 TREE_OPERAND (arg1, 0),
11483 code
11484 == POINTER_DIFF_EXPR);
11485 if (tem)
11486 return tem;
11489 /* Further transformations are not for pointers. */
11490 if (code == POINTER_DIFF_EXPR)
11491 return NULL_TREE;
11493 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11494 if (TREE_CODE (arg0) == NEGATE_EXPR
11495 && negate_expr_p (op1)
11496 /* If arg0 is e.g. unsigned int and type is int, then this could
11497 introduce UB, because if A is INT_MIN at runtime, the original
11498 expression can be well defined while the latter is not.
11499 See PR83269. */
11500 && !(ANY_INTEGRAL_TYPE_P (type)
11501 && TYPE_OVERFLOW_UNDEFINED (type)
11502 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11503 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11504 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11505 fold_convert_loc (loc, type,
11506 TREE_OPERAND (arg0, 0)));
11508 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11509 __complex__ ( x, -y ). This is not the same for SNaNs or if
11510 signed zeros are involved. */
11511 if (!HONOR_SNANS (arg0)
11512 && !HONOR_SIGNED_ZEROS (arg0)
11513 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11515 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11516 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11517 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11518 bool arg0rz = false, arg0iz = false;
11519 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11520 || (arg0i && (arg0iz = real_zerop (arg0i))))
11522 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11523 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11524 if (arg0rz && arg1i && real_zerop (arg1i))
11526 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11527 arg1r ? arg1r
11528 : build1 (REALPART_EXPR, rtype, arg1));
11529 tree ip = arg0i ? arg0i
11530 : build1 (IMAGPART_EXPR, rtype, arg0);
11531 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11533 else if (arg0iz && arg1r && real_zerop (arg1r))
11535 tree rp = arg0r ? arg0r
11536 : build1 (REALPART_EXPR, rtype, arg0);
11537 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11538 arg1i ? arg1i
11539 : build1 (IMAGPART_EXPR, rtype, arg1));
11540 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11545 /* A - B -> A + (-B) if B is easily negatable. */
11546 if (negate_expr_p (op1)
11547 && ! TYPE_OVERFLOW_SANITIZED (type)
11548 && ((FLOAT_TYPE_P (type)
11549 /* Avoid this transformation if B is a positive REAL_CST. */
11550 && (TREE_CODE (op1) != REAL_CST
11551 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11552 || INTEGRAL_TYPE_P (type)))
11553 return fold_build2_loc (loc, PLUS_EXPR, type,
11554 fold_convert_loc (loc, type, arg0),
11555 negate_expr (op1));
11557 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11558 one. Make sure the type is not saturating and has the signedness of
11559 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11560 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11561 if ((TREE_CODE (arg0) == MULT_EXPR
11562 || TREE_CODE (arg1) == MULT_EXPR)
11563 && !TYPE_SATURATING (type)
11564 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11565 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11566 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11568 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11569 if (tem)
11570 return tem;
11573 goto associate;
11575 case MULT_EXPR:
11576 if (! FLOAT_TYPE_P (type))
11578 /* Transform x * -C into -x * C if x is easily negatable. */
11579 if (TREE_CODE (op1) == INTEGER_CST
11580 && tree_int_cst_sgn (op1) == -1
11581 && negate_expr_p (op0)
11582 && negate_expr_p (op1)
11583 && (tem = negate_expr (op1)) != op1
11584 && ! TREE_OVERFLOW (tem))
11585 return fold_build2_loc (loc, MULT_EXPR, type,
11586 fold_convert_loc (loc, type,
11587 negate_expr (op0)), tem);
11589 strict_overflow_p = false;
11590 if (TREE_CODE (arg1) == INTEGER_CST
11591 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11592 &strict_overflow_p)) != 0)
11594 if (strict_overflow_p)
11595 fold_overflow_warning (("assuming signed overflow does not "
11596 "occur when simplifying "
11597 "multiplication"),
11598 WARN_STRICT_OVERFLOW_MISC);
11599 return fold_convert_loc (loc, type, tem);
11602 /* Optimize z * conj(z) for integer complex numbers. */
11603 if (TREE_CODE (arg0) == CONJ_EXPR
11604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11605 return fold_mult_zconjz (loc, type, arg1);
11606 if (TREE_CODE (arg1) == CONJ_EXPR
11607 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11608 return fold_mult_zconjz (loc, type, arg0);
11610 else
11612 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11613 This is not the same for NaNs or if signed zeros are
11614 involved. */
11615 if (!HONOR_NANS (arg0)
11616 && !HONOR_SIGNED_ZEROS (arg0)
11617 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11618 && TREE_CODE (arg1) == COMPLEX_CST
11619 && real_zerop (TREE_REALPART (arg1)))
11621 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11622 if (real_onep (TREE_IMAGPART (arg1)))
11623 return
11624 fold_build2_loc (loc, COMPLEX_EXPR, type,
11625 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11626 rtype, arg0)),
11627 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11628 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11629 return
11630 fold_build2_loc (loc, COMPLEX_EXPR, type,
11631 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11632 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11633 rtype, arg0)));
11636 /* Optimize z * conj(z) for floating point complex numbers.
11637 Guarded by flag_unsafe_math_optimizations as non-finite
11638 imaginary components don't produce scalar results. */
11639 if (flag_unsafe_math_optimizations
11640 && TREE_CODE (arg0) == CONJ_EXPR
11641 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11642 return fold_mult_zconjz (loc, type, arg1);
11643 if (flag_unsafe_math_optimizations
11644 && TREE_CODE (arg1) == CONJ_EXPR
11645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11646 return fold_mult_zconjz (loc, type, arg0);
11648 goto associate;
11650 case BIT_IOR_EXPR:
11651 /* Canonicalize (X & C1) | C2. */
11652 if (TREE_CODE (arg0) == BIT_AND_EXPR
11653 && TREE_CODE (arg1) == INTEGER_CST
11654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11656 int width = TYPE_PRECISION (type), w;
11657 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11658 wide_int c2 = wi::to_wide (arg1);
11660 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11661 if ((c1 & c2) == c1)
11662 return omit_one_operand_loc (loc, type, arg1,
11663 TREE_OPERAND (arg0, 0));
11665 wide_int msk = wi::mask (width, false,
11666 TYPE_PRECISION (TREE_TYPE (arg1)));
11668 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11669 if (wi::bit_and_not (msk, c1 | c2) == 0)
11671 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11672 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11675 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11676 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11677 mode which allows further optimizations. */
11678 c1 &= msk;
11679 c2 &= msk;
11680 wide_int c3 = wi::bit_and_not (c1, c2);
11681 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11683 wide_int mask = wi::mask (w, false,
11684 TYPE_PRECISION (type));
11685 if (((c1 | c2) & mask) == mask
11686 && wi::bit_and_not (c1, mask) == 0)
11688 c3 = mask;
11689 break;
11693 if (c3 != c1)
11695 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11696 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11697 wide_int_to_tree (type, c3));
11698 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11702 /* See if this can be simplified into a rotate first. If that
11703 is unsuccessful continue in the association code. */
11704 goto bit_rotate;
11706 case BIT_XOR_EXPR:
11707 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11708 if (TREE_CODE (arg0) == BIT_AND_EXPR
11709 && INTEGRAL_TYPE_P (type)
11710 && integer_onep (TREE_OPERAND (arg0, 1))
11711 && integer_onep (arg1))
11712 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11713 build_zero_cst (TREE_TYPE (arg0)));
11715 /* See if this can be simplified into a rotate first. If that
11716 is unsuccessful continue in the association code. */
11717 goto bit_rotate;
11719 case BIT_AND_EXPR:
11720 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11721 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11722 && INTEGRAL_TYPE_P (type)
11723 && integer_onep (TREE_OPERAND (arg0, 1))
11724 && integer_onep (arg1))
11726 tree tem2;
11727 tem = TREE_OPERAND (arg0, 0);
11728 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11729 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11730 tem, tem2);
11731 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11732 build_zero_cst (TREE_TYPE (tem)));
11734 /* Fold ~X & 1 as (X & 1) == 0. */
11735 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11736 && INTEGRAL_TYPE_P (type)
11737 && integer_onep (arg1))
11739 tree tem2;
11740 tem = TREE_OPERAND (arg0, 0);
11741 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11742 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11743 tem, tem2);
11744 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11745 build_zero_cst (TREE_TYPE (tem)));
11747 /* Fold !X & 1 as X == 0. */
11748 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11749 && integer_onep (arg1))
11751 tem = TREE_OPERAND (arg0, 0);
11752 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11753 build_zero_cst (TREE_TYPE (tem)));
11756 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11757 multiple of 1 << CST. */
11758 if (TREE_CODE (arg1) == INTEGER_CST)
11760 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11761 wide_int ncst1 = -cst1;
11762 if ((cst1 & ncst1) == ncst1
11763 && multiple_of_p (type, arg0,
11764 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11765 return fold_convert_loc (loc, type, arg0);
11768 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11769 bits from CST2. */
11770 if (TREE_CODE (arg1) == INTEGER_CST
11771 && TREE_CODE (arg0) == MULT_EXPR
11772 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11774 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11775 wide_int masked
11776 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11778 if (masked == 0)
11779 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11780 arg0, arg1);
11781 else if (masked != warg1)
11783 /* Avoid the transform if arg1 is a mask of some
11784 mode which allows further optimizations. */
11785 int pop = wi::popcount (warg1);
11786 if (!(pop >= BITS_PER_UNIT
11787 && pow2p_hwi (pop)
11788 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11789 return fold_build2_loc (loc, code, type, op0,
11790 wide_int_to_tree (type, masked));
11794 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11795 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11796 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11798 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11800 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11801 if (mask == -1)
11802 return
11803 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11806 goto associate;
11808 case RDIV_EXPR:
11809 /* Don't touch a floating-point divide by zero unless the mode
11810 of the constant can represent infinity. */
11811 if (TREE_CODE (arg1) == REAL_CST
11812 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11813 && real_zerop (arg1))
11814 return NULL_TREE;
11816 /* (-A) / (-B) -> A / B */
11817 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11818 return fold_build2_loc (loc, RDIV_EXPR, type,
11819 TREE_OPERAND (arg0, 0),
11820 negate_expr (arg1));
11821 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11822 return fold_build2_loc (loc, RDIV_EXPR, type,
11823 negate_expr (arg0),
11824 TREE_OPERAND (arg1, 0));
11825 return NULL_TREE;
11827 case TRUNC_DIV_EXPR:
11828 /* Fall through */
11830 case FLOOR_DIV_EXPR:
11831 /* Simplify A / (B << N) where A and B are positive and B is
11832 a power of 2, to A >> (N + log2(B)). */
11833 strict_overflow_p = false;
11834 if (TREE_CODE (arg1) == LSHIFT_EXPR
11835 && (TYPE_UNSIGNED (type)
11836 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11838 tree sval = TREE_OPERAND (arg1, 0);
11839 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11841 tree sh_cnt = TREE_OPERAND (arg1, 1);
11842 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11843 wi::exact_log2 (wi::to_wide (sval)));
11845 if (strict_overflow_p)
11846 fold_overflow_warning (("assuming signed overflow does not "
11847 "occur when simplifying A / (B << N)"),
11848 WARN_STRICT_OVERFLOW_MISC);
11850 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11851 sh_cnt, pow2);
11852 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11853 fold_convert_loc (loc, type, arg0), sh_cnt);
11857 /* Fall through */
11859 case ROUND_DIV_EXPR:
11860 case CEIL_DIV_EXPR:
11861 case EXACT_DIV_EXPR:
11862 if (integer_zerop (arg1))
11863 return NULL_TREE;
11865 /* Convert -A / -B to A / B when the type is signed and overflow is
11866 undefined. */
11867 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11868 && TREE_CODE (op0) == NEGATE_EXPR
11869 && negate_expr_p (op1))
11871 if (ANY_INTEGRAL_TYPE_P (type))
11872 fold_overflow_warning (("assuming signed overflow does not occur "
11873 "when distributing negation across "
11874 "division"),
11875 WARN_STRICT_OVERFLOW_MISC);
11876 return fold_build2_loc (loc, code, type,
11877 fold_convert_loc (loc, type,
11878 TREE_OPERAND (arg0, 0)),
11879 negate_expr (op1));
11881 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11882 && TREE_CODE (arg1) == NEGATE_EXPR
11883 && negate_expr_p (op0))
11885 if (ANY_INTEGRAL_TYPE_P (type))
11886 fold_overflow_warning (("assuming signed overflow does not occur "
11887 "when distributing negation across "
11888 "division"),
11889 WARN_STRICT_OVERFLOW_MISC);
11890 return fold_build2_loc (loc, code, type,
11891 negate_expr (op0),
11892 fold_convert_loc (loc, type,
11893 TREE_OPERAND (arg1, 0)));
11896 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11897 operation, EXACT_DIV_EXPR.
11899 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11900 At one time others generated faster code, it's not clear if they do
11901 after the last round to changes to the DIV code in expmed.cc. */
11902 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11903 && multiple_of_p (type, arg0, arg1))
11904 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11905 fold_convert (type, arg0),
11906 fold_convert (type, arg1));
11908 strict_overflow_p = false;
11909 if (TREE_CODE (arg1) == INTEGER_CST
11910 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11911 &strict_overflow_p)) != 0)
11913 if (strict_overflow_p)
11914 fold_overflow_warning (("assuming signed overflow does not occur "
11915 "when simplifying division"),
11916 WARN_STRICT_OVERFLOW_MISC);
11917 return fold_convert_loc (loc, type, tem);
11920 return NULL_TREE;
11922 case CEIL_MOD_EXPR:
11923 case FLOOR_MOD_EXPR:
11924 case ROUND_MOD_EXPR:
11925 case TRUNC_MOD_EXPR:
11926 strict_overflow_p = false;
11927 if (TREE_CODE (arg1) == INTEGER_CST
11928 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11929 &strict_overflow_p)) != 0)
11931 if (strict_overflow_p)
11932 fold_overflow_warning (("assuming signed overflow does not occur "
11933 "when simplifying modulus"),
11934 WARN_STRICT_OVERFLOW_MISC);
11935 return fold_convert_loc (loc, type, tem);
11938 return NULL_TREE;
11940 case LROTATE_EXPR:
11941 case RROTATE_EXPR:
11942 case RSHIFT_EXPR:
11943 case LSHIFT_EXPR:
11944 /* Since negative shift count is not well-defined,
11945 don't try to compute it in the compiler. */
11946 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11947 return NULL_TREE;
11949 prec = element_precision (type);
11951 /* If we have a rotate of a bit operation with the rotate count and
11952 the second operand of the bit operation both constant,
11953 permute the two operations. */
11954 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11955 && (TREE_CODE (arg0) == BIT_AND_EXPR
11956 || TREE_CODE (arg0) == BIT_IOR_EXPR
11957 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11960 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11961 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11962 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11963 fold_build2_loc (loc, code, type,
11964 arg00, arg1),
11965 fold_build2_loc (loc, code, type,
11966 arg01, arg1));
11969 /* Two consecutive rotates adding up to the some integer
11970 multiple of the precision of the type can be ignored. */
11971 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11972 && TREE_CODE (arg0) == RROTATE_EXPR
11973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11974 && wi::umod_trunc (wi::to_wide (arg1)
11975 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11976 prec) == 0)
11977 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11979 return NULL_TREE;
11981 case MIN_EXPR:
11982 case MAX_EXPR:
11983 goto associate;
11985 case TRUTH_ANDIF_EXPR:
11986 /* Note that the operands of this must be ints
11987 and their values must be 0 or 1.
11988 ("true" is a fixed value perhaps depending on the language.) */
11989 /* If first arg is constant zero, return it. */
11990 if (integer_zerop (arg0))
11991 return fold_convert_loc (loc, type, arg0);
11992 /* FALLTHRU */
11993 case TRUTH_AND_EXPR:
11994 /* If either arg is constant true, drop it. */
11995 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11996 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11997 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11998 /* Preserve sequence points. */
11999 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12000 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12001 /* If second arg is constant zero, result is zero, but first arg
12002 must be evaluated. */
12003 if (integer_zerop (arg1))
12004 return omit_one_operand_loc (loc, type, arg1, arg0);
12005 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12006 case will be handled here. */
12007 if (integer_zerop (arg0))
12008 return omit_one_operand_loc (loc, type, arg0, arg1);
12010 /* !X && X is always false. */
12011 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12012 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12013 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12014 /* X && !X is always false. */
12015 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12016 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12017 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12019 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12020 means A >= Y && A != MAX, but in this case we know that
12021 A < X <= MAX. */
12023 if (!TREE_SIDE_EFFECTS (arg0)
12024 && !TREE_SIDE_EFFECTS (arg1))
12026 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12027 if (tem && !operand_equal_p (tem, arg0, 0))
12028 return fold_convert (type,
12029 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12030 tem, arg1));
12032 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12033 if (tem && !operand_equal_p (tem, arg1, 0))
12034 return fold_convert (type,
12035 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12036 arg0, tem));
12039 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12040 != NULL_TREE)
12041 return tem;
12043 return NULL_TREE;
12045 case TRUTH_ORIF_EXPR:
12046 /* Note that the operands of this must be ints
12047 and their values must be 0 or true.
12048 ("true" is a fixed value perhaps depending on the language.) */
12049 /* If first arg is constant true, return it. */
12050 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12051 return fold_convert_loc (loc, type, arg0);
12052 /* FALLTHRU */
12053 case TRUTH_OR_EXPR:
12054 /* If either arg is constant zero, drop it. */
12055 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12057 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12058 /* Preserve sequence points. */
12059 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12061 /* If second arg is constant true, result is true, but we must
12062 evaluate first arg. */
12063 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12064 return omit_one_operand_loc (loc, type, arg1, arg0);
12065 /* Likewise for first arg, but note this only occurs here for
12066 TRUTH_OR_EXPR. */
12067 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12068 return omit_one_operand_loc (loc, type, arg0, arg1);
12070 /* !X || X is always true. */
12071 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12072 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12073 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12074 /* X || !X is always true. */
12075 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12076 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12077 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12079 /* (X && !Y) || (!X && Y) is X ^ Y */
12080 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12081 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12083 tree a0, a1, l0, l1, n0, n1;
12085 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12086 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12088 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12089 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12091 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12092 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12094 if ((operand_equal_p (n0, a0, 0)
12095 && operand_equal_p (n1, a1, 0))
12096 || (operand_equal_p (n0, a1, 0)
12097 && operand_equal_p (n1, a0, 0)))
12098 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12101 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12102 != NULL_TREE)
12103 return tem;
12105 return NULL_TREE;
12107 case TRUTH_XOR_EXPR:
12108 /* If the second arg is constant zero, drop it. */
12109 if (integer_zerop (arg1))
12110 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12111 /* If the second arg is constant true, this is a logical inversion. */
12112 if (integer_onep (arg1))
12114 tem = invert_truthvalue_loc (loc, arg0);
12115 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12117 /* Identical arguments cancel to zero. */
12118 if (operand_equal_p (arg0, arg1, 0))
12119 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12121 /* !X ^ X is always true. */
12122 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12124 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12126 /* X ^ !X is always true. */
12127 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12129 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12131 return NULL_TREE;
12133 case EQ_EXPR:
12134 case NE_EXPR:
12135 STRIP_NOPS (arg0);
12136 STRIP_NOPS (arg1);
12138 tem = fold_comparison (loc, code, type, op0, op1);
12139 if (tem != NULL_TREE)
12140 return tem;
12142 /* bool_var != 1 becomes !bool_var. */
12143 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12144 && code == NE_EXPR)
12145 return fold_convert_loc (loc, type,
12146 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12147 TREE_TYPE (arg0), arg0));
12149 /* bool_var == 0 becomes !bool_var. */
12150 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12151 && code == EQ_EXPR)
12152 return fold_convert_loc (loc, type,
12153 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12154 TREE_TYPE (arg0), arg0));
12156 /* !exp != 0 becomes !exp */
12157 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12158 && code == NE_EXPR)
12159 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12161 /* If this is an EQ or NE comparison with zero and ARG0 is
12162 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12163 two operations, but the latter can be done in one less insn
12164 on machines that have only two-operand insns or on which a
12165 constant cannot be the first operand. */
12166 if (TREE_CODE (arg0) == BIT_AND_EXPR
12167 && integer_zerop (arg1))
12169 tree arg00 = TREE_OPERAND (arg0, 0);
12170 tree arg01 = TREE_OPERAND (arg0, 1);
12171 if (TREE_CODE (arg00) == LSHIFT_EXPR
12172 && integer_onep (TREE_OPERAND (arg00, 0)))
12174 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12175 arg01, TREE_OPERAND (arg00, 1));
12176 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12177 build_one_cst (TREE_TYPE (arg0)));
12178 return fold_build2_loc (loc, code, type,
12179 fold_convert_loc (loc, TREE_TYPE (arg1),
12180 tem), arg1);
12182 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12183 && integer_onep (TREE_OPERAND (arg01, 0)))
12185 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12186 arg00, TREE_OPERAND (arg01, 1));
12187 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12188 build_one_cst (TREE_TYPE (arg0)));
12189 return fold_build2_loc (loc, code, type,
12190 fold_convert_loc (loc, TREE_TYPE (arg1),
12191 tem), arg1);
12195 /* If this is a comparison of a field, we may be able to simplify it. */
12196 if ((TREE_CODE (arg0) == COMPONENT_REF
12197 || TREE_CODE (arg0) == BIT_FIELD_REF)
12198 /* Handle the constant case even without -O
12199 to make sure the warnings are given. */
12200 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12202 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12203 if (t1)
12204 return t1;
12207 /* Optimize comparisons of strlen vs zero to a compare of the
12208 first character of the string vs zero. To wit,
12209 strlen(ptr) == 0 => *ptr == 0
12210 strlen(ptr) != 0 => *ptr != 0
12211 Other cases should reduce to one of these two (or a constant)
12212 due to the return value of strlen being unsigned. */
12213 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12215 tree fndecl = get_callee_fndecl (arg0);
12217 if (fndecl
12218 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12219 && call_expr_nargs (arg0) == 1
12220 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12221 == POINTER_TYPE))
12223 tree ptrtype
12224 = build_pointer_type (build_qualified_type (char_type_node,
12225 TYPE_QUAL_CONST));
12226 tree ptr = fold_convert_loc (loc, ptrtype,
12227 CALL_EXPR_ARG (arg0, 0));
12228 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12229 return fold_build2_loc (loc, code, type, iref,
12230 build_int_cst (TREE_TYPE (iref), 0));
12234 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12235 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12236 if (TREE_CODE (arg0) == RSHIFT_EXPR
12237 && integer_zerop (arg1)
12238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12240 tree arg00 = TREE_OPERAND (arg0, 0);
12241 tree arg01 = TREE_OPERAND (arg0, 1);
12242 tree itype = TREE_TYPE (arg00);
12243 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12245 if (TYPE_UNSIGNED (itype))
12247 itype = signed_type_for (itype);
12248 arg00 = fold_convert_loc (loc, itype, arg00);
12250 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12251 type, arg00, build_zero_cst (itype));
12255 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12256 (X & C) == 0 when C is a single bit. */
12257 if (TREE_CODE (arg0) == BIT_AND_EXPR
12258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12259 && integer_zerop (arg1)
12260 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12262 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12263 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12264 TREE_OPERAND (arg0, 1));
12265 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12266 type, tem,
12267 fold_convert_loc (loc, TREE_TYPE (arg0),
12268 arg1));
12271 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12272 constant C is a power of two, i.e. a single bit. */
12273 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12274 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12275 && integer_zerop (arg1)
12276 && integer_pow2p (TREE_OPERAND (arg0, 1))
12277 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12278 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12280 tree arg00 = TREE_OPERAND (arg0, 0);
12281 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12282 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12285 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12286 when is C is a power of two, i.e. a single bit. */
12287 if (TREE_CODE (arg0) == BIT_AND_EXPR
12288 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12289 && integer_zerop (arg1)
12290 && integer_pow2p (TREE_OPERAND (arg0, 1))
12291 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12292 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12294 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12295 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12296 arg000, TREE_OPERAND (arg0, 1));
12297 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12298 tem, build_int_cst (TREE_TYPE (tem), 0));
12301 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12302 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12304 tree arg00 = TREE_OPERAND (arg0, 0);
12305 tree arg01 = TREE_OPERAND (arg0, 1);
12306 tree arg10 = TREE_OPERAND (arg1, 0);
12307 tree arg11 = TREE_OPERAND (arg1, 1);
12308 tree itype = TREE_TYPE (arg0);
12310 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12311 operand_equal_p guarantees no side-effects so we don't need
12312 to use omit_one_operand on Z. */
12313 if (operand_equal_p (arg01, arg11, 0))
12314 return fold_build2_loc (loc, code, type, arg00,
12315 fold_convert_loc (loc, TREE_TYPE (arg00),
12316 arg10));
12317 if (operand_equal_p (arg01, arg10, 0))
12318 return fold_build2_loc (loc, code, type, arg00,
12319 fold_convert_loc (loc, TREE_TYPE (arg00),
12320 arg11));
12321 if (operand_equal_p (arg00, arg11, 0))
12322 return fold_build2_loc (loc, code, type, arg01,
12323 fold_convert_loc (loc, TREE_TYPE (arg01),
12324 arg10));
12325 if (operand_equal_p (arg00, arg10, 0))
12326 return fold_build2_loc (loc, code, type, arg01,
12327 fold_convert_loc (loc, TREE_TYPE (arg01),
12328 arg11));
12330 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12331 if (TREE_CODE (arg01) == INTEGER_CST
12332 && TREE_CODE (arg11) == INTEGER_CST)
12334 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12335 fold_convert_loc (loc, itype, arg11));
12336 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12337 return fold_build2_loc (loc, code, type, tem,
12338 fold_convert_loc (loc, itype, arg10));
12342 /* Attempt to simplify equality/inequality comparisons of complex
12343 values. Only lower the comparison if the result is known or
12344 can be simplified to a single scalar comparison. */
12345 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12346 || TREE_CODE (arg0) == COMPLEX_CST)
12347 && (TREE_CODE (arg1) == COMPLEX_EXPR
12348 || TREE_CODE (arg1) == COMPLEX_CST))
12350 tree real0, imag0, real1, imag1;
12351 tree rcond, icond;
12353 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12355 real0 = TREE_OPERAND (arg0, 0);
12356 imag0 = TREE_OPERAND (arg0, 1);
12358 else
12360 real0 = TREE_REALPART (arg0);
12361 imag0 = TREE_IMAGPART (arg0);
12364 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12366 real1 = TREE_OPERAND (arg1, 0);
12367 imag1 = TREE_OPERAND (arg1, 1);
12369 else
12371 real1 = TREE_REALPART (arg1);
12372 imag1 = TREE_IMAGPART (arg1);
12375 rcond = fold_binary_loc (loc, code, type, real0, real1);
12376 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12378 if (integer_zerop (rcond))
12380 if (code == EQ_EXPR)
12381 return omit_two_operands_loc (loc, type, boolean_false_node,
12382 imag0, imag1);
12383 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12385 else
12387 if (code == NE_EXPR)
12388 return omit_two_operands_loc (loc, type, boolean_true_node,
12389 imag0, imag1);
12390 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12394 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12395 if (icond && TREE_CODE (icond) == INTEGER_CST)
12397 if (integer_zerop (icond))
12399 if (code == EQ_EXPR)
12400 return omit_two_operands_loc (loc, type, boolean_false_node,
12401 real0, real1);
12402 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12404 else
12406 if (code == NE_EXPR)
12407 return omit_two_operands_loc (loc, type, boolean_true_node,
12408 real0, real1);
12409 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12414 return NULL_TREE;
12416 case LT_EXPR:
12417 case GT_EXPR:
12418 case LE_EXPR:
12419 case GE_EXPR:
12420 tem = fold_comparison (loc, code, type, op0, op1);
12421 if (tem != NULL_TREE)
12422 return tem;
12424 /* Transform comparisons of the form X +- C CMP X. */
12425 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12426 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12427 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12428 && !HONOR_SNANS (arg0))
12430 tree arg01 = TREE_OPERAND (arg0, 1);
12431 enum tree_code code0 = TREE_CODE (arg0);
12432 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12434 /* (X - c) > X becomes false. */
12435 if (code == GT_EXPR
12436 && ((code0 == MINUS_EXPR && is_positive >= 0)
12437 || (code0 == PLUS_EXPR && is_positive <= 0)))
12438 return constant_boolean_node (0, type);
12440 /* Likewise (X + c) < X becomes false. */
12441 if (code == LT_EXPR
12442 && ((code0 == PLUS_EXPR && is_positive >= 0)
12443 || (code0 == MINUS_EXPR && is_positive <= 0)))
12444 return constant_boolean_node (0, type);
12446 /* Convert (X - c) <= X to true. */
12447 if (!HONOR_NANS (arg1)
12448 && code == LE_EXPR
12449 && ((code0 == MINUS_EXPR && is_positive >= 0)
12450 || (code0 == PLUS_EXPR && is_positive <= 0)))
12451 return constant_boolean_node (1, type);
12453 /* Convert (X + c) >= X to true. */
12454 if (!HONOR_NANS (arg1)
12455 && code == GE_EXPR
12456 && ((code0 == PLUS_EXPR && is_positive >= 0)
12457 || (code0 == MINUS_EXPR && is_positive <= 0)))
12458 return constant_boolean_node (1, type);
12461 /* If we are comparing an ABS_EXPR with a constant, we can
12462 convert all the cases into explicit comparisons, but they may
12463 well not be faster than doing the ABS and one comparison.
12464 But ABS (X) <= C is a range comparison, which becomes a subtraction
12465 and a comparison, and is probably faster. */
12466 if (code == LE_EXPR
12467 && TREE_CODE (arg1) == INTEGER_CST
12468 && TREE_CODE (arg0) == ABS_EXPR
12469 && ! TREE_SIDE_EFFECTS (arg0)
12470 && (tem = negate_expr (arg1)) != 0
12471 && TREE_CODE (tem) == INTEGER_CST
12472 && !TREE_OVERFLOW (tem))
12473 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12474 build2 (GE_EXPR, type,
12475 TREE_OPERAND (arg0, 0), tem),
12476 build2 (LE_EXPR, type,
12477 TREE_OPERAND (arg0, 0), arg1));
12479 /* Convert ABS_EXPR<x> >= 0 to true. */
12480 strict_overflow_p = false;
12481 if (code == GE_EXPR
12482 && (integer_zerop (arg1)
12483 || (! HONOR_NANS (arg0)
12484 && real_zerop (arg1)))
12485 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12487 if (strict_overflow_p)
12488 fold_overflow_warning (("assuming signed overflow does not occur "
12489 "when simplifying comparison of "
12490 "absolute value and zero"),
12491 WARN_STRICT_OVERFLOW_CONDITIONAL);
12492 return omit_one_operand_loc (loc, type,
12493 constant_boolean_node (true, type),
12494 arg0);
12497 /* Convert ABS_EXPR<x> < 0 to false. */
12498 strict_overflow_p = false;
12499 if (code == LT_EXPR
12500 && (integer_zerop (arg1) || real_zerop (arg1))
12501 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12503 if (strict_overflow_p)
12504 fold_overflow_warning (("assuming signed overflow does not occur "
12505 "when simplifying comparison of "
12506 "absolute value and zero"),
12507 WARN_STRICT_OVERFLOW_CONDITIONAL);
12508 return omit_one_operand_loc (loc, type,
12509 constant_boolean_node (false, type),
12510 arg0);
12513 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12514 and similarly for >= into !=. */
12515 if ((code == LT_EXPR || code == GE_EXPR)
12516 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12517 && TREE_CODE (arg1) == LSHIFT_EXPR
12518 && integer_onep (TREE_OPERAND (arg1, 0)))
12519 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12520 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12521 TREE_OPERAND (arg1, 1)),
12522 build_zero_cst (TREE_TYPE (arg0)));
12524 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12525 otherwise Y might be >= # of bits in X's type and thus e.g.
12526 (unsigned char) (1 << Y) for Y 15 might be 0.
12527 If the cast is widening, then 1 << Y should have unsigned type,
12528 otherwise if Y is number of bits in the signed shift type minus 1,
12529 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12530 31 might be 0xffffffff80000000. */
12531 if ((code == LT_EXPR || code == GE_EXPR)
12532 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12533 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12534 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12535 && CONVERT_EXPR_P (arg1)
12536 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12537 && (element_precision (TREE_TYPE (arg1))
12538 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12539 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12540 || (element_precision (TREE_TYPE (arg1))
12541 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12542 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12544 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12545 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12546 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12547 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12548 build_zero_cst (TREE_TYPE (arg0)));
12551 return NULL_TREE;
12553 case UNORDERED_EXPR:
12554 case ORDERED_EXPR:
12555 case UNLT_EXPR:
12556 case UNLE_EXPR:
12557 case UNGT_EXPR:
12558 case UNGE_EXPR:
12559 case UNEQ_EXPR:
12560 case LTGT_EXPR:
12561 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12563 tree targ0 = strip_float_extensions (arg0);
12564 tree targ1 = strip_float_extensions (arg1);
12565 tree newtype = TREE_TYPE (targ0);
12567 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12568 newtype = TREE_TYPE (targ1);
12570 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0)))
12571 return fold_build2_loc (loc, code, type,
12572 fold_convert_loc (loc, newtype, targ0),
12573 fold_convert_loc (loc, newtype, targ1));
12576 return NULL_TREE;
12578 case COMPOUND_EXPR:
12579 /* When pedantic, a compound expression can be neither an lvalue
12580 nor an integer constant expression. */
12581 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12582 return NULL_TREE;
12583 /* Don't let (0, 0) be null pointer constant. */
12584 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12585 : fold_convert_loc (loc, type, arg1);
12586 return tem;
12588 default:
12589 return NULL_TREE;
12590 } /* switch (code) */
12593 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12594 ((A & N) + B) & M -> (A + B) & M
12595 Similarly if (N & M) == 0,
12596 ((A | N) + B) & M -> (A + B) & M
12597 and for - instead of + (or unary - instead of +)
12598 and/or ^ instead of |.
12599 If B is constant and (B & M) == 0, fold into A & M.
12601 This function is a helper for match.pd patterns. Return non-NULL
12602 type in which the simplified operation should be performed only
12603 if any optimization is possible.
12605 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12606 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12607 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12608 +/-. */
12609 tree
12610 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12611 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12612 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12613 tree *pmop)
12615 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12616 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12617 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12618 if (~cst1 == 0
12619 || (cst1 & (cst1 + 1)) != 0
12620 || !INTEGRAL_TYPE_P (type)
12621 || (!TYPE_OVERFLOW_WRAPS (type)
12622 && TREE_CODE (type) != INTEGER_TYPE)
12623 || (wi::max_value (type) & cst1) != cst1)
12624 return NULL_TREE;
12626 enum tree_code codes[2] = { code00, code01 };
12627 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12628 int which = 0;
12629 wide_int cst0;
12631 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12632 arg1 (M) is == (1LL << cst) - 1.
12633 Store C into PMOP[0] and D into PMOP[1]. */
12634 pmop[0] = arg00;
12635 pmop[1] = arg01;
12636 which = code != NEGATE_EXPR;
12638 for (; which >= 0; which--)
12639 switch (codes[which])
12641 case BIT_AND_EXPR:
12642 case BIT_IOR_EXPR:
12643 case BIT_XOR_EXPR:
12644 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12645 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12646 if (codes[which] == BIT_AND_EXPR)
12648 if (cst0 != cst1)
12649 break;
12651 else if (cst0 != 0)
12652 break;
12653 /* If C or D is of the form (A & N) where
12654 (N & M) == M, or of the form (A | N) or
12655 (A ^ N) where (N & M) == 0, replace it with A. */
12656 pmop[which] = arg0xx[2 * which];
12657 break;
12658 case ERROR_MARK:
12659 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12660 break;
12661 /* If C or D is a N where (N & M) == 0, it can be
12662 omitted (replaced with 0). */
12663 if ((code == PLUS_EXPR
12664 || (code == MINUS_EXPR && which == 0))
12665 && (cst1 & wi::to_wide (pmop[which])) == 0)
12666 pmop[which] = build_int_cst (type, 0);
12667 /* Similarly, with C - N where (-N & M) == 0. */
12668 if (code == MINUS_EXPR
12669 && which == 1
12670 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12671 pmop[which] = build_int_cst (type, 0);
12672 break;
12673 default:
12674 gcc_unreachable ();
12677 /* Only build anything new if we optimized one or both arguments above. */
12678 if (pmop[0] == arg00 && pmop[1] == arg01)
12679 return NULL_TREE;
12681 if (TYPE_OVERFLOW_WRAPS (type))
12682 return type;
12683 else
12684 return unsigned_type_for (type);
12687 /* Used by contains_label_[p1]. */
12689 struct contains_label_data
12691 hash_set<tree> *pset;
12692 bool inside_switch_p;
12695 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12696 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12697 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12699 static tree
12700 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12702 contains_label_data *d = (contains_label_data *) data;
12703 switch (TREE_CODE (*tp))
12705 case LABEL_EXPR:
12706 return *tp;
12708 case CASE_LABEL_EXPR:
12709 if (!d->inside_switch_p)
12710 return *tp;
12711 return NULL_TREE;
12713 case SWITCH_EXPR:
12714 if (!d->inside_switch_p)
12716 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12717 return *tp;
12718 d->inside_switch_p = true;
12719 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12720 return *tp;
12721 d->inside_switch_p = false;
12722 *walk_subtrees = 0;
12724 return NULL_TREE;
12726 case GOTO_EXPR:
12727 *walk_subtrees = 0;
12728 return NULL_TREE;
12730 default:
12731 return NULL_TREE;
12735 /* Return whether the sub-tree ST contains a label which is accessible from
12736 outside the sub-tree. */
12738 static bool
12739 contains_label_p (tree st)
12741 hash_set<tree> pset;
12742 contains_label_data data = { &pset, false };
12743 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12746 /* Fold a ternary expression of code CODE and type TYPE with operands
12747 OP0, OP1, and OP2. Return the folded expression if folding is
12748 successful. Otherwise, return NULL_TREE. */
12750 tree
12751 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12752 tree op0, tree op1, tree op2)
12754 tree tem;
12755 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12756 enum tree_code_class kind = TREE_CODE_CLASS (code);
12758 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12759 && TREE_CODE_LENGTH (code) == 3);
12761 /* If this is a commutative operation, and OP0 is a constant, move it
12762 to OP1 to reduce the number of tests below. */
12763 if (commutative_ternary_tree_code (code)
12764 && tree_swap_operands_p (op0, op1))
12765 return fold_build3_loc (loc, code, type, op1, op0, op2);
12767 tem = generic_simplify (loc, code, type, op0, op1, op2);
12768 if (tem)
12769 return tem;
12771 /* Strip any conversions that don't change the mode. This is safe
12772 for every expression, except for a comparison expression because
12773 its signedness is derived from its operands. So, in the latter
12774 case, only strip conversions that don't change the signedness.
12776 Note that this is done as an internal manipulation within the
12777 constant folder, in order to find the simplest representation of
12778 the arguments so that their form can be studied. In any cases,
12779 the appropriate type conversions should be put back in the tree
12780 that will get out of the constant folder. */
12781 if (op0)
12783 arg0 = op0;
12784 STRIP_NOPS (arg0);
12787 if (op1)
12789 arg1 = op1;
12790 STRIP_NOPS (arg1);
12793 if (op2)
12795 arg2 = op2;
12796 STRIP_NOPS (arg2);
12799 switch (code)
12801 case COMPONENT_REF:
12802 if (TREE_CODE (arg0) == CONSTRUCTOR
12803 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12805 unsigned HOST_WIDE_INT idx;
12806 tree field, value;
12807 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12808 if (field == arg1)
12809 return value;
12811 return NULL_TREE;
12813 case COND_EXPR:
12814 case VEC_COND_EXPR:
12815 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12816 so all simple results must be passed through pedantic_non_lvalue. */
12817 if (TREE_CODE (arg0) == INTEGER_CST)
12819 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12820 tem = integer_zerop (arg0) ? op2 : op1;
12821 /* Only optimize constant conditions when the selected branch
12822 has the same type as the COND_EXPR. This avoids optimizing
12823 away "c ? x : throw", where the throw has a void type.
12824 Avoid throwing away that operand which contains label. */
12825 if ((!TREE_SIDE_EFFECTS (unused_op)
12826 || !contains_label_p (unused_op))
12827 && (! VOID_TYPE_P (TREE_TYPE (tem))
12828 || VOID_TYPE_P (type)))
12829 return protected_set_expr_location_unshare (tem, loc);
12830 return NULL_TREE;
12832 else if (TREE_CODE (arg0) == VECTOR_CST)
12834 unsigned HOST_WIDE_INT nelts;
12835 if ((TREE_CODE (arg1) == VECTOR_CST
12836 || TREE_CODE (arg1) == CONSTRUCTOR)
12837 && (TREE_CODE (arg2) == VECTOR_CST
12838 || TREE_CODE (arg2) == CONSTRUCTOR)
12839 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12841 vec_perm_builder sel (nelts, nelts, 1);
12842 for (unsigned int i = 0; i < nelts; i++)
12844 tree val = VECTOR_CST_ELT (arg0, i);
12845 if (integer_all_onesp (val))
12846 sel.quick_push (i);
12847 else if (integer_zerop (val))
12848 sel.quick_push (nelts + i);
12849 else /* Currently unreachable. */
12850 return NULL_TREE;
12852 vec_perm_indices indices (sel, 2, nelts);
12853 tree t = fold_vec_perm (type, arg1, arg2, indices);
12854 if (t != NULL_TREE)
12855 return t;
12859 /* If we have A op B ? A : C, we may be able to convert this to a
12860 simpler expression, depending on the operation and the values
12861 of B and C. Signed zeros prevent all of these transformations,
12862 for reasons given above each one.
12864 Also try swapping the arguments and inverting the conditional. */
12865 if (COMPARISON_CLASS_P (arg0)
12866 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12867 && !HONOR_SIGNED_ZEROS (op1))
12869 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12870 TREE_OPERAND (arg0, 0),
12871 TREE_OPERAND (arg0, 1),
12872 op1, op2);
12873 if (tem)
12874 return tem;
12877 if (COMPARISON_CLASS_P (arg0)
12878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12879 && !HONOR_SIGNED_ZEROS (op2))
12881 enum tree_code comp_code = TREE_CODE (arg0);
12882 tree arg00 = TREE_OPERAND (arg0, 0);
12883 tree arg01 = TREE_OPERAND (arg0, 1);
12884 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12885 if (comp_code != ERROR_MARK)
12886 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12887 arg00,
12888 arg01,
12889 op2, op1);
12890 if (tem)
12891 return tem;
12894 /* If the second operand is simpler than the third, swap them
12895 since that produces better jump optimization results. */
12896 if (truth_value_p (TREE_CODE (arg0))
12897 && tree_swap_operands_p (op1, op2))
12899 location_t loc0 = expr_location_or (arg0, loc);
12900 /* See if this can be inverted. If it can't, possibly because
12901 it was a floating-point inequality comparison, don't do
12902 anything. */
12903 tem = fold_invert_truthvalue (loc0, arg0);
12904 if (tem)
12905 return fold_build3_loc (loc, code, type, tem, op2, op1);
12908 /* Convert A ? 1 : 0 to simply A. */
12909 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12910 : (integer_onep (op1)
12911 && !VECTOR_TYPE_P (type)))
12912 && integer_zerop (op2)
12913 /* If we try to convert OP0 to our type, the
12914 call to fold will try to move the conversion inside
12915 a COND, which will recurse. In that case, the COND_EXPR
12916 is probably the best choice, so leave it alone. */
12917 && type == TREE_TYPE (arg0))
12918 return protected_set_expr_location_unshare (arg0, loc);
12920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12921 over COND_EXPR in cases such as floating point comparisons. */
12922 if (integer_zerop (op1)
12923 && code == COND_EXPR
12924 && integer_onep (op2)
12925 && !VECTOR_TYPE_P (type)
12926 && truth_value_p (TREE_CODE (arg0)))
12927 return fold_convert_loc (loc, type,
12928 invert_truthvalue_loc (loc, arg0));
12930 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12931 if (TREE_CODE (arg0) == LT_EXPR
12932 && integer_zerop (TREE_OPERAND (arg0, 1))
12933 && integer_zerop (op2)
12934 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12936 /* sign_bit_p looks through both zero and sign extensions,
12937 but for this optimization only sign extensions are
12938 usable. */
12939 tree tem2 = TREE_OPERAND (arg0, 0);
12940 while (tem != tem2)
12942 if (TREE_CODE (tem2) != NOP_EXPR
12943 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12945 tem = NULL_TREE;
12946 break;
12948 tem2 = TREE_OPERAND (tem2, 0);
12950 /* sign_bit_p only checks ARG1 bits within A's precision.
12951 If <sign bit of A> has wider type than A, bits outside
12952 of A's precision in <sign bit of A> need to be checked.
12953 If they are all 0, this optimization needs to be done
12954 in unsigned A's type, if they are all 1 in signed A's type,
12955 otherwise this can't be done. */
12956 if (tem
12957 && TYPE_PRECISION (TREE_TYPE (tem))
12958 < TYPE_PRECISION (TREE_TYPE (arg1))
12959 && TYPE_PRECISION (TREE_TYPE (tem))
12960 < TYPE_PRECISION (type))
12962 int inner_width, outer_width;
12963 tree tem_type;
12965 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12966 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12967 if (outer_width > TYPE_PRECISION (type))
12968 outer_width = TYPE_PRECISION (type);
12970 wide_int mask = wi::shifted_mask
12971 (inner_width, outer_width - inner_width, false,
12972 TYPE_PRECISION (TREE_TYPE (arg1)));
12974 wide_int common = mask & wi::to_wide (arg1);
12975 if (common == mask)
12977 tem_type = signed_type_for (TREE_TYPE (tem));
12978 tem = fold_convert_loc (loc, tem_type, tem);
12980 else if (common == 0)
12982 tem_type = unsigned_type_for (TREE_TYPE (tem));
12983 tem = fold_convert_loc (loc, tem_type, tem);
12985 else
12986 tem = NULL;
12989 if (tem)
12990 return
12991 fold_convert_loc (loc, type,
12992 fold_build2_loc (loc, BIT_AND_EXPR,
12993 TREE_TYPE (tem), tem,
12994 fold_convert_loc (loc,
12995 TREE_TYPE (tem),
12996 arg1)));
12999 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13000 already handled above. */
13001 if (TREE_CODE (arg0) == BIT_AND_EXPR
13002 && integer_onep (TREE_OPERAND (arg0, 1))
13003 && integer_zerop (op2)
13004 && integer_pow2p (arg1))
13006 tree tem = TREE_OPERAND (arg0, 0);
13007 STRIP_NOPS (tem);
13008 if (TREE_CODE (tem) == RSHIFT_EXPR
13009 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13010 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13011 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13012 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13013 fold_convert_loc (loc, type,
13014 TREE_OPERAND (tem, 0)),
13015 op1);
13018 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13019 is probably obsolete because the first operand should be a
13020 truth value (that's why we have the two cases above), but let's
13021 leave it in until we can confirm this for all front-ends. */
13022 if (integer_zerop (op2)
13023 && TREE_CODE (arg0) == NE_EXPR
13024 && integer_zerop (TREE_OPERAND (arg0, 1))
13025 && integer_pow2p (arg1)
13026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13028 arg1, OEP_ONLY_CONST)
13029 /* operand_equal_p compares just value, not precision, so e.g.
13030 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13031 second operand 32-bit -128, which is not a power of two (or vice
13032 versa. */
13033 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13034 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13036 /* Disable the transformations below for vectors, since
13037 fold_binary_op_with_conditional_arg may undo them immediately,
13038 yielding an infinite loop. */
13039 if (code == VEC_COND_EXPR)
13040 return NULL_TREE;
13042 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13043 if (integer_zerop (op2)
13044 && truth_value_p (TREE_CODE (arg0))
13045 && truth_value_p (TREE_CODE (arg1))
13046 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13047 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13048 : TRUTH_ANDIF_EXPR,
13049 type, fold_convert_loc (loc, type, arg0), op1);
13051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13052 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13053 && truth_value_p (TREE_CODE (arg0))
13054 && truth_value_p (TREE_CODE (arg1))
13055 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13057 location_t loc0 = expr_location_or (arg0, loc);
13058 /* Only perform transformation if ARG0 is easily inverted. */
13059 tem = fold_invert_truthvalue (loc0, arg0);
13060 if (tem)
13061 return fold_build2_loc (loc, code == VEC_COND_EXPR
13062 ? BIT_IOR_EXPR
13063 : TRUTH_ORIF_EXPR,
13064 type, fold_convert_loc (loc, type, tem),
13065 op1);
13068 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13069 if (integer_zerop (arg1)
13070 && truth_value_p (TREE_CODE (arg0))
13071 && truth_value_p (TREE_CODE (op2))
13072 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13074 location_t loc0 = expr_location_or (arg0, loc);
13075 /* Only perform transformation if ARG0 is easily inverted. */
13076 tem = fold_invert_truthvalue (loc0, arg0);
13077 if (tem)
13078 return fold_build2_loc (loc, code == VEC_COND_EXPR
13079 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13080 type, fold_convert_loc (loc, type, tem),
13081 op2);
13084 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13085 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13086 && truth_value_p (TREE_CODE (arg0))
13087 && truth_value_p (TREE_CODE (op2))
13088 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13089 return fold_build2_loc (loc, code == VEC_COND_EXPR
13090 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13091 type, fold_convert_loc (loc, type, arg0), op2);
13093 return NULL_TREE;
13095 case CALL_EXPR:
13096 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13097 of fold_ternary on them. */
13098 gcc_unreachable ();
13100 case BIT_FIELD_REF:
13101 if (TREE_CODE (arg0) == VECTOR_CST
13102 && (type == TREE_TYPE (TREE_TYPE (arg0))
13103 || (VECTOR_TYPE_P (type)
13104 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13105 && tree_fits_uhwi_p (op1)
13106 && tree_fits_uhwi_p (op2))
13108 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13109 unsigned HOST_WIDE_INT width
13110 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13111 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13112 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13113 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13115 if (n != 0
13116 && (idx % width) == 0
13117 && (n % width) == 0
13118 && known_le ((idx + n) / width,
13119 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13121 idx = idx / width;
13122 n = n / width;
13124 if (TREE_CODE (arg0) == VECTOR_CST)
13126 if (n == 1)
13128 tem = VECTOR_CST_ELT (arg0, idx);
13129 if (VECTOR_TYPE_P (type))
13130 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13131 return tem;
13134 tree_vector_builder vals (type, n, 1);
13135 for (unsigned i = 0; i < n; ++i)
13136 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13137 return vals.build ();
13142 /* On constants we can use native encode/interpret to constant
13143 fold (nearly) all BIT_FIELD_REFs. */
13144 if (CONSTANT_CLASS_P (arg0)
13145 && can_native_interpret_type_p (type)
13146 && BITS_PER_UNIT == 8
13147 && tree_fits_uhwi_p (op1)
13148 && tree_fits_uhwi_p (op2))
13150 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13151 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13152 /* Limit us to a reasonable amount of work. To relax the
13153 other limitations we need bit-shifting of the buffer
13154 and rounding up the size. */
13155 if (bitpos % BITS_PER_UNIT == 0
13156 && bitsize % BITS_PER_UNIT == 0
13157 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13159 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13160 unsigned HOST_WIDE_INT len
13161 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13162 bitpos / BITS_PER_UNIT);
13163 if (len > 0
13164 && len * BITS_PER_UNIT >= bitsize)
13166 tree v = native_interpret_expr (type, b,
13167 bitsize / BITS_PER_UNIT);
13168 if (v)
13169 return v;
13174 return NULL_TREE;
13176 case VEC_PERM_EXPR:
13177 /* Perform constant folding of BIT_INSERT_EXPR. */
13178 if (TREE_CODE (arg2) == VECTOR_CST
13179 && TREE_CODE (op0) == VECTOR_CST
13180 && TREE_CODE (op1) == VECTOR_CST)
13182 /* Build a vector of integers from the tree mask. */
13183 vec_perm_builder builder;
13184 if (!tree_to_vec_perm_builder (&builder, arg2))
13185 return NULL_TREE;
13187 /* Create a vec_perm_indices for the integer vector. */
13188 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13189 bool single_arg = (op0 == op1);
13190 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13191 return fold_vec_perm (type, op0, op1, sel);
13193 return NULL_TREE;
13195 case BIT_INSERT_EXPR:
13196 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13197 if (TREE_CODE (arg0) == INTEGER_CST
13198 && TREE_CODE (arg1) == INTEGER_CST)
13200 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13201 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13202 wide_int tem = (wi::to_wide (arg0)
13203 & wi::shifted_mask (bitpos, bitsize, true,
13204 TYPE_PRECISION (type)));
13205 wide_int tem2
13206 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13207 bitsize), bitpos);
13208 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13210 else if (TREE_CODE (arg0) == VECTOR_CST
13211 && CONSTANT_CLASS_P (arg1)
13212 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13213 TREE_TYPE (arg1)))
13215 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13216 unsigned HOST_WIDE_INT elsize
13217 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13218 if (bitpos % elsize == 0)
13220 unsigned k = bitpos / elsize;
13221 unsigned HOST_WIDE_INT nelts;
13222 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13223 return arg0;
13224 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13226 tree_vector_builder elts (type, nelts, 1);
13227 elts.quick_grow (nelts);
13228 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13229 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13230 return elts.build ();
13234 return NULL_TREE;
13236 default:
13237 return NULL_TREE;
13238 } /* switch (code) */
13241 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13242 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13243 constructor element index of the value returned. If the element is
13244 not found NULL_TREE is returned and *CTOR_IDX is updated to
13245 the index of the element after the ACCESS_INDEX position (which
13246 may be outside of the CTOR array). */
13248 tree
13249 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13250 unsigned *ctor_idx)
13252 tree index_type = NULL_TREE;
13253 signop index_sgn = UNSIGNED;
13254 offset_int low_bound = 0;
13256 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13258 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13259 if (domain_type && TYPE_MIN_VALUE (domain_type))
13261 /* Static constructors for variably sized objects makes no sense. */
13262 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13263 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13264 /* ??? When it is obvious that the range is signed, treat it so. */
13265 if (TYPE_UNSIGNED (index_type)
13266 && TYPE_MAX_VALUE (domain_type)
13267 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13268 TYPE_MIN_VALUE (domain_type)))
13270 index_sgn = SIGNED;
13271 low_bound
13272 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13273 SIGNED);
13275 else
13277 index_sgn = TYPE_SIGN (index_type);
13278 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13283 if (index_type)
13284 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13285 index_sgn);
13287 offset_int index = low_bound;
13288 if (index_type)
13289 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13291 offset_int max_index = index;
13292 unsigned cnt;
13293 tree cfield, cval;
13294 bool first_p = true;
13296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13298 /* Array constructor might explicitly set index, or specify a range,
13299 or leave index NULL meaning that it is next index after previous
13300 one. */
13301 if (cfield)
13303 if (TREE_CODE (cfield) == INTEGER_CST)
13304 max_index = index
13305 = offset_int::from (wi::to_wide (cfield), index_sgn);
13306 else
13308 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13309 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13310 index_sgn);
13311 max_index
13312 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13313 index_sgn);
13314 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13317 else if (!first_p)
13319 index = max_index + 1;
13320 if (index_type)
13321 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13322 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13323 max_index = index;
13325 else
13326 first_p = false;
13328 /* Do we have match? */
13329 if (wi::cmp (access_index, index, index_sgn) >= 0)
13331 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13333 if (ctor_idx)
13334 *ctor_idx = cnt;
13335 return cval;
13338 else if (in_gimple_form)
13339 /* We're past the element we search for. Note during parsing
13340 the elements might not be sorted.
13341 ??? We should use a binary search and a flag on the
13342 CONSTRUCTOR as to whether elements are sorted in declaration
13343 order. */
13344 break;
13346 if (ctor_idx)
13347 *ctor_idx = cnt;
13348 return NULL_TREE;
13351 /* Perform constant folding and related simplification of EXPR.
13352 The related simplifications include x*1 => x, x*0 => 0, etc.,
13353 and application of the associative law.
13354 NOP_EXPR conversions may be removed freely (as long as we
13355 are careful not to change the type of the overall expression).
13356 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13357 but we can constant-fold them if they have constant operands. */
13359 #ifdef ENABLE_FOLD_CHECKING
13360 # define fold(x) fold_1 (x)
13361 static tree fold_1 (tree);
13362 static
13363 #endif
13364 tree
13365 fold (tree expr)
13367 const tree t = expr;
13368 enum tree_code code = TREE_CODE (t);
13369 enum tree_code_class kind = TREE_CODE_CLASS (code);
13370 tree tem;
13371 location_t loc = EXPR_LOCATION (expr);
13373 /* Return right away if a constant. */
13374 if (kind == tcc_constant)
13375 return t;
13377 /* CALL_EXPR-like objects with variable numbers of operands are
13378 treated specially. */
13379 if (kind == tcc_vl_exp)
13381 if (code == CALL_EXPR)
13383 tem = fold_call_expr (loc, expr, false);
13384 return tem ? tem : expr;
13386 return expr;
13389 if (IS_EXPR_CODE_CLASS (kind))
13391 tree type = TREE_TYPE (t);
13392 tree op0, op1, op2;
13394 switch (TREE_CODE_LENGTH (code))
13396 case 1:
13397 op0 = TREE_OPERAND (t, 0);
13398 tem = fold_unary_loc (loc, code, type, op0);
13399 return tem ? tem : expr;
13400 case 2:
13401 op0 = TREE_OPERAND (t, 0);
13402 op1 = TREE_OPERAND (t, 1);
13403 tem = fold_binary_loc (loc, code, type, op0, op1);
13404 return tem ? tem : expr;
13405 case 3:
13406 op0 = TREE_OPERAND (t, 0);
13407 op1 = TREE_OPERAND (t, 1);
13408 op2 = TREE_OPERAND (t, 2);
13409 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13410 return tem ? tem : expr;
13411 default:
13412 break;
13416 switch (code)
13418 case ARRAY_REF:
13420 tree op0 = TREE_OPERAND (t, 0);
13421 tree op1 = TREE_OPERAND (t, 1);
13423 if (TREE_CODE (op1) == INTEGER_CST
13424 && TREE_CODE (op0) == CONSTRUCTOR
13425 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13427 tree val = get_array_ctor_element_at_index (op0,
13428 wi::to_offset (op1));
13429 if (val)
13430 return val;
13433 return t;
13436 /* Return a VECTOR_CST if possible. */
13437 case CONSTRUCTOR:
13439 tree type = TREE_TYPE (t);
13440 if (TREE_CODE (type) != VECTOR_TYPE)
13441 return t;
13443 unsigned i;
13444 tree val;
13445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13446 if (! CONSTANT_CLASS_P (val))
13447 return t;
13449 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13452 case CONST_DECL:
13453 return fold (DECL_INITIAL (t));
13455 default:
13456 return t;
13457 } /* switch (code) */
13460 #ifdef ENABLE_FOLD_CHECKING
13461 #undef fold
13463 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13464 hash_table<nofree_ptr_hash<const tree_node> > *);
13465 static void fold_check_failed (const_tree, const_tree);
13466 void print_fold_checksum (const_tree);
13468 /* When --enable-checking=fold, compute a digest of expr before
13469 and after actual fold call to see if fold did not accidentally
13470 change original expr. */
13472 tree
13473 fold (tree expr)
13475 tree ret;
13476 struct md5_ctx ctx;
13477 unsigned char checksum_before[16], checksum_after[16];
13478 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13480 md5_init_ctx (&ctx);
13481 fold_checksum_tree (expr, &ctx, &ht);
13482 md5_finish_ctx (&ctx, checksum_before);
13483 ht.empty ();
13485 ret = fold_1 (expr);
13487 md5_init_ctx (&ctx);
13488 fold_checksum_tree (expr, &ctx, &ht);
13489 md5_finish_ctx (&ctx, checksum_after);
13491 if (memcmp (checksum_before, checksum_after, 16))
13492 fold_check_failed (expr, ret);
13494 return ret;
13497 void
13498 print_fold_checksum (const_tree expr)
13500 struct md5_ctx ctx;
13501 unsigned char checksum[16], cnt;
13502 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13504 md5_init_ctx (&ctx);
13505 fold_checksum_tree (expr, &ctx, &ht);
13506 md5_finish_ctx (&ctx, checksum);
13507 for (cnt = 0; cnt < 16; ++cnt)
13508 fprintf (stderr, "%02x", checksum[cnt]);
13509 putc ('\n', stderr);
13512 static void
13513 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13515 internal_error ("fold check: original tree changed by fold");
13518 static void
13519 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13520 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13522 const tree_node **slot;
13523 enum tree_code code;
13524 union tree_node *buf;
13525 int i, len;
13527 recursive_label:
13528 if (expr == NULL)
13529 return;
13530 slot = ht->find_slot (expr, INSERT);
13531 if (*slot != NULL)
13532 return;
13533 *slot = expr;
13534 code = TREE_CODE (expr);
13535 if (TREE_CODE_CLASS (code) == tcc_declaration
13536 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13538 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13539 size_t sz = tree_size (expr);
13540 buf = XALLOCAVAR (union tree_node, sz);
13541 memcpy ((char *) buf, expr, sz);
13542 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13543 buf->decl_with_vis.symtab_node = NULL;
13544 buf->base.nowarning_flag = 0;
13545 expr = (tree) buf;
13547 else if (TREE_CODE_CLASS (code) == tcc_type
13548 && (TYPE_POINTER_TO (expr)
13549 || TYPE_REFERENCE_TO (expr)
13550 || TYPE_CACHED_VALUES_P (expr)
13551 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13552 || TYPE_NEXT_VARIANT (expr)
13553 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13555 /* Allow these fields to be modified. */
13556 tree tmp;
13557 size_t sz = tree_size (expr);
13558 buf = XALLOCAVAR (union tree_node, sz);
13559 memcpy ((char *) buf, expr, sz);
13560 expr = tmp = (tree) buf;
13561 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13562 TYPE_POINTER_TO (tmp) = NULL;
13563 TYPE_REFERENCE_TO (tmp) = NULL;
13564 TYPE_NEXT_VARIANT (tmp) = NULL;
13565 TYPE_ALIAS_SET (tmp) = -1;
13566 if (TYPE_CACHED_VALUES_P (tmp))
13568 TYPE_CACHED_VALUES_P (tmp) = 0;
13569 TYPE_CACHED_VALUES (tmp) = NULL;
13572 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13574 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13575 that and change builtins.cc etc. instead - see PR89543. */
13576 size_t sz = tree_size (expr);
13577 buf = XALLOCAVAR (union tree_node, sz);
13578 memcpy ((char *) buf, expr, sz);
13579 buf->base.nowarning_flag = 0;
13580 expr = (tree) buf;
13582 md5_process_bytes (expr, tree_size (expr), ctx);
13583 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13584 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13585 if (TREE_CODE_CLASS (code) != tcc_type
13586 && TREE_CODE_CLASS (code) != tcc_declaration
13587 && code != TREE_LIST
13588 && code != SSA_NAME
13589 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13590 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13591 switch (TREE_CODE_CLASS (code))
13593 case tcc_constant:
13594 switch (code)
13596 case STRING_CST:
13597 md5_process_bytes (TREE_STRING_POINTER (expr),
13598 TREE_STRING_LENGTH (expr), ctx);
13599 break;
13600 case COMPLEX_CST:
13601 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13602 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13603 break;
13604 case VECTOR_CST:
13605 len = vector_cst_encoded_nelts (expr);
13606 for (i = 0; i < len; ++i)
13607 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13608 break;
13609 default:
13610 break;
13612 break;
13613 case tcc_exceptional:
13614 switch (code)
13616 case TREE_LIST:
13617 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13618 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13619 expr = TREE_CHAIN (expr);
13620 goto recursive_label;
13621 break;
13622 case TREE_VEC:
13623 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13624 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13625 break;
13626 default:
13627 break;
13629 break;
13630 case tcc_expression:
13631 case tcc_reference:
13632 case tcc_comparison:
13633 case tcc_unary:
13634 case tcc_binary:
13635 case tcc_statement:
13636 case tcc_vl_exp:
13637 len = TREE_OPERAND_LENGTH (expr);
13638 for (i = 0; i < len; ++i)
13639 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13640 break;
13641 case tcc_declaration:
13642 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13643 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13644 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13646 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13647 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13648 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13649 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13650 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13653 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13655 if (TREE_CODE (expr) == FUNCTION_DECL)
13657 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13658 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13660 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13662 break;
13663 case tcc_type:
13664 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13665 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13666 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13667 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13668 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13669 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13670 if (INTEGRAL_TYPE_P (expr)
13671 || SCALAR_FLOAT_TYPE_P (expr))
13673 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13674 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13676 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13677 if (RECORD_OR_UNION_TYPE_P (expr))
13678 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13679 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13680 break;
13681 default:
13682 break;
13686 /* Helper function for outputting the checksum of a tree T. When
13687 debugging with gdb, you can "define mynext" to be "next" followed
13688 by "call debug_fold_checksum (op0)", then just trace down till the
13689 outputs differ. */
13691 DEBUG_FUNCTION void
13692 debug_fold_checksum (const_tree t)
13694 int i;
13695 unsigned char checksum[16];
13696 struct md5_ctx ctx;
13697 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13699 md5_init_ctx (&ctx);
13700 fold_checksum_tree (t, &ctx, &ht);
13701 md5_finish_ctx (&ctx, checksum);
13702 ht.empty ();
13704 for (i = 0; i < 16; i++)
13705 fprintf (stderr, "%d ", checksum[i]);
13707 fprintf (stderr, "\n");
13710 #endif
13712 /* Fold a unary tree expression with code CODE of type TYPE with an
13713 operand OP0. LOC is the location of the resulting expression.
13714 Return a folded expression if successful. Otherwise, return a tree
13715 expression with code CODE of type TYPE with an operand OP0. */
13717 tree
13718 fold_build1_loc (location_t loc,
13719 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13721 tree tem;
13722 #ifdef ENABLE_FOLD_CHECKING
13723 unsigned char checksum_before[16], checksum_after[16];
13724 struct md5_ctx ctx;
13725 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13727 md5_init_ctx (&ctx);
13728 fold_checksum_tree (op0, &ctx, &ht);
13729 md5_finish_ctx (&ctx, checksum_before);
13730 ht.empty ();
13731 #endif
13733 tem = fold_unary_loc (loc, code, type, op0);
13734 if (!tem)
13735 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13737 #ifdef ENABLE_FOLD_CHECKING
13738 md5_init_ctx (&ctx);
13739 fold_checksum_tree (op0, &ctx, &ht);
13740 md5_finish_ctx (&ctx, checksum_after);
13742 if (memcmp (checksum_before, checksum_after, 16))
13743 fold_check_failed (op0, tem);
13744 #endif
13745 return tem;
13748 /* Fold a binary tree expression with code CODE of type TYPE with
13749 operands OP0 and OP1. LOC is the location of the resulting
13750 expression. Return a folded expression if successful. Otherwise,
13751 return a tree expression with code CODE of type TYPE with operands
13752 OP0 and OP1. */
13754 tree
13755 fold_build2_loc (location_t loc,
13756 enum tree_code code, tree type, tree op0, tree op1
13757 MEM_STAT_DECL)
13759 tree tem;
13760 #ifdef ENABLE_FOLD_CHECKING
13761 unsigned char checksum_before_op0[16],
13762 checksum_before_op1[16],
13763 checksum_after_op0[16],
13764 checksum_after_op1[16];
13765 struct md5_ctx ctx;
13766 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13768 md5_init_ctx (&ctx);
13769 fold_checksum_tree (op0, &ctx, &ht);
13770 md5_finish_ctx (&ctx, checksum_before_op0);
13771 ht.empty ();
13773 md5_init_ctx (&ctx);
13774 fold_checksum_tree (op1, &ctx, &ht);
13775 md5_finish_ctx (&ctx, checksum_before_op1);
13776 ht.empty ();
13777 #endif
13779 tem = fold_binary_loc (loc, code, type, op0, op1);
13780 if (!tem)
13781 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13783 #ifdef ENABLE_FOLD_CHECKING
13784 md5_init_ctx (&ctx);
13785 fold_checksum_tree (op0, &ctx, &ht);
13786 md5_finish_ctx (&ctx, checksum_after_op0);
13787 ht.empty ();
13789 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13790 fold_check_failed (op0, tem);
13792 md5_init_ctx (&ctx);
13793 fold_checksum_tree (op1, &ctx, &ht);
13794 md5_finish_ctx (&ctx, checksum_after_op1);
13796 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13797 fold_check_failed (op1, tem);
13798 #endif
13799 return tem;
13802 /* Fold a ternary tree expression with code CODE of type TYPE with
13803 operands OP0, OP1, and OP2. Return a folded expression if
13804 successful. Otherwise, return a tree expression with code CODE of
13805 type TYPE with operands OP0, OP1, and OP2. */
13807 tree
13808 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13809 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13811 tree tem;
13812 #ifdef ENABLE_FOLD_CHECKING
13813 unsigned char checksum_before_op0[16],
13814 checksum_before_op1[16],
13815 checksum_before_op2[16],
13816 checksum_after_op0[16],
13817 checksum_after_op1[16],
13818 checksum_after_op2[16];
13819 struct md5_ctx ctx;
13820 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13822 md5_init_ctx (&ctx);
13823 fold_checksum_tree (op0, &ctx, &ht);
13824 md5_finish_ctx (&ctx, checksum_before_op0);
13825 ht.empty ();
13827 md5_init_ctx (&ctx);
13828 fold_checksum_tree (op1, &ctx, &ht);
13829 md5_finish_ctx (&ctx, checksum_before_op1);
13830 ht.empty ();
13832 md5_init_ctx (&ctx);
13833 fold_checksum_tree (op2, &ctx, &ht);
13834 md5_finish_ctx (&ctx, checksum_before_op2);
13835 ht.empty ();
13836 #endif
13838 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13839 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13840 if (!tem)
13841 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13843 #ifdef ENABLE_FOLD_CHECKING
13844 md5_init_ctx (&ctx);
13845 fold_checksum_tree (op0, &ctx, &ht);
13846 md5_finish_ctx (&ctx, checksum_after_op0);
13847 ht.empty ();
13849 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13850 fold_check_failed (op0, tem);
13852 md5_init_ctx (&ctx);
13853 fold_checksum_tree (op1, &ctx, &ht);
13854 md5_finish_ctx (&ctx, checksum_after_op1);
13855 ht.empty ();
13857 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13858 fold_check_failed (op1, tem);
13860 md5_init_ctx (&ctx);
13861 fold_checksum_tree (op2, &ctx, &ht);
13862 md5_finish_ctx (&ctx, checksum_after_op2);
13864 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13865 fold_check_failed (op2, tem);
13866 #endif
13867 return tem;
13870 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13871 arguments in ARGARRAY, and a null static chain.
13872 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13873 of type TYPE from the given operands as constructed by build_call_array. */
13875 tree
13876 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13877 int nargs, tree *argarray)
13879 tree tem;
13880 #ifdef ENABLE_FOLD_CHECKING
13881 unsigned char checksum_before_fn[16],
13882 checksum_before_arglist[16],
13883 checksum_after_fn[16],
13884 checksum_after_arglist[16];
13885 struct md5_ctx ctx;
13886 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13887 int i;
13889 md5_init_ctx (&ctx);
13890 fold_checksum_tree (fn, &ctx, &ht);
13891 md5_finish_ctx (&ctx, checksum_before_fn);
13892 ht.empty ();
13894 md5_init_ctx (&ctx);
13895 for (i = 0; i < nargs; i++)
13896 fold_checksum_tree (argarray[i], &ctx, &ht);
13897 md5_finish_ctx (&ctx, checksum_before_arglist);
13898 ht.empty ();
13899 #endif
13901 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13902 if (!tem)
13903 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13905 #ifdef ENABLE_FOLD_CHECKING
13906 md5_init_ctx (&ctx);
13907 fold_checksum_tree (fn, &ctx, &ht);
13908 md5_finish_ctx (&ctx, checksum_after_fn);
13909 ht.empty ();
13911 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13912 fold_check_failed (fn, tem);
13914 md5_init_ctx (&ctx);
13915 for (i = 0; i < nargs; i++)
13916 fold_checksum_tree (argarray[i], &ctx, &ht);
13917 md5_finish_ctx (&ctx, checksum_after_arglist);
13919 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13920 fold_check_failed (NULL_TREE, tem);
13921 #endif
13922 return tem;
13925 /* Perform constant folding and related simplification of initializer
13926 expression EXPR. These behave identically to "fold_buildN" but ignore
13927 potential run-time traps and exceptions that fold must preserve. */
13929 #define START_FOLD_INIT \
13930 int saved_signaling_nans = flag_signaling_nans;\
13931 int saved_trapping_math = flag_trapping_math;\
13932 int saved_rounding_math = flag_rounding_math;\
13933 int saved_trapv = flag_trapv;\
13934 int saved_folding_initializer = folding_initializer;\
13935 flag_signaling_nans = 0;\
13936 flag_trapping_math = 0;\
13937 flag_rounding_math = 0;\
13938 flag_trapv = 0;\
13939 folding_initializer = 1;
13941 #define END_FOLD_INIT \
13942 flag_signaling_nans = saved_signaling_nans;\
13943 flag_trapping_math = saved_trapping_math;\
13944 flag_rounding_math = saved_rounding_math;\
13945 flag_trapv = saved_trapv;\
13946 folding_initializer = saved_folding_initializer;
13948 tree
13949 fold_init (tree expr)
13951 tree result;
13952 START_FOLD_INIT;
13954 result = fold (expr);
13956 END_FOLD_INIT;
13957 return result;
13960 tree
13961 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13962 tree type, tree op)
13964 tree result;
13965 START_FOLD_INIT;
13967 result = fold_build1_loc (loc, code, type, op);
13969 END_FOLD_INIT;
13970 return result;
13973 tree
13974 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13975 tree type, tree op0, tree op1)
13977 tree result;
13978 START_FOLD_INIT;
13980 result = fold_build2_loc (loc, code, type, op0, op1);
13982 END_FOLD_INIT;
13983 return result;
13986 tree
13987 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13988 int nargs, tree *argarray)
13990 tree result;
13991 START_FOLD_INIT;
13993 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13995 END_FOLD_INIT;
13996 return result;
13999 tree
14000 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14001 tree lhs, tree rhs)
14003 tree result;
14004 START_FOLD_INIT;
14006 result = fold_binary_loc (loc, code, type, lhs, rhs);
14008 END_FOLD_INIT;
14009 return result;
14012 #undef START_FOLD_INIT
14013 #undef END_FOLD_INIT
14015 /* Determine if first argument is a multiple of second argument. Return
14016 false if it is not, or we cannot easily determined it to be.
14018 An example of the sort of thing we care about (at this point; this routine
14019 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14020 fold cases do now) is discovering that
14022 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14024 is a multiple of
14026 SAVE_EXPR (J * 8)
14028 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14030 This code also handles discovering that
14032 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14034 is a multiple of 8 so we don't have to worry about dealing with a
14035 possible remainder.
14037 Note that we *look* inside a SAVE_EXPR only to determine how it was
14038 calculated; it is not safe for fold to do much of anything else with the
14039 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14040 at run time. For example, the latter example above *cannot* be implemented
14041 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14042 evaluation time of the original SAVE_EXPR is not necessarily the same at
14043 the time the new expression is evaluated. The only optimization of this
14044 sort that would be valid is changing
14046 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14048 divided by 8 to
14050 SAVE_EXPR (I) * SAVE_EXPR (J)
14052 (where the same SAVE_EXPR (J) is used in the original and the
14053 transformed version).
14055 NOWRAP specifies whether all outer operations in TYPE should
14056 be considered not wrapping. Any type conversion within TOP acts
14057 as a barrier and we will fall back to NOWRAP being false.
14058 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14059 as not wrapping even though they are generally using unsigned arithmetic. */
14061 bool
14062 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14064 gimple *stmt;
14065 tree op1, op2;
14067 if (operand_equal_p (top, bottom, 0))
14068 return true;
14070 if (TREE_CODE (type) != INTEGER_TYPE)
14071 return false;
14073 switch (TREE_CODE (top))
14075 case BIT_AND_EXPR:
14076 /* Bitwise and provides a power of two multiple. If the mask is
14077 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14078 if (!integer_pow2p (bottom))
14079 return false;
14080 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14081 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14083 case MULT_EXPR:
14084 /* If the multiplication can wrap we cannot recurse further unless
14085 the bottom is a power of two which is where wrapping does not
14086 matter. */
14087 if (!nowrap
14088 && !TYPE_OVERFLOW_UNDEFINED (type)
14089 && !integer_pow2p (bottom))
14090 return false;
14091 if (TREE_CODE (bottom) == INTEGER_CST)
14093 op1 = TREE_OPERAND (top, 0);
14094 op2 = TREE_OPERAND (top, 1);
14095 if (TREE_CODE (op1) == INTEGER_CST)
14096 std::swap (op1, op2);
14097 if (TREE_CODE (op2) == INTEGER_CST)
14099 if (multiple_of_p (type, op2, bottom, nowrap))
14100 return true;
14101 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14102 if (multiple_of_p (type, bottom, op2, nowrap))
14104 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14105 wi::to_widest (op2));
14106 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14108 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14109 return multiple_of_p (type, op1, op2, nowrap);
14112 return multiple_of_p (type, op1, bottom, nowrap);
14115 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14116 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14118 case LSHIFT_EXPR:
14119 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14120 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14122 op1 = TREE_OPERAND (top, 1);
14123 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14125 wide_int mul_op
14126 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14127 return multiple_of_p (type,
14128 wide_int_to_tree (type, mul_op), bottom,
14129 nowrap);
14132 return false;
14134 case MINUS_EXPR:
14135 case PLUS_EXPR:
14136 /* If the addition or subtraction can wrap we cannot recurse further
14137 unless bottom is a power of two which is where wrapping does not
14138 matter. */
14139 if (!nowrap
14140 && !TYPE_OVERFLOW_UNDEFINED (type)
14141 && !integer_pow2p (bottom))
14142 return false;
14144 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14145 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14146 but 0xfffffffd is not. */
14147 op1 = TREE_OPERAND (top, 1);
14148 if (TREE_CODE (top) == PLUS_EXPR
14149 && nowrap
14150 && TYPE_UNSIGNED (type)
14151 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14152 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14154 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14155 precisely, so be conservative here checking if both op0 and op1
14156 are multiple of bottom. Note we check the second operand first
14157 since it's usually simpler. */
14158 return (multiple_of_p (type, op1, bottom, nowrap)
14159 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14161 CASE_CONVERT:
14162 /* Can't handle conversions from non-integral or wider integral type. */
14163 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14164 || (TYPE_PRECISION (type)
14165 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14166 return false;
14167 /* NOWRAP only extends to operations in the outermost type so
14168 make sure to strip it off here. */
14169 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14170 TREE_OPERAND (top, 0), bottom, false);
14172 case SAVE_EXPR:
14173 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14175 case COND_EXPR:
14176 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14177 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14179 case INTEGER_CST:
14180 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14181 return false;
14182 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14183 SIGNED);
14185 case SSA_NAME:
14186 if (TREE_CODE (bottom) == INTEGER_CST
14187 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14188 && gimple_code (stmt) == GIMPLE_ASSIGN)
14190 enum tree_code code = gimple_assign_rhs_code (stmt);
14192 /* Check for special cases to see if top is defined as multiple
14193 of bottom:
14195 top = (X & ~(bottom - 1) ; bottom is power of 2
14199 Y = X % bottom
14200 top = X - Y. */
14201 if (code == BIT_AND_EXPR
14202 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14203 && TREE_CODE (op2) == INTEGER_CST
14204 && integer_pow2p (bottom)
14205 && wi::multiple_of_p (wi::to_widest (op2),
14206 wi::to_widest (bottom), UNSIGNED))
14207 return true;
14209 op1 = gimple_assign_rhs1 (stmt);
14210 if (code == MINUS_EXPR
14211 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14212 && TREE_CODE (op2) == SSA_NAME
14213 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14214 && gimple_code (stmt) == GIMPLE_ASSIGN
14215 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14216 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14217 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14218 return true;
14221 /* fall through */
14223 default:
14224 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14225 return multiple_p (wi::to_poly_widest (top),
14226 wi::to_poly_widest (bottom));
14228 return false;
14232 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14233 This function returns true for integer expressions, and returns
14234 false if uncertain. */
14236 bool
14237 tree_expr_finite_p (const_tree x)
14239 machine_mode mode = element_mode (x);
14240 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14241 return true;
14242 switch (TREE_CODE (x))
14244 case REAL_CST:
14245 return real_isfinite (TREE_REAL_CST_PTR (x));
14246 case COMPLEX_CST:
14247 return tree_expr_finite_p (TREE_REALPART (x))
14248 && tree_expr_finite_p (TREE_IMAGPART (x));
14249 case FLOAT_EXPR:
14250 return true;
14251 case ABS_EXPR:
14252 case CONVERT_EXPR:
14253 case NON_LVALUE_EXPR:
14254 case NEGATE_EXPR:
14255 case SAVE_EXPR:
14256 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14257 case MIN_EXPR:
14258 case MAX_EXPR:
14259 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14260 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14261 case COND_EXPR:
14262 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14263 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14264 case CALL_EXPR:
14265 switch (get_call_combined_fn (x))
14267 CASE_CFN_FABS:
14268 CASE_CFN_FABS_FN:
14269 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14270 CASE_CFN_FMAX:
14271 CASE_CFN_FMAX_FN:
14272 CASE_CFN_FMIN:
14273 CASE_CFN_FMIN_FN:
14274 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14275 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14276 default:
14277 return false;
14280 default:
14281 return false;
14285 /* Return true if expression X evaluates to an infinity.
14286 This function returns false for integer expressions. */
14288 bool
14289 tree_expr_infinite_p (const_tree x)
14291 if (!HONOR_INFINITIES (x))
14292 return false;
14293 switch (TREE_CODE (x))
14295 case REAL_CST:
14296 return real_isinf (TREE_REAL_CST_PTR (x));
14297 case ABS_EXPR:
14298 case NEGATE_EXPR:
14299 case NON_LVALUE_EXPR:
14300 case SAVE_EXPR:
14301 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14302 case COND_EXPR:
14303 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14304 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14305 default:
14306 return false;
14310 /* Return true if expression X could evaluate to an infinity.
14311 This function returns false for integer expressions, and returns
14312 true if uncertain. */
14314 bool
14315 tree_expr_maybe_infinite_p (const_tree x)
14317 if (!HONOR_INFINITIES (x))
14318 return false;
14319 switch (TREE_CODE (x))
14321 case REAL_CST:
14322 return real_isinf (TREE_REAL_CST_PTR (x));
14323 case FLOAT_EXPR:
14324 return false;
14325 case ABS_EXPR:
14326 case NEGATE_EXPR:
14327 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14328 case COND_EXPR:
14329 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14330 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14331 default:
14332 return true;
14336 /* Return true if expression X evaluates to a signaling NaN.
14337 This function returns false for integer expressions. */
14339 bool
14340 tree_expr_signaling_nan_p (const_tree x)
14342 if (!HONOR_SNANS (x))
14343 return false;
14344 switch (TREE_CODE (x))
14346 case REAL_CST:
14347 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14348 case NON_LVALUE_EXPR:
14349 case SAVE_EXPR:
14350 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14351 case COND_EXPR:
14352 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14353 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14354 default:
14355 return false;
14359 /* Return true if expression X could evaluate to a signaling NaN.
14360 This function returns false for integer expressions, and returns
14361 true if uncertain. */
14363 bool
14364 tree_expr_maybe_signaling_nan_p (const_tree x)
14366 if (!HONOR_SNANS (x))
14367 return false;
14368 switch (TREE_CODE (x))
14370 case REAL_CST:
14371 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14372 case FLOAT_EXPR:
14373 return false;
14374 case ABS_EXPR:
14375 case CONVERT_EXPR:
14376 case NEGATE_EXPR:
14377 case NON_LVALUE_EXPR:
14378 case SAVE_EXPR:
14379 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14380 case MIN_EXPR:
14381 case MAX_EXPR:
14382 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14383 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14384 case COND_EXPR:
14385 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14386 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14387 case CALL_EXPR:
14388 switch (get_call_combined_fn (x))
14390 CASE_CFN_FABS:
14391 CASE_CFN_FABS_FN:
14392 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14393 CASE_CFN_FMAX:
14394 CASE_CFN_FMAX_FN:
14395 CASE_CFN_FMIN:
14396 CASE_CFN_FMIN_FN:
14397 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14398 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14399 default:
14400 return true;
14402 default:
14403 return true;
14407 /* Return true if expression X evaluates to a NaN.
14408 This function returns false for integer expressions. */
14410 bool
14411 tree_expr_nan_p (const_tree x)
14413 if (!HONOR_NANS (x))
14414 return false;
14415 switch (TREE_CODE (x))
14417 case REAL_CST:
14418 return real_isnan (TREE_REAL_CST_PTR (x));
14419 case NON_LVALUE_EXPR:
14420 case SAVE_EXPR:
14421 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14422 case COND_EXPR:
14423 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14424 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14425 default:
14426 return false;
14430 /* Return true if expression X could evaluate to a NaN.
14431 This function returns false for integer expressions, and returns
14432 true if uncertain. */
14434 bool
14435 tree_expr_maybe_nan_p (const_tree x)
14437 if (!HONOR_NANS (x))
14438 return false;
14439 switch (TREE_CODE (x))
14441 case REAL_CST:
14442 return real_isnan (TREE_REAL_CST_PTR (x));
14443 case FLOAT_EXPR:
14444 return false;
14445 case PLUS_EXPR:
14446 case MINUS_EXPR:
14447 case MULT_EXPR:
14448 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14449 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14450 case ABS_EXPR:
14451 case CONVERT_EXPR:
14452 case NEGATE_EXPR:
14453 case NON_LVALUE_EXPR:
14454 case SAVE_EXPR:
14455 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14456 case MIN_EXPR:
14457 case MAX_EXPR:
14458 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14459 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14460 case COND_EXPR:
14461 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14462 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14463 case CALL_EXPR:
14464 switch (get_call_combined_fn (x))
14466 CASE_CFN_FABS:
14467 CASE_CFN_FABS_FN:
14468 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14469 CASE_CFN_FMAX:
14470 CASE_CFN_FMAX_FN:
14471 CASE_CFN_FMIN:
14472 CASE_CFN_FMIN_FN:
14473 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14474 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14475 default:
14476 return true;
14478 default:
14479 return true;
14483 /* Return true if expression X could evaluate to -0.0.
14484 This function returns true if uncertain. */
14486 bool
14487 tree_expr_maybe_real_minus_zero_p (const_tree x)
14489 if (!HONOR_SIGNED_ZEROS (x))
14490 return false;
14491 switch (TREE_CODE (x))
14493 case REAL_CST:
14494 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14495 case INTEGER_CST:
14496 case FLOAT_EXPR:
14497 case ABS_EXPR:
14498 return false;
14499 case NON_LVALUE_EXPR:
14500 case SAVE_EXPR:
14501 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14502 case COND_EXPR:
14503 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14504 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14505 case CALL_EXPR:
14506 switch (get_call_combined_fn (x))
14508 CASE_CFN_FABS:
14509 CASE_CFN_FABS_FN:
14510 return false;
14511 default:
14512 break;
14514 default:
14515 break;
14517 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14518 * but currently those predicates require tree and not const_tree. */
14519 return true;
14522 #define tree_expr_nonnegative_warnv_p(X, Y) \
14523 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14525 #define RECURSE(X) \
14526 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14528 /* Return true if CODE or TYPE is known to be non-negative. */
14530 static bool
14531 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14533 if (!VECTOR_TYPE_P (type)
14534 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14535 && truth_value_p (code))
14536 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14537 have a signed:1 type (where the value is -1 and 0). */
14538 return true;
14539 return false;
14542 /* Return true if (CODE OP0) is known to be non-negative. If the return
14543 value is based on the assumption that signed overflow is undefined,
14544 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14545 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14547 bool
14548 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14549 bool *strict_overflow_p, int depth)
14551 if (TYPE_UNSIGNED (type))
14552 return true;
14554 switch (code)
14556 case ABS_EXPR:
14557 /* We can't return 1 if flag_wrapv is set because
14558 ABS_EXPR<INT_MIN> = INT_MIN. */
14559 if (!ANY_INTEGRAL_TYPE_P (type))
14560 return true;
14561 if (TYPE_OVERFLOW_UNDEFINED (type))
14563 *strict_overflow_p = true;
14564 return true;
14566 break;
14568 case NON_LVALUE_EXPR:
14569 case FLOAT_EXPR:
14570 case FIX_TRUNC_EXPR:
14571 return RECURSE (op0);
14573 CASE_CONVERT:
14575 tree inner_type = TREE_TYPE (op0);
14576 tree outer_type = type;
14578 if (SCALAR_FLOAT_TYPE_P (outer_type))
14580 if (SCALAR_FLOAT_TYPE_P (inner_type))
14581 return RECURSE (op0);
14582 if (INTEGRAL_TYPE_P (inner_type))
14584 if (TYPE_UNSIGNED (inner_type))
14585 return true;
14586 return RECURSE (op0);
14589 else if (INTEGRAL_TYPE_P (outer_type))
14591 if (SCALAR_FLOAT_TYPE_P (inner_type))
14592 return RECURSE (op0);
14593 if (INTEGRAL_TYPE_P (inner_type))
14594 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14595 && TYPE_UNSIGNED (inner_type);
14598 break;
14600 default:
14601 return tree_simple_nonnegative_warnv_p (code, type);
14604 /* We don't know sign of `t', so be conservative and return false. */
14605 return false;
14608 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14609 value is based on the assumption that signed overflow is undefined,
14610 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14611 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14613 bool
14614 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14615 tree op1, bool *strict_overflow_p,
14616 int depth)
14618 if (TYPE_UNSIGNED (type))
14619 return true;
14621 switch (code)
14623 case POINTER_PLUS_EXPR:
14624 case PLUS_EXPR:
14625 if (FLOAT_TYPE_P (type))
14626 return RECURSE (op0) && RECURSE (op1);
14628 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14629 both unsigned and at least 2 bits shorter than the result. */
14630 if (TREE_CODE (type) == INTEGER_TYPE
14631 && TREE_CODE (op0) == NOP_EXPR
14632 && TREE_CODE (op1) == NOP_EXPR)
14634 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14635 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14636 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14637 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14639 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14640 TYPE_PRECISION (inner2)) + 1;
14641 return prec < TYPE_PRECISION (type);
14644 break;
14646 case MULT_EXPR:
14647 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14649 /* x * x is always non-negative for floating point x
14650 or without overflow. */
14651 if (operand_equal_p (op0, op1, 0)
14652 || (RECURSE (op0) && RECURSE (op1)))
14654 if (ANY_INTEGRAL_TYPE_P (type)
14655 && TYPE_OVERFLOW_UNDEFINED (type))
14656 *strict_overflow_p = true;
14657 return true;
14661 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14662 both unsigned and their total bits is shorter than the result. */
14663 if (TREE_CODE (type) == INTEGER_TYPE
14664 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14665 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14667 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14668 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14669 : TREE_TYPE (op0);
14670 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14671 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14672 : TREE_TYPE (op1);
14674 bool unsigned0 = TYPE_UNSIGNED (inner0);
14675 bool unsigned1 = TYPE_UNSIGNED (inner1);
14677 if (TREE_CODE (op0) == INTEGER_CST)
14678 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14680 if (TREE_CODE (op1) == INTEGER_CST)
14681 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14683 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14684 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14686 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14687 ? tree_int_cst_min_precision (op0, UNSIGNED)
14688 : TYPE_PRECISION (inner0);
14690 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14691 ? tree_int_cst_min_precision (op1, UNSIGNED)
14692 : TYPE_PRECISION (inner1);
14694 return precision0 + precision1 < TYPE_PRECISION (type);
14697 return false;
14699 case BIT_AND_EXPR:
14700 return RECURSE (op0) || RECURSE (op1);
14702 case MAX_EXPR:
14703 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14704 things. */
14705 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14706 return RECURSE (op0) && RECURSE (op1);
14707 return RECURSE (op0) || RECURSE (op1);
14709 case BIT_IOR_EXPR:
14710 case BIT_XOR_EXPR:
14711 case MIN_EXPR:
14712 case RDIV_EXPR:
14713 case TRUNC_DIV_EXPR:
14714 case CEIL_DIV_EXPR:
14715 case FLOOR_DIV_EXPR:
14716 case ROUND_DIV_EXPR:
14717 return RECURSE (op0) && RECURSE (op1);
14719 case TRUNC_MOD_EXPR:
14720 return RECURSE (op0);
14722 case FLOOR_MOD_EXPR:
14723 return RECURSE (op1);
14725 case CEIL_MOD_EXPR:
14726 case ROUND_MOD_EXPR:
14727 default:
14728 return tree_simple_nonnegative_warnv_p (code, type);
14731 /* We don't know sign of `t', so be conservative and return false. */
14732 return false;
14735 /* Return true if T is known to be non-negative. If the return
14736 value is based on the assumption that signed overflow is undefined,
14737 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14738 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14740 bool
14741 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14743 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14744 return true;
14746 switch (TREE_CODE (t))
14748 case INTEGER_CST:
14749 return tree_int_cst_sgn (t) >= 0;
14751 case REAL_CST:
14752 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14754 case FIXED_CST:
14755 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14757 case COND_EXPR:
14758 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14760 case SSA_NAME:
14761 /* Limit the depth of recursion to avoid quadratic behavior.
14762 This is expected to catch almost all occurrences in practice.
14763 If this code misses important cases that unbounded recursion
14764 would not, passes that need this information could be revised
14765 to provide it through dataflow propagation. */
14766 return (!name_registered_for_update_p (t)
14767 && depth < param_max_ssa_name_query_depth
14768 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14769 strict_overflow_p, depth));
14771 default:
14772 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14776 /* Return true if T is known to be non-negative. If the return
14777 value is based on the assumption that signed overflow is undefined,
14778 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14779 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14781 bool
14782 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14783 bool *strict_overflow_p, int depth)
14785 switch (fn)
14787 CASE_CFN_ACOS:
14788 CASE_CFN_ACOS_FN:
14789 CASE_CFN_ACOSH:
14790 CASE_CFN_ACOSH_FN:
14791 CASE_CFN_CABS:
14792 CASE_CFN_CABS_FN:
14793 CASE_CFN_COSH:
14794 CASE_CFN_COSH_FN:
14795 CASE_CFN_ERFC:
14796 CASE_CFN_ERFC_FN:
14797 CASE_CFN_EXP:
14798 CASE_CFN_EXP_FN:
14799 CASE_CFN_EXP10:
14800 CASE_CFN_EXP2:
14801 CASE_CFN_EXP2_FN:
14802 CASE_CFN_FABS:
14803 CASE_CFN_FABS_FN:
14804 CASE_CFN_FDIM:
14805 CASE_CFN_FDIM_FN:
14806 CASE_CFN_HYPOT:
14807 CASE_CFN_HYPOT_FN:
14808 CASE_CFN_POW10:
14809 CASE_CFN_FFS:
14810 CASE_CFN_PARITY:
14811 CASE_CFN_POPCOUNT:
14812 CASE_CFN_CLZ:
14813 CASE_CFN_CLRSB:
14814 case CFN_BUILT_IN_BSWAP16:
14815 case CFN_BUILT_IN_BSWAP32:
14816 case CFN_BUILT_IN_BSWAP64:
14817 case CFN_BUILT_IN_BSWAP128:
14818 /* Always true. */
14819 return true;
14821 CASE_CFN_SQRT:
14822 CASE_CFN_SQRT_FN:
14823 /* sqrt(-0.0) is -0.0. */
14824 if (!HONOR_SIGNED_ZEROS (type))
14825 return true;
14826 return RECURSE (arg0);
14828 CASE_CFN_ASINH:
14829 CASE_CFN_ASINH_FN:
14830 CASE_CFN_ATAN:
14831 CASE_CFN_ATAN_FN:
14832 CASE_CFN_ATANH:
14833 CASE_CFN_ATANH_FN:
14834 CASE_CFN_CBRT:
14835 CASE_CFN_CBRT_FN:
14836 CASE_CFN_CEIL:
14837 CASE_CFN_CEIL_FN:
14838 CASE_CFN_ERF:
14839 CASE_CFN_ERF_FN:
14840 CASE_CFN_EXPM1:
14841 CASE_CFN_EXPM1_FN:
14842 CASE_CFN_FLOOR:
14843 CASE_CFN_FLOOR_FN:
14844 CASE_CFN_FMOD:
14845 CASE_CFN_FMOD_FN:
14846 CASE_CFN_FREXP:
14847 CASE_CFN_FREXP_FN:
14848 CASE_CFN_ICEIL:
14849 CASE_CFN_IFLOOR:
14850 CASE_CFN_IRINT:
14851 CASE_CFN_IROUND:
14852 CASE_CFN_LCEIL:
14853 CASE_CFN_LDEXP:
14854 CASE_CFN_LFLOOR:
14855 CASE_CFN_LLCEIL:
14856 CASE_CFN_LLFLOOR:
14857 CASE_CFN_LLRINT:
14858 CASE_CFN_LLRINT_FN:
14859 CASE_CFN_LLROUND:
14860 CASE_CFN_LLROUND_FN:
14861 CASE_CFN_LRINT:
14862 CASE_CFN_LRINT_FN:
14863 CASE_CFN_LROUND:
14864 CASE_CFN_LROUND_FN:
14865 CASE_CFN_MODF:
14866 CASE_CFN_MODF_FN:
14867 CASE_CFN_NEARBYINT:
14868 CASE_CFN_NEARBYINT_FN:
14869 CASE_CFN_RINT:
14870 CASE_CFN_RINT_FN:
14871 CASE_CFN_ROUND:
14872 CASE_CFN_ROUND_FN:
14873 CASE_CFN_ROUNDEVEN:
14874 CASE_CFN_ROUNDEVEN_FN:
14875 CASE_CFN_SCALB:
14876 CASE_CFN_SCALBLN:
14877 CASE_CFN_SCALBLN_FN:
14878 CASE_CFN_SCALBN:
14879 CASE_CFN_SCALBN_FN:
14880 CASE_CFN_SIGNBIT:
14881 CASE_CFN_SIGNIFICAND:
14882 CASE_CFN_SINH:
14883 CASE_CFN_SINH_FN:
14884 CASE_CFN_TANH:
14885 CASE_CFN_TANH_FN:
14886 CASE_CFN_TRUNC:
14887 CASE_CFN_TRUNC_FN:
14888 /* True if the 1st argument is nonnegative. */
14889 return RECURSE (arg0);
14891 CASE_CFN_FMAX:
14892 CASE_CFN_FMAX_FN:
14893 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14894 things. In the presence of sNaNs, we're only guaranteed to be
14895 non-negative if both operands are non-negative. In the presence
14896 of qNaNs, we're non-negative if either operand is non-negative
14897 and can't be a qNaN, or if both operands are non-negative. */
14898 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14899 tree_expr_maybe_signaling_nan_p (arg1))
14900 return RECURSE (arg0) && RECURSE (arg1);
14901 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14902 || RECURSE (arg1))
14903 : (RECURSE (arg1)
14904 && !tree_expr_maybe_nan_p (arg1));
14906 CASE_CFN_FMIN:
14907 CASE_CFN_FMIN_FN:
14908 /* True if the 1st AND 2nd arguments are nonnegative. */
14909 return RECURSE (arg0) && RECURSE (arg1);
14911 CASE_CFN_COPYSIGN:
14912 CASE_CFN_COPYSIGN_FN:
14913 /* True if the 2nd argument is nonnegative. */
14914 return RECURSE (arg1);
14916 CASE_CFN_POWI:
14917 /* True if the 1st argument is nonnegative or the second
14918 argument is an even integer. */
14919 if (TREE_CODE (arg1) == INTEGER_CST
14920 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14921 return true;
14922 return RECURSE (arg0);
14924 CASE_CFN_POW:
14925 CASE_CFN_POW_FN:
14926 /* True if the 1st argument is nonnegative or the second
14927 argument is an even integer valued real. */
14928 if (TREE_CODE (arg1) == REAL_CST)
14930 REAL_VALUE_TYPE c;
14931 HOST_WIDE_INT n;
14933 c = TREE_REAL_CST (arg1);
14934 n = real_to_integer (&c);
14935 if ((n & 1) == 0)
14937 REAL_VALUE_TYPE cint;
14938 real_from_integer (&cint, VOIDmode, n, SIGNED);
14939 if (real_identical (&c, &cint))
14940 return true;
14943 return RECURSE (arg0);
14945 default:
14946 break;
14948 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14951 /* Return true if T is known to be non-negative. If the return
14952 value is based on the assumption that signed overflow is undefined,
14953 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14954 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14956 static bool
14957 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14959 enum tree_code code = TREE_CODE (t);
14960 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14961 return true;
14963 switch (code)
14965 case TARGET_EXPR:
14967 tree temp = TARGET_EXPR_SLOT (t);
14968 t = TARGET_EXPR_INITIAL (t);
14970 /* If the initializer is non-void, then it's a normal expression
14971 that will be assigned to the slot. */
14972 if (!VOID_TYPE_P (TREE_TYPE (t)))
14973 return RECURSE (t);
14975 /* Otherwise, the initializer sets the slot in some way. One common
14976 way is an assignment statement at the end of the initializer. */
14977 while (1)
14979 if (TREE_CODE (t) == BIND_EXPR)
14980 t = expr_last (BIND_EXPR_BODY (t));
14981 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14982 || TREE_CODE (t) == TRY_CATCH_EXPR)
14983 t = expr_last (TREE_OPERAND (t, 0));
14984 else if (TREE_CODE (t) == STATEMENT_LIST)
14985 t = expr_last (t);
14986 else
14987 break;
14989 if (TREE_CODE (t) == MODIFY_EXPR
14990 && TREE_OPERAND (t, 0) == temp)
14991 return RECURSE (TREE_OPERAND (t, 1));
14993 return false;
14996 case CALL_EXPR:
14998 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14999 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15001 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15002 get_call_combined_fn (t),
15003 arg0,
15004 arg1,
15005 strict_overflow_p, depth);
15007 case COMPOUND_EXPR:
15008 case MODIFY_EXPR:
15009 return RECURSE (TREE_OPERAND (t, 1));
15011 case BIND_EXPR:
15012 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15014 case SAVE_EXPR:
15015 return RECURSE (TREE_OPERAND (t, 0));
15017 default:
15018 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15022 #undef RECURSE
15023 #undef tree_expr_nonnegative_warnv_p
15025 /* Return true if T is known to be non-negative. If the return
15026 value is based on the assumption that signed overflow is undefined,
15027 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15028 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15030 bool
15031 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15033 enum tree_code code;
15034 if (t == error_mark_node)
15035 return false;
15037 code = TREE_CODE (t);
15038 switch (TREE_CODE_CLASS (code))
15040 case tcc_binary:
15041 case tcc_comparison:
15042 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15043 TREE_TYPE (t),
15044 TREE_OPERAND (t, 0),
15045 TREE_OPERAND (t, 1),
15046 strict_overflow_p, depth);
15048 case tcc_unary:
15049 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15050 TREE_TYPE (t),
15051 TREE_OPERAND (t, 0),
15052 strict_overflow_p, depth);
15054 case tcc_constant:
15055 case tcc_declaration:
15056 case tcc_reference:
15057 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15059 default:
15060 break;
15063 switch (code)
15065 case TRUTH_AND_EXPR:
15066 case TRUTH_OR_EXPR:
15067 case TRUTH_XOR_EXPR:
15068 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15069 TREE_TYPE (t),
15070 TREE_OPERAND (t, 0),
15071 TREE_OPERAND (t, 1),
15072 strict_overflow_p, depth);
15073 case TRUTH_NOT_EXPR:
15074 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15075 TREE_TYPE (t),
15076 TREE_OPERAND (t, 0),
15077 strict_overflow_p, depth);
15079 case COND_EXPR:
15080 case CONSTRUCTOR:
15081 case OBJ_TYPE_REF:
15082 case ADDR_EXPR:
15083 case WITH_SIZE_EXPR:
15084 case SSA_NAME:
15085 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15087 default:
15088 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15092 /* Return true if `t' is known to be non-negative. Handle warnings
15093 about undefined signed overflow. */
15095 bool
15096 tree_expr_nonnegative_p (tree t)
15098 bool ret, strict_overflow_p;
15100 strict_overflow_p = false;
15101 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15102 if (strict_overflow_p)
15103 fold_overflow_warning (("assuming signed overflow does not occur when "
15104 "determining that expression is always "
15105 "non-negative"),
15106 WARN_STRICT_OVERFLOW_MISC);
15107 return ret;
15111 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15112 For floating point we further ensure that T is not denormal.
15113 Similar logic is present in nonzero_address in rtlanal.h.
15115 If the return value is based on the assumption that signed overflow
15116 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15117 change *STRICT_OVERFLOW_P. */
15119 bool
15120 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15121 bool *strict_overflow_p)
15123 switch (code)
15125 case ABS_EXPR:
15126 return tree_expr_nonzero_warnv_p (op0,
15127 strict_overflow_p);
15129 case NOP_EXPR:
15131 tree inner_type = TREE_TYPE (op0);
15132 tree outer_type = type;
15134 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15135 && tree_expr_nonzero_warnv_p (op0,
15136 strict_overflow_p));
15138 break;
15140 case NON_LVALUE_EXPR:
15141 return tree_expr_nonzero_warnv_p (op0,
15142 strict_overflow_p);
15144 default:
15145 break;
15148 return false;
15151 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15152 For floating point we further ensure that T is not denormal.
15153 Similar logic is present in nonzero_address in rtlanal.h.
15155 If the return value is based on the assumption that signed overflow
15156 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15157 change *STRICT_OVERFLOW_P. */
15159 bool
15160 tree_binary_nonzero_warnv_p (enum tree_code code,
15161 tree type,
15162 tree op0,
15163 tree op1, bool *strict_overflow_p)
15165 bool sub_strict_overflow_p;
15166 switch (code)
15168 case POINTER_PLUS_EXPR:
15169 case PLUS_EXPR:
15170 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15172 /* With the presence of negative values it is hard
15173 to say something. */
15174 sub_strict_overflow_p = false;
15175 if (!tree_expr_nonnegative_warnv_p (op0,
15176 &sub_strict_overflow_p)
15177 || !tree_expr_nonnegative_warnv_p (op1,
15178 &sub_strict_overflow_p))
15179 return false;
15180 /* One of operands must be positive and the other non-negative. */
15181 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15182 overflows, on a twos-complement machine the sum of two
15183 nonnegative numbers can never be zero. */
15184 return (tree_expr_nonzero_warnv_p (op0,
15185 strict_overflow_p)
15186 || tree_expr_nonzero_warnv_p (op1,
15187 strict_overflow_p));
15189 break;
15191 case MULT_EXPR:
15192 if (TYPE_OVERFLOW_UNDEFINED (type))
15194 if (tree_expr_nonzero_warnv_p (op0,
15195 strict_overflow_p)
15196 && tree_expr_nonzero_warnv_p (op1,
15197 strict_overflow_p))
15199 *strict_overflow_p = true;
15200 return true;
15203 break;
15205 case MIN_EXPR:
15206 sub_strict_overflow_p = false;
15207 if (tree_expr_nonzero_warnv_p (op0,
15208 &sub_strict_overflow_p)
15209 && tree_expr_nonzero_warnv_p (op1,
15210 &sub_strict_overflow_p))
15212 if (sub_strict_overflow_p)
15213 *strict_overflow_p = true;
15215 break;
15217 case MAX_EXPR:
15218 sub_strict_overflow_p = false;
15219 if (tree_expr_nonzero_warnv_p (op0,
15220 &sub_strict_overflow_p))
15222 if (sub_strict_overflow_p)
15223 *strict_overflow_p = true;
15225 /* When both operands are nonzero, then MAX must be too. */
15226 if (tree_expr_nonzero_warnv_p (op1,
15227 strict_overflow_p))
15228 return true;
15230 /* MAX where operand 0 is positive is positive. */
15231 return tree_expr_nonnegative_warnv_p (op0,
15232 strict_overflow_p);
15234 /* MAX where operand 1 is positive is positive. */
15235 else if (tree_expr_nonzero_warnv_p (op1,
15236 &sub_strict_overflow_p)
15237 && tree_expr_nonnegative_warnv_p (op1,
15238 &sub_strict_overflow_p))
15240 if (sub_strict_overflow_p)
15241 *strict_overflow_p = true;
15242 return true;
15244 break;
15246 case BIT_IOR_EXPR:
15247 return (tree_expr_nonzero_warnv_p (op1,
15248 strict_overflow_p)
15249 || tree_expr_nonzero_warnv_p (op0,
15250 strict_overflow_p));
15252 default:
15253 break;
15256 return false;
15259 /* Return true when T is an address and is known to be nonzero.
15260 For floating point we further ensure that T is not denormal.
15261 Similar logic is present in nonzero_address in rtlanal.h.
15263 If the return value is based on the assumption that signed overflow
15264 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15265 change *STRICT_OVERFLOW_P. */
15267 bool
15268 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15270 bool sub_strict_overflow_p;
15271 switch (TREE_CODE (t))
15273 case INTEGER_CST:
15274 return !integer_zerop (t);
15276 case ADDR_EXPR:
15278 tree base = TREE_OPERAND (t, 0);
15280 if (!DECL_P (base))
15281 base = get_base_address (base);
15283 if (base && TREE_CODE (base) == TARGET_EXPR)
15284 base = TARGET_EXPR_SLOT (base);
15286 if (!base)
15287 return false;
15289 /* For objects in symbol table check if we know they are non-zero.
15290 Don't do anything for variables and functions before symtab is built;
15291 it is quite possible that they will be declared weak later. */
15292 int nonzero_addr = maybe_nonzero_address (base);
15293 if (nonzero_addr >= 0)
15294 return nonzero_addr;
15296 /* Constants are never weak. */
15297 if (CONSTANT_CLASS_P (base))
15298 return true;
15300 return false;
15303 case COND_EXPR:
15304 sub_strict_overflow_p = false;
15305 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15306 &sub_strict_overflow_p)
15307 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15308 &sub_strict_overflow_p))
15310 if (sub_strict_overflow_p)
15311 *strict_overflow_p = true;
15312 return true;
15314 break;
15316 case SSA_NAME:
15317 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15318 break;
15319 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15321 default:
15322 break;
15324 return false;
15327 #define integer_valued_real_p(X) \
15328 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15330 #define RECURSE(X) \
15331 ((integer_valued_real_p) (X, depth + 1))
15333 /* Return true if the floating point result of (CODE OP0) has an
15334 integer value. We also allow +Inf, -Inf and NaN to be considered
15335 integer values. Return false for signaling NaN.
15337 DEPTH is the current nesting depth of the query. */
15339 bool
15340 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15342 switch (code)
15344 case FLOAT_EXPR:
15345 return true;
15347 case ABS_EXPR:
15348 return RECURSE (op0);
15350 CASE_CONVERT:
15352 tree type = TREE_TYPE (op0);
15353 if (TREE_CODE (type) == INTEGER_TYPE)
15354 return true;
15355 if (SCALAR_FLOAT_TYPE_P (type))
15356 return RECURSE (op0);
15357 break;
15360 default:
15361 break;
15363 return false;
15366 /* Return true if the floating point result of (CODE OP0 OP1) has an
15367 integer value. We also allow +Inf, -Inf and NaN to be considered
15368 integer values. Return false for signaling NaN.
15370 DEPTH is the current nesting depth of the query. */
15372 bool
15373 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15375 switch (code)
15377 case PLUS_EXPR:
15378 case MINUS_EXPR:
15379 case MULT_EXPR:
15380 case MIN_EXPR:
15381 case MAX_EXPR:
15382 return RECURSE (op0) && RECURSE (op1);
15384 default:
15385 break;
15387 return false;
15390 /* Return true if the floating point result of calling FNDECL with arguments
15391 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15392 considered integer values. Return false for signaling NaN. If FNDECL
15393 takes fewer than 2 arguments, the remaining ARGn are null.
15395 DEPTH is the current nesting depth of the query. */
15397 bool
15398 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15400 switch (fn)
15402 CASE_CFN_CEIL:
15403 CASE_CFN_CEIL_FN:
15404 CASE_CFN_FLOOR:
15405 CASE_CFN_FLOOR_FN:
15406 CASE_CFN_NEARBYINT:
15407 CASE_CFN_NEARBYINT_FN:
15408 CASE_CFN_RINT:
15409 CASE_CFN_RINT_FN:
15410 CASE_CFN_ROUND:
15411 CASE_CFN_ROUND_FN:
15412 CASE_CFN_ROUNDEVEN:
15413 CASE_CFN_ROUNDEVEN_FN:
15414 CASE_CFN_TRUNC:
15415 CASE_CFN_TRUNC_FN:
15416 return true;
15418 CASE_CFN_FMIN:
15419 CASE_CFN_FMIN_FN:
15420 CASE_CFN_FMAX:
15421 CASE_CFN_FMAX_FN:
15422 return RECURSE (arg0) && RECURSE (arg1);
15424 default:
15425 break;
15427 return false;
15430 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15431 has an integer value. We also allow +Inf, -Inf and NaN to be
15432 considered integer values. Return false for signaling NaN.
15434 DEPTH is the current nesting depth of the query. */
15436 bool
15437 integer_valued_real_single_p (tree t, int depth)
15439 switch (TREE_CODE (t))
15441 case REAL_CST:
15442 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15444 case COND_EXPR:
15445 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15447 case SSA_NAME:
15448 /* Limit the depth of recursion to avoid quadratic behavior.
15449 This is expected to catch almost all occurrences in practice.
15450 If this code misses important cases that unbounded recursion
15451 would not, passes that need this information could be revised
15452 to provide it through dataflow propagation. */
15453 return (!name_registered_for_update_p (t)
15454 && depth < param_max_ssa_name_query_depth
15455 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15456 depth));
15458 default:
15459 break;
15461 return false;
15464 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15465 has an integer value. We also allow +Inf, -Inf and NaN to be
15466 considered integer values. Return false for signaling NaN.
15468 DEPTH is the current nesting depth of the query. */
15470 static bool
15471 integer_valued_real_invalid_p (tree t, int depth)
15473 switch (TREE_CODE (t))
15475 case COMPOUND_EXPR:
15476 case MODIFY_EXPR:
15477 case BIND_EXPR:
15478 return RECURSE (TREE_OPERAND (t, 1));
15480 case SAVE_EXPR:
15481 return RECURSE (TREE_OPERAND (t, 0));
15483 default:
15484 break;
15486 return false;
15489 #undef RECURSE
15490 #undef integer_valued_real_p
15492 /* Return true if the floating point expression T has an integer value.
15493 We also allow +Inf, -Inf and NaN to be considered integer values.
15494 Return false for signaling NaN.
15496 DEPTH is the current nesting depth of the query. */
15498 bool
15499 integer_valued_real_p (tree t, int depth)
15501 if (t == error_mark_node)
15502 return false;
15504 STRIP_ANY_LOCATION_WRAPPER (t);
15506 tree_code code = TREE_CODE (t);
15507 switch (TREE_CODE_CLASS (code))
15509 case tcc_binary:
15510 case tcc_comparison:
15511 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15512 TREE_OPERAND (t, 1), depth);
15514 case tcc_unary:
15515 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15517 case tcc_constant:
15518 case tcc_declaration:
15519 case tcc_reference:
15520 return integer_valued_real_single_p (t, depth);
15522 default:
15523 break;
15526 switch (code)
15528 case COND_EXPR:
15529 case SSA_NAME:
15530 return integer_valued_real_single_p (t, depth);
15532 case CALL_EXPR:
15534 tree arg0 = (call_expr_nargs (t) > 0
15535 ? CALL_EXPR_ARG (t, 0)
15536 : NULL_TREE);
15537 tree arg1 = (call_expr_nargs (t) > 1
15538 ? CALL_EXPR_ARG (t, 1)
15539 : NULL_TREE);
15540 return integer_valued_real_call_p (get_call_combined_fn (t),
15541 arg0, arg1, depth);
15544 default:
15545 return integer_valued_real_invalid_p (t, depth);
15549 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15550 attempt to fold the expression to a constant without modifying TYPE,
15551 OP0 or OP1.
15553 If the expression could be simplified to a constant, then return
15554 the constant. If the expression would not be simplified to a
15555 constant, then return NULL_TREE. */
15557 tree
15558 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15560 tree tem = fold_binary (code, type, op0, op1);
15561 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15564 /* Given the components of a unary expression CODE, TYPE and OP0,
15565 attempt to fold the expression to a constant without modifying
15566 TYPE or OP0.
15568 If the expression could be simplified to a constant, then return
15569 the constant. If the expression would not be simplified to a
15570 constant, then return NULL_TREE. */
15572 tree
15573 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15575 tree tem = fold_unary (code, type, op0);
15576 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15579 /* If EXP represents referencing an element in a constant string
15580 (either via pointer arithmetic or array indexing), return the
15581 tree representing the value accessed, otherwise return NULL. */
15583 tree
15584 fold_read_from_constant_string (tree exp)
15586 if ((INDIRECT_REF_P (exp)
15587 || TREE_CODE (exp) == ARRAY_REF)
15588 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15590 tree exp1 = TREE_OPERAND (exp, 0);
15591 tree index;
15592 tree string;
15593 location_t loc = EXPR_LOCATION (exp);
15595 if (INDIRECT_REF_P (exp))
15596 string = string_constant (exp1, &index, NULL, NULL);
15597 else
15599 tree low_bound = array_ref_low_bound (exp);
15600 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15602 /* Optimize the special-case of a zero lower bound.
15604 We convert the low_bound to sizetype to avoid some problems
15605 with constant folding. (E.g. suppose the lower bound is 1,
15606 and its mode is QI. Without the conversion,l (ARRAY
15607 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15608 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15609 if (! integer_zerop (low_bound))
15610 index = size_diffop_loc (loc, index,
15611 fold_convert_loc (loc, sizetype, low_bound));
15613 string = exp1;
15616 scalar_int_mode char_mode;
15617 if (string
15618 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15619 && TREE_CODE (string) == STRING_CST
15620 && tree_fits_uhwi_p (index)
15621 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15622 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15623 &char_mode)
15624 && GET_MODE_SIZE (char_mode) == 1)
15625 return build_int_cst_type (TREE_TYPE (exp),
15626 (TREE_STRING_POINTER (string)
15627 [TREE_INT_CST_LOW (index)]));
15629 return NULL;
15632 /* Folds a read from vector element at IDX of vector ARG. */
15634 tree
15635 fold_read_from_vector (tree arg, poly_uint64 idx)
15637 unsigned HOST_WIDE_INT i;
15638 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15639 && known_ge (idx, 0u)
15640 && idx.is_constant (&i))
15642 if (TREE_CODE (arg) == VECTOR_CST)
15643 return VECTOR_CST_ELT (arg, i);
15644 else if (TREE_CODE (arg) == CONSTRUCTOR)
15646 if (CONSTRUCTOR_NELTS (arg)
15647 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15648 return NULL_TREE;
15649 if (i >= CONSTRUCTOR_NELTS (arg))
15650 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15651 return CONSTRUCTOR_ELT (arg, i)->value;
15654 return NULL_TREE;
15657 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15658 an integer constant, real, or fixed-point constant.
15660 TYPE is the type of the result. */
15662 static tree
15663 fold_negate_const (tree arg0, tree type)
15665 tree t = NULL_TREE;
15667 switch (TREE_CODE (arg0))
15669 case REAL_CST:
15670 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15671 break;
15673 case FIXED_CST:
15675 FIXED_VALUE_TYPE f;
15676 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15677 &(TREE_FIXED_CST (arg0)), NULL,
15678 TYPE_SATURATING (type));
15679 t = build_fixed (type, f);
15680 /* Propagate overflow flags. */
15681 if (overflow_p | TREE_OVERFLOW (arg0))
15682 TREE_OVERFLOW (t) = 1;
15683 break;
15686 default:
15687 if (poly_int_tree_p (arg0))
15689 wi::overflow_type overflow;
15690 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15691 t = force_fit_type (type, res, 1,
15692 (overflow && ! TYPE_UNSIGNED (type))
15693 || TREE_OVERFLOW (arg0));
15694 break;
15697 gcc_unreachable ();
15700 return t;
15703 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15704 an integer constant or real constant.
15706 TYPE is the type of the result. */
15708 tree
15709 fold_abs_const (tree arg0, tree type)
15711 tree t = NULL_TREE;
15713 switch (TREE_CODE (arg0))
15715 case INTEGER_CST:
15717 /* If the value is unsigned or non-negative, then the absolute value
15718 is the same as the ordinary value. */
15719 wide_int val = wi::to_wide (arg0);
15720 wi::overflow_type overflow = wi::OVF_NONE;
15721 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15724 /* If the value is negative, then the absolute value is
15725 its negation. */
15726 else
15727 val = wi::neg (val, &overflow);
15729 /* Force to the destination type, set TREE_OVERFLOW for signed
15730 TYPE only. */
15731 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15733 break;
15735 case REAL_CST:
15736 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15737 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15738 else
15739 t = arg0;
15740 break;
15742 default:
15743 gcc_unreachable ();
15746 return t;
15749 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15750 constant. TYPE is the type of the result. */
15752 static tree
15753 fold_not_const (const_tree arg0, tree type)
15755 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15757 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15760 /* Given CODE, a relational operator, the target type, TYPE and two
15761 constant operands OP0 and OP1, return the result of the
15762 relational operation. If the result is not a compile time
15763 constant, then return NULL_TREE. */
15765 static tree
15766 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15768 int result, invert;
15770 /* From here on, the only cases we handle are when the result is
15771 known to be a constant. */
15773 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15775 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15776 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15778 /* Handle the cases where either operand is a NaN. */
15779 if (real_isnan (c0) || real_isnan (c1))
15781 switch (code)
15783 case EQ_EXPR:
15784 case ORDERED_EXPR:
15785 result = 0;
15786 break;
15788 case NE_EXPR:
15789 case UNORDERED_EXPR:
15790 case UNLT_EXPR:
15791 case UNLE_EXPR:
15792 case UNGT_EXPR:
15793 case UNGE_EXPR:
15794 case UNEQ_EXPR:
15795 result = 1;
15796 break;
15798 case LT_EXPR:
15799 case LE_EXPR:
15800 case GT_EXPR:
15801 case GE_EXPR:
15802 case LTGT_EXPR:
15803 if (flag_trapping_math)
15804 return NULL_TREE;
15805 result = 0;
15806 break;
15808 default:
15809 gcc_unreachable ();
15812 return constant_boolean_node (result, type);
15815 return constant_boolean_node (real_compare (code, c0, c1), type);
15818 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15820 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15821 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15822 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15825 /* Handle equality/inequality of complex constants. */
15826 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15828 tree rcond = fold_relational_const (code, type,
15829 TREE_REALPART (op0),
15830 TREE_REALPART (op1));
15831 tree icond = fold_relational_const (code, type,
15832 TREE_IMAGPART (op0),
15833 TREE_IMAGPART (op1));
15834 if (code == EQ_EXPR)
15835 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15836 else if (code == NE_EXPR)
15837 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15838 else
15839 return NULL_TREE;
15842 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15844 if (!VECTOR_TYPE_P (type))
15846 /* Have vector comparison with scalar boolean result. */
15847 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15848 && known_eq (VECTOR_CST_NELTS (op0),
15849 VECTOR_CST_NELTS (op1)));
15850 unsigned HOST_WIDE_INT nunits;
15851 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15852 return NULL_TREE;
15853 for (unsigned i = 0; i < nunits; i++)
15855 tree elem0 = VECTOR_CST_ELT (op0, i);
15856 tree elem1 = VECTOR_CST_ELT (op1, i);
15857 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15858 if (tmp == NULL_TREE)
15859 return NULL_TREE;
15860 if (integer_zerop (tmp))
15861 return constant_boolean_node (code == NE_EXPR, type);
15863 return constant_boolean_node (code == EQ_EXPR, type);
15865 tree_vector_builder elts;
15866 if (!elts.new_binary_operation (type, op0, op1, false))
15867 return NULL_TREE;
15868 unsigned int count = elts.encoded_nelts ();
15869 for (unsigned i = 0; i < count; i++)
15871 tree elem_type = TREE_TYPE (type);
15872 tree elem0 = VECTOR_CST_ELT (op0, i);
15873 tree elem1 = VECTOR_CST_ELT (op1, i);
15875 tree tem = fold_relational_const (code, elem_type,
15876 elem0, elem1);
15878 if (tem == NULL_TREE)
15879 return NULL_TREE;
15881 elts.quick_push (build_int_cst (elem_type,
15882 integer_zerop (tem) ? 0 : -1));
15885 return elts.build ();
15888 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15890 To compute GT, swap the arguments and do LT.
15891 To compute GE, do LT and invert the result.
15892 To compute LE, swap the arguments, do LT and invert the result.
15893 To compute NE, do EQ and invert the result.
15895 Therefore, the code below must handle only EQ and LT. */
15897 if (code == LE_EXPR || code == GT_EXPR)
15899 std::swap (op0, op1);
15900 code = swap_tree_comparison (code);
15903 /* Note that it is safe to invert for real values here because we
15904 have already handled the one case that it matters. */
15906 invert = 0;
15907 if (code == NE_EXPR || code == GE_EXPR)
15909 invert = 1;
15910 code = invert_tree_comparison (code, false);
15913 /* Compute a result for LT or EQ if args permit;
15914 Otherwise return T. */
15915 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15917 if (code == EQ_EXPR)
15918 result = tree_int_cst_equal (op0, op1);
15919 else
15920 result = tree_int_cst_lt (op0, op1);
15922 else
15923 return NULL_TREE;
15925 if (invert)
15926 result ^= 1;
15927 return constant_boolean_node (result, type);
15930 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15931 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15932 itself. */
15934 tree
15935 fold_build_cleanup_point_expr (tree type, tree expr)
15937 /* If the expression does not have side effects then we don't have to wrap
15938 it with a cleanup point expression. */
15939 if (!TREE_SIDE_EFFECTS (expr))
15940 return expr;
15942 /* If the expression is a return, check to see if the expression inside the
15943 return has no side effects or the right hand side of the modify expression
15944 inside the return. If either don't have side effects set we don't need to
15945 wrap the expression in a cleanup point expression. Note we don't check the
15946 left hand side of the modify because it should always be a return decl. */
15947 if (TREE_CODE (expr) == RETURN_EXPR)
15949 tree op = TREE_OPERAND (expr, 0);
15950 if (!op || !TREE_SIDE_EFFECTS (op))
15951 return expr;
15952 op = TREE_OPERAND (op, 1);
15953 if (!TREE_SIDE_EFFECTS (op))
15954 return expr;
15957 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15960 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15961 of an indirection through OP0, or NULL_TREE if no simplification is
15962 possible. */
15964 tree
15965 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15967 tree sub = op0;
15968 tree subtype;
15969 poly_uint64 const_op01;
15971 STRIP_NOPS (sub);
15972 subtype = TREE_TYPE (sub);
15973 if (!POINTER_TYPE_P (subtype)
15974 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15975 return NULL_TREE;
15977 if (TREE_CODE (sub) == ADDR_EXPR)
15979 tree op = TREE_OPERAND (sub, 0);
15980 tree optype = TREE_TYPE (op);
15982 /* *&CONST_DECL -> to the value of the const decl. */
15983 if (TREE_CODE (op) == CONST_DECL)
15984 return DECL_INITIAL (op);
15985 /* *&p => p; make sure to handle *&"str"[cst] here. */
15986 if (type == optype)
15988 tree fop = fold_read_from_constant_string (op);
15989 if (fop)
15990 return fop;
15991 else
15992 return op;
15994 /* *(foo *)&fooarray => fooarray[0] */
15995 else if (TREE_CODE (optype) == ARRAY_TYPE
15996 && type == TREE_TYPE (optype)
15997 && (!in_gimple_form
15998 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16000 tree type_domain = TYPE_DOMAIN (optype);
16001 tree min_val = size_zero_node;
16002 if (type_domain && TYPE_MIN_VALUE (type_domain))
16003 min_val = TYPE_MIN_VALUE (type_domain);
16004 if (in_gimple_form
16005 && TREE_CODE (min_val) != INTEGER_CST)
16006 return NULL_TREE;
16007 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16008 NULL_TREE, NULL_TREE);
16010 /* *(foo *)&complexfoo => __real__ complexfoo */
16011 else if (TREE_CODE (optype) == COMPLEX_TYPE
16012 && type == TREE_TYPE (optype))
16013 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16014 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16015 else if (VECTOR_TYPE_P (optype)
16016 && type == TREE_TYPE (optype))
16018 tree part_width = TYPE_SIZE (type);
16019 tree index = bitsize_int (0);
16020 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16021 index);
16025 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16026 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16028 tree op00 = TREE_OPERAND (sub, 0);
16029 tree op01 = TREE_OPERAND (sub, 1);
16031 STRIP_NOPS (op00);
16032 if (TREE_CODE (op00) == ADDR_EXPR)
16034 tree op00type;
16035 op00 = TREE_OPERAND (op00, 0);
16036 op00type = TREE_TYPE (op00);
16038 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16039 if (VECTOR_TYPE_P (op00type)
16040 && type == TREE_TYPE (op00type)
16041 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16042 but we want to treat offsets with MSB set as negative.
16043 For the code below negative offsets are invalid and
16044 TYPE_SIZE of the element is something unsigned, so
16045 check whether op01 fits into poly_int64, which implies
16046 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16047 then just use poly_uint64 because we want to treat the
16048 value as unsigned. */
16049 && tree_fits_poly_int64_p (op01))
16051 tree part_width = TYPE_SIZE (type);
16052 poly_uint64 max_offset
16053 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16054 * TYPE_VECTOR_SUBPARTS (op00type));
16055 if (known_lt (const_op01, max_offset))
16057 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16058 return fold_build3_loc (loc,
16059 BIT_FIELD_REF, type, op00,
16060 part_width, index);
16063 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16064 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16065 && type == TREE_TYPE (op00type))
16067 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16068 const_op01))
16069 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16071 /* ((foo *)&fooarray)[1] => fooarray[1] */
16072 else if (TREE_CODE (op00type) == ARRAY_TYPE
16073 && type == TREE_TYPE (op00type))
16075 tree type_domain = TYPE_DOMAIN (op00type);
16076 tree min_val = size_zero_node;
16077 if (type_domain && TYPE_MIN_VALUE (type_domain))
16078 min_val = TYPE_MIN_VALUE (type_domain);
16079 poly_uint64 type_size, index;
16080 if (poly_int_tree_p (min_val)
16081 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16082 && multiple_p (const_op01, type_size, &index))
16084 poly_offset_int off = index + wi::to_poly_offset (min_val);
16085 op01 = wide_int_to_tree (sizetype, off);
16086 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16087 NULL_TREE, NULL_TREE);
16093 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16094 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16095 && type == TREE_TYPE (TREE_TYPE (subtype))
16096 && (!in_gimple_form
16097 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16099 tree type_domain;
16100 tree min_val = size_zero_node;
16101 sub = build_fold_indirect_ref_loc (loc, sub);
16102 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16103 if (type_domain && TYPE_MIN_VALUE (type_domain))
16104 min_val = TYPE_MIN_VALUE (type_domain);
16105 if (in_gimple_form
16106 && TREE_CODE (min_val) != INTEGER_CST)
16107 return NULL_TREE;
16108 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16109 NULL_TREE);
16112 return NULL_TREE;
16115 /* Builds an expression for an indirection through T, simplifying some
16116 cases. */
16118 tree
16119 build_fold_indirect_ref_loc (location_t loc, tree t)
16121 tree type = TREE_TYPE (TREE_TYPE (t));
16122 tree sub = fold_indirect_ref_1 (loc, type, t);
16124 if (sub)
16125 return sub;
16127 return build1_loc (loc, INDIRECT_REF, type, t);
16130 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16132 tree
16133 fold_indirect_ref_loc (location_t loc, tree t)
16135 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16137 if (sub)
16138 return sub;
16139 else
16140 return t;
16143 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16144 whose result is ignored. The type of the returned tree need not be
16145 the same as the original expression. */
16147 tree
16148 fold_ignored_result (tree t)
16150 if (!TREE_SIDE_EFFECTS (t))
16151 return integer_zero_node;
16153 for (;;)
16154 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16156 case tcc_unary:
16157 t = TREE_OPERAND (t, 0);
16158 break;
16160 case tcc_binary:
16161 case tcc_comparison:
16162 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16163 t = TREE_OPERAND (t, 0);
16164 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16165 t = TREE_OPERAND (t, 1);
16166 else
16167 return t;
16168 break;
16170 case tcc_expression:
16171 switch (TREE_CODE (t))
16173 case COMPOUND_EXPR:
16174 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16175 return t;
16176 t = TREE_OPERAND (t, 0);
16177 break;
16179 case COND_EXPR:
16180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16181 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16182 return t;
16183 t = TREE_OPERAND (t, 0);
16184 break;
16186 default:
16187 return t;
16189 break;
16191 default:
16192 return t;
16196 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16198 tree
16199 round_up_loc (location_t loc, tree value, unsigned int divisor)
16201 tree div = NULL_TREE;
16203 if (divisor == 1)
16204 return value;
16206 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16207 have to do anything. Only do this when we are not given a const,
16208 because in that case, this check is more expensive than just
16209 doing it. */
16210 if (TREE_CODE (value) != INTEGER_CST)
16212 div = build_int_cst (TREE_TYPE (value), divisor);
16214 if (multiple_of_p (TREE_TYPE (value), value, div))
16215 return value;
16218 /* If divisor is a power of two, simplify this to bit manipulation. */
16219 if (pow2_or_zerop (divisor))
16221 if (TREE_CODE (value) == INTEGER_CST)
16223 wide_int val = wi::to_wide (value);
16224 bool overflow_p;
16226 if ((val & (divisor - 1)) == 0)
16227 return value;
16229 overflow_p = TREE_OVERFLOW (value);
16230 val += divisor - 1;
16231 val &= (int) -divisor;
16232 if (val == 0)
16233 overflow_p = true;
16235 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16237 else
16239 tree t;
16241 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16242 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16243 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16244 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16247 else
16249 if (!div)
16250 div = build_int_cst (TREE_TYPE (value), divisor);
16251 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16252 value = size_binop_loc (loc, MULT_EXPR, value, div);
16255 return value;
16258 /* Likewise, but round down. */
16260 tree
16261 round_down_loc (location_t loc, tree value, int divisor)
16263 tree div = NULL_TREE;
16265 gcc_assert (divisor > 0);
16266 if (divisor == 1)
16267 return value;
16269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16270 have to do anything. Only do this when we are not given a const,
16271 because in that case, this check is more expensive than just
16272 doing it. */
16273 if (TREE_CODE (value) != INTEGER_CST)
16275 div = build_int_cst (TREE_TYPE (value), divisor);
16277 if (multiple_of_p (TREE_TYPE (value), value, div))
16278 return value;
16281 /* If divisor is a power of two, simplify this to bit manipulation. */
16282 if (pow2_or_zerop (divisor))
16284 tree t;
16286 t = build_int_cst (TREE_TYPE (value), -divisor);
16287 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16289 else
16291 if (!div)
16292 div = build_int_cst (TREE_TYPE (value), divisor);
16293 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16294 value = size_binop_loc (loc, MULT_EXPR, value, div);
16297 return value;
16300 /* Returns the pointer to the base of the object addressed by EXP and
16301 extracts the information about the offset of the access, storing it
16302 to PBITPOS and POFFSET. */
16304 static tree
16305 split_address_to_core_and_offset (tree exp,
16306 poly_int64_pod *pbitpos, tree *poffset)
16308 tree core;
16309 machine_mode mode;
16310 int unsignedp, reversep, volatilep;
16311 poly_int64 bitsize;
16312 location_t loc = EXPR_LOCATION (exp);
16314 if (TREE_CODE (exp) == SSA_NAME)
16315 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16316 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16317 exp = gimple_assign_rhs1 (def);
16319 if (TREE_CODE (exp) == ADDR_EXPR)
16321 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16322 poffset, &mode, &unsignedp, &reversep,
16323 &volatilep);
16324 core = build_fold_addr_expr_loc (loc, core);
16326 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16328 core = TREE_OPERAND (exp, 0);
16329 STRIP_NOPS (core);
16330 *pbitpos = 0;
16331 *poffset = TREE_OPERAND (exp, 1);
16332 if (poly_int_tree_p (*poffset))
16334 poly_offset_int tem
16335 = wi::sext (wi::to_poly_offset (*poffset),
16336 TYPE_PRECISION (TREE_TYPE (*poffset)));
16337 tem <<= LOG2_BITS_PER_UNIT;
16338 if (tem.to_shwi (pbitpos))
16339 *poffset = NULL_TREE;
16342 else
16344 core = exp;
16345 *pbitpos = 0;
16346 *poffset = NULL_TREE;
16349 return core;
16352 /* Returns true if addresses of E1 and E2 differ by a constant, false
16353 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16355 bool
16356 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16358 tree core1, core2;
16359 poly_int64 bitpos1, bitpos2;
16360 tree toffset1, toffset2, tdiff, type;
16362 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16363 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16365 poly_int64 bytepos1, bytepos2;
16366 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16367 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16368 || !operand_equal_p (core1, core2, 0))
16369 return false;
16371 if (toffset1 && toffset2)
16373 type = TREE_TYPE (toffset1);
16374 if (type != TREE_TYPE (toffset2))
16375 toffset2 = fold_convert (type, toffset2);
16377 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16378 if (!cst_and_fits_in_hwi (tdiff))
16379 return false;
16381 *diff = int_cst_value (tdiff);
16383 else if (toffset1 || toffset2)
16385 /* If only one of the offsets is non-constant, the difference cannot
16386 be a constant. */
16387 return false;
16389 else
16390 *diff = 0;
16392 *diff += bytepos1 - bytepos2;
16393 return true;
16396 /* Return OFF converted to a pointer offset type suitable as offset for
16397 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16398 tree
16399 convert_to_ptrofftype_loc (location_t loc, tree off)
16401 if (ptrofftype_p (TREE_TYPE (off)))
16402 return off;
16403 return fold_convert_loc (loc, sizetype, off);
16406 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16407 tree
16408 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16410 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16411 ptr, convert_to_ptrofftype_loc (loc, off));
16414 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16415 tree
16416 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16418 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16419 ptr, size_int (off));
16422 /* Return a pointer to a NUL-terminated string containing the sequence
16423 of bytes corresponding to the representation of the object referred to
16424 by SRC (or a subsequence of such bytes within it if SRC is a reference
16425 to an initialized constant array plus some constant offset).
16426 Set *STRSIZE the number of bytes in the constant sequence including
16427 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16428 where A is the array that stores the constant sequence that SRC points
16429 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16430 need not point to a string or even an array of characters but may point
16431 to an object of any type. */
16433 const char *
16434 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16436 /* The offset into the array A storing the string, and A's byte size. */
16437 tree offset_node;
16438 tree mem_size;
16440 if (strsize)
16441 *strsize = 0;
16443 if (strsize)
16444 src = byte_representation (src, &offset_node, &mem_size, NULL);
16445 else
16446 src = string_constant (src, &offset_node, &mem_size, NULL);
16447 if (!src)
16448 return NULL;
16450 unsigned HOST_WIDE_INT offset = 0;
16451 if (offset_node != NULL_TREE)
16453 if (!tree_fits_uhwi_p (offset_node))
16454 return NULL;
16455 else
16456 offset = tree_to_uhwi (offset_node);
16459 if (!tree_fits_uhwi_p (mem_size))
16460 return NULL;
16462 /* ARRAY_SIZE is the byte size of the array the constant sequence
16463 is stored in and equal to sizeof A. INIT_BYTES is the number
16464 of bytes in the constant sequence used to initialize the array,
16465 including any embedded NULs as well as the terminating NUL (for
16466 strings), but not including any trailing zeros/NULs past
16467 the terminating one appended implicitly to a string literal to
16468 zero out the remainder of the array it's stored in. For example,
16469 given:
16470 const char a[7] = "abc\0d";
16471 n = strlen (a + 1);
16472 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16473 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16474 is equal to strlen (A) + 1. */
16475 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16476 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16477 const char *string = TREE_STRING_POINTER (src);
16479 /* Ideally this would turn into a gcc_checking_assert over time. */
16480 if (init_bytes > array_size)
16481 init_bytes = array_size;
16483 if (init_bytes == 0 || offset >= array_size)
16484 return NULL;
16486 if (strsize)
16488 /* Compute and store the number of characters from the beginning
16489 of the substring at OFFSET to the end, including the terminating
16490 nul. Offsets past the initial length refer to null strings. */
16491 if (offset < init_bytes)
16492 *strsize = init_bytes - offset;
16493 else
16494 *strsize = 1;
16496 else
16498 tree eltype = TREE_TYPE (TREE_TYPE (src));
16499 /* Support only properly NUL-terminated single byte strings. */
16500 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16501 return NULL;
16502 if (string[init_bytes - 1] != '\0')
16503 return NULL;
16506 return offset < init_bytes ? string + offset : "";
16509 /* Return a pointer to a NUL-terminated string corresponding to
16510 the expression STR referencing a constant string, possibly
16511 involving a constant offset. Return null if STR either doesn't
16512 reference a constant string or if it involves a nonconstant
16513 offset. */
16515 const char *
16516 c_getstr (tree str)
16518 return getbyterep (str, NULL);
16521 /* Given a tree T, compute which bits in T may be nonzero. */
16523 wide_int
16524 tree_nonzero_bits (const_tree t)
16526 switch (TREE_CODE (t))
16528 case INTEGER_CST:
16529 return wi::to_wide (t);
16530 case SSA_NAME:
16531 return get_nonzero_bits (t);
16532 case NON_LVALUE_EXPR:
16533 case SAVE_EXPR:
16534 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16535 case BIT_AND_EXPR:
16536 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16537 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16538 case BIT_IOR_EXPR:
16539 case BIT_XOR_EXPR:
16540 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16541 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16542 case COND_EXPR:
16543 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16544 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16545 CASE_CONVERT:
16546 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16547 TYPE_PRECISION (TREE_TYPE (t)),
16548 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16549 case PLUS_EXPR:
16550 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16552 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16553 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16554 if (wi::bit_and (nzbits1, nzbits2) == 0)
16555 return wi::bit_or (nzbits1, nzbits2);
16557 break;
16558 case LSHIFT_EXPR:
16559 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16561 tree type = TREE_TYPE (t);
16562 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16563 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16564 TYPE_PRECISION (type));
16565 return wi::neg_p (arg1)
16566 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16567 : wi::lshift (nzbits, arg1);
16569 break;
16570 case RSHIFT_EXPR:
16571 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16573 tree type = TREE_TYPE (t);
16574 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16575 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16576 TYPE_PRECISION (type));
16577 return wi::neg_p (arg1)
16578 ? wi::lshift (nzbits, -arg1)
16579 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16581 break;
16582 default:
16583 break;
16586 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16589 /* Helper function for address compare simplifications in match.pd.
16590 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16591 TYPE is the type of comparison operands.
16592 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16593 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16594 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16595 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16596 and 2 if unknown. */
16599 address_compare (tree_code code, tree type, tree op0, tree op1,
16600 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16601 bool generic)
16603 if (TREE_CODE (op0) == SSA_NAME)
16604 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16605 if (TREE_CODE (op1) == SSA_NAME)
16606 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16607 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16608 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16609 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16610 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16611 if (base0 && TREE_CODE (base0) == MEM_REF)
16613 off0 += mem_ref_offset (base0).force_shwi ();
16614 base0 = TREE_OPERAND (base0, 0);
16616 if (base1 && TREE_CODE (base1) == MEM_REF)
16618 off1 += mem_ref_offset (base1).force_shwi ();
16619 base1 = TREE_OPERAND (base1, 0);
16621 if (base0 == NULL_TREE || base1 == NULL_TREE)
16622 return 2;
16624 int equal = 2;
16625 /* Punt in GENERIC on variables with value expressions;
16626 the value expressions might point to fields/elements
16627 of other vars etc. */
16628 if (generic
16629 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16630 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16631 return 2;
16632 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16634 symtab_node *node0 = symtab_node::get_create (base0);
16635 symtab_node *node1 = symtab_node::get_create (base1);
16636 equal = node0->equal_address_to (node1);
16638 else if ((DECL_P (base0)
16639 || TREE_CODE (base0) == SSA_NAME
16640 || TREE_CODE (base0) == STRING_CST)
16641 && (DECL_P (base1)
16642 || TREE_CODE (base1) == SSA_NAME
16643 || TREE_CODE (base1) == STRING_CST))
16644 equal = (base0 == base1);
16645 /* Assume different STRING_CSTs with the same content will be
16646 merged. */
16647 if (equal == 0
16648 && TREE_CODE (base0) == STRING_CST
16649 && TREE_CODE (base1) == STRING_CST
16650 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16651 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16652 TREE_STRING_LENGTH (base0)) == 0)
16653 equal = 1;
16654 if (equal == 1)
16656 if (code == EQ_EXPR
16657 || code == NE_EXPR
16658 /* If the offsets are equal we can ignore overflow. */
16659 || known_eq (off0, off1)
16660 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16661 /* Or if we compare using pointers to decls or strings. */
16662 || (POINTER_TYPE_P (type)
16663 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16664 return 1;
16665 return 2;
16667 if (equal != 0)
16668 return equal;
16669 if (code != EQ_EXPR && code != NE_EXPR)
16670 return 2;
16672 /* At this point we know (or assume) the two pointers point at
16673 different objects. */
16674 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16675 off0.is_constant (&ioff0);
16676 off1.is_constant (&ioff1);
16677 /* Punt on non-zero offsets from functions. */
16678 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16679 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16680 return 2;
16681 /* Or if the bases are neither decls nor string literals. */
16682 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16683 return 2;
16684 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16685 return 2;
16686 /* For initializers, assume addresses of different functions are
16687 different. */
16688 if (folding_initializer
16689 && TREE_CODE (base0) == FUNCTION_DECL
16690 && TREE_CODE (base1) == FUNCTION_DECL)
16691 return 0;
16693 /* Compute whether one address points to the start of one
16694 object and another one to the end of another one. */
16695 poly_int64 size0 = 0, size1 = 0;
16696 if (TREE_CODE (base0) == STRING_CST)
16698 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16699 equal = 2;
16700 else
16701 size0 = TREE_STRING_LENGTH (base0);
16703 else if (TREE_CODE (base0) == FUNCTION_DECL)
16704 size0 = 1;
16705 else
16707 tree sz0 = DECL_SIZE_UNIT (base0);
16708 if (!tree_fits_poly_int64_p (sz0))
16709 equal = 2;
16710 else
16711 size0 = tree_to_poly_int64 (sz0);
16713 if (TREE_CODE (base1) == STRING_CST)
16715 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16716 equal = 2;
16717 else
16718 size1 = TREE_STRING_LENGTH (base1);
16720 else if (TREE_CODE (base1) == FUNCTION_DECL)
16721 size1 = 1;
16722 else
16724 tree sz1 = DECL_SIZE_UNIT (base1);
16725 if (!tree_fits_poly_int64_p (sz1))
16726 equal = 2;
16727 else
16728 size1 = tree_to_poly_int64 (sz1);
16730 if (equal == 0)
16732 /* If one offset is pointing (or could be) to the beginning of one
16733 object and the other is pointing to one past the last byte of the
16734 other object, punt. */
16735 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16736 equal = 2;
16737 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16738 equal = 2;
16739 /* If both offsets are the same, there are some cases we know that are
16740 ok. Either if we know they aren't zero, or if we know both sizes
16741 are no zero. */
16742 if (equal == 2
16743 && known_eq (off0, off1)
16744 && (known_ne (off0, 0)
16745 || (known_ne (size0, 0) && known_ne (size1, 0))))
16746 equal = 0;
16749 /* At this point, equal is 2 if either one or both pointers are out of
16750 bounds of their object, or one points to start of its object and the
16751 other points to end of its object. This is unspecified behavior
16752 e.g. in C++. Otherwise equal is 0. */
16753 if (folding_cxx_constexpr && equal)
16754 return equal;
16756 /* When both pointers point to string literals, even when equal is 0,
16757 due to tail merging of string literals the pointers might be the same. */
16758 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16760 if (ioff0 < 0
16761 || ioff1 < 0
16762 || ioff0 > TREE_STRING_LENGTH (base0)
16763 || ioff1 > TREE_STRING_LENGTH (base1))
16764 return 2;
16766 /* If the bytes in the string literals starting at the pointers
16767 differ, the pointers need to be different. */
16768 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16769 TREE_STRING_POINTER (base1) + ioff1,
16770 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16771 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16773 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16774 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16775 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16776 ioffmin) == 0)
16777 /* If even the bytes in the string literal before the
16778 pointers are the same, the string literals could be
16779 tail merged. */
16780 return 2;
16782 return 0;
16785 if (folding_cxx_constexpr)
16786 return 0;
16788 /* If this is a pointer comparison, ignore for now even
16789 valid equalities where one pointer is the offset zero
16790 of one object and the other to one past end of another one. */
16791 if (!INTEGRAL_TYPE_P (type))
16792 return 0;
16794 /* Assume that string literals can't be adjacent to variables
16795 (automatic or global). */
16796 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16797 return 0;
16799 /* Assume that automatic variables can't be adjacent to global
16800 variables. */
16801 if (is_global_var (base0) != is_global_var (base1))
16802 return 0;
16804 return equal;
16807 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16808 tree
16809 ctor_single_nonzero_element (const_tree t)
16811 unsigned HOST_WIDE_INT idx;
16812 constructor_elt *ce;
16813 tree elt = NULL_TREE;
16815 if (TREE_CODE (t) != CONSTRUCTOR)
16816 return NULL_TREE;
16817 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16818 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16820 if (elt)
16821 return NULL_TREE;
16822 elt = ce->value;
16824 return elt;
16827 #if CHECKING_P
16829 namespace selftest {
16831 /* Helper functions for writing tests of folding trees. */
16833 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16835 static void
16836 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16837 tree constant)
16839 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16842 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16843 wrapping WRAPPED_EXPR. */
16845 static void
16846 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16847 tree wrapped_expr)
16849 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16850 ASSERT_NE (wrapped_expr, result);
16851 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16852 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16855 /* Verify that various arithmetic binary operations are folded
16856 correctly. */
16858 static void
16859 test_arithmetic_folding ()
16861 tree type = integer_type_node;
16862 tree x = create_tmp_var_raw (type, "x");
16863 tree zero = build_zero_cst (type);
16864 tree one = build_int_cst (type, 1);
16866 /* Addition. */
16867 /* 1 <-- (0 + 1) */
16868 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16869 one);
16870 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16871 one);
16873 /* (nonlvalue)x <-- (x + 0) */
16874 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16877 /* Subtraction. */
16878 /* 0 <-- (x - x) */
16879 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16880 zero);
16881 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16884 /* Multiplication. */
16885 /* 0 <-- (x * 0) */
16886 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16887 zero);
16889 /* (nonlvalue)x <-- (x * 1) */
16890 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16894 /* Verify that various binary operations on vectors are folded
16895 correctly. */
16897 static void
16898 test_vector_folding ()
16900 tree inner_type = integer_type_node;
16901 tree type = build_vector_type (inner_type, 4);
16902 tree zero = build_zero_cst (type);
16903 tree one = build_one_cst (type);
16904 tree index = build_index_vector (type, 0, 1);
16906 /* Verify equality tests that return a scalar boolean result. */
16907 tree res_type = boolean_type_node;
16908 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16909 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16910 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16911 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16912 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16913 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16914 index, one)));
16915 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16916 index, index)));
16917 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16918 index, index)));
16921 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16923 static void
16924 test_vec_duplicate_folding ()
16926 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16927 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16928 /* This will be 1 if VEC_MODE isn't a vector mode. */
16929 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16931 tree type = build_vector_type (ssizetype, nunits);
16932 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16933 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16934 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16937 /* Run all of the selftests within this file. */
16939 void
16940 fold_const_cc_tests ()
16942 test_arithmetic_folding ();
16943 test_vector_folding ();
16944 test_vec_duplicate_folding ();
16947 } // namespace selftest
16949 #endif /* CHECKING_P */