c++: fix explicit/copy problem [PR109247]
[official-gcc.git] / gcc / fold-const.cc
blob84b0d06b81925256a3edd17de40cb6ea107f9454
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
153 /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
155 tree_code
156 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158 enum tree_code code = ERROR_MARK;
160 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
161 return ERROR_MARK;
163 if (!operand_equal_p (exp0, exp2))
164 return ERROR_MARK;
166 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
170 /* X <= Y - 1 equals to X < Y. */
171 if (cmp == LE_EXPR)
172 code = LT_EXPR;
173 /* X > Y - 1 equals to X >= Y. */
174 if (cmp == GT_EXPR)
175 code = GE_EXPR;
176 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
179 value_range r;
180 get_range_query (cfun)->range_of_expr (r, exp0);
181 if (r.undefined_p ())
182 r.set_varying (TREE_TYPE (exp0));
184 widest_int min = widest_int::from (r.lower_bound (),
185 TYPE_SIGN (TREE_TYPE (exp0)));
186 if (min == wi::to_widest (exp1))
187 code = MAX_EXPR;
190 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
192 /* X < Y + 1 equals to X <= Y. */
193 if (cmp == LT_EXPR)
194 code = LE_EXPR;
195 /* X >= Y + 1 equals to X > Y. */
196 if (cmp == GE_EXPR)
197 code = GT_EXPR;
198 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
201 value_range r;
202 get_range_query (cfun)->range_of_expr (r, exp0);
203 if (r.undefined_p ())
204 r.set_varying (TREE_TYPE (exp0));
206 widest_int max = widest_int::from (r.upper_bound (),
207 TYPE_SIGN (TREE_TYPE (exp0)));
208 if (max == wi::to_widest (exp1))
209 code = MIN_EXPR;
213 if (code != ERROR_MARK
214 || operand_equal_p (exp1, exp3))
216 if (cmp == LT_EXPR || cmp == LE_EXPR)
217 code = MIN_EXPR;
218 if (cmp == GT_EXPR || cmp == GE_EXPR)
219 code = MAX_EXPR;
221 return code;
224 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
225 Otherwise, return LOC. */
227 static location_t
228 expr_location_or (tree t, location_t loc)
230 location_t tloc = EXPR_LOCATION (t);
231 return tloc == UNKNOWN_LOCATION ? loc : tloc;
234 /* Similar to protected_set_expr_location, but never modify x in place,
235 if location can and needs to be set, unshare it. */
237 tree
238 protected_set_expr_location_unshare (tree x, location_t loc)
240 if (CAN_HAVE_LOCATION_P (x)
241 && EXPR_LOCATION (x) != loc
242 && !(TREE_CODE (x) == SAVE_EXPR
243 || TREE_CODE (x) == TARGET_EXPR
244 || TREE_CODE (x) == BIND_EXPR))
246 x = copy_node (x);
247 SET_EXPR_LOCATION (x, loc);
249 return x;
252 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
253 division and returns the quotient. Otherwise returns
254 NULL_TREE. */
256 tree
257 div_if_zero_remainder (const_tree arg1, const_tree arg2)
259 widest_int quo;
261 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
262 SIGNED, &quo))
263 return wide_int_to_tree (TREE_TYPE (arg1), quo);
265 return NULL_TREE;
268 /* This is nonzero if we should defer warnings about undefined
269 overflow. This facility exists because these warnings are a
270 special case. The code to estimate loop iterations does not want
271 to issue any warnings, since it works with expressions which do not
272 occur in user code. Various bits of cleanup code call fold(), but
273 only use the result if it has certain characteristics (e.g., is a
274 constant); that code only wants to issue a warning if the result is
275 used. */
277 static int fold_deferring_overflow_warnings;
279 /* If a warning about undefined overflow is deferred, this is the
280 warning. Note that this may cause us to turn two warnings into
281 one, but that is fine since it is sufficient to only give one
282 warning per expression. */
284 static const char* fold_deferred_overflow_warning;
286 /* If a warning about undefined overflow is deferred, this is the
287 level at which the warning should be emitted. */
289 static enum warn_strict_overflow_code fold_deferred_overflow_code;
291 /* Start deferring overflow warnings. We could use a stack here to
292 permit nested calls, but at present it is not necessary. */
294 void
295 fold_defer_overflow_warnings (void)
297 ++fold_deferring_overflow_warnings;
300 /* Stop deferring overflow warnings. If there is a pending warning,
301 and ISSUE is true, then issue the warning if appropriate. STMT is
302 the statement with which the warning should be associated (used for
303 location information); STMT may be NULL. CODE is the level of the
304 warning--a warn_strict_overflow_code value. This function will use
305 the smaller of CODE and the deferred code when deciding whether to
306 issue the warning. CODE may be zero to mean to always use the
307 deferred code. */
309 void
310 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
312 const char *warnmsg;
313 location_t locus;
315 gcc_assert (fold_deferring_overflow_warnings > 0);
316 --fold_deferring_overflow_warnings;
317 if (fold_deferring_overflow_warnings > 0)
319 if (fold_deferred_overflow_warning != NULL
320 && code != 0
321 && code < (int) fold_deferred_overflow_code)
322 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
323 return;
326 warnmsg = fold_deferred_overflow_warning;
327 fold_deferred_overflow_warning = NULL;
329 if (!issue || warnmsg == NULL)
330 return;
332 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
333 return;
335 /* Use the smallest code level when deciding to issue the
336 warning. */
337 if (code == 0 || code > (int) fold_deferred_overflow_code)
338 code = fold_deferred_overflow_code;
340 if (!issue_strict_overflow_warning (code))
341 return;
343 if (stmt == NULL)
344 locus = input_location;
345 else
346 locus = gimple_location (stmt);
347 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
350 /* Stop deferring overflow warnings, ignoring any deferred
351 warnings. */
353 void
354 fold_undefer_and_ignore_overflow_warnings (void)
356 fold_undefer_overflow_warnings (false, NULL, 0);
359 /* Whether we are deferring overflow warnings. */
361 bool
362 fold_deferring_overflow_warnings_p (void)
364 return fold_deferring_overflow_warnings > 0;
367 /* This is called when we fold something based on the fact that signed
368 overflow is undefined. */
370 void
371 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
373 if (fold_deferring_overflow_warnings > 0)
375 if (fold_deferred_overflow_warning == NULL
376 || wc < fold_deferred_overflow_code)
378 fold_deferred_overflow_warning = gmsgid;
379 fold_deferred_overflow_code = wc;
382 else if (issue_strict_overflow_warning (wc))
383 warning (OPT_Wstrict_overflow, gmsgid);
386 /* Return true if the built-in mathematical function specified by CODE
387 is odd, i.e. -f(x) == f(-x). */
389 bool
390 negate_mathfn_p (combined_fn fn)
392 switch (fn)
394 CASE_CFN_ASIN:
395 CASE_CFN_ASIN_FN:
396 CASE_CFN_ASINH:
397 CASE_CFN_ASINH_FN:
398 CASE_CFN_ATAN:
399 CASE_CFN_ATAN_FN:
400 CASE_CFN_ATANH:
401 CASE_CFN_ATANH_FN:
402 CASE_CFN_CASIN:
403 CASE_CFN_CASIN_FN:
404 CASE_CFN_CASINH:
405 CASE_CFN_CASINH_FN:
406 CASE_CFN_CATAN:
407 CASE_CFN_CATAN_FN:
408 CASE_CFN_CATANH:
409 CASE_CFN_CATANH_FN:
410 CASE_CFN_CBRT:
411 CASE_CFN_CBRT_FN:
412 CASE_CFN_CPROJ:
413 CASE_CFN_CPROJ_FN:
414 CASE_CFN_CSIN:
415 CASE_CFN_CSIN_FN:
416 CASE_CFN_CSINH:
417 CASE_CFN_CSINH_FN:
418 CASE_CFN_CTAN:
419 CASE_CFN_CTAN_FN:
420 CASE_CFN_CTANH:
421 CASE_CFN_CTANH_FN:
422 CASE_CFN_ERF:
423 CASE_CFN_ERF_FN:
424 CASE_CFN_LLROUND:
425 CASE_CFN_LLROUND_FN:
426 CASE_CFN_LROUND:
427 CASE_CFN_LROUND_FN:
428 CASE_CFN_ROUND:
429 CASE_CFN_ROUNDEVEN:
430 CASE_CFN_ROUNDEVEN_FN:
431 CASE_CFN_SIN:
432 CASE_CFN_SIN_FN:
433 CASE_CFN_SINH:
434 CASE_CFN_SINH_FN:
435 CASE_CFN_TAN:
436 CASE_CFN_TAN_FN:
437 CASE_CFN_TANH:
438 CASE_CFN_TANH_FN:
439 CASE_CFN_TRUNC:
440 CASE_CFN_TRUNC_FN:
441 return true;
443 CASE_CFN_LLRINT:
444 CASE_CFN_LLRINT_FN:
445 CASE_CFN_LRINT:
446 CASE_CFN_LRINT_FN:
447 CASE_CFN_NEARBYINT:
448 CASE_CFN_NEARBYINT_FN:
449 CASE_CFN_RINT:
450 CASE_CFN_RINT_FN:
451 return !flag_rounding_math;
453 default:
454 break;
456 return false;
459 /* Check whether we may negate an integer constant T without causing
460 overflow. */
462 bool
463 may_negate_without_overflow_p (const_tree t)
465 tree type;
467 gcc_assert (TREE_CODE (t) == INTEGER_CST);
469 type = TREE_TYPE (t);
470 if (TYPE_UNSIGNED (type))
471 return false;
473 return !wi::only_sign_bit_p (wi::to_wide (t));
476 /* Determine whether an expression T can be cheaply negated using
477 the function negate_expr without introducing undefined overflow. */
479 static bool
480 negate_expr_p (tree t)
482 tree type;
484 if (t == 0)
485 return false;
487 type = TREE_TYPE (t);
489 STRIP_SIGN_NOPS (t);
490 switch (TREE_CODE (t))
492 case INTEGER_CST:
493 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
494 return true;
496 /* Check that -CST will not overflow type. */
497 return may_negate_without_overflow_p (t);
498 case BIT_NOT_EXPR:
499 return (INTEGRAL_TYPE_P (type)
500 && TYPE_OVERFLOW_WRAPS (type));
502 case FIXED_CST:
503 return true;
505 case NEGATE_EXPR:
506 return !TYPE_OVERFLOW_SANITIZED (type);
508 case REAL_CST:
509 /* We want to canonicalize to positive real constants. Pretend
510 that only negative ones can be easily negated. */
511 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
513 case COMPLEX_CST:
514 return negate_expr_p (TREE_REALPART (t))
515 && negate_expr_p (TREE_IMAGPART (t));
517 case VECTOR_CST:
519 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
520 return true;
522 /* Steps don't prevent negation. */
523 unsigned int count = vector_cst_encoded_nelts (t);
524 for (unsigned int i = 0; i < count; ++i)
525 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
526 return false;
528 return true;
531 case COMPLEX_EXPR:
532 return negate_expr_p (TREE_OPERAND (t, 0))
533 && negate_expr_p (TREE_OPERAND (t, 1));
535 case CONJ_EXPR:
536 return negate_expr_p (TREE_OPERAND (t, 0));
538 case PLUS_EXPR:
539 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
540 || HONOR_SIGNED_ZEROS (type)
541 || (ANY_INTEGRAL_TYPE_P (type)
542 && ! TYPE_OVERFLOW_WRAPS (type)))
543 return false;
544 /* -(A + B) -> (-B) - A. */
545 if (negate_expr_p (TREE_OPERAND (t, 1)))
546 return true;
547 /* -(A + B) -> (-A) - B. */
548 return negate_expr_p (TREE_OPERAND (t, 0));
550 case MINUS_EXPR:
551 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
552 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
553 && !HONOR_SIGNED_ZEROS (type)
554 && (! ANY_INTEGRAL_TYPE_P (type)
555 || TYPE_OVERFLOW_WRAPS (type));
557 case MULT_EXPR:
558 if (TYPE_UNSIGNED (type))
559 break;
560 /* INT_MIN/n * n doesn't overflow while negating one operand it does
561 if n is a (negative) power of two. */
562 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
563 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
564 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
565 && (wi::popcount
566 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
567 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
568 && (wi::popcount
569 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
570 break;
572 /* Fall through. */
574 case RDIV_EXPR:
575 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
576 return negate_expr_p (TREE_OPERAND (t, 1))
577 || negate_expr_p (TREE_OPERAND (t, 0));
578 break;
580 case TRUNC_DIV_EXPR:
581 case ROUND_DIV_EXPR:
582 case EXACT_DIV_EXPR:
583 if (TYPE_UNSIGNED (type))
584 break;
585 /* In general we can't negate A in A / B, because if A is INT_MIN and
586 B is not 1 we change the sign of the result. */
587 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
588 && negate_expr_p (TREE_OPERAND (t, 0)))
589 return true;
590 /* In general we can't negate B in A / B, because if A is INT_MIN and
591 B is 1, we may turn this into INT_MIN / -1 which is undefined
592 and actually traps on some architectures. */
593 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
594 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
595 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
596 && ! integer_onep (TREE_OPERAND (t, 1))))
597 return negate_expr_p (TREE_OPERAND (t, 1));
598 break;
600 case NOP_EXPR:
601 /* Negate -((double)float) as (double)(-float). */
602 if (SCALAR_FLOAT_TYPE_P (type))
604 tree tem = strip_float_extensions (t);
605 if (tem != t)
606 return negate_expr_p (tem);
608 break;
610 case CALL_EXPR:
611 /* Negate -f(x) as f(-x). */
612 if (negate_mathfn_p (get_call_combined_fn (t)))
613 return negate_expr_p (CALL_EXPR_ARG (t, 0));
614 break;
616 case RSHIFT_EXPR:
617 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
618 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
620 tree op1 = TREE_OPERAND (t, 1);
621 if (wi::to_wide (op1) == element_precision (type) - 1)
622 return true;
624 break;
626 default:
627 break;
629 return false;
632 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
633 simplification is possible.
634 If negate_expr_p would return true for T, NULL_TREE will never be
635 returned. */
637 static tree
638 fold_negate_expr_1 (location_t loc, tree t)
640 tree type = TREE_TYPE (t);
641 tree tem;
643 switch (TREE_CODE (t))
645 /* Convert - (~A) to A + 1. */
646 case BIT_NOT_EXPR:
647 if (INTEGRAL_TYPE_P (type))
648 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
649 build_one_cst (type));
650 break;
652 case INTEGER_CST:
653 tem = fold_negate_const (t, type);
654 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
655 || (ANY_INTEGRAL_TYPE_P (type)
656 && !TYPE_OVERFLOW_TRAPS (type)
657 && TYPE_OVERFLOW_WRAPS (type))
658 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
659 return tem;
660 break;
662 case POLY_INT_CST:
663 case REAL_CST:
664 case FIXED_CST:
665 tem = fold_negate_const (t, type);
666 return tem;
668 case COMPLEX_CST:
670 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
671 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
672 if (rpart && ipart)
673 return build_complex (type, rpart, ipart);
675 break;
677 case VECTOR_CST:
679 tree_vector_builder elts;
680 elts.new_unary_operation (type, t, true);
681 unsigned int count = elts.encoded_nelts ();
682 for (unsigned int i = 0; i < count; ++i)
684 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
685 if (elt == NULL_TREE)
686 return NULL_TREE;
687 elts.quick_push (elt);
690 return elts.build ();
693 case COMPLEX_EXPR:
694 if (negate_expr_p (t))
695 return fold_build2_loc (loc, COMPLEX_EXPR, type,
696 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
697 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
698 break;
700 case CONJ_EXPR:
701 if (negate_expr_p (t))
702 return fold_build1_loc (loc, CONJ_EXPR, type,
703 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
704 break;
706 case NEGATE_EXPR:
707 if (!TYPE_OVERFLOW_SANITIZED (type))
708 return TREE_OPERAND (t, 0);
709 break;
711 case PLUS_EXPR:
712 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
713 && !HONOR_SIGNED_ZEROS (type))
715 /* -(A + B) -> (-B) - A. */
716 if (negate_expr_p (TREE_OPERAND (t, 1)))
718 tem = negate_expr (TREE_OPERAND (t, 1));
719 return fold_build2_loc (loc, MINUS_EXPR, type,
720 tem, TREE_OPERAND (t, 0));
723 /* -(A + B) -> (-A) - B. */
724 if (negate_expr_p (TREE_OPERAND (t, 0)))
726 tem = negate_expr (TREE_OPERAND (t, 0));
727 return fold_build2_loc (loc, MINUS_EXPR, type,
728 tem, TREE_OPERAND (t, 1));
731 break;
733 case MINUS_EXPR:
734 /* - (A - B) -> B - A */
735 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
736 && !HONOR_SIGNED_ZEROS (type))
737 return fold_build2_loc (loc, MINUS_EXPR, type,
738 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
739 break;
741 case MULT_EXPR:
742 if (TYPE_UNSIGNED (type))
743 break;
745 /* Fall through. */
747 case RDIV_EXPR:
748 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
750 tem = TREE_OPERAND (t, 1);
751 if (negate_expr_p (tem))
752 return fold_build2_loc (loc, TREE_CODE (t), type,
753 TREE_OPERAND (t, 0), negate_expr (tem));
754 tem = TREE_OPERAND (t, 0);
755 if (negate_expr_p (tem))
756 return fold_build2_loc (loc, TREE_CODE (t), type,
757 negate_expr (tem), TREE_OPERAND (t, 1));
759 break;
761 case TRUNC_DIV_EXPR:
762 case ROUND_DIV_EXPR:
763 case EXACT_DIV_EXPR:
764 if (TYPE_UNSIGNED (type))
765 break;
766 /* In general we can't negate A in A / B, because if A is INT_MIN and
767 B is not 1 we change the sign of the result. */
768 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
769 && negate_expr_p (TREE_OPERAND (t, 0)))
770 return fold_build2_loc (loc, TREE_CODE (t), type,
771 negate_expr (TREE_OPERAND (t, 0)),
772 TREE_OPERAND (t, 1));
773 /* In general we can't negate B in A / B, because if A is INT_MIN and
774 B is 1, we may turn this into INT_MIN / -1 which is undefined
775 and actually traps on some architectures. */
776 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
777 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
778 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
779 && ! integer_onep (TREE_OPERAND (t, 1))))
780 && negate_expr_p (TREE_OPERAND (t, 1)))
781 return fold_build2_loc (loc, TREE_CODE (t), type,
782 TREE_OPERAND (t, 0),
783 negate_expr (TREE_OPERAND (t, 1)));
784 break;
786 case NOP_EXPR:
787 /* Convert -((double)float) into (double)(-float). */
788 if (SCALAR_FLOAT_TYPE_P (type))
790 tem = strip_float_extensions (t);
791 if (tem != t && negate_expr_p (tem))
792 return fold_convert_loc (loc, type, negate_expr (tem));
794 break;
796 case CALL_EXPR:
797 /* Negate -f(x) as f(-x). */
798 if (negate_mathfn_p (get_call_combined_fn (t))
799 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
801 tree fndecl, arg;
803 fndecl = get_callee_fndecl (t);
804 arg = negate_expr (CALL_EXPR_ARG (t, 0));
805 return build_call_expr_loc (loc, fndecl, 1, arg);
807 break;
809 case RSHIFT_EXPR:
810 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
811 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
813 tree op1 = TREE_OPERAND (t, 1);
814 if (wi::to_wide (op1) == element_precision (type) - 1)
816 tree ntype = TYPE_UNSIGNED (type)
817 ? signed_type_for (type)
818 : unsigned_type_for (type);
819 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
820 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
821 return fold_convert_loc (loc, type, temp);
824 break;
826 default:
827 break;
830 return NULL_TREE;
833 /* A wrapper for fold_negate_expr_1. */
835 static tree
836 fold_negate_expr (location_t loc, tree t)
838 tree type = TREE_TYPE (t);
839 STRIP_SIGN_NOPS (t);
840 tree tem = fold_negate_expr_1 (loc, t);
841 if (tem == NULL_TREE)
842 return NULL_TREE;
843 return fold_convert_loc (loc, type, tem);
846 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
847 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
848 return NULL_TREE. */
850 static tree
851 negate_expr (tree t)
853 tree type, tem;
854 location_t loc;
856 if (t == NULL_TREE)
857 return NULL_TREE;
859 loc = EXPR_LOCATION (t);
860 type = TREE_TYPE (t);
861 STRIP_SIGN_NOPS (t);
863 tem = fold_negate_expr (loc, t);
864 if (!tem)
865 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
866 return fold_convert_loc (loc, type, tem);
869 /* Split a tree IN into a constant, literal and variable parts that could be
870 combined with CODE to make IN. "constant" means an expression with
871 TREE_CONSTANT but that isn't an actual constant. CODE must be a
872 commutative arithmetic operation. Store the constant part into *CONP,
873 the literal in *LITP and return the variable part. If a part isn't
874 present, set it to null. If the tree does not decompose in this way,
875 return the entire tree as the variable part and the other parts as null.
877 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
878 case, we negate an operand that was subtracted. Except if it is a
879 literal for which we use *MINUS_LITP instead.
881 If NEGATE_P is true, we are negating all of IN, again except a literal
882 for which we use *MINUS_LITP instead. If a variable part is of pointer
883 type, it is negated after converting to TYPE. This prevents us from
884 generating illegal MINUS pointer expression. LOC is the location of
885 the converted variable part.
887 If IN is itself a literal or constant, return it as appropriate.
889 Note that we do not guarantee that any of the three values will be the
890 same type as IN, but they will have the same signedness and mode. */
892 static tree
893 split_tree (tree in, tree type, enum tree_code code,
894 tree *minus_varp, tree *conp, tree *minus_conp,
895 tree *litp, tree *minus_litp, int negate_p)
897 tree var = 0;
898 *minus_varp = 0;
899 *conp = 0;
900 *minus_conp = 0;
901 *litp = 0;
902 *minus_litp = 0;
904 /* Strip any conversions that don't change the machine mode or signedness. */
905 STRIP_SIGN_NOPS (in);
907 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
908 || TREE_CODE (in) == FIXED_CST)
909 *litp = in;
910 else if (TREE_CODE (in) == code
911 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
912 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
913 /* We can associate addition and subtraction together (even
914 though the C standard doesn't say so) for integers because
915 the value is not affected. For reals, the value might be
916 affected, so we can't. */
917 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
918 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
919 || (code == MINUS_EXPR
920 && (TREE_CODE (in) == PLUS_EXPR
921 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
923 tree op0 = TREE_OPERAND (in, 0);
924 tree op1 = TREE_OPERAND (in, 1);
925 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
926 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
928 /* First see if either of the operands is a literal, then a constant. */
929 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
930 || TREE_CODE (op0) == FIXED_CST)
931 *litp = op0, op0 = 0;
932 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
933 || TREE_CODE (op1) == FIXED_CST)
934 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
936 if (op0 != 0 && TREE_CONSTANT (op0))
937 *conp = op0, op0 = 0;
938 else if (op1 != 0 && TREE_CONSTANT (op1))
939 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
941 /* If we haven't dealt with either operand, this is not a case we can
942 decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 if (op0 != 0 && op1 != 0)
944 var = in;
945 else if (op0 != 0)
946 var = op0;
947 else
948 var = op1, neg_var_p = neg1_p;
950 /* Now do any needed negations. */
951 if (neg_litp_p)
952 *minus_litp = *litp, *litp = 0;
953 if (neg_conp_p && *conp)
954 *minus_conp = *conp, *conp = 0;
955 if (neg_var_p && var)
956 *minus_varp = var, var = 0;
958 else if (TREE_CONSTANT (in))
959 *conp = in;
960 else if (TREE_CODE (in) == BIT_NOT_EXPR
961 && code == PLUS_EXPR)
963 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
964 when IN is constant. */
965 *litp = build_minus_one_cst (type);
966 *minus_varp = TREE_OPERAND (in, 0);
968 else
969 var = in;
971 if (negate_p)
973 if (*litp)
974 *minus_litp = *litp, *litp = 0;
975 else if (*minus_litp)
976 *litp = *minus_litp, *minus_litp = 0;
977 if (*conp)
978 *minus_conp = *conp, *conp = 0;
979 else if (*minus_conp)
980 *conp = *minus_conp, *minus_conp = 0;
981 if (var)
982 *minus_varp = var, var = 0;
983 else if (*minus_varp)
984 var = *minus_varp, *minus_varp = 0;
987 if (*litp
988 && TREE_OVERFLOW_P (*litp))
989 *litp = drop_tree_overflow (*litp);
990 if (*minus_litp
991 && TREE_OVERFLOW_P (*minus_litp))
992 *minus_litp = drop_tree_overflow (*minus_litp);
994 return var;
997 /* Re-associate trees split by the above function. T1 and T2 are
998 either expressions to associate or null. Return the new
999 expression, if any. LOC is the location of the new expression. If
1000 we build an operation, do it in TYPE and with CODE. */
1002 static tree
1003 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1005 if (t1 == 0)
1007 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1008 return t2;
1010 else if (t2 == 0)
1011 return t1;
1013 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1014 try to fold this since we will have infinite recursion. But do
1015 deal with any NEGATE_EXPRs. */
1016 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1017 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1018 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1020 if (code == PLUS_EXPR)
1022 if (TREE_CODE (t1) == NEGATE_EXPR)
1023 return build2_loc (loc, MINUS_EXPR, type,
1024 fold_convert_loc (loc, type, t2),
1025 fold_convert_loc (loc, type,
1026 TREE_OPERAND (t1, 0)));
1027 else if (TREE_CODE (t2) == NEGATE_EXPR)
1028 return build2_loc (loc, MINUS_EXPR, type,
1029 fold_convert_loc (loc, type, t1),
1030 fold_convert_loc (loc, type,
1031 TREE_OPERAND (t2, 0)));
1032 else if (integer_zerop (t2))
1033 return fold_convert_loc (loc, type, t1);
1035 else if (code == MINUS_EXPR)
1037 if (integer_zerop (t2))
1038 return fold_convert_loc (loc, type, t1);
1041 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1042 fold_convert_loc (loc, type, t2));
1045 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1046 fold_convert_loc (loc, type, t2));
1049 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1050 for use in int_const_binop, size_binop and size_diffop. */
1052 static bool
1053 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1055 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1056 return false;
1057 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1058 return false;
1060 switch (code)
1062 case LSHIFT_EXPR:
1063 case RSHIFT_EXPR:
1064 case LROTATE_EXPR:
1065 case RROTATE_EXPR:
1066 return true;
1068 default:
1069 break;
1072 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1073 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1074 && TYPE_MODE (type1) == TYPE_MODE (type2);
1077 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1078 a new constant in RES. Return FALSE if we don't know how to
1079 evaluate CODE at compile-time. */
1081 bool
1082 wide_int_binop (wide_int &res,
1083 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1084 signop sign, wi::overflow_type *overflow)
1086 wide_int tmp;
1087 *overflow = wi::OVF_NONE;
1088 switch (code)
1090 case BIT_IOR_EXPR:
1091 res = wi::bit_or (arg1, arg2);
1092 break;
1094 case BIT_XOR_EXPR:
1095 res = wi::bit_xor (arg1, arg2);
1096 break;
1098 case BIT_AND_EXPR:
1099 res = wi::bit_and (arg1, arg2);
1100 break;
1102 case LSHIFT_EXPR:
1103 if (wi::neg_p (arg2))
1104 return false;
1105 res = wi::lshift (arg1, arg2);
1106 break;
1108 case RSHIFT_EXPR:
1109 if (wi::neg_p (arg2))
1110 return false;
1111 /* It's unclear from the C standard whether shifts can overflow.
1112 The following code ignores overflow; perhaps a C standard
1113 interpretation ruling is needed. */
1114 res = wi::rshift (arg1, arg2, sign);
1115 break;
1117 case RROTATE_EXPR:
1118 case LROTATE_EXPR:
1119 if (wi::neg_p (arg2))
1121 tmp = -arg2;
1122 if (code == RROTATE_EXPR)
1123 code = LROTATE_EXPR;
1124 else
1125 code = RROTATE_EXPR;
1127 else
1128 tmp = arg2;
1130 if (code == RROTATE_EXPR)
1131 res = wi::rrotate (arg1, tmp);
1132 else
1133 res = wi::lrotate (arg1, tmp);
1134 break;
1136 case PLUS_EXPR:
1137 res = wi::add (arg1, arg2, sign, overflow);
1138 break;
1140 case MINUS_EXPR:
1141 res = wi::sub (arg1, arg2, sign, overflow);
1142 break;
1144 case MULT_EXPR:
1145 res = wi::mul (arg1, arg2, sign, overflow);
1146 break;
1148 case MULT_HIGHPART_EXPR:
1149 res = wi::mul_high (arg1, arg2, sign);
1150 break;
1152 case TRUNC_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (arg2 == 0)
1155 return false;
1156 res = wi::div_trunc (arg1, arg2, sign, overflow);
1157 break;
1159 case FLOOR_DIV_EXPR:
1160 if (arg2 == 0)
1161 return false;
1162 res = wi::div_floor (arg1, arg2, sign, overflow);
1163 break;
1165 case CEIL_DIV_EXPR:
1166 if (arg2 == 0)
1167 return false;
1168 res = wi::div_ceil (arg1, arg2, sign, overflow);
1169 break;
1171 case ROUND_DIV_EXPR:
1172 if (arg2 == 0)
1173 return false;
1174 res = wi::div_round (arg1, arg2, sign, overflow);
1175 break;
1177 case TRUNC_MOD_EXPR:
1178 if (arg2 == 0)
1179 return false;
1180 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1181 break;
1183 case FLOOR_MOD_EXPR:
1184 if (arg2 == 0)
1185 return false;
1186 res = wi::mod_floor (arg1, arg2, sign, overflow);
1187 break;
1189 case CEIL_MOD_EXPR:
1190 if (arg2 == 0)
1191 return false;
1192 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1193 break;
1195 case ROUND_MOD_EXPR:
1196 if (arg2 == 0)
1197 return false;
1198 res = wi::mod_round (arg1, arg2, sign, overflow);
1199 break;
1201 case MIN_EXPR:
1202 res = wi::min (arg1, arg2, sign);
1203 break;
1205 case MAX_EXPR:
1206 res = wi::max (arg1, arg2, sign);
1207 break;
1209 default:
1210 return false;
1212 return true;
1215 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1216 produce a new constant in RES. Return FALSE if we don't know how
1217 to evaluate CODE at compile-time. */
1219 static bool
1220 poly_int_binop (poly_wide_int &res, enum tree_code code,
1221 const_tree arg1, const_tree arg2,
1222 signop sign, wi::overflow_type *overflow)
1224 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1225 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1226 switch (code)
1228 case PLUS_EXPR:
1229 res = wi::add (wi::to_poly_wide (arg1),
1230 wi::to_poly_wide (arg2), sign, overflow);
1231 break;
1233 case MINUS_EXPR:
1234 res = wi::sub (wi::to_poly_wide (arg1),
1235 wi::to_poly_wide (arg2), sign, overflow);
1236 break;
1238 case MULT_EXPR:
1239 if (TREE_CODE (arg2) == INTEGER_CST)
1240 res = wi::mul (wi::to_poly_wide (arg1),
1241 wi::to_wide (arg2), sign, overflow);
1242 else if (TREE_CODE (arg1) == INTEGER_CST)
1243 res = wi::mul (wi::to_poly_wide (arg2),
1244 wi::to_wide (arg1), sign, overflow);
1245 else
1246 return NULL_TREE;
1247 break;
1249 case LSHIFT_EXPR:
1250 if (TREE_CODE (arg2) == INTEGER_CST)
1251 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1252 else
1253 return false;
1254 break;
1256 case BIT_IOR_EXPR:
1257 if (TREE_CODE (arg2) != INTEGER_CST
1258 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1259 &res))
1260 return false;
1261 break;
1263 default:
1264 return false;
1266 return true;
1269 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1270 produce a new constant. Return NULL_TREE if we don't know how to
1271 evaluate CODE at compile-time. */
1273 tree
1274 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1275 int overflowable)
1277 poly_wide_int poly_res;
1278 tree type = TREE_TYPE (arg1);
1279 signop sign = TYPE_SIGN (type);
1280 wi::overflow_type overflow = wi::OVF_NONE;
1282 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1284 wide_int warg1 = wi::to_wide (arg1), res;
1285 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1286 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1287 return NULL_TREE;
1288 poly_res = res;
1290 else if (!poly_int_tree_p (arg1)
1291 || !poly_int_tree_p (arg2)
1292 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1293 return NULL_TREE;
1294 return force_fit_type (type, poly_res, overflowable,
1295 (((sign == SIGNED || overflowable == -1)
1296 && overflow)
1297 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1300 /* Return true if binary operation OP distributes over addition in operand
1301 OPNO, with the other operand being held constant. OPNO counts from 1. */
1303 static bool
1304 distributes_over_addition_p (tree_code op, int opno)
1306 switch (op)
1308 case PLUS_EXPR:
1309 case MINUS_EXPR:
1310 case MULT_EXPR:
1311 return true;
1313 case LSHIFT_EXPR:
1314 return opno == 1;
1316 default:
1317 return false;
1321 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1322 constant. We assume ARG1 and ARG2 have the same data type, or at least
1323 are the same kind of constant and the same machine mode. Return zero if
1324 combining the constants is not allowed in the current operating mode. */
1326 static tree
1327 const_binop (enum tree_code code, tree arg1, tree arg2)
1329 /* Sanity check for the recursive cases. */
1330 if (!arg1 || !arg2)
1331 return NULL_TREE;
1333 STRIP_NOPS (arg1);
1334 STRIP_NOPS (arg2);
1336 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1338 if (code == POINTER_PLUS_EXPR)
1339 return int_const_binop (PLUS_EXPR,
1340 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1342 return int_const_binop (code, arg1, arg2);
1345 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1347 machine_mode mode;
1348 REAL_VALUE_TYPE d1;
1349 REAL_VALUE_TYPE d2;
1350 REAL_VALUE_TYPE value;
1351 REAL_VALUE_TYPE result;
1352 bool inexact;
1353 tree t, type;
1355 /* The following codes are handled by real_arithmetic. */
1356 switch (code)
1358 case PLUS_EXPR:
1359 case MINUS_EXPR:
1360 case MULT_EXPR:
1361 case RDIV_EXPR:
1362 case MIN_EXPR:
1363 case MAX_EXPR:
1364 break;
1366 default:
1367 return NULL_TREE;
1370 d1 = TREE_REAL_CST (arg1);
1371 d2 = TREE_REAL_CST (arg2);
1373 type = TREE_TYPE (arg1);
1374 mode = TYPE_MODE (type);
1376 /* Don't perform operation if we honor signaling NaNs and
1377 either operand is a signaling NaN. */
1378 if (HONOR_SNANS (mode)
1379 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1380 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1381 return NULL_TREE;
1383 /* Don't perform operation if it would raise a division
1384 by zero exception. */
1385 if (code == RDIV_EXPR
1386 && real_equal (&d2, &dconst0)
1387 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1388 return NULL_TREE;
1390 /* If either operand is a NaN, just return it. Otherwise, set up
1391 for floating-point trap; we return an overflow. */
1392 if (REAL_VALUE_ISNAN (d1))
1394 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1395 is off. */
1396 d1.signalling = 0;
1397 t = build_real (type, d1);
1398 return t;
1400 else if (REAL_VALUE_ISNAN (d2))
1402 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1403 is off. */
1404 d2.signalling = 0;
1405 t = build_real (type, d2);
1406 return t;
1409 inexact = real_arithmetic (&value, code, &d1, &d2);
1410 real_convert (&result, mode, &value);
1412 /* Don't constant fold this floating point operation if
1413 both operands are not NaN but the result is NaN, and
1414 flag_trapping_math. Such operations should raise an
1415 invalid operation exception. */
1416 if (flag_trapping_math
1417 && MODE_HAS_NANS (mode)
1418 && REAL_VALUE_ISNAN (result)
1419 && !REAL_VALUE_ISNAN (d1)
1420 && !REAL_VALUE_ISNAN (d2))
1421 return NULL_TREE;
1423 /* Don't constant fold this floating point operation if
1424 the result has overflowed and flag_trapping_math. */
1425 if (flag_trapping_math
1426 && MODE_HAS_INFINITIES (mode)
1427 && REAL_VALUE_ISINF (result)
1428 && !REAL_VALUE_ISINF (d1)
1429 && !REAL_VALUE_ISINF (d2))
1430 return NULL_TREE;
1432 /* Don't constant fold this floating point operation if the
1433 result may dependent upon the run-time rounding mode and
1434 flag_rounding_math is set, or if GCC's software emulation
1435 is unable to accurately represent the result. */
1436 if ((flag_rounding_math
1437 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1438 && (inexact || !real_identical (&result, &value)))
1439 return NULL_TREE;
1441 t = build_real (type, result);
1443 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1444 return t;
1447 if (TREE_CODE (arg1) == FIXED_CST)
1449 FIXED_VALUE_TYPE f1;
1450 FIXED_VALUE_TYPE f2;
1451 FIXED_VALUE_TYPE result;
1452 tree t, type;
1453 int sat_p;
1454 bool overflow_p;
1456 /* The following codes are handled by fixed_arithmetic. */
1457 switch (code)
1459 case PLUS_EXPR:
1460 case MINUS_EXPR:
1461 case MULT_EXPR:
1462 case TRUNC_DIV_EXPR:
1463 if (TREE_CODE (arg2) != FIXED_CST)
1464 return NULL_TREE;
1465 f2 = TREE_FIXED_CST (arg2);
1466 break;
1468 case LSHIFT_EXPR:
1469 case RSHIFT_EXPR:
1471 if (TREE_CODE (arg2) != INTEGER_CST)
1472 return NULL_TREE;
1473 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1474 f2.data.high = w2.elt (1);
1475 f2.data.low = w2.ulow ();
1476 f2.mode = SImode;
1478 break;
1480 default:
1481 return NULL_TREE;
1484 f1 = TREE_FIXED_CST (arg1);
1485 type = TREE_TYPE (arg1);
1486 sat_p = TYPE_SATURATING (type);
1487 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1488 t = build_fixed (type, result);
1489 /* Propagate overflow flags. */
1490 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1491 TREE_OVERFLOW (t) = 1;
1492 return t;
1495 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1497 tree type = TREE_TYPE (arg1);
1498 tree r1 = TREE_REALPART (arg1);
1499 tree i1 = TREE_IMAGPART (arg1);
1500 tree r2 = TREE_REALPART (arg2);
1501 tree i2 = TREE_IMAGPART (arg2);
1502 tree real, imag;
1504 switch (code)
1506 case PLUS_EXPR:
1507 case MINUS_EXPR:
1508 real = const_binop (code, r1, r2);
1509 imag = const_binop (code, i1, i2);
1510 break;
1512 case MULT_EXPR:
1513 if (COMPLEX_FLOAT_TYPE_P (type))
1514 return do_mpc_arg2 (arg1, arg2, type,
1515 /* do_nonfinite= */ folding_initializer,
1516 mpc_mul);
1518 real = const_binop (MINUS_EXPR,
1519 const_binop (MULT_EXPR, r1, r2),
1520 const_binop (MULT_EXPR, i1, i2));
1521 imag = const_binop (PLUS_EXPR,
1522 const_binop (MULT_EXPR, r1, i2),
1523 const_binop (MULT_EXPR, i1, r2));
1524 break;
1526 case RDIV_EXPR:
1527 if (COMPLEX_FLOAT_TYPE_P (type))
1528 return do_mpc_arg2 (arg1, arg2, type,
1529 /* do_nonfinite= */ folding_initializer,
1530 mpc_div);
1531 /* Fallthru. */
1532 case TRUNC_DIV_EXPR:
1533 case CEIL_DIV_EXPR:
1534 case FLOOR_DIV_EXPR:
1535 case ROUND_DIV_EXPR:
1536 if (flag_complex_method == 0)
1538 /* Keep this algorithm in sync with
1539 tree-complex.cc:expand_complex_div_straight().
1541 Expand complex division to scalars, straightforward algorithm.
1542 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1543 t = br*br + bi*bi
1545 tree magsquared
1546 = const_binop (PLUS_EXPR,
1547 const_binop (MULT_EXPR, r2, r2),
1548 const_binop (MULT_EXPR, i2, i2));
1549 tree t1
1550 = const_binop (PLUS_EXPR,
1551 const_binop (MULT_EXPR, r1, r2),
1552 const_binop (MULT_EXPR, i1, i2));
1553 tree t2
1554 = const_binop (MINUS_EXPR,
1555 const_binop (MULT_EXPR, i1, r2),
1556 const_binop (MULT_EXPR, r1, i2));
1558 real = const_binop (code, t1, magsquared);
1559 imag = const_binop (code, t2, magsquared);
1561 else
1563 /* Keep this algorithm in sync with
1564 tree-complex.cc:expand_complex_div_wide().
1566 Expand complex division to scalars, modified algorithm to minimize
1567 overflow with wide input ranges. */
1568 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1569 fold_abs_const (r2, TREE_TYPE (type)),
1570 fold_abs_const (i2, TREE_TYPE (type)));
1572 if (integer_nonzerop (compare))
1574 /* In the TRUE branch, we compute
1575 ratio = br/bi;
1576 div = (br * ratio) + bi;
1577 tr = (ar * ratio) + ai;
1578 ti = (ai * ratio) - ar;
1579 tr = tr / div;
1580 ti = ti / div; */
1581 tree ratio = const_binop (code, r2, i2);
1582 tree div = const_binop (PLUS_EXPR, i2,
1583 const_binop (MULT_EXPR, r2, ratio));
1584 real = const_binop (MULT_EXPR, r1, ratio);
1585 real = const_binop (PLUS_EXPR, real, i1);
1586 real = const_binop (code, real, div);
1588 imag = const_binop (MULT_EXPR, i1, ratio);
1589 imag = const_binop (MINUS_EXPR, imag, r1);
1590 imag = const_binop (code, imag, div);
1592 else
1594 /* In the FALSE branch, we compute
1595 ratio = d/c;
1596 divisor = (d * ratio) + c;
1597 tr = (b * ratio) + a;
1598 ti = b - (a * ratio);
1599 tr = tr / div;
1600 ti = ti / div; */
1601 tree ratio = const_binop (code, i2, r2);
1602 tree div = const_binop (PLUS_EXPR, r2,
1603 const_binop (MULT_EXPR, i2, ratio));
1605 real = const_binop (MULT_EXPR, i1, ratio);
1606 real = const_binop (PLUS_EXPR, real, r1);
1607 real = const_binop (code, real, div);
1609 imag = const_binop (MULT_EXPR, r1, ratio);
1610 imag = const_binop (MINUS_EXPR, i1, imag);
1611 imag = const_binop (code, imag, div);
1614 break;
1616 default:
1617 return NULL_TREE;
1620 if (real && imag)
1621 return build_complex (type, real, imag);
1624 if (TREE_CODE (arg1) == VECTOR_CST
1625 && TREE_CODE (arg2) == VECTOR_CST
1626 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1627 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1629 tree type = TREE_TYPE (arg1);
1630 bool step_ok_p;
1631 if (VECTOR_CST_STEPPED_P (arg1)
1632 && VECTOR_CST_STEPPED_P (arg2))
1633 /* We can operate directly on the encoding if:
1635 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1636 implies
1637 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1639 Addition and subtraction are the supported operators
1640 for which this is true. */
1641 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1642 else if (VECTOR_CST_STEPPED_P (arg1))
1643 /* We can operate directly on stepped encodings if:
1645 a3 - a2 == a2 - a1
1646 implies:
1647 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1649 which is true if (x -> x op c) distributes over addition. */
1650 step_ok_p = distributes_over_addition_p (code, 1);
1651 else
1652 /* Similarly in reverse. */
1653 step_ok_p = distributes_over_addition_p (code, 2);
1654 tree_vector_builder elts;
1655 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1656 return NULL_TREE;
1657 unsigned int count = elts.encoded_nelts ();
1658 for (unsigned int i = 0; i < count; ++i)
1660 tree elem1 = VECTOR_CST_ELT (arg1, i);
1661 tree elem2 = VECTOR_CST_ELT (arg2, i);
1663 tree elt = const_binop (code, elem1, elem2);
1665 /* It is possible that const_binop cannot handle the given
1666 code and return NULL_TREE */
1667 if (elt == NULL_TREE)
1668 return NULL_TREE;
1669 elts.quick_push (elt);
1672 return elts.build ();
1675 /* Shifts allow a scalar offset for a vector. */
1676 if (TREE_CODE (arg1) == VECTOR_CST
1677 && TREE_CODE (arg2) == INTEGER_CST)
1679 tree type = TREE_TYPE (arg1);
1680 bool step_ok_p = distributes_over_addition_p (code, 1);
1681 tree_vector_builder elts;
1682 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1683 return NULL_TREE;
1684 unsigned int count = elts.encoded_nelts ();
1685 for (unsigned int i = 0; i < count; ++i)
1687 tree elem1 = VECTOR_CST_ELT (arg1, i);
1689 tree elt = const_binop (code, elem1, arg2);
1691 /* It is possible that const_binop cannot handle the given
1692 code and return NULL_TREE. */
1693 if (elt == NULL_TREE)
1694 return NULL_TREE;
1695 elts.quick_push (elt);
1698 return elts.build ();
1700 return NULL_TREE;
1703 /* Overload that adds a TYPE parameter to be able to dispatch
1704 to fold_relational_const. */
1706 tree
1707 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1709 if (TREE_CODE_CLASS (code) == tcc_comparison)
1710 return fold_relational_const (code, type, arg1, arg2);
1712 /* ??? Until we make the const_binop worker take the type of the
1713 result as argument put those cases that need it here. */
1714 switch (code)
1716 case VEC_SERIES_EXPR:
1717 if (CONSTANT_CLASS_P (arg1)
1718 && CONSTANT_CLASS_P (arg2))
1719 return build_vec_series (type, arg1, arg2);
1720 return NULL_TREE;
1722 case COMPLEX_EXPR:
1723 if ((TREE_CODE (arg1) == REAL_CST
1724 && TREE_CODE (arg2) == REAL_CST)
1725 || (TREE_CODE (arg1) == INTEGER_CST
1726 && TREE_CODE (arg2) == INTEGER_CST))
1727 return build_complex (type, arg1, arg2);
1728 return NULL_TREE;
1730 case POINTER_DIFF_EXPR:
1731 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1733 poly_offset_int res = (wi::to_poly_offset (arg1)
1734 - wi::to_poly_offset (arg2));
1735 return force_fit_type (type, res, 1,
1736 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1738 return NULL_TREE;
1740 case VEC_PACK_TRUNC_EXPR:
1741 case VEC_PACK_FIX_TRUNC_EXPR:
1742 case VEC_PACK_FLOAT_EXPR:
1744 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1746 if (TREE_CODE (arg1) != VECTOR_CST
1747 || TREE_CODE (arg2) != VECTOR_CST)
1748 return NULL_TREE;
1750 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1751 return NULL_TREE;
1753 out_nelts = in_nelts * 2;
1754 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1755 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1757 tree_vector_builder elts (type, out_nelts, 1);
1758 for (i = 0; i < out_nelts; i++)
1760 tree elt = (i < in_nelts
1761 ? VECTOR_CST_ELT (arg1, i)
1762 : VECTOR_CST_ELT (arg2, i - in_nelts));
1763 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1764 ? NOP_EXPR
1765 : code == VEC_PACK_FLOAT_EXPR
1766 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1767 TREE_TYPE (type), elt);
1768 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1769 return NULL_TREE;
1770 elts.quick_push (elt);
1773 return elts.build ();
1776 case VEC_WIDEN_MULT_LO_EXPR:
1777 case VEC_WIDEN_MULT_HI_EXPR:
1778 case VEC_WIDEN_MULT_EVEN_EXPR:
1779 case VEC_WIDEN_MULT_ODD_EXPR:
1781 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1783 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1784 return NULL_TREE;
1786 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1787 return NULL_TREE;
1788 out_nelts = in_nelts / 2;
1789 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1790 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1792 if (code == VEC_WIDEN_MULT_LO_EXPR)
1793 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1794 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1795 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1796 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1797 scale = 1, ofs = 0;
1798 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1799 scale = 1, ofs = 1;
1801 tree_vector_builder elts (type, out_nelts, 1);
1802 for (out = 0; out < out_nelts; out++)
1804 unsigned int in = (out << scale) + ofs;
1805 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1806 VECTOR_CST_ELT (arg1, in));
1807 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1808 VECTOR_CST_ELT (arg2, in));
1810 if (t1 == NULL_TREE || t2 == NULL_TREE)
1811 return NULL_TREE;
1812 tree elt = const_binop (MULT_EXPR, t1, t2);
1813 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1814 return NULL_TREE;
1815 elts.quick_push (elt);
1818 return elts.build ();
1821 default:;
1824 if (TREE_CODE_CLASS (code) != tcc_binary)
1825 return NULL_TREE;
1827 /* Make sure type and arg0 have the same saturating flag. */
1828 gcc_checking_assert (TYPE_SATURATING (type)
1829 == TYPE_SATURATING (TREE_TYPE (arg1)));
1831 return const_binop (code, arg1, arg2);
1834 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1835 Return zero if computing the constants is not possible. */
1837 tree
1838 const_unop (enum tree_code code, tree type, tree arg0)
1840 /* Don't perform the operation, other than NEGATE and ABS, if
1841 flag_signaling_nans is on and the operand is a signaling NaN. */
1842 if (TREE_CODE (arg0) == REAL_CST
1843 && HONOR_SNANS (arg0)
1844 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1845 && code != NEGATE_EXPR
1846 && code != ABS_EXPR
1847 && code != ABSU_EXPR)
1848 return NULL_TREE;
1850 switch (code)
1852 CASE_CONVERT:
1853 case FLOAT_EXPR:
1854 case FIX_TRUNC_EXPR:
1855 case FIXED_CONVERT_EXPR:
1856 return fold_convert_const (code, type, arg0);
1858 case ADDR_SPACE_CONVERT_EXPR:
1859 /* If the source address is 0, and the source address space
1860 cannot have a valid object at 0, fold to dest type null. */
1861 if (integer_zerop (arg0)
1862 && !(targetm.addr_space.zero_address_valid
1863 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1864 return fold_convert_const (code, type, arg0);
1865 break;
1867 case VIEW_CONVERT_EXPR:
1868 return fold_view_convert_expr (type, arg0);
1870 case NEGATE_EXPR:
1872 /* Can't call fold_negate_const directly here as that doesn't
1873 handle all cases and we might not be able to negate some
1874 constants. */
1875 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1876 if (tem && CONSTANT_CLASS_P (tem))
1877 return tem;
1878 break;
1881 case ABS_EXPR:
1882 case ABSU_EXPR:
1883 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1884 return fold_abs_const (arg0, type);
1885 break;
1887 case CONJ_EXPR:
1888 if (TREE_CODE (arg0) == COMPLEX_CST)
1890 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1891 TREE_TYPE (type));
1892 return build_complex (type, TREE_REALPART (arg0), ipart);
1894 break;
1896 case BIT_NOT_EXPR:
1897 if (TREE_CODE (arg0) == INTEGER_CST)
1898 return fold_not_const (arg0, type);
1899 else if (POLY_INT_CST_P (arg0))
1900 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1901 /* Perform BIT_NOT_EXPR on each element individually. */
1902 else if (TREE_CODE (arg0) == VECTOR_CST)
1904 tree elem;
1906 /* This can cope with stepped encodings because ~x == -1 - x. */
1907 tree_vector_builder elements;
1908 elements.new_unary_operation (type, arg0, true);
1909 unsigned int i, count = elements.encoded_nelts ();
1910 for (i = 0; i < count; ++i)
1912 elem = VECTOR_CST_ELT (arg0, i);
1913 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1914 if (elem == NULL_TREE)
1915 break;
1916 elements.quick_push (elem);
1918 if (i == count)
1919 return elements.build ();
1921 break;
1923 case TRUTH_NOT_EXPR:
1924 if (TREE_CODE (arg0) == INTEGER_CST)
1925 return constant_boolean_node (integer_zerop (arg0), type);
1926 break;
1928 case REALPART_EXPR:
1929 if (TREE_CODE (arg0) == COMPLEX_CST)
1930 return fold_convert (type, TREE_REALPART (arg0));
1931 break;
1933 case IMAGPART_EXPR:
1934 if (TREE_CODE (arg0) == COMPLEX_CST)
1935 return fold_convert (type, TREE_IMAGPART (arg0));
1936 break;
1938 case VEC_UNPACK_LO_EXPR:
1939 case VEC_UNPACK_HI_EXPR:
1940 case VEC_UNPACK_FLOAT_LO_EXPR:
1941 case VEC_UNPACK_FLOAT_HI_EXPR:
1942 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1943 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1945 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1946 enum tree_code subcode;
1948 if (TREE_CODE (arg0) != VECTOR_CST)
1949 return NULL_TREE;
1951 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1952 return NULL_TREE;
1953 out_nelts = in_nelts / 2;
1954 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1956 unsigned int offset = 0;
1957 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1958 || code == VEC_UNPACK_FLOAT_LO_EXPR
1959 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1960 offset = out_nelts;
1962 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1963 subcode = NOP_EXPR;
1964 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1965 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1966 subcode = FLOAT_EXPR;
1967 else
1968 subcode = FIX_TRUNC_EXPR;
1970 tree_vector_builder elts (type, out_nelts, 1);
1971 for (i = 0; i < out_nelts; i++)
1973 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1974 VECTOR_CST_ELT (arg0, i + offset));
1975 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1976 return NULL_TREE;
1977 elts.quick_push (elt);
1980 return elts.build ();
1983 case VEC_DUPLICATE_EXPR:
1984 if (CONSTANT_CLASS_P (arg0))
1985 return build_vector_from_val (type, arg0);
1986 return NULL_TREE;
1988 default:
1989 break;
1992 return NULL_TREE;
1995 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1996 indicates which particular sizetype to create. */
1998 tree
1999 size_int_kind (poly_int64 number, enum size_type_kind kind)
2001 return build_int_cst (sizetype_tab[(int) kind], number);
2004 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2005 is a tree code. The type of the result is taken from the operands.
2006 Both must be equivalent integer types, ala int_binop_types_match_p.
2007 If the operands are constant, so is the result. */
2009 tree
2010 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2012 tree type = TREE_TYPE (arg0);
2014 if (arg0 == error_mark_node || arg1 == error_mark_node)
2015 return error_mark_node;
2017 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2018 TREE_TYPE (arg1)));
2020 /* Handle the special case of two poly_int constants faster. */
2021 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2023 /* And some specific cases even faster than that. */
2024 if (code == PLUS_EXPR)
2026 if (integer_zerop (arg0)
2027 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2028 return arg1;
2029 if (integer_zerop (arg1)
2030 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2031 return arg0;
2033 else if (code == MINUS_EXPR)
2035 if (integer_zerop (arg1)
2036 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2037 return arg0;
2039 else if (code == MULT_EXPR)
2041 if (integer_onep (arg0)
2042 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2043 return arg1;
2046 /* Handle general case of two integer constants. For sizetype
2047 constant calculations we always want to know about overflow,
2048 even in the unsigned case. */
2049 tree res = int_const_binop (code, arg0, arg1, -1);
2050 if (res != NULL_TREE)
2051 return res;
2054 return fold_build2_loc (loc, code, type, arg0, arg1);
2057 /* Given two values, either both of sizetype or both of bitsizetype,
2058 compute the difference between the two values. Return the value
2059 in signed type corresponding to the type of the operands. */
2061 tree
2062 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2064 tree type = TREE_TYPE (arg0);
2065 tree ctype;
2067 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2068 TREE_TYPE (arg1)));
2070 /* If the type is already signed, just do the simple thing. */
2071 if (!TYPE_UNSIGNED (type))
2072 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2074 if (type == sizetype)
2075 ctype = ssizetype;
2076 else if (type == bitsizetype)
2077 ctype = sbitsizetype;
2078 else
2079 ctype = signed_type_for (type);
2081 /* If either operand is not a constant, do the conversions to the signed
2082 type and subtract. The hardware will do the right thing with any
2083 overflow in the subtraction. */
2084 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2085 return size_binop_loc (loc, MINUS_EXPR,
2086 fold_convert_loc (loc, ctype, arg0),
2087 fold_convert_loc (loc, ctype, arg1));
2089 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2090 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2091 overflow) and negate (which can't either). Special-case a result
2092 of zero while we're here. */
2093 if (tree_int_cst_equal (arg0, arg1))
2094 return build_int_cst (ctype, 0);
2095 else if (tree_int_cst_lt (arg1, arg0))
2096 return fold_convert_loc (loc, ctype,
2097 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2098 else
2099 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2100 fold_convert_loc (loc, ctype,
2101 size_binop_loc (loc,
2102 MINUS_EXPR,
2103 arg1, arg0)));
2106 /* A subroutine of fold_convert_const handling conversions of an
2107 INTEGER_CST to another integer type. */
2109 static tree
2110 fold_convert_const_int_from_int (tree type, const_tree arg1)
2112 /* Given an integer constant, make new constant with new type,
2113 appropriately sign-extended or truncated. Use widest_int
2114 so that any extension is done according ARG1's type. */
2115 return force_fit_type (type, wi::to_widest (arg1),
2116 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2117 TREE_OVERFLOW (arg1));
2120 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2121 to an integer type. */
2123 static tree
2124 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2126 bool overflow = false;
2127 tree t;
2129 /* The following code implements the floating point to integer
2130 conversion rules required by the Java Language Specification,
2131 that IEEE NaNs are mapped to zero and values that overflow
2132 the target precision saturate, i.e. values greater than
2133 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2134 are mapped to INT_MIN. These semantics are allowed by the
2135 C and C++ standards that simply state that the behavior of
2136 FP-to-integer conversion is unspecified upon overflow. */
2138 wide_int val;
2139 REAL_VALUE_TYPE r;
2140 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2142 switch (code)
2144 case FIX_TRUNC_EXPR:
2145 real_trunc (&r, VOIDmode, &x);
2146 break;
2148 default:
2149 gcc_unreachable ();
2152 /* If R is NaN, return zero and show we have an overflow. */
2153 if (REAL_VALUE_ISNAN (r))
2155 overflow = true;
2156 val = wi::zero (TYPE_PRECISION (type));
2159 /* See if R is less than the lower bound or greater than the
2160 upper bound. */
2162 if (! overflow)
2164 tree lt = TYPE_MIN_VALUE (type);
2165 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2166 if (real_less (&r, &l))
2168 overflow = true;
2169 val = wi::to_wide (lt);
2173 if (! overflow)
2175 tree ut = TYPE_MAX_VALUE (type);
2176 if (ut)
2178 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2179 if (real_less (&u, &r))
2181 overflow = true;
2182 val = wi::to_wide (ut);
2187 if (! overflow)
2188 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2190 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2191 return t;
2194 /* A subroutine of fold_convert_const handling conversions of a
2195 FIXED_CST to an integer type. */
2197 static tree
2198 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2200 tree t;
2201 double_int temp, temp_trunc;
2202 scalar_mode mode;
2204 /* Right shift FIXED_CST to temp by fbit. */
2205 temp = TREE_FIXED_CST (arg1).data;
2206 mode = TREE_FIXED_CST (arg1).mode;
2207 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2209 temp = temp.rshift (GET_MODE_FBIT (mode),
2210 HOST_BITS_PER_DOUBLE_INT,
2211 SIGNED_FIXED_POINT_MODE_P (mode));
2213 /* Left shift temp to temp_trunc by fbit. */
2214 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2215 HOST_BITS_PER_DOUBLE_INT,
2216 SIGNED_FIXED_POINT_MODE_P (mode));
2218 else
2220 temp = double_int_zero;
2221 temp_trunc = double_int_zero;
2224 /* If FIXED_CST is negative, we need to round the value toward 0.
2225 By checking if the fractional bits are not zero to add 1 to temp. */
2226 if (SIGNED_FIXED_POINT_MODE_P (mode)
2227 && temp_trunc.is_negative ()
2228 && TREE_FIXED_CST (arg1).data != temp_trunc)
2229 temp += double_int_one;
2231 /* Given a fixed-point constant, make new constant with new type,
2232 appropriately sign-extended or truncated. */
2233 t = force_fit_type (type, temp, -1,
2234 (temp.is_negative ()
2235 && (TYPE_UNSIGNED (type)
2236 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2237 | TREE_OVERFLOW (arg1));
2239 return t;
2242 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2243 to another floating point type. */
2245 static tree
2246 fold_convert_const_real_from_real (tree type, const_tree arg1)
2248 REAL_VALUE_TYPE value;
2249 tree t;
2251 /* If the underlying modes are the same, simply treat it as
2252 copy and rebuild with TREE_REAL_CST information and the
2253 given type. */
2254 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2256 t = build_real (type, TREE_REAL_CST (arg1));
2257 return t;
2260 /* Don't perform the operation if flag_signaling_nans is on
2261 and the operand is a signaling NaN. */
2262 if (HONOR_SNANS (arg1)
2263 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2264 return NULL_TREE;
2266 /* With flag_rounding_math we should respect the current rounding mode
2267 unless the conversion is exact. */
2268 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2269 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2270 return NULL_TREE;
2272 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2273 t = build_real (type, value);
2275 /* If converting an infinity or NAN to a representation that doesn't
2276 have one, set the overflow bit so that we can produce some kind of
2277 error message at the appropriate point if necessary. It's not the
2278 most user-friendly message, but it's better than nothing. */
2279 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2280 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2281 TREE_OVERFLOW (t) = 1;
2282 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2283 && !MODE_HAS_NANS (TYPE_MODE (type)))
2284 TREE_OVERFLOW (t) = 1;
2285 /* Regular overflow, conversion produced an infinity in a mode that
2286 can't represent them. */
2287 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2288 && REAL_VALUE_ISINF (value)
2289 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2290 TREE_OVERFLOW (t) = 1;
2291 else
2292 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2293 return t;
2296 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2297 to a floating point type. */
2299 static tree
2300 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2302 REAL_VALUE_TYPE value;
2303 tree t;
2305 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2306 &TREE_FIXED_CST (arg1));
2307 t = build_real (type, value);
2309 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2310 return t;
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2316 static tree
2317 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2319 FIXED_VALUE_TYPE value;
2320 tree t;
2321 bool overflow_p;
2323 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2324 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2325 t = build_fixed (type, value);
2327 /* Propagate overflow flags. */
2328 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 return t;
2333 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2334 to a fixed-point type. */
2336 static tree
2337 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2339 FIXED_VALUE_TYPE value;
2340 tree t;
2341 bool overflow_p;
2342 double_int di;
2344 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2346 di.low = TREE_INT_CST_ELT (arg1, 0);
2347 if (TREE_INT_CST_NUNITS (arg1) == 1)
2348 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2349 else
2350 di.high = TREE_INT_CST_ELT (arg1, 1);
2352 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2353 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2354 TYPE_SATURATING (type));
2355 t = build_fixed (type, value);
2357 /* Propagate overflow flags. */
2358 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 return t;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2366 static tree
2367 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 FIXED_VALUE_TYPE value;
2370 tree t;
2371 bool overflow_p;
2373 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2374 &TREE_REAL_CST (arg1),
2375 TYPE_SATURATING (type));
2376 t = build_fixed (type, value);
2378 /* Propagate overflow flags. */
2379 if (overflow_p | TREE_OVERFLOW (arg1))
2380 TREE_OVERFLOW (t) = 1;
2381 return t;
2384 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2385 type TYPE. If no simplification can be done return NULL_TREE. */
2387 static tree
2388 fold_convert_const (enum tree_code code, tree type, tree arg1)
2390 tree arg_type = TREE_TYPE (arg1);
2391 if (arg_type == type)
2392 return arg1;
2394 /* We can't widen types, since the runtime value could overflow the
2395 original type before being extended to the new type. */
2396 if (POLY_INT_CST_P (arg1)
2397 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2398 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2399 return build_poly_int_cst (type,
2400 poly_wide_int::from (poly_int_cst_value (arg1),
2401 TYPE_PRECISION (type),
2402 TYPE_SIGN (arg_type)));
2404 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2405 || TREE_CODE (type) == OFFSET_TYPE)
2407 if (TREE_CODE (arg1) == INTEGER_CST)
2408 return fold_convert_const_int_from_int (type, arg1);
2409 else if (TREE_CODE (arg1) == REAL_CST)
2410 return fold_convert_const_int_from_real (code, type, arg1);
2411 else if (TREE_CODE (arg1) == FIXED_CST)
2412 return fold_convert_const_int_from_fixed (type, arg1);
2414 else if (SCALAR_FLOAT_TYPE_P (type))
2416 if (TREE_CODE (arg1) == INTEGER_CST)
2418 tree res = build_real_from_int_cst (type, arg1);
2419 /* Avoid the folding if flag_rounding_math is on and the
2420 conversion is not exact. */
2421 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2423 bool fail = false;
2424 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2425 TYPE_PRECISION (TREE_TYPE (arg1)));
2426 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2427 return NULL_TREE;
2429 return res;
2431 else if (TREE_CODE (arg1) == REAL_CST)
2432 return fold_convert_const_real_from_real (type, arg1);
2433 else if (TREE_CODE (arg1) == FIXED_CST)
2434 return fold_convert_const_real_from_fixed (type, arg1);
2436 else if (FIXED_POINT_TYPE_P (type))
2438 if (TREE_CODE (arg1) == FIXED_CST)
2439 return fold_convert_const_fixed_from_fixed (type, arg1);
2440 else if (TREE_CODE (arg1) == INTEGER_CST)
2441 return fold_convert_const_fixed_from_int (type, arg1);
2442 else if (TREE_CODE (arg1) == REAL_CST)
2443 return fold_convert_const_fixed_from_real (type, arg1);
2445 else if (VECTOR_TYPE_P (type))
2447 if (TREE_CODE (arg1) == VECTOR_CST
2448 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2450 tree elttype = TREE_TYPE (type);
2451 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2452 /* We can't handle steps directly when extending, since the
2453 values need to wrap at the original precision first. */
2454 bool step_ok_p
2455 = (INTEGRAL_TYPE_P (elttype)
2456 && INTEGRAL_TYPE_P (arg1_elttype)
2457 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2458 tree_vector_builder v;
2459 if (!v.new_unary_operation (type, arg1, step_ok_p))
2460 return NULL_TREE;
2461 unsigned int len = v.encoded_nelts ();
2462 for (unsigned int i = 0; i < len; ++i)
2464 tree elt = VECTOR_CST_ELT (arg1, i);
2465 tree cvt = fold_convert_const (code, elttype, elt);
2466 if (cvt == NULL_TREE)
2467 return NULL_TREE;
2468 v.quick_push (cvt);
2470 return v.build ();
2473 return NULL_TREE;
2476 /* Construct a vector of zero elements of vector type TYPE. */
2478 static tree
2479 build_zero_vector (tree type)
2481 tree t;
2483 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2484 return build_vector_from_val (type, t);
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2492 const_tree orig = TREE_TYPE (arg);
2494 if (type == orig)
2495 return true;
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2505 switch (TREE_CODE (type))
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 return (INTEGRAL_TYPE_P (orig)
2511 || (POINTER_TYPE_P (orig)
2512 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2513 || TREE_CODE (orig) == OFFSET_TYPE);
2515 case REAL_TYPE:
2516 case FIXED_POINT_TYPE:
2517 case VOID_TYPE:
2518 return TREE_CODE (type) == TREE_CODE (orig);
2520 case VECTOR_TYPE:
2521 return (VECTOR_TYPE_P (orig)
2522 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2523 TYPE_VECTOR_SUBPARTS (orig))
2524 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2526 default:
2527 return false;
2531 /* Convert expression ARG to type TYPE. Used by the middle-end for
2532 simple conversions in preference to calling the front-end's convert. */
2534 tree
2535 fold_convert_loc (location_t loc, tree type, tree arg)
2537 tree orig = TREE_TYPE (arg);
2538 tree tem;
2540 if (type == orig)
2541 return arg;
2543 if (TREE_CODE (arg) == ERROR_MARK
2544 || TREE_CODE (type) == ERROR_MARK
2545 || TREE_CODE (orig) == ERROR_MARK)
2546 return error_mark_node;
2548 switch (TREE_CODE (type))
2550 case POINTER_TYPE:
2551 case REFERENCE_TYPE:
2552 /* Handle conversions between pointers to different address spaces. */
2553 if (POINTER_TYPE_P (orig)
2554 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2555 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2556 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2557 /* fall through */
2559 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2560 case OFFSET_TYPE:
2561 if (TREE_CODE (arg) == INTEGER_CST)
2563 tem = fold_convert_const (NOP_EXPR, type, arg);
2564 if (tem != NULL_TREE)
2565 return tem;
2567 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2568 || TREE_CODE (orig) == OFFSET_TYPE)
2569 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2570 if (TREE_CODE (orig) == COMPLEX_TYPE)
2571 return fold_convert_loc (loc, type,
2572 fold_build1_loc (loc, REALPART_EXPR,
2573 TREE_TYPE (orig), arg));
2574 gcc_assert (VECTOR_TYPE_P (orig)
2575 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2576 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2578 case REAL_TYPE:
2579 if (TREE_CODE (arg) == INTEGER_CST)
2581 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2582 if (tem != NULL_TREE)
2583 return tem;
2585 else if (TREE_CODE (arg) == REAL_CST)
2587 tem = fold_convert_const (NOP_EXPR, type, arg);
2588 if (tem != NULL_TREE)
2589 return tem;
2591 else if (TREE_CODE (arg) == FIXED_CST)
2593 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2594 if (tem != NULL_TREE)
2595 return tem;
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2605 case REAL_TYPE:
2606 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2608 case FIXED_POINT_TYPE:
2609 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2611 case COMPLEX_TYPE:
2612 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2613 return fold_convert_loc (loc, type, tem);
2615 default:
2616 gcc_unreachable ();
2619 case FIXED_POINT_TYPE:
2620 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2621 || TREE_CODE (arg) == REAL_CST)
2623 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2624 if (tem != NULL_TREE)
2625 goto fold_convert_exit;
2628 switch (TREE_CODE (orig))
2630 case FIXED_POINT_TYPE:
2631 case INTEGER_TYPE:
2632 case ENUMERAL_TYPE:
2633 case BOOLEAN_TYPE:
2634 case REAL_TYPE:
2635 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2637 case COMPLEX_TYPE:
2638 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2639 return fold_convert_loc (loc, type, tem);
2641 default:
2642 gcc_unreachable ();
2645 case COMPLEX_TYPE:
2646 switch (TREE_CODE (orig))
2648 case INTEGER_TYPE:
2649 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2650 case POINTER_TYPE: case REFERENCE_TYPE:
2651 case REAL_TYPE:
2652 case FIXED_POINT_TYPE:
2653 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2654 fold_convert_loc (loc, TREE_TYPE (type), arg),
2655 fold_convert_loc (loc, TREE_TYPE (type),
2656 integer_zero_node));
2657 case COMPLEX_TYPE:
2659 tree rpart, ipart;
2661 if (TREE_CODE (arg) == COMPLEX_EXPR)
2663 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2664 TREE_OPERAND (arg, 0));
2665 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2666 TREE_OPERAND (arg, 1));
2667 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2670 arg = save_expr (arg);
2671 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2673 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2674 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2675 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2678 default:
2679 gcc_unreachable ();
2682 case VECTOR_TYPE:
2683 if (integer_zerop (arg))
2684 return build_zero_vector (type);
2685 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2686 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2687 || VECTOR_TYPE_P (orig));
2688 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2690 case VOID_TYPE:
2691 tem = fold_ignored_result (arg);
2692 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2694 default:
2695 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2696 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2697 gcc_unreachable ();
2699 fold_convert_exit:
2700 tem = protected_set_expr_location_unshare (tem, loc);
2701 return tem;
2704 /* Return false if expr can be assumed not to be an lvalue, true
2705 otherwise. */
2707 static bool
2708 maybe_lvalue_p (const_tree x)
2710 /* We only need to wrap lvalue tree codes. */
2711 switch (TREE_CODE (x))
2713 case VAR_DECL:
2714 case PARM_DECL:
2715 case RESULT_DECL:
2716 case LABEL_DECL:
2717 case FUNCTION_DECL:
2718 case SSA_NAME:
2719 case COMPOUND_LITERAL_EXPR:
2721 case COMPONENT_REF:
2722 case MEM_REF:
2723 case INDIRECT_REF:
2724 case ARRAY_REF:
2725 case ARRAY_RANGE_REF:
2726 case BIT_FIELD_REF:
2727 case OBJ_TYPE_REF:
2729 case REALPART_EXPR:
2730 case IMAGPART_EXPR:
2731 case PREINCREMENT_EXPR:
2732 case PREDECREMENT_EXPR:
2733 case SAVE_EXPR:
2734 case TRY_CATCH_EXPR:
2735 case WITH_CLEANUP_EXPR:
2736 case COMPOUND_EXPR:
2737 case MODIFY_EXPR:
2738 case TARGET_EXPR:
2739 case COND_EXPR:
2740 case BIND_EXPR:
2741 case VIEW_CONVERT_EXPR:
2742 break;
2744 default:
2745 /* Assume the worst for front-end tree codes. */
2746 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2747 break;
2748 return false;
2751 return true;
2754 /* Return an expr equal to X but certainly not valid as an lvalue. */
2756 tree
2757 non_lvalue_loc (location_t loc, tree x)
2759 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2760 us. */
2761 if (in_gimple_form)
2762 return x;
2764 if (! maybe_lvalue_p (x))
2765 return x;
2766 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2769 /* Given a tree comparison code, return the code that is the logical inverse.
2770 It is generally not safe to do this for floating-point comparisons, except
2771 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2772 ERROR_MARK in this case. */
2774 enum tree_code
2775 invert_tree_comparison (enum tree_code code, bool honor_nans)
2777 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2778 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2779 return ERROR_MARK;
2781 switch (code)
2783 case EQ_EXPR:
2784 return NE_EXPR;
2785 case NE_EXPR:
2786 return EQ_EXPR;
2787 case GT_EXPR:
2788 return honor_nans ? UNLE_EXPR : LE_EXPR;
2789 case GE_EXPR:
2790 return honor_nans ? UNLT_EXPR : LT_EXPR;
2791 case LT_EXPR:
2792 return honor_nans ? UNGE_EXPR : GE_EXPR;
2793 case LE_EXPR:
2794 return honor_nans ? UNGT_EXPR : GT_EXPR;
2795 case LTGT_EXPR:
2796 return UNEQ_EXPR;
2797 case UNEQ_EXPR:
2798 return LTGT_EXPR;
2799 case UNGT_EXPR:
2800 return LE_EXPR;
2801 case UNGE_EXPR:
2802 return LT_EXPR;
2803 case UNLT_EXPR:
2804 return GE_EXPR;
2805 case UNLE_EXPR:
2806 return GT_EXPR;
2807 case ORDERED_EXPR:
2808 return UNORDERED_EXPR;
2809 case UNORDERED_EXPR:
2810 return ORDERED_EXPR;
2811 default:
2812 gcc_unreachable ();
2816 /* Similar, but return the comparison that results if the operands are
2817 swapped. This is safe for floating-point. */
2819 enum tree_code
2820 swap_tree_comparison (enum tree_code code)
2822 switch (code)
2824 case EQ_EXPR:
2825 case NE_EXPR:
2826 case ORDERED_EXPR:
2827 case UNORDERED_EXPR:
2828 case LTGT_EXPR:
2829 case UNEQ_EXPR:
2830 return code;
2831 case GT_EXPR:
2832 return LT_EXPR;
2833 case GE_EXPR:
2834 return LE_EXPR;
2835 case LT_EXPR:
2836 return GT_EXPR;
2837 case LE_EXPR:
2838 return GE_EXPR;
2839 case UNGT_EXPR:
2840 return UNLT_EXPR;
2841 case UNGE_EXPR:
2842 return UNLE_EXPR;
2843 case UNLT_EXPR:
2844 return UNGT_EXPR;
2845 case UNLE_EXPR:
2846 return UNGE_EXPR;
2847 default:
2848 gcc_unreachable ();
2853 /* Convert a comparison tree code from an enum tree_code representation
2854 into a compcode bit-based encoding. This function is the inverse of
2855 compcode_to_comparison. */
2857 static enum comparison_code
2858 comparison_to_compcode (enum tree_code code)
2860 switch (code)
2862 case LT_EXPR:
2863 return COMPCODE_LT;
2864 case EQ_EXPR:
2865 return COMPCODE_EQ;
2866 case LE_EXPR:
2867 return COMPCODE_LE;
2868 case GT_EXPR:
2869 return COMPCODE_GT;
2870 case NE_EXPR:
2871 return COMPCODE_NE;
2872 case GE_EXPR:
2873 return COMPCODE_GE;
2874 case ORDERED_EXPR:
2875 return COMPCODE_ORD;
2876 case UNORDERED_EXPR:
2877 return COMPCODE_UNORD;
2878 case UNLT_EXPR:
2879 return COMPCODE_UNLT;
2880 case UNEQ_EXPR:
2881 return COMPCODE_UNEQ;
2882 case UNLE_EXPR:
2883 return COMPCODE_UNLE;
2884 case UNGT_EXPR:
2885 return COMPCODE_UNGT;
2886 case LTGT_EXPR:
2887 return COMPCODE_LTGT;
2888 case UNGE_EXPR:
2889 return COMPCODE_UNGE;
2890 default:
2891 gcc_unreachable ();
2895 /* Convert a compcode bit-based encoding of a comparison operator back
2896 to GCC's enum tree_code representation. This function is the
2897 inverse of comparison_to_compcode. */
2899 static enum tree_code
2900 compcode_to_comparison (enum comparison_code code)
2902 switch (code)
2904 case COMPCODE_LT:
2905 return LT_EXPR;
2906 case COMPCODE_EQ:
2907 return EQ_EXPR;
2908 case COMPCODE_LE:
2909 return LE_EXPR;
2910 case COMPCODE_GT:
2911 return GT_EXPR;
2912 case COMPCODE_NE:
2913 return NE_EXPR;
2914 case COMPCODE_GE:
2915 return GE_EXPR;
2916 case COMPCODE_ORD:
2917 return ORDERED_EXPR;
2918 case COMPCODE_UNORD:
2919 return UNORDERED_EXPR;
2920 case COMPCODE_UNLT:
2921 return UNLT_EXPR;
2922 case COMPCODE_UNEQ:
2923 return UNEQ_EXPR;
2924 case COMPCODE_UNLE:
2925 return UNLE_EXPR;
2926 case COMPCODE_UNGT:
2927 return UNGT_EXPR;
2928 case COMPCODE_LTGT:
2929 return LTGT_EXPR;
2930 case COMPCODE_UNGE:
2931 return UNGE_EXPR;
2932 default:
2933 gcc_unreachable ();
2937 /* Return true if COND1 tests the opposite condition of COND2. */
2939 bool
2940 inverse_conditions_p (const_tree cond1, const_tree cond2)
2942 return (COMPARISON_CLASS_P (cond1)
2943 && COMPARISON_CLASS_P (cond2)
2944 && (invert_tree_comparison
2945 (TREE_CODE (cond1),
2946 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2947 && operand_equal_p (TREE_OPERAND (cond1, 0),
2948 TREE_OPERAND (cond2, 0), 0)
2949 && operand_equal_p (TREE_OPERAND (cond1, 1),
2950 TREE_OPERAND (cond2, 1), 0));
2953 /* Return a tree for the comparison which is the combination of
2954 doing the AND or OR (depending on CODE) of the two operations LCODE
2955 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2956 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2957 if this makes the transformation invalid. */
2959 tree
2960 combine_comparisons (location_t loc,
2961 enum tree_code code, enum tree_code lcode,
2962 enum tree_code rcode, tree truth_type,
2963 tree ll_arg, tree lr_arg)
2965 bool honor_nans = HONOR_NANS (ll_arg);
2966 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2967 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2968 int compcode;
2970 switch (code)
2972 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2973 compcode = lcompcode & rcompcode;
2974 break;
2976 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2977 compcode = lcompcode | rcompcode;
2978 break;
2980 default:
2981 return NULL_TREE;
2984 if (!honor_nans)
2986 /* Eliminate unordered comparisons, as well as LTGT and ORD
2987 which are not used unless the mode has NaNs. */
2988 compcode &= ~COMPCODE_UNORD;
2989 if (compcode == COMPCODE_LTGT)
2990 compcode = COMPCODE_NE;
2991 else if (compcode == COMPCODE_ORD)
2992 compcode = COMPCODE_TRUE;
2994 else if (flag_trapping_math)
2996 /* Check that the original operation and the optimized ones will trap
2997 under the same condition. */
2998 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2999 && (lcompcode != COMPCODE_EQ)
3000 && (lcompcode != COMPCODE_ORD);
3001 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3002 && (rcompcode != COMPCODE_EQ)
3003 && (rcompcode != COMPCODE_ORD);
3004 bool trap = (compcode & COMPCODE_UNORD) == 0
3005 && (compcode != COMPCODE_EQ)
3006 && (compcode != COMPCODE_ORD);
3008 /* In a short-circuited boolean expression the LHS might be
3009 such that the RHS, if evaluated, will never trap. For
3010 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3011 if neither x nor y is NaN. (This is a mixed blessing: for
3012 example, the expression above will never trap, hence
3013 optimizing it to x < y would be invalid). */
3014 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3015 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3016 rtrap = false;
3018 /* If the comparison was short-circuited, and only the RHS
3019 trapped, we may now generate a spurious trap. */
3020 if (rtrap && !ltrap
3021 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3022 return NULL_TREE;
3024 /* If we changed the conditions that cause a trap, we lose. */
3025 if ((ltrap || rtrap) != trap)
3026 return NULL_TREE;
3029 if (compcode == COMPCODE_TRUE)
3030 return constant_boolean_node (true, truth_type);
3031 else if (compcode == COMPCODE_FALSE)
3032 return constant_boolean_node (false, truth_type);
3033 else
3035 enum tree_code tcode;
3037 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3038 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3042 /* Return nonzero if two operands (typically of the same tree node)
3043 are necessarily equal. FLAGS modifies behavior as follows:
3045 If OEP_ONLY_CONST is set, only return nonzero for constants.
3046 This function tests whether the operands are indistinguishable;
3047 it does not test whether they are equal using C's == operation.
3048 The distinction is important for IEEE floating point, because
3049 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3050 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3052 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3053 even though it may hold multiple values during a function.
3054 This is because a GCC tree node guarantees that nothing else is
3055 executed between the evaluation of its "operands" (which may often
3056 be evaluated in arbitrary order). Hence if the operands themselves
3057 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3058 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3059 unset means assuming isochronic (or instantaneous) tree equivalence.
3060 Unless comparing arbitrary expression trees, such as from different
3061 statements, this flag can usually be left unset.
3063 If OEP_PURE_SAME is set, then pure functions with identical arguments
3064 are considered the same. It is used when the caller has other ways
3065 to ensure that global memory is unchanged in between.
3067 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3068 not values of expressions.
3070 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3071 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3073 If OEP_BITWISE is set, then require the values to be bitwise identical
3074 rather than simply numerically equal. Do not take advantage of things
3075 like math-related flags or undefined behavior; only return true for
3076 values that are provably bitwise identical in all circumstances.
3078 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3079 any operand with side effect. This is unnecesarily conservative in the
3080 case we know that arg0 and arg1 are in disjoint code paths (such as in
3081 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3082 addresses with TREE_CONSTANT flag set so we know that &var == &var
3083 even if var is volatile. */
3085 bool
3086 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3087 unsigned int flags)
3089 bool r;
3090 if (verify_hash_value (arg0, arg1, flags, &r))
3091 return r;
3093 STRIP_ANY_LOCATION_WRAPPER (arg0);
3094 STRIP_ANY_LOCATION_WRAPPER (arg1);
3096 /* If either is ERROR_MARK, they aren't equal. */
3097 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3098 || TREE_TYPE (arg0) == error_mark_node
3099 || TREE_TYPE (arg1) == error_mark_node)
3100 return false;
3102 /* Similar, if either does not have a type (like a template id),
3103 they aren't equal. */
3104 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3105 return false;
3107 /* Bitwise identity makes no sense if the values have different layouts. */
3108 if ((flags & OEP_BITWISE)
3109 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3110 return false;
3112 /* We cannot consider pointers to different address space equal. */
3113 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3114 && POINTER_TYPE_P (TREE_TYPE (arg1))
3115 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3116 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3117 return false;
3119 /* Check equality of integer constants before bailing out due to
3120 precision differences. */
3121 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3123 /* Address of INTEGER_CST is not defined; check that we did not forget
3124 to drop the OEP_ADDRESS_OF flags. */
3125 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3126 return tree_int_cst_equal (arg0, arg1);
3129 if (!(flags & OEP_ADDRESS_OF))
3131 /* If both types don't have the same signedness, then we can't consider
3132 them equal. We must check this before the STRIP_NOPS calls
3133 because they may change the signedness of the arguments. As pointers
3134 strictly don't have a signedness, require either two pointers or
3135 two non-pointers as well. */
3136 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3137 || POINTER_TYPE_P (TREE_TYPE (arg0))
3138 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3139 return false;
3141 /* If both types don't have the same precision, then it is not safe
3142 to strip NOPs. */
3143 if (element_precision (TREE_TYPE (arg0))
3144 != element_precision (TREE_TYPE (arg1)))
3145 return false;
3147 STRIP_NOPS (arg0);
3148 STRIP_NOPS (arg1);
3150 #if 0
3151 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3152 sanity check once the issue is solved. */
3153 else
3154 /* Addresses of conversions and SSA_NAMEs (and many other things)
3155 are not defined. Check that we did not forget to drop the
3156 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3157 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3158 && TREE_CODE (arg0) != SSA_NAME);
3159 #endif
3161 /* In case both args are comparisons but with different comparison
3162 code, try to swap the comparison operands of one arg to produce
3163 a match and compare that variant. */
3164 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3165 && COMPARISON_CLASS_P (arg0)
3166 && COMPARISON_CLASS_P (arg1))
3168 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3170 if (TREE_CODE (arg0) == swap_code)
3171 return operand_equal_p (TREE_OPERAND (arg0, 0),
3172 TREE_OPERAND (arg1, 1), flags)
3173 && operand_equal_p (TREE_OPERAND (arg0, 1),
3174 TREE_OPERAND (arg1, 0), flags);
3177 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3179 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3180 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3182 else if (flags & OEP_ADDRESS_OF)
3184 /* If we are interested in comparing addresses ignore
3185 MEM_REF wrappings of the base that can appear just for
3186 TBAA reasons. */
3187 if (TREE_CODE (arg0) == MEM_REF
3188 && DECL_P (arg1)
3189 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3190 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3191 && integer_zerop (TREE_OPERAND (arg0, 1)))
3192 return true;
3193 else if (TREE_CODE (arg1) == MEM_REF
3194 && DECL_P (arg0)
3195 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3196 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3197 && integer_zerop (TREE_OPERAND (arg1, 1)))
3198 return true;
3199 return false;
3201 else
3202 return false;
3205 /* When not checking adddresses, this is needed for conversions and for
3206 COMPONENT_REF. Might as well play it safe and always test this. */
3207 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3208 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3209 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3210 && !(flags & OEP_ADDRESS_OF)))
3211 return false;
3213 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3214 We don't care about side effects in that case because the SAVE_EXPR
3215 takes care of that for us. In all other cases, two expressions are
3216 equal if they have no side effects. If we have two identical
3217 expressions with side effects that should be treated the same due
3218 to the only side effects being identical SAVE_EXPR's, that will
3219 be detected in the recursive calls below.
3220 If we are taking an invariant address of two identical objects
3221 they are necessarily equal as well. */
3222 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3223 && (TREE_CODE (arg0) == SAVE_EXPR
3224 || (flags & OEP_MATCH_SIDE_EFFECTS)
3225 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3226 return true;
3228 /* Next handle constant cases, those for which we can return 1 even
3229 if ONLY_CONST is set. */
3230 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3231 switch (TREE_CODE (arg0))
3233 case INTEGER_CST:
3234 return tree_int_cst_equal (arg0, arg1);
3236 case FIXED_CST:
3237 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3238 TREE_FIXED_CST (arg1));
3240 case REAL_CST:
3241 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3242 return true;
3244 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3246 /* If we do not distinguish between signed and unsigned zero,
3247 consider them equal. */
3248 if (real_zerop (arg0) && real_zerop (arg1))
3249 return true;
3251 return false;
3253 case VECTOR_CST:
3255 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3256 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3257 return false;
3259 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3260 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3261 return false;
3263 unsigned int count = vector_cst_encoded_nelts (arg0);
3264 for (unsigned int i = 0; i < count; ++i)
3265 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3266 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3267 return false;
3268 return true;
3271 case COMPLEX_CST:
3272 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3273 flags)
3274 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3275 flags));
3277 case STRING_CST:
3278 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3279 && ! memcmp (TREE_STRING_POINTER (arg0),
3280 TREE_STRING_POINTER (arg1),
3281 TREE_STRING_LENGTH (arg0)));
3283 case ADDR_EXPR:
3284 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3285 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3286 flags | OEP_ADDRESS_OF
3287 | OEP_MATCH_SIDE_EFFECTS);
3288 case CONSTRUCTOR:
3289 /* In GIMPLE empty constructors are allowed in initializers of
3290 aggregates. */
3291 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3292 default:
3293 break;
3296 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3297 two instances of undefined behavior will give identical results. */
3298 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3299 return false;
3301 /* Define macros to test an operand from arg0 and arg1 for equality and a
3302 variant that allows null and views null as being different from any
3303 non-null value. In the latter case, if either is null, the both
3304 must be; otherwise, do the normal comparison. */
3305 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3306 TREE_OPERAND (arg1, N), flags)
3308 #define OP_SAME_WITH_NULL(N) \
3309 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3310 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3312 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3314 case tcc_unary:
3315 /* Two conversions are equal only if signedness and modes match. */
3316 switch (TREE_CODE (arg0))
3318 CASE_CONVERT:
3319 case FIX_TRUNC_EXPR:
3320 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3321 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3322 return false;
3323 break;
3324 default:
3325 break;
3328 return OP_SAME (0);
3331 case tcc_comparison:
3332 case tcc_binary:
3333 if (OP_SAME (0) && OP_SAME (1))
3334 return true;
3336 /* For commutative ops, allow the other order. */
3337 return (commutative_tree_code (TREE_CODE (arg0))
3338 && operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 1), flags)
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 0), flags));
3343 case tcc_reference:
3344 /* If either of the pointer (or reference) expressions we are
3345 dereferencing contain a side effect, these cannot be equal,
3346 but their addresses can be. */
3347 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3348 && (TREE_SIDE_EFFECTS (arg0)
3349 || TREE_SIDE_EFFECTS (arg1)))
3350 return false;
3352 switch (TREE_CODE (arg0))
3354 case INDIRECT_REF:
3355 if (!(flags & OEP_ADDRESS_OF))
3357 if (TYPE_ALIGN (TREE_TYPE (arg0))
3358 != TYPE_ALIGN (TREE_TYPE (arg1)))
3359 return false;
3360 /* Verify that the access types are compatible. */
3361 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3362 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3363 return false;
3365 flags &= ~OEP_ADDRESS_OF;
3366 return OP_SAME (0);
3368 case IMAGPART_EXPR:
3369 /* Require the same offset. */
3370 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3371 TYPE_SIZE (TREE_TYPE (arg1)),
3372 flags & ~OEP_ADDRESS_OF))
3373 return false;
3375 /* Fallthru. */
3376 case REALPART_EXPR:
3377 case VIEW_CONVERT_EXPR:
3378 return OP_SAME (0);
3380 case TARGET_MEM_REF:
3381 case MEM_REF:
3382 if (!(flags & OEP_ADDRESS_OF))
3384 /* Require equal access sizes */
3385 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3386 && (!TYPE_SIZE (TREE_TYPE (arg0))
3387 || !TYPE_SIZE (TREE_TYPE (arg1))
3388 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3389 TYPE_SIZE (TREE_TYPE (arg1)),
3390 flags)))
3391 return false;
3392 /* Verify that access happens in similar types. */
3393 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3394 return false;
3395 /* Verify that accesses are TBAA compatible. */
3396 if (!alias_ptr_types_compatible_p
3397 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3398 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3399 || (MR_DEPENDENCE_CLIQUE (arg0)
3400 != MR_DEPENDENCE_CLIQUE (arg1))
3401 || (MR_DEPENDENCE_BASE (arg0)
3402 != MR_DEPENDENCE_BASE (arg1)))
3403 return false;
3404 /* Verify that alignment is compatible. */
3405 if (TYPE_ALIGN (TREE_TYPE (arg0))
3406 != TYPE_ALIGN (TREE_TYPE (arg1)))
3407 return false;
3409 flags &= ~OEP_ADDRESS_OF;
3410 return (OP_SAME (0) && OP_SAME (1)
3411 /* TARGET_MEM_REF require equal extra operands. */
3412 && (TREE_CODE (arg0) != TARGET_MEM_REF
3413 || (OP_SAME_WITH_NULL (2)
3414 && OP_SAME_WITH_NULL (3)
3415 && OP_SAME_WITH_NULL (4))));
3417 case ARRAY_REF:
3418 case ARRAY_RANGE_REF:
3419 if (!OP_SAME (0))
3420 return false;
3421 flags &= ~OEP_ADDRESS_OF;
3422 /* Compare the array index by value if it is constant first as we
3423 may have different types but same value here. */
3424 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3425 TREE_OPERAND (arg1, 1))
3426 || OP_SAME (1))
3427 && OP_SAME_WITH_NULL (2)
3428 && OP_SAME_WITH_NULL (3)
3429 /* Compare low bound and element size as with OEP_ADDRESS_OF
3430 we have to account for the offset of the ref. */
3431 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3432 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3433 || (operand_equal_p (array_ref_low_bound
3434 (CONST_CAST_TREE (arg0)),
3435 array_ref_low_bound
3436 (CONST_CAST_TREE (arg1)), flags)
3437 && operand_equal_p (array_ref_element_size
3438 (CONST_CAST_TREE (arg0)),
3439 array_ref_element_size
3440 (CONST_CAST_TREE (arg1)),
3441 flags))));
3443 case COMPONENT_REF:
3444 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3445 may be NULL when we're called to compare MEM_EXPRs. */
3446 if (!OP_SAME_WITH_NULL (0))
3447 return false;
3449 bool compare_address = flags & OEP_ADDRESS_OF;
3451 /* Most of time we only need to compare FIELD_DECLs for equality.
3452 However when determining address look into actual offsets.
3453 These may match for unions and unshared record types. */
3454 flags &= ~OEP_ADDRESS_OF;
3455 if (!OP_SAME (1))
3457 if (compare_address
3458 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3460 tree field0 = TREE_OPERAND (arg0, 1);
3461 tree field1 = TREE_OPERAND (arg1, 1);
3463 /* Non-FIELD_DECL operands can appear in C++ templates. */
3464 if (TREE_CODE (field0) != FIELD_DECL
3465 || TREE_CODE (field1) != FIELD_DECL
3466 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3467 DECL_FIELD_OFFSET (field1), flags)
3468 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3469 DECL_FIELD_BIT_OFFSET (field1),
3470 flags))
3471 return false;
3473 else
3474 return false;
3477 return OP_SAME_WITH_NULL (2);
3479 case BIT_FIELD_REF:
3480 if (!OP_SAME (0))
3481 return false;
3482 flags &= ~OEP_ADDRESS_OF;
3483 return OP_SAME (1) && OP_SAME (2);
3485 default:
3486 return false;
3489 case tcc_expression:
3490 switch (TREE_CODE (arg0))
3492 case ADDR_EXPR:
3493 /* Be sure we pass right ADDRESS_OF flag. */
3494 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3495 return operand_equal_p (TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0),
3497 flags | OEP_ADDRESS_OF);
3499 case TRUTH_NOT_EXPR:
3500 return OP_SAME (0);
3502 case TRUTH_ANDIF_EXPR:
3503 case TRUTH_ORIF_EXPR:
3504 return OP_SAME (0) && OP_SAME (1);
3506 case WIDEN_MULT_PLUS_EXPR:
3507 case WIDEN_MULT_MINUS_EXPR:
3508 if (!OP_SAME (2))
3509 return false;
3510 /* The multiplcation operands are commutative. */
3511 /* FALLTHRU */
3513 case TRUTH_AND_EXPR:
3514 case TRUTH_OR_EXPR:
3515 case TRUTH_XOR_EXPR:
3516 if (OP_SAME (0) && OP_SAME (1))
3517 return true;
3519 /* Otherwise take into account this is a commutative operation. */
3520 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3521 TREE_OPERAND (arg1, 1), flags)
3522 && operand_equal_p (TREE_OPERAND (arg0, 1),
3523 TREE_OPERAND (arg1, 0), flags));
3525 case COND_EXPR:
3526 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3527 return false;
3528 flags &= ~OEP_ADDRESS_OF;
3529 return OP_SAME (0);
3531 case BIT_INSERT_EXPR:
3532 /* BIT_INSERT_EXPR has an implict operand as the type precision
3533 of op1. Need to check to make sure they are the same. */
3534 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3535 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3536 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3537 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3538 return false;
3539 /* FALLTHRU */
3541 case VEC_COND_EXPR:
3542 case DOT_PROD_EXPR:
3543 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3545 case MODIFY_EXPR:
3546 case INIT_EXPR:
3547 case COMPOUND_EXPR:
3548 case PREDECREMENT_EXPR:
3549 case PREINCREMENT_EXPR:
3550 case POSTDECREMENT_EXPR:
3551 case POSTINCREMENT_EXPR:
3552 if (flags & OEP_LEXICOGRAPHIC)
3553 return OP_SAME (0) && OP_SAME (1);
3554 return false;
3556 case CLEANUP_POINT_EXPR:
3557 case EXPR_STMT:
3558 case SAVE_EXPR:
3559 if (flags & OEP_LEXICOGRAPHIC)
3560 return OP_SAME (0);
3561 return false;
3563 case OBJ_TYPE_REF:
3564 /* Virtual table reference. */
3565 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3566 OBJ_TYPE_REF_EXPR (arg1), flags))
3567 return false;
3568 flags &= ~OEP_ADDRESS_OF;
3569 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3570 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3571 return false;
3572 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3573 OBJ_TYPE_REF_OBJECT (arg1), flags))
3574 return false;
3575 if (virtual_method_call_p (arg0))
3577 if (!virtual_method_call_p (arg1))
3578 return false;
3579 return types_same_for_odr (obj_type_ref_class (arg0),
3580 obj_type_ref_class (arg1));
3582 return false;
3584 default:
3585 return false;
3588 case tcc_vl_exp:
3589 switch (TREE_CODE (arg0))
3591 case CALL_EXPR:
3592 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3593 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3594 /* If not both CALL_EXPRs are either internal or normal function
3595 functions, then they are not equal. */
3596 return false;
3597 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3599 /* If the CALL_EXPRs call different internal functions, then they
3600 are not equal. */
3601 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3602 return false;
3604 else
3606 /* If the CALL_EXPRs call different functions, then they are not
3607 equal. */
3608 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3609 flags))
3610 return false;
3613 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3615 unsigned int cef = call_expr_flags (arg0);
3616 if (flags & OEP_PURE_SAME)
3617 cef &= ECF_CONST | ECF_PURE;
3618 else
3619 cef &= ECF_CONST;
3620 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3621 return false;
3624 /* Now see if all the arguments are the same. */
3626 const_call_expr_arg_iterator iter0, iter1;
3627 const_tree a0, a1;
3628 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3629 a1 = first_const_call_expr_arg (arg1, &iter1);
3630 a0 && a1;
3631 a0 = next_const_call_expr_arg (&iter0),
3632 a1 = next_const_call_expr_arg (&iter1))
3633 if (! operand_equal_p (a0, a1, flags))
3634 return false;
3636 /* If we get here and both argument lists are exhausted
3637 then the CALL_EXPRs are equal. */
3638 return ! (a0 || a1);
3640 default:
3641 return false;
3644 case tcc_declaration:
3645 /* Consider __builtin_sqrt equal to sqrt. */
3646 if (TREE_CODE (arg0) == FUNCTION_DECL)
3647 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3648 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3649 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3650 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3652 if (DECL_P (arg0)
3653 && (flags & OEP_DECL_NAME)
3654 && (flags & OEP_LEXICOGRAPHIC))
3656 /* Consider decls with the same name equal. The caller needs
3657 to make sure they refer to the same entity (such as a function
3658 formal parameter). */
3659 tree a0name = DECL_NAME (arg0);
3660 tree a1name = DECL_NAME (arg1);
3661 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3662 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3663 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3665 return false;
3667 case tcc_exceptional:
3668 if (TREE_CODE (arg0) == CONSTRUCTOR)
3670 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3671 return false;
3673 /* In GIMPLE constructors are used only to build vectors from
3674 elements. Individual elements in the constructor must be
3675 indexed in increasing order and form an initial sequence.
3677 We make no effort to compare constructors in generic.
3678 (see sem_variable::equals in ipa-icf which can do so for
3679 constants). */
3680 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3681 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3682 return false;
3684 /* Be sure that vectors constructed have the same representation.
3685 We only tested element precision and modes to match.
3686 Vectors may be BLKmode and thus also check that the number of
3687 parts match. */
3688 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3689 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3690 return false;
3692 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3693 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3694 unsigned int len = vec_safe_length (v0);
3696 if (len != vec_safe_length (v1))
3697 return false;
3699 for (unsigned int i = 0; i < len; i++)
3701 constructor_elt *c0 = &(*v0)[i];
3702 constructor_elt *c1 = &(*v1)[i];
3704 if (!operand_equal_p (c0->value, c1->value, flags)
3705 /* In GIMPLE the indexes can be either NULL or matching i.
3706 Double check this so we won't get false
3707 positives for GENERIC. */
3708 || (c0->index
3709 && (TREE_CODE (c0->index) != INTEGER_CST
3710 || compare_tree_int (c0->index, i)))
3711 || (c1->index
3712 && (TREE_CODE (c1->index) != INTEGER_CST
3713 || compare_tree_int (c1->index, i))))
3714 return false;
3716 return true;
3718 else if (TREE_CODE (arg0) == STATEMENT_LIST
3719 && (flags & OEP_LEXICOGRAPHIC))
3721 /* Compare the STATEMENT_LISTs. */
3722 tree_stmt_iterator tsi1, tsi2;
3723 tree body1 = CONST_CAST_TREE (arg0);
3724 tree body2 = CONST_CAST_TREE (arg1);
3725 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3726 tsi_next (&tsi1), tsi_next (&tsi2))
3728 /* The lists don't have the same number of statements. */
3729 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3730 return false;
3731 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3732 return true;
3733 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3734 flags & (OEP_LEXICOGRAPHIC
3735 | OEP_NO_HASH_CHECK)))
3736 return false;
3739 return false;
3741 case tcc_statement:
3742 switch (TREE_CODE (arg0))
3744 case RETURN_EXPR:
3745 if (flags & OEP_LEXICOGRAPHIC)
3746 return OP_SAME_WITH_NULL (0);
3747 return false;
3748 case DEBUG_BEGIN_STMT:
3749 if (flags & OEP_LEXICOGRAPHIC)
3750 return true;
3751 return false;
3752 default:
3753 return false;
3756 default:
3757 return false;
3760 #undef OP_SAME
3761 #undef OP_SAME_WITH_NULL
3764 /* Generate a hash value for an expression. This can be used iteratively
3765 by passing a previous result as the HSTATE argument. */
3767 void
3768 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3769 unsigned int flags)
3771 int i;
3772 enum tree_code code;
3773 enum tree_code_class tclass;
3775 if (t == NULL_TREE || t == error_mark_node)
3777 hstate.merge_hash (0);
3778 return;
3781 STRIP_ANY_LOCATION_WRAPPER (t);
3783 if (!(flags & OEP_ADDRESS_OF))
3784 STRIP_NOPS (t);
3786 code = TREE_CODE (t);
3788 switch (code)
3790 /* Alas, constants aren't shared, so we can't rely on pointer
3791 identity. */
3792 case VOID_CST:
3793 hstate.merge_hash (0);
3794 return;
3795 case INTEGER_CST:
3796 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3797 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3798 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3799 return;
3800 case REAL_CST:
3802 unsigned int val2;
3803 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3804 val2 = rvc_zero;
3805 else
3806 val2 = real_hash (TREE_REAL_CST_PTR (t));
3807 hstate.merge_hash (val2);
3808 return;
3810 case FIXED_CST:
3812 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3813 hstate.merge_hash (val2);
3814 return;
3816 case STRING_CST:
3817 hstate.add ((const void *) TREE_STRING_POINTER (t),
3818 TREE_STRING_LENGTH (t));
3819 return;
3820 case COMPLEX_CST:
3821 hash_operand (TREE_REALPART (t), hstate, flags);
3822 hash_operand (TREE_IMAGPART (t), hstate, flags);
3823 return;
3824 case VECTOR_CST:
3826 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3827 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3828 unsigned int count = vector_cst_encoded_nelts (t);
3829 for (unsigned int i = 0; i < count; ++i)
3830 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3831 return;
3833 case SSA_NAME:
3834 /* We can just compare by pointer. */
3835 hstate.add_hwi (SSA_NAME_VERSION (t));
3836 return;
3837 case PLACEHOLDER_EXPR:
3838 /* The node itself doesn't matter. */
3839 return;
3840 case BLOCK:
3841 case OMP_CLAUSE:
3842 /* Ignore. */
3843 return;
3844 case TREE_LIST:
3845 /* A list of expressions, for a CALL_EXPR or as the elements of a
3846 VECTOR_CST. */
3847 for (; t; t = TREE_CHAIN (t))
3848 hash_operand (TREE_VALUE (t), hstate, flags);
3849 return;
3850 case CONSTRUCTOR:
3852 unsigned HOST_WIDE_INT idx;
3853 tree field, value;
3854 flags &= ~OEP_ADDRESS_OF;
3855 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3858 /* In GIMPLE the indexes can be either NULL or matching i. */
3859 if (field == NULL_TREE)
3860 field = bitsize_int (idx);
3861 hash_operand (field, hstate, flags);
3862 hash_operand (value, hstate, flags);
3864 return;
3866 case STATEMENT_LIST:
3868 tree_stmt_iterator i;
3869 for (i = tsi_start (CONST_CAST_TREE (t));
3870 !tsi_end_p (i); tsi_next (&i))
3871 hash_operand (tsi_stmt (i), hstate, flags);
3872 return;
3874 case TREE_VEC:
3875 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3876 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3877 return;
3878 case IDENTIFIER_NODE:
3879 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3880 return;
3881 case FUNCTION_DECL:
3882 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3883 Otherwise nodes that compare equal according to operand_equal_p might
3884 get different hash codes. However, don't do this for machine specific
3885 or front end builtins, since the function code is overloaded in those
3886 cases. */
3887 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3888 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3890 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3891 code = TREE_CODE (t);
3893 /* FALL THROUGH */
3894 default:
3895 if (POLY_INT_CST_P (t))
3897 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3898 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3899 return;
3901 tclass = TREE_CODE_CLASS (code);
3903 if (tclass == tcc_declaration)
3905 /* DECL's have a unique ID */
3906 hstate.add_hwi (DECL_UID (t));
3908 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3910 /* For comparisons that can be swapped, use the lower
3911 tree code. */
3912 enum tree_code ccode = swap_tree_comparison (code);
3913 if (code < ccode)
3914 ccode = code;
3915 hstate.add_object (ccode);
3916 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3917 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3919 else if (CONVERT_EXPR_CODE_P (code))
3921 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3922 operand_equal_p. */
3923 enum tree_code ccode = NOP_EXPR;
3924 hstate.add_object (ccode);
3926 /* Don't hash the type, that can lead to having nodes which
3927 compare equal according to operand_equal_p, but which
3928 have different hash codes. Make sure to include signedness
3929 in the hash computation. */
3930 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3931 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3933 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3934 else if (code == MEM_REF
3935 && (flags & OEP_ADDRESS_OF) != 0
3936 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3937 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3938 && integer_zerop (TREE_OPERAND (t, 1)))
3939 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3940 hstate, flags);
3941 /* Don't ICE on FE specific trees, or their arguments etc.
3942 during operand_equal_p hash verification. */
3943 else if (!IS_EXPR_CODE_CLASS (tclass))
3944 gcc_assert (flags & OEP_HASH_CHECK);
3945 else
3947 unsigned int sflags = flags;
3949 hstate.add_object (code);
3951 switch (code)
3953 case ADDR_EXPR:
3954 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3955 flags |= OEP_ADDRESS_OF;
3956 sflags = flags;
3957 break;
3959 case INDIRECT_REF:
3960 case MEM_REF:
3961 case TARGET_MEM_REF:
3962 flags &= ~OEP_ADDRESS_OF;
3963 sflags = flags;
3964 break;
3966 case COMPONENT_REF:
3967 if (sflags & OEP_ADDRESS_OF)
3969 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3970 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3971 hstate, flags & ~OEP_ADDRESS_OF);
3972 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3973 hstate, flags & ~OEP_ADDRESS_OF);
3974 return;
3976 break;
3977 case ARRAY_REF:
3978 case ARRAY_RANGE_REF:
3979 case BIT_FIELD_REF:
3980 sflags &= ~OEP_ADDRESS_OF;
3981 break;
3983 case COND_EXPR:
3984 flags &= ~OEP_ADDRESS_OF;
3985 break;
3987 case WIDEN_MULT_PLUS_EXPR:
3988 case WIDEN_MULT_MINUS_EXPR:
3990 /* The multiplication operands are commutative. */
3991 inchash::hash one, two;
3992 hash_operand (TREE_OPERAND (t, 0), one, flags);
3993 hash_operand (TREE_OPERAND (t, 1), two, flags);
3994 hstate.add_commutative (one, two);
3995 hash_operand (TREE_OPERAND (t, 2), two, flags);
3996 return;
3999 case CALL_EXPR:
4000 if (CALL_EXPR_FN (t) == NULL_TREE)
4001 hstate.add_int (CALL_EXPR_IFN (t));
4002 break;
4004 case TARGET_EXPR:
4005 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4006 Usually different TARGET_EXPRs just should use
4007 different temporaries in their slots. */
4008 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4009 return;
4011 case OBJ_TYPE_REF:
4012 /* Virtual table reference. */
4013 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4014 flags &= ~OEP_ADDRESS_OF;
4015 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4016 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4017 if (!virtual_method_call_p (t))
4018 return;
4019 if (tree c = obj_type_ref_class (t))
4021 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4022 /* We compute mangled names only when free_lang_data is run.
4023 In that case we can hash precisely. */
4024 if (TREE_CODE (c) == TYPE_DECL
4025 && DECL_ASSEMBLER_NAME_SET_P (c))
4026 hstate.add_object
4027 (IDENTIFIER_HASH_VALUE
4028 (DECL_ASSEMBLER_NAME (c)));
4030 return;
4031 default:
4032 break;
4035 /* Don't hash the type, that can lead to having nodes which
4036 compare equal according to operand_equal_p, but which
4037 have different hash codes. */
4038 if (code == NON_LVALUE_EXPR)
4040 /* Make sure to include signness in the hash computation. */
4041 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4042 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4045 else if (commutative_tree_code (code))
4047 /* It's a commutative expression. We want to hash it the same
4048 however it appears. We do this by first hashing both operands
4049 and then rehashing based on the order of their independent
4050 hashes. */
4051 inchash::hash one, two;
4052 hash_operand (TREE_OPERAND (t, 0), one, flags);
4053 hash_operand (TREE_OPERAND (t, 1), two, flags);
4054 hstate.add_commutative (one, two);
4056 else
4057 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4058 hash_operand (TREE_OPERAND (t, i), hstate,
4059 i == 0 ? flags : sflags);
4061 return;
4065 bool
4066 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4067 unsigned int flags, bool *ret)
4069 /* When checking and unless comparing DECL names, verify that if
4070 the outermost operand_equal_p call returns non-zero then ARG0
4071 and ARG1 have the same hash value. */
4072 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4074 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4076 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4078 inchash::hash hstate0 (0), hstate1 (0);
4079 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4080 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4081 hashval_t h0 = hstate0.end ();
4082 hashval_t h1 = hstate1.end ();
4083 gcc_assert (h0 == h1);
4085 *ret = true;
4087 else
4088 *ret = false;
4090 return true;
4093 return false;
4097 static operand_compare default_compare_instance;
4099 /* Conveinece wrapper around operand_compare class because usually we do
4100 not need to play with the valueizer. */
4102 bool
4103 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4105 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4108 namespace inchash
4111 /* Generate a hash value for an expression. This can be used iteratively
4112 by passing a previous result as the HSTATE argument.
4114 This function is intended to produce the same hash for expressions which
4115 would compare equal using operand_equal_p. */
4116 void
4117 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4119 default_compare_instance.hash_operand (t, hstate, flags);
4124 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4125 with a different signedness or a narrower precision. */
4127 static bool
4128 operand_equal_for_comparison_p (tree arg0, tree arg1)
4130 if (operand_equal_p (arg0, arg1, 0))
4131 return true;
4133 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4134 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4135 return false;
4137 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4138 and see if the inner values are the same. This removes any
4139 signedness comparison, which doesn't matter here. */
4140 tree op0 = arg0;
4141 tree op1 = arg1;
4142 STRIP_NOPS (op0);
4143 STRIP_NOPS (op1);
4144 if (operand_equal_p (op0, op1, 0))
4145 return true;
4147 /* Discard a single widening conversion from ARG1 and see if the inner
4148 value is the same as ARG0. */
4149 if (CONVERT_EXPR_P (arg1)
4150 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4151 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4152 < TYPE_PRECISION (TREE_TYPE (arg1))
4153 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4154 return true;
4156 return false;
4159 /* See if ARG is an expression that is either a comparison or is performing
4160 arithmetic on comparisons. The comparisons must only be comparing
4161 two different values, which will be stored in *CVAL1 and *CVAL2; if
4162 they are nonzero it means that some operands have already been found.
4163 No variables may be used anywhere else in the expression except in the
4164 comparisons.
4166 If this is true, return 1. Otherwise, return zero. */
4168 static bool
4169 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4171 enum tree_code code = TREE_CODE (arg);
4172 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4174 /* We can handle some of the tcc_expression cases here. */
4175 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4176 tclass = tcc_unary;
4177 else if (tclass == tcc_expression
4178 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4179 || code == COMPOUND_EXPR))
4180 tclass = tcc_binary;
4182 switch (tclass)
4184 case tcc_unary:
4185 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4187 case tcc_binary:
4188 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4189 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4191 case tcc_constant:
4192 return true;
4194 case tcc_expression:
4195 if (code == COND_EXPR)
4196 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4197 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4198 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4199 return false;
4201 case tcc_comparison:
4202 /* First see if we can handle the first operand, then the second. For
4203 the second operand, we know *CVAL1 can't be zero. It must be that
4204 one side of the comparison is each of the values; test for the
4205 case where this isn't true by failing if the two operands
4206 are the same. */
4208 if (operand_equal_p (TREE_OPERAND (arg, 0),
4209 TREE_OPERAND (arg, 1), 0))
4210 return false;
4212 if (*cval1 == 0)
4213 *cval1 = TREE_OPERAND (arg, 0);
4214 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4216 else if (*cval2 == 0)
4217 *cval2 = TREE_OPERAND (arg, 0);
4218 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4220 else
4221 return false;
4223 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4225 else if (*cval2 == 0)
4226 *cval2 = TREE_OPERAND (arg, 1);
4227 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4229 else
4230 return false;
4232 return true;
4234 default:
4235 return false;
4239 /* ARG is a tree that is known to contain just arithmetic operations and
4240 comparisons. Evaluate the operations in the tree substituting NEW0 for
4241 any occurrence of OLD0 as an operand of a comparison and likewise for
4242 NEW1 and OLD1. */
4244 static tree
4245 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4246 tree old1, tree new1)
4248 tree type = TREE_TYPE (arg);
4249 enum tree_code code = TREE_CODE (arg);
4250 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4252 /* We can handle some of the tcc_expression cases here. */
4253 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4254 tclass = tcc_unary;
4255 else if (tclass == tcc_expression
4256 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4257 tclass = tcc_binary;
4259 switch (tclass)
4261 case tcc_unary:
4262 return fold_build1_loc (loc, code, type,
4263 eval_subst (loc, TREE_OPERAND (arg, 0),
4264 old0, new0, old1, new1));
4266 case tcc_binary:
4267 return fold_build2_loc (loc, code, type,
4268 eval_subst (loc, TREE_OPERAND (arg, 0),
4269 old0, new0, old1, new1),
4270 eval_subst (loc, TREE_OPERAND (arg, 1),
4271 old0, new0, old1, new1));
4273 case tcc_expression:
4274 switch (code)
4276 case SAVE_EXPR:
4277 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4278 old1, new1);
4280 case COMPOUND_EXPR:
4281 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4282 old1, new1);
4284 case COND_EXPR:
4285 return fold_build3_loc (loc, code, type,
4286 eval_subst (loc, TREE_OPERAND (arg, 0),
4287 old0, new0, old1, new1),
4288 eval_subst (loc, TREE_OPERAND (arg, 1),
4289 old0, new0, old1, new1),
4290 eval_subst (loc, TREE_OPERAND (arg, 2),
4291 old0, new0, old1, new1));
4292 default:
4293 break;
4295 /* Fall through - ??? */
4297 case tcc_comparison:
4299 tree arg0 = TREE_OPERAND (arg, 0);
4300 tree arg1 = TREE_OPERAND (arg, 1);
4302 /* We need to check both for exact equality and tree equality. The
4303 former will be true if the operand has a side-effect. In that
4304 case, we know the operand occurred exactly once. */
4306 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4307 arg0 = new0;
4308 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4309 arg0 = new1;
4311 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4312 arg1 = new0;
4313 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4314 arg1 = new1;
4316 return fold_build2_loc (loc, code, type, arg0, arg1);
4319 default:
4320 return arg;
4324 /* Return a tree for the case when the result of an expression is RESULT
4325 converted to TYPE and OMITTED was previously an operand of the expression
4326 but is now not needed (e.g., we folded OMITTED * 0).
4328 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4329 the conversion of RESULT to TYPE. */
4331 tree
4332 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4334 tree t = fold_convert_loc (loc, type, result);
4336 /* If the resulting operand is an empty statement, just return the omitted
4337 statement casted to void. */
4338 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4339 return build1_loc (loc, NOP_EXPR, void_type_node,
4340 fold_ignored_result (omitted));
4342 if (TREE_SIDE_EFFECTS (omitted))
4343 return build2_loc (loc, COMPOUND_EXPR, type,
4344 fold_ignored_result (omitted), t);
4346 return non_lvalue_loc (loc, t);
4349 /* Return a tree for the case when the result of an expression is RESULT
4350 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4351 of the expression but are now not needed.
4353 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4354 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4355 evaluated before OMITTED2. Otherwise, if neither has side effects,
4356 just do the conversion of RESULT to TYPE. */
4358 tree
4359 omit_two_operands_loc (location_t loc, tree type, tree result,
4360 tree omitted1, tree omitted2)
4362 tree t = fold_convert_loc (loc, type, result);
4364 if (TREE_SIDE_EFFECTS (omitted2))
4365 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4366 if (TREE_SIDE_EFFECTS (omitted1))
4367 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4369 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4373 /* Return a simplified tree node for the truth-negation of ARG. This
4374 never alters ARG itself. We assume that ARG is an operation that
4375 returns a truth value (0 or 1).
4377 FIXME: one would think we would fold the result, but it causes
4378 problems with the dominator optimizer. */
4380 static tree
4381 fold_truth_not_expr (location_t loc, tree arg)
4383 tree type = TREE_TYPE (arg);
4384 enum tree_code code = TREE_CODE (arg);
4385 location_t loc1, loc2;
4387 /* If this is a comparison, we can simply invert it, except for
4388 floating-point non-equality comparisons, in which case we just
4389 enclose a TRUTH_NOT_EXPR around what we have. */
4391 if (TREE_CODE_CLASS (code) == tcc_comparison)
4393 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4394 if (FLOAT_TYPE_P (op_type)
4395 && flag_trapping_math
4396 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4397 && code != NE_EXPR && code != EQ_EXPR)
4398 return NULL_TREE;
4400 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4401 if (code == ERROR_MARK)
4402 return NULL_TREE;
4404 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4405 TREE_OPERAND (arg, 1));
4406 copy_warning (ret, arg);
4407 return ret;
4410 switch (code)
4412 case INTEGER_CST:
4413 return constant_boolean_node (integer_zerop (arg), type);
4415 case TRUTH_AND_EXPR:
4416 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4417 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4418 return build2_loc (loc, TRUTH_OR_EXPR, type,
4419 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4420 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4422 case TRUTH_OR_EXPR:
4423 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4424 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4425 return build2_loc (loc, TRUTH_AND_EXPR, type,
4426 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4427 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4429 case TRUTH_XOR_EXPR:
4430 /* Here we can invert either operand. We invert the first operand
4431 unless the second operand is a TRUTH_NOT_EXPR in which case our
4432 result is the XOR of the first operand with the inside of the
4433 negation of the second operand. */
4435 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4436 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4437 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4438 else
4439 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4440 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4441 TREE_OPERAND (arg, 1));
4443 case TRUTH_ANDIF_EXPR:
4444 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4445 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4446 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4447 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4448 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4450 case TRUTH_ORIF_EXPR:
4451 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4452 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4453 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4454 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4455 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4457 case TRUTH_NOT_EXPR:
4458 return TREE_OPERAND (arg, 0);
4460 case COND_EXPR:
4462 tree arg1 = TREE_OPERAND (arg, 1);
4463 tree arg2 = TREE_OPERAND (arg, 2);
4465 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4466 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4468 /* A COND_EXPR may have a throw as one operand, which
4469 then has void type. Just leave void operands
4470 as they are. */
4471 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4472 VOID_TYPE_P (TREE_TYPE (arg1))
4473 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4474 VOID_TYPE_P (TREE_TYPE (arg2))
4475 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4478 case COMPOUND_EXPR:
4479 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4480 return build2_loc (loc, COMPOUND_EXPR, type,
4481 TREE_OPERAND (arg, 0),
4482 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4484 case NON_LVALUE_EXPR:
4485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4486 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4488 CASE_CONVERT:
4489 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4490 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4492 /* fall through */
4494 case FLOAT_EXPR:
4495 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4496 return build1_loc (loc, TREE_CODE (arg), type,
4497 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4499 case BIT_AND_EXPR:
4500 if (!integer_onep (TREE_OPERAND (arg, 1)))
4501 return NULL_TREE;
4502 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4504 case SAVE_EXPR:
4505 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4507 case CLEANUP_POINT_EXPR:
4508 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4509 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4510 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4512 default:
4513 return NULL_TREE;
4517 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4518 assume that ARG is an operation that returns a truth value (0 or 1
4519 for scalars, 0 or -1 for vectors). Return the folded expression if
4520 folding is successful. Otherwise, return NULL_TREE. */
4522 static tree
4523 fold_invert_truthvalue (location_t loc, tree arg)
4525 tree type = TREE_TYPE (arg);
4526 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4527 ? BIT_NOT_EXPR
4528 : TRUTH_NOT_EXPR,
4529 type, arg);
4532 /* Return a simplified tree node for the truth-negation of ARG. This
4533 never alters ARG itself. We assume that ARG is an operation that
4534 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4536 tree
4537 invert_truthvalue_loc (location_t loc, tree arg)
4539 if (TREE_CODE (arg) == ERROR_MARK)
4540 return arg;
4542 tree type = TREE_TYPE (arg);
4543 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4544 ? BIT_NOT_EXPR
4545 : TRUTH_NOT_EXPR,
4546 type, arg);
4549 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4550 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4551 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4552 is the original memory reference used to preserve the alias set of
4553 the access. */
4555 static tree
4556 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4557 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4558 int unsignedp, int reversep)
4560 tree result, bftype;
4562 /* Attempt not to lose the access path if possible. */
4563 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4565 tree ninner = TREE_OPERAND (orig_inner, 0);
4566 machine_mode nmode;
4567 poly_int64 nbitsize, nbitpos;
4568 tree noffset;
4569 int nunsignedp, nreversep, nvolatilep = 0;
4570 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4571 &noffset, &nmode, &nunsignedp,
4572 &nreversep, &nvolatilep);
4573 if (base == inner
4574 && noffset == NULL_TREE
4575 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4576 && !reversep
4577 && !nreversep
4578 && !nvolatilep)
4580 inner = ninner;
4581 bitpos -= nbitpos;
4585 alias_set_type iset = get_alias_set (orig_inner);
4586 if (iset == 0 && get_alias_set (inner) != iset)
4587 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4588 build_fold_addr_expr (inner),
4589 build_int_cst (ptr_type_node, 0));
4591 if (known_eq (bitpos, 0) && !reversep)
4593 tree size = TYPE_SIZE (TREE_TYPE (inner));
4594 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4595 || POINTER_TYPE_P (TREE_TYPE (inner)))
4596 && tree_fits_shwi_p (size)
4597 && tree_to_shwi (size) == bitsize)
4598 return fold_convert_loc (loc, type, inner);
4601 bftype = type;
4602 if (TYPE_PRECISION (bftype) != bitsize
4603 || TYPE_UNSIGNED (bftype) == !unsignedp)
4604 bftype = build_nonstandard_integer_type (bitsize, 0);
4606 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4607 bitsize_int (bitsize), bitsize_int (bitpos));
4608 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4610 if (bftype != type)
4611 result = fold_convert_loc (loc, type, result);
4613 return result;
4616 /* Optimize a bit-field compare.
4618 There are two cases: First is a compare against a constant and the
4619 second is a comparison of two items where the fields are at the same
4620 bit position relative to the start of a chunk (byte, halfword, word)
4621 large enough to contain it. In these cases we can avoid the shift
4622 implicit in bitfield extractions.
4624 For constants, we emit a compare of the shifted constant with the
4625 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4626 compared. For two fields at the same position, we do the ANDs with the
4627 similar mask and compare the result of the ANDs.
4629 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4630 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4631 are the left and right operands of the comparison, respectively.
4633 If the optimization described above can be done, we return the resulting
4634 tree. Otherwise we return zero. */
4636 static tree
4637 optimize_bit_field_compare (location_t loc, enum tree_code code,
4638 tree compare_type, tree lhs, tree rhs)
4640 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4641 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4642 tree type = TREE_TYPE (lhs);
4643 tree unsigned_type;
4644 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4645 machine_mode lmode, rmode;
4646 scalar_int_mode nmode;
4647 int lunsignedp, runsignedp;
4648 int lreversep, rreversep;
4649 int lvolatilep = 0, rvolatilep = 0;
4650 tree linner, rinner = NULL_TREE;
4651 tree mask;
4652 tree offset;
4654 /* Get all the information about the extractions being done. If the bit size
4655 is the same as the size of the underlying object, we aren't doing an
4656 extraction at all and so can do nothing. We also don't want to
4657 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4658 then will no longer be able to replace it. */
4659 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4660 &lunsignedp, &lreversep, &lvolatilep);
4661 if (linner == lhs
4662 || !known_size_p (plbitsize)
4663 || !plbitsize.is_constant (&lbitsize)
4664 || !plbitpos.is_constant (&lbitpos)
4665 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4666 || offset != 0
4667 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4668 || lvolatilep)
4669 return 0;
4671 if (const_p)
4672 rreversep = lreversep;
4673 else
4675 /* If this is not a constant, we can only do something if bit positions,
4676 sizes, signedness and storage order are the same. */
4677 rinner
4678 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4679 &runsignedp, &rreversep, &rvolatilep);
4681 if (rinner == rhs
4682 || maybe_ne (lbitpos, rbitpos)
4683 || maybe_ne (lbitsize, rbitsize)
4684 || lunsignedp != runsignedp
4685 || lreversep != rreversep
4686 || offset != 0
4687 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4688 || rvolatilep)
4689 return 0;
4692 /* Honor the C++ memory model and mimic what RTL expansion does. */
4693 poly_uint64 bitstart = 0;
4694 poly_uint64 bitend = 0;
4695 if (TREE_CODE (lhs) == COMPONENT_REF)
4697 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4698 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4699 return 0;
4702 /* See if we can find a mode to refer to this field. We should be able to,
4703 but fail if we can't. */
4704 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4705 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4706 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4707 TYPE_ALIGN (TREE_TYPE (rinner))),
4708 BITS_PER_WORD, false, &nmode))
4709 return 0;
4711 /* Set signed and unsigned types of the precision of this mode for the
4712 shifts below. */
4713 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4715 /* Compute the bit position and size for the new reference and our offset
4716 within it. If the new reference is the same size as the original, we
4717 won't optimize anything, so return zero. */
4718 nbitsize = GET_MODE_BITSIZE (nmode);
4719 nbitpos = lbitpos & ~ (nbitsize - 1);
4720 lbitpos -= nbitpos;
4721 if (nbitsize == lbitsize)
4722 return 0;
4724 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4725 lbitpos = nbitsize - lbitsize - lbitpos;
4727 /* Make the mask to be used against the extracted field. */
4728 mask = build_int_cst_type (unsigned_type, -1);
4729 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4730 mask = const_binop (RSHIFT_EXPR, mask,
4731 size_int (nbitsize - lbitsize - lbitpos));
4733 if (! const_p)
4735 if (nbitpos < 0)
4736 return 0;
4738 /* If not comparing with constant, just rework the comparison
4739 and return. */
4740 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4741 nbitsize, nbitpos, 1, lreversep);
4742 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4743 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4744 nbitsize, nbitpos, 1, rreversep);
4745 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4746 return fold_build2_loc (loc, code, compare_type, t1, t2);
4749 /* Otherwise, we are handling the constant case. See if the constant is too
4750 big for the field. Warn and return a tree for 0 (false) if so. We do
4751 this not only for its own sake, but to avoid having to test for this
4752 error case below. If we didn't, we might generate wrong code.
4754 For unsigned fields, the constant shifted right by the field length should
4755 be all zero. For signed fields, the high-order bits should agree with
4756 the sign bit. */
4758 if (lunsignedp)
4760 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4762 warning (0, "comparison is always %d due to width of bit-field",
4763 code == NE_EXPR);
4764 return constant_boolean_node (code == NE_EXPR, compare_type);
4767 else
4769 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4770 if (tem != 0 && tem != -1)
4772 warning (0, "comparison is always %d due to width of bit-field",
4773 code == NE_EXPR);
4774 return constant_boolean_node (code == NE_EXPR, compare_type);
4778 if (nbitpos < 0)
4779 return 0;
4781 /* Single-bit compares should always be against zero. */
4782 if (lbitsize == 1 && ! integer_zerop (rhs))
4784 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4785 rhs = build_int_cst (type, 0);
4788 /* Make a new bitfield reference, shift the constant over the
4789 appropriate number of bits and mask it with the computed mask
4790 (in case this was a signed field). If we changed it, make a new one. */
4791 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4792 nbitsize, nbitpos, 1, lreversep);
4794 rhs = const_binop (BIT_AND_EXPR,
4795 const_binop (LSHIFT_EXPR,
4796 fold_convert_loc (loc, unsigned_type, rhs),
4797 size_int (lbitpos)),
4798 mask);
4800 lhs = build2_loc (loc, code, compare_type,
4801 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4802 return lhs;
4805 /* Subroutine for fold_truth_andor_1: decode a field reference.
4807 If EXP is a comparison reference, we return the innermost reference.
4809 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4810 set to the starting bit number.
4812 If the innermost field can be completely contained in a mode-sized
4813 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4815 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4816 otherwise it is not changed.
4818 *PUNSIGNEDP is set to the signedness of the field.
4820 *PREVERSEP is set to the storage order of the field.
4822 *PMASK is set to the mask used. This is either contained in a
4823 BIT_AND_EXPR or derived from the width of the field.
4825 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4827 Return 0 if this is not a component reference or is one that we can't
4828 do anything with. */
4830 static tree
4831 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4832 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4833 int *punsignedp, int *preversep, int *pvolatilep,
4834 tree *pmask, tree *pand_mask)
4836 tree exp = *exp_;
4837 tree outer_type = 0;
4838 tree and_mask = 0;
4839 tree mask, inner, offset;
4840 tree unsigned_type;
4841 unsigned int precision;
4843 /* All the optimizations using this function assume integer fields.
4844 There are problems with FP fields since the type_for_size call
4845 below can fail for, e.g., XFmode. */
4846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4847 return NULL_TREE;
4849 /* We are interested in the bare arrangement of bits, so strip everything
4850 that doesn't affect the machine mode. However, record the type of the
4851 outermost expression if it may matter below. */
4852 if (CONVERT_EXPR_P (exp)
4853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4854 outer_type = TREE_TYPE (exp);
4855 STRIP_NOPS (exp);
4857 if (TREE_CODE (exp) == BIT_AND_EXPR)
4859 and_mask = TREE_OPERAND (exp, 1);
4860 exp = TREE_OPERAND (exp, 0);
4861 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4862 if (TREE_CODE (and_mask) != INTEGER_CST)
4863 return NULL_TREE;
4866 poly_int64 poly_bitsize, poly_bitpos;
4867 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4868 pmode, punsignedp, preversep, pvolatilep);
4869 if ((inner == exp && and_mask == 0)
4870 || !poly_bitsize.is_constant (pbitsize)
4871 || !poly_bitpos.is_constant (pbitpos)
4872 || *pbitsize < 0
4873 || offset != 0
4874 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4875 /* Reject out-of-bound accesses (PR79731). */
4876 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4877 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4878 *pbitpos + *pbitsize) < 0))
4879 return NULL_TREE;
4881 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4882 if (unsigned_type == NULL_TREE)
4883 return NULL_TREE;
4885 *exp_ = exp;
4887 /* If the number of bits in the reference is the same as the bitsize of
4888 the outer type, then the outer type gives the signedness. Otherwise
4889 (in case of a small bitfield) the signedness is unchanged. */
4890 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4891 *punsignedp = TYPE_UNSIGNED (outer_type);
4893 /* Compute the mask to access the bitfield. */
4894 precision = TYPE_PRECISION (unsigned_type);
4896 mask = build_int_cst_type (unsigned_type, -1);
4898 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4899 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4901 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4902 if (and_mask != 0)
4903 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4904 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4906 *pmask = mask;
4907 *pand_mask = and_mask;
4908 return inner;
4911 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4912 bit positions and MASK is SIGNED. */
4914 static bool
4915 all_ones_mask_p (const_tree mask, unsigned int size)
4917 tree type = TREE_TYPE (mask);
4918 unsigned int precision = TYPE_PRECISION (type);
4920 /* If this function returns true when the type of the mask is
4921 UNSIGNED, then there will be errors. In particular see
4922 gcc.c-torture/execute/990326-1.c. There does not appear to be
4923 any documentation paper trail as to why this is so. But the pre
4924 wide-int worked with that restriction and it has been preserved
4925 here. */
4926 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4927 return false;
4929 return wi::mask (size, false, precision) == wi::to_wide (mask);
4932 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4933 represents the sign bit of EXP's type. If EXP represents a sign
4934 or zero extension, also test VAL against the unextended type.
4935 The return value is the (sub)expression whose sign bit is VAL,
4936 or NULL_TREE otherwise. */
4938 tree
4939 sign_bit_p (tree exp, const_tree val)
4941 int width;
4942 tree t;
4944 /* Tree EXP must have an integral type. */
4945 t = TREE_TYPE (exp);
4946 if (! INTEGRAL_TYPE_P (t))
4947 return NULL_TREE;
4949 /* Tree VAL must be an integer constant. */
4950 if (TREE_CODE (val) != INTEGER_CST
4951 || TREE_OVERFLOW (val))
4952 return NULL_TREE;
4954 width = TYPE_PRECISION (t);
4955 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4956 return exp;
4958 /* Handle extension from a narrower type. */
4959 if (TREE_CODE (exp) == NOP_EXPR
4960 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4961 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4963 return NULL_TREE;
4966 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4967 operand is simple enough to be evaluated unconditionally. */
4969 static bool
4970 simple_operand_p (const_tree exp)
4972 /* Strip any conversions that don't change the machine mode. */
4973 STRIP_NOPS (exp);
4975 return (CONSTANT_CLASS_P (exp)
4976 || TREE_CODE (exp) == SSA_NAME
4977 || (DECL_P (exp)
4978 && ! TREE_ADDRESSABLE (exp)
4979 && ! TREE_THIS_VOLATILE (exp)
4980 && ! DECL_NONLOCAL (exp)
4981 /* Don't regard global variables as simple. They may be
4982 allocated in ways unknown to the compiler (shared memory,
4983 #pragma weak, etc). */
4984 && ! TREE_PUBLIC (exp)
4985 && ! DECL_EXTERNAL (exp)
4986 /* Weakrefs are not safe to be read, since they can be NULL.
4987 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4988 have DECL_WEAK flag set. */
4989 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4990 /* Loading a static variable is unduly expensive, but global
4991 registers aren't expensive. */
4992 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4995 /* Determine if an operand is simple enough to be evaluated unconditionally.
4996 In addition to simple_operand_p, we assume that comparisons, conversions,
4997 and logic-not operations are simple, if their operands are simple, too. */
4999 bool
5000 simple_condition_p (tree exp)
5002 enum tree_code code;
5004 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5005 return false;
5007 while (CONVERT_EXPR_P (exp))
5008 exp = TREE_OPERAND (exp, 0);
5010 code = TREE_CODE (exp);
5012 if (TREE_CODE_CLASS (code) == tcc_comparison)
5013 return (simple_operand_p (TREE_OPERAND (exp, 0))
5014 && simple_operand_p (TREE_OPERAND (exp, 1)));
5016 if (code == TRUTH_NOT_EXPR)
5017 return simple_condition_p (TREE_OPERAND (exp, 0));
5019 return simple_operand_p (exp);
5023 /* The following functions are subroutines to fold_range_test and allow it to
5024 try to change a logical combination of comparisons into a range test.
5026 For example, both
5027 X == 2 || X == 3 || X == 4 || X == 5
5029 X >= 2 && X <= 5
5030 are converted to
5031 (unsigned) (X - 2) <= 3
5033 We describe each set of comparisons as being either inside or outside
5034 a range, using a variable named like IN_P, and then describe the
5035 range with a lower and upper bound. If one of the bounds is omitted,
5036 it represents either the highest or lowest value of the type.
5038 In the comments below, we represent a range by two numbers in brackets
5039 preceded by a "+" to designate being inside that range, or a "-" to
5040 designate being outside that range, so the condition can be inverted by
5041 flipping the prefix. An omitted bound is represented by a "-". For
5042 example, "- [-, 10]" means being outside the range starting at the lowest
5043 possible value and ending at 10, in other words, being greater than 10.
5044 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5045 always false.
5047 We set up things so that the missing bounds are handled in a consistent
5048 manner so neither a missing bound nor "true" and "false" need to be
5049 handled using a special case. */
5051 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5052 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5053 and UPPER1_P are nonzero if the respective argument is an upper bound
5054 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5055 must be specified for a comparison. ARG1 will be converted to ARG0's
5056 type if both are specified. */
5058 static tree
5059 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5060 tree arg1, int upper1_p)
5062 tree tem;
5063 int result;
5064 int sgn0, sgn1;
5066 /* If neither arg represents infinity, do the normal operation.
5067 Else, if not a comparison, return infinity. Else handle the special
5068 comparison rules. Note that most of the cases below won't occur, but
5069 are handled for consistency. */
5071 if (arg0 != 0 && arg1 != 0)
5073 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5074 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5075 STRIP_NOPS (tem);
5076 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5079 if (TREE_CODE_CLASS (code) != tcc_comparison)
5080 return 0;
5082 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5083 for neither. In real maths, we cannot assume open ended ranges are
5084 the same. But, this is computer arithmetic, where numbers are finite.
5085 We can therefore make the transformation of any unbounded range with
5086 the value Z, Z being greater than any representable number. This permits
5087 us to treat unbounded ranges as equal. */
5088 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5089 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5090 switch (code)
5092 case EQ_EXPR:
5093 result = sgn0 == sgn1;
5094 break;
5095 case NE_EXPR:
5096 result = sgn0 != sgn1;
5097 break;
5098 case LT_EXPR:
5099 result = sgn0 < sgn1;
5100 break;
5101 case LE_EXPR:
5102 result = sgn0 <= sgn1;
5103 break;
5104 case GT_EXPR:
5105 result = sgn0 > sgn1;
5106 break;
5107 case GE_EXPR:
5108 result = sgn0 >= sgn1;
5109 break;
5110 default:
5111 gcc_unreachable ();
5114 return constant_boolean_node (result, type);
5117 /* Helper routine for make_range. Perform one step for it, return
5118 new expression if the loop should continue or NULL_TREE if it should
5119 stop. */
5121 tree
5122 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5123 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5124 bool *strict_overflow_p)
5126 tree arg0_type = TREE_TYPE (arg0);
5127 tree n_low, n_high, low = *p_low, high = *p_high;
5128 int in_p = *p_in_p, n_in_p;
5130 switch (code)
5132 case TRUTH_NOT_EXPR:
5133 /* We can only do something if the range is testing for zero. */
5134 if (low == NULL_TREE || high == NULL_TREE
5135 || ! integer_zerop (low) || ! integer_zerop (high))
5136 return NULL_TREE;
5137 *p_in_p = ! in_p;
5138 return arg0;
5140 case EQ_EXPR: case NE_EXPR:
5141 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5142 /* We can only do something if the range is testing for zero
5143 and if the second operand is an integer constant. Note that
5144 saying something is "in" the range we make is done by
5145 complementing IN_P since it will set in the initial case of
5146 being not equal to zero; "out" is leaving it alone. */
5147 if (low == NULL_TREE || high == NULL_TREE
5148 || ! integer_zerop (low) || ! integer_zerop (high)
5149 || TREE_CODE (arg1) != INTEGER_CST)
5150 return NULL_TREE;
5152 switch (code)
5154 case NE_EXPR: /* - [c, c] */
5155 low = high = arg1;
5156 break;
5157 case EQ_EXPR: /* + [c, c] */
5158 in_p = ! in_p, low = high = arg1;
5159 break;
5160 case GT_EXPR: /* - [-, c] */
5161 low = 0, high = arg1;
5162 break;
5163 case GE_EXPR: /* + [c, -] */
5164 in_p = ! in_p, low = arg1, high = 0;
5165 break;
5166 case LT_EXPR: /* - [c, -] */
5167 low = arg1, high = 0;
5168 break;
5169 case LE_EXPR: /* + [-, c] */
5170 in_p = ! in_p, low = 0, high = arg1;
5171 break;
5172 default:
5173 gcc_unreachable ();
5176 /* If this is an unsigned comparison, we also know that EXP is
5177 greater than or equal to zero. We base the range tests we make
5178 on that fact, so we record it here so we can parse existing
5179 range tests. We test arg0_type since often the return type
5180 of, e.g. EQ_EXPR, is boolean. */
5181 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5183 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5184 in_p, low, high, 1,
5185 build_int_cst (arg0_type, 0),
5186 NULL_TREE))
5187 return NULL_TREE;
5189 in_p = n_in_p, low = n_low, high = n_high;
5191 /* If the high bound is missing, but we have a nonzero low
5192 bound, reverse the range so it goes from zero to the low bound
5193 minus 1. */
5194 if (high == 0 && low && ! integer_zerop (low))
5196 in_p = ! in_p;
5197 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5198 build_int_cst (TREE_TYPE (low), 1), 0);
5199 low = build_int_cst (arg0_type, 0);
5203 *p_low = low;
5204 *p_high = high;
5205 *p_in_p = in_p;
5206 return arg0;
5208 case NEGATE_EXPR:
5209 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5210 low and high are non-NULL, then normalize will DTRT. */
5211 if (!TYPE_UNSIGNED (arg0_type)
5212 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5214 if (low == NULL_TREE)
5215 low = TYPE_MIN_VALUE (arg0_type);
5216 if (high == NULL_TREE)
5217 high = TYPE_MAX_VALUE (arg0_type);
5220 /* (-x) IN [a,b] -> x in [-b, -a] */
5221 n_low = range_binop (MINUS_EXPR, exp_type,
5222 build_int_cst (exp_type, 0),
5223 0, high, 1);
5224 n_high = range_binop (MINUS_EXPR, exp_type,
5225 build_int_cst (exp_type, 0),
5226 0, low, 0);
5227 if (n_high != 0 && TREE_OVERFLOW (n_high))
5228 return NULL_TREE;
5229 goto normalize;
5231 case BIT_NOT_EXPR:
5232 /* ~ X -> -X - 1 */
5233 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5234 build_int_cst (exp_type, 1));
5236 case PLUS_EXPR:
5237 case MINUS_EXPR:
5238 if (TREE_CODE (arg1) != INTEGER_CST)
5239 return NULL_TREE;
5241 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5242 move a constant to the other side. */
5243 if (!TYPE_UNSIGNED (arg0_type)
5244 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5245 return NULL_TREE;
5247 /* If EXP is signed, any overflow in the computation is undefined,
5248 so we don't worry about it so long as our computations on
5249 the bounds don't overflow. For unsigned, overflow is defined
5250 and this is exactly the right thing. */
5251 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5252 arg0_type, low, 0, arg1, 0);
5253 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5254 arg0_type, high, 1, arg1, 0);
5255 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5256 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5257 return NULL_TREE;
5259 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5260 *strict_overflow_p = true;
5262 normalize:
5263 /* Check for an unsigned range which has wrapped around the maximum
5264 value thus making n_high < n_low, and normalize it. */
5265 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5267 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5268 build_int_cst (TREE_TYPE (n_high), 1), 0);
5269 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5270 build_int_cst (TREE_TYPE (n_low), 1), 0);
5272 /* If the range is of the form +/- [ x+1, x ], we won't
5273 be able to normalize it. But then, it represents the
5274 whole range or the empty set, so make it
5275 +/- [ -, - ]. */
5276 if (tree_int_cst_equal (n_low, low)
5277 && tree_int_cst_equal (n_high, high))
5278 low = high = 0;
5279 else
5280 in_p = ! in_p;
5282 else
5283 low = n_low, high = n_high;
5285 *p_low = low;
5286 *p_high = high;
5287 *p_in_p = in_p;
5288 return arg0;
5290 CASE_CONVERT:
5291 case NON_LVALUE_EXPR:
5292 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5293 return NULL_TREE;
5295 if (! INTEGRAL_TYPE_P (arg0_type)
5296 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5297 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5298 return NULL_TREE;
5300 n_low = low, n_high = high;
5302 if (n_low != 0)
5303 n_low = fold_convert_loc (loc, arg0_type, n_low);
5305 if (n_high != 0)
5306 n_high = fold_convert_loc (loc, arg0_type, n_high);
5308 /* If we're converting arg0 from an unsigned type, to exp,
5309 a signed type, we will be doing the comparison as unsigned.
5310 The tests above have already verified that LOW and HIGH
5311 are both positive.
5313 So we have to ensure that we will handle large unsigned
5314 values the same way that the current signed bounds treat
5315 negative values. */
5317 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5319 tree high_positive;
5320 tree equiv_type;
5321 /* For fixed-point modes, we need to pass the saturating flag
5322 as the 2nd parameter. */
5323 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5324 equiv_type
5325 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5326 TYPE_SATURATING (arg0_type));
5327 else
5328 equiv_type
5329 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5331 /* A range without an upper bound is, naturally, unbounded.
5332 Since convert would have cropped a very large value, use
5333 the max value for the destination type. */
5334 high_positive
5335 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5336 : TYPE_MAX_VALUE (arg0_type);
5338 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5339 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5340 fold_convert_loc (loc, arg0_type,
5341 high_positive),
5342 build_int_cst (arg0_type, 1));
5344 /* If the low bound is specified, "and" the range with the
5345 range for which the original unsigned value will be
5346 positive. */
5347 if (low != 0)
5349 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5350 1, fold_convert_loc (loc, arg0_type,
5351 integer_zero_node),
5352 high_positive))
5353 return NULL_TREE;
5355 in_p = (n_in_p == in_p);
5357 else
5359 /* Otherwise, "or" the range with the range of the input
5360 that will be interpreted as negative. */
5361 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5362 1, fold_convert_loc (loc, arg0_type,
5363 integer_zero_node),
5364 high_positive))
5365 return NULL_TREE;
5367 in_p = (in_p != n_in_p);
5371 /* Otherwise, if we are converting arg0 from signed type, to exp,
5372 an unsigned type, we will do the comparison as signed. If
5373 high is non-NULL, we punt above if it doesn't fit in the signed
5374 type, so if we get through here, +[-, high] or +[low, high] are
5375 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5376 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5377 high is not, the +[low, -] range is equivalent to union of
5378 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5379 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5380 low being 0, which should be treated as [-, -]. */
5381 else if (TYPE_UNSIGNED (exp_type)
5382 && !TYPE_UNSIGNED (arg0_type)
5383 && low
5384 && !high)
5386 if (integer_zerop (low))
5387 n_low = NULL_TREE;
5388 else
5390 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5391 n_low, build_int_cst (arg0_type, -1));
5392 n_low = build_zero_cst (arg0_type);
5393 in_p = !in_p;
5397 *p_low = n_low;
5398 *p_high = n_high;
5399 *p_in_p = in_p;
5400 return arg0;
5402 default:
5403 return NULL_TREE;
5407 /* Given EXP, a logical expression, set the range it is testing into
5408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5409 actually being tested. *PLOW and *PHIGH will be made of the same
5410 type as the returned expression. If EXP is not a comparison, we
5411 will most likely not be returning a useful value and range. Set
5412 *STRICT_OVERFLOW_P to true if the return value is only valid
5413 because signed overflow is undefined; otherwise, do not change
5414 *STRICT_OVERFLOW_P. */
5416 tree
5417 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5418 bool *strict_overflow_p)
5420 enum tree_code code;
5421 tree arg0, arg1 = NULL_TREE;
5422 tree exp_type, nexp;
5423 int in_p;
5424 tree low, high;
5425 location_t loc = EXPR_LOCATION (exp);
5427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5428 and see if we can refine the range. Some of the cases below may not
5429 happen, but it doesn't seem worth worrying about this. We "continue"
5430 the outer loop when we've changed something; otherwise we "break"
5431 the switch, which will "break" the while. */
5433 in_p = 0;
5434 low = high = build_int_cst (TREE_TYPE (exp), 0);
5436 while (1)
5438 code = TREE_CODE (exp);
5439 exp_type = TREE_TYPE (exp);
5440 arg0 = NULL_TREE;
5442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5444 if (TREE_OPERAND_LENGTH (exp) > 0)
5445 arg0 = TREE_OPERAND (exp, 0);
5446 if (TREE_CODE_CLASS (code) == tcc_binary
5447 || TREE_CODE_CLASS (code) == tcc_comparison
5448 || (TREE_CODE_CLASS (code) == tcc_expression
5449 && TREE_OPERAND_LENGTH (exp) > 1))
5450 arg1 = TREE_OPERAND (exp, 1);
5452 if (arg0 == NULL_TREE)
5453 break;
5455 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5456 &high, &in_p, strict_overflow_p);
5457 if (nexp == NULL_TREE)
5458 break;
5459 exp = nexp;
5462 /* If EXP is a constant, we can evaluate whether this is true or false. */
5463 if (TREE_CODE (exp) == INTEGER_CST)
5465 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5466 exp, 0, low, 0))
5467 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5468 exp, 1, high, 1)));
5469 low = high = 0;
5470 exp = 0;
5473 *pin_p = in_p, *plow = low, *phigh = high;
5474 return exp;
5477 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5478 a bitwise check i.e. when
5479 LOW == 0xXX...X00...0
5480 HIGH == 0xXX...X11...1
5481 Return corresponding mask in MASK and stem in VALUE. */
5483 static bool
5484 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5485 tree *value)
5487 if (TREE_CODE (low) != INTEGER_CST
5488 || TREE_CODE (high) != INTEGER_CST)
5489 return false;
5491 unsigned prec = TYPE_PRECISION (type);
5492 wide_int lo = wi::to_wide (low, prec);
5493 wide_int hi = wi::to_wide (high, prec);
5495 wide_int end_mask = lo ^ hi;
5496 if ((end_mask & (end_mask + 1)) != 0
5497 || (lo & end_mask) != 0)
5498 return false;
5500 wide_int stem_mask = ~end_mask;
5501 wide_int stem = lo & stem_mask;
5502 if (stem != (hi & stem_mask))
5503 return false;
5505 *mask = wide_int_to_tree (type, stem_mask);
5506 *value = wide_int_to_tree (type, stem);
5508 return true;
5511 /* Helper routine for build_range_check and match.pd. Return the type to
5512 perform the check or NULL if it shouldn't be optimized. */
5514 tree
5515 range_check_type (tree etype)
5517 /* First make sure that arithmetics in this type is valid, then make sure
5518 that it wraps around. */
5519 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5520 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5522 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5524 tree utype, minv, maxv;
5526 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5527 for the type in question, as we rely on this here. */
5528 utype = unsigned_type_for (etype);
5529 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5530 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5531 build_int_cst (TREE_TYPE (maxv), 1), 1);
5532 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5534 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5535 minv, 1, maxv, 1)))
5536 etype = utype;
5537 else
5538 return NULL_TREE;
5540 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5541 etype = unsigned_type_for (etype);
5542 return etype;
5545 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5546 type, TYPE, return an expression to test if EXP is in (or out of, depending
5547 on IN_P) the range. Return 0 if the test couldn't be created. */
5549 tree
5550 build_range_check (location_t loc, tree type, tree exp, int in_p,
5551 tree low, tree high)
5553 tree etype = TREE_TYPE (exp), mask, value;
5555 /* Disable this optimization for function pointer expressions
5556 on targets that require function pointer canonicalization. */
5557 if (targetm.have_canonicalize_funcptr_for_compare ()
5558 && POINTER_TYPE_P (etype)
5559 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5560 return NULL_TREE;
5562 if (! in_p)
5564 value = build_range_check (loc, type, exp, 1, low, high);
5565 if (value != 0)
5566 return invert_truthvalue_loc (loc, value);
5568 return 0;
5571 if (low == 0 && high == 0)
5572 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5574 if (low == 0)
5575 return fold_build2_loc (loc, LE_EXPR, type, exp,
5576 fold_convert_loc (loc, etype, high));
5578 if (high == 0)
5579 return fold_build2_loc (loc, GE_EXPR, type, exp,
5580 fold_convert_loc (loc, etype, low));
5582 if (operand_equal_p (low, high, 0))
5583 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5584 fold_convert_loc (loc, etype, low));
5586 if (TREE_CODE (exp) == BIT_AND_EXPR
5587 && maskable_range_p (low, high, etype, &mask, &value))
5588 return fold_build2_loc (loc, EQ_EXPR, type,
5589 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5590 exp, mask),
5591 value);
5593 if (integer_zerop (low))
5595 if (! TYPE_UNSIGNED (etype))
5597 etype = unsigned_type_for (etype);
5598 high = fold_convert_loc (loc, etype, high);
5599 exp = fold_convert_loc (loc, etype, exp);
5601 return build_range_check (loc, type, exp, 1, 0, high);
5604 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5605 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5607 int prec = TYPE_PRECISION (etype);
5609 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5611 if (TYPE_UNSIGNED (etype))
5613 tree signed_etype = signed_type_for (etype);
5614 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5615 etype
5616 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5617 else
5618 etype = signed_etype;
5619 exp = fold_convert_loc (loc, etype, exp);
5621 return fold_build2_loc (loc, GT_EXPR, type, exp,
5622 build_int_cst (etype, 0));
5626 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5627 This requires wrap-around arithmetics for the type of the expression. */
5628 etype = range_check_type (etype);
5629 if (etype == NULL_TREE)
5630 return NULL_TREE;
5632 high = fold_convert_loc (loc, etype, high);
5633 low = fold_convert_loc (loc, etype, low);
5634 exp = fold_convert_loc (loc, etype, exp);
5636 value = const_binop (MINUS_EXPR, high, low);
5638 if (value != 0 && !TREE_OVERFLOW (value))
5639 return build_range_check (loc, type,
5640 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5641 1, build_int_cst (etype, 0), value);
5643 return 0;
5646 /* Return the predecessor of VAL in its type, handling the infinite case. */
5648 static tree
5649 range_predecessor (tree val)
5651 tree type = TREE_TYPE (val);
5653 if (INTEGRAL_TYPE_P (type)
5654 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5655 return 0;
5656 else
5657 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5658 build_int_cst (TREE_TYPE (val), 1), 0);
5661 /* Return the successor of VAL in its type, handling the infinite case. */
5663 static tree
5664 range_successor (tree val)
5666 tree type = TREE_TYPE (val);
5668 if (INTEGRAL_TYPE_P (type)
5669 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5670 return 0;
5671 else
5672 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5673 build_int_cst (TREE_TYPE (val), 1), 0);
5676 /* Given two ranges, see if we can merge them into one. Return 1 if we
5677 can, 0 if we can't. Set the output range into the specified parameters. */
5679 bool
5680 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5681 tree high0, int in1_p, tree low1, tree high1)
5683 int no_overlap;
5684 int subset;
5685 int temp;
5686 tree tem;
5687 int in_p;
5688 tree low, high;
5689 int lowequal = ((low0 == 0 && low1 == 0)
5690 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5691 low0, 0, low1, 0)));
5692 int highequal = ((high0 == 0 && high1 == 0)
5693 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5694 high0, 1, high1, 1)));
5696 /* Make range 0 be the range that starts first, or ends last if they
5697 start at the same value. Swap them if it isn't. */
5698 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5699 low0, 0, low1, 0))
5700 || (lowequal
5701 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5702 high1, 1, high0, 1))))
5704 temp = in0_p, in0_p = in1_p, in1_p = temp;
5705 tem = low0, low0 = low1, low1 = tem;
5706 tem = high0, high0 = high1, high1 = tem;
5709 /* If the second range is != high1 where high1 is the type maximum of
5710 the type, try first merging with < high1 range. */
5711 if (low1
5712 && high1
5713 && TREE_CODE (low1) == INTEGER_CST
5714 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5715 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5716 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5717 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5718 && operand_equal_p (low1, high1, 0))
5720 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5721 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5722 !in1_p, NULL_TREE, range_predecessor (low1)))
5723 return true;
5724 /* Similarly for the second range != low1 where low1 is the type minimum
5725 of the type, try first merging with > low1 range. */
5726 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5727 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5728 !in1_p, range_successor (low1), NULL_TREE))
5729 return true;
5732 /* Now flag two cases, whether the ranges are disjoint or whether the
5733 second range is totally subsumed in the first. Note that the tests
5734 below are simplified by the ones above. */
5735 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5736 high0, 1, low1, 0));
5737 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5738 high1, 1, high0, 1));
5740 /* We now have four cases, depending on whether we are including or
5741 excluding the two ranges. */
5742 if (in0_p && in1_p)
5744 /* If they don't overlap, the result is false. If the second range
5745 is a subset it is the result. Otherwise, the range is from the start
5746 of the second to the end of the first. */
5747 if (no_overlap)
5748 in_p = 0, low = high = 0;
5749 else if (subset)
5750 in_p = 1, low = low1, high = high1;
5751 else
5752 in_p = 1, low = low1, high = high0;
5755 else if (in0_p && ! in1_p)
5757 /* If they don't overlap, the result is the first range. If they are
5758 equal, the result is false. If the second range is a subset of the
5759 first, and the ranges begin at the same place, we go from just after
5760 the end of the second range to the end of the first. If the second
5761 range is not a subset of the first, or if it is a subset and both
5762 ranges end at the same place, the range starts at the start of the
5763 first range and ends just before the second range.
5764 Otherwise, we can't describe this as a single range. */
5765 if (no_overlap)
5766 in_p = 1, low = low0, high = high0;
5767 else if (lowequal && highequal)
5768 in_p = 0, low = high = 0;
5769 else if (subset && lowequal)
5771 low = range_successor (high1);
5772 high = high0;
5773 in_p = 1;
5774 if (low == 0)
5776 /* We are in the weird situation where high0 > high1 but
5777 high1 has no successor. Punt. */
5778 return 0;
5781 else if (! subset || highequal)
5783 low = low0;
5784 high = range_predecessor (low1);
5785 in_p = 1;
5786 if (high == 0)
5788 /* low0 < low1 but low1 has no predecessor. Punt. */
5789 return 0;
5792 else
5793 return 0;
5796 else if (! in0_p && in1_p)
5798 /* If they don't overlap, the result is the second range. If the second
5799 is a subset of the first, the result is false. Otherwise,
5800 the range starts just after the first range and ends at the
5801 end of the second. */
5802 if (no_overlap)
5803 in_p = 1, low = low1, high = high1;
5804 else if (subset || highequal)
5805 in_p = 0, low = high = 0;
5806 else
5808 low = range_successor (high0);
5809 high = high1;
5810 in_p = 1;
5811 if (low == 0)
5813 /* high1 > high0 but high0 has no successor. Punt. */
5814 return 0;
5819 else
5821 /* The case where we are excluding both ranges. Here the complex case
5822 is if they don't overlap. In that case, the only time we have a
5823 range is if they are adjacent. If the second is a subset of the
5824 first, the result is the first. Otherwise, the range to exclude
5825 starts at the beginning of the first range and ends at the end of the
5826 second. */
5827 if (no_overlap)
5829 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5830 range_successor (high0),
5831 1, low1, 0)))
5832 in_p = 0, low = low0, high = high1;
5833 else
5835 /* Canonicalize - [min, x] into - [-, x]. */
5836 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5837 switch (TREE_CODE (TREE_TYPE (low0)))
5839 case ENUMERAL_TYPE:
5840 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5841 GET_MODE_BITSIZE
5842 (TYPE_MODE (TREE_TYPE (low0)))))
5843 break;
5844 /* FALLTHROUGH */
5845 case INTEGER_TYPE:
5846 if (tree_int_cst_equal (low0,
5847 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5848 low0 = 0;
5849 break;
5850 case POINTER_TYPE:
5851 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5852 && integer_zerop (low0))
5853 low0 = 0;
5854 break;
5855 default:
5856 break;
5859 /* Canonicalize - [x, max] into - [x, -]. */
5860 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5861 switch (TREE_CODE (TREE_TYPE (high1)))
5863 case ENUMERAL_TYPE:
5864 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5865 GET_MODE_BITSIZE
5866 (TYPE_MODE (TREE_TYPE (high1)))))
5867 break;
5868 /* FALLTHROUGH */
5869 case INTEGER_TYPE:
5870 if (tree_int_cst_equal (high1,
5871 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5872 high1 = 0;
5873 break;
5874 case POINTER_TYPE:
5875 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5876 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5877 high1, 1,
5878 build_int_cst (TREE_TYPE (high1), 1),
5879 1)))
5880 high1 = 0;
5881 break;
5882 default:
5883 break;
5886 /* The ranges might be also adjacent between the maximum and
5887 minimum values of the given type. For
5888 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5889 return + [x + 1, y - 1]. */
5890 if (low0 == 0 && high1 == 0)
5892 low = range_successor (high0);
5893 high = range_predecessor (low1);
5894 if (low == 0 || high == 0)
5895 return 0;
5897 in_p = 1;
5899 else
5900 return 0;
5903 else if (subset)
5904 in_p = 0, low = low0, high = high0;
5905 else
5906 in_p = 0, low = low0, high = high1;
5909 *pin_p = in_p, *plow = low, *phigh = high;
5910 return 1;
5914 /* Subroutine of fold, looking inside expressions of the form
5915 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5916 are the three operands of the COND_EXPR. This function is
5917 being used also to optimize A op B ? C : A, by reversing the
5918 comparison first.
5920 Return a folded expression whose code is not a COND_EXPR
5921 anymore, or NULL_TREE if no folding opportunity is found. */
5923 static tree
5924 fold_cond_expr_with_comparison (location_t loc, tree type,
5925 enum tree_code comp_code,
5926 tree arg00, tree arg01, tree arg1, tree arg2)
5928 tree arg1_type = TREE_TYPE (arg1);
5929 tree tem;
5931 STRIP_NOPS (arg1);
5932 STRIP_NOPS (arg2);
5934 /* If we have A op 0 ? A : -A, consider applying the following
5935 transformations:
5937 A == 0? A : -A same as -A
5938 A != 0? A : -A same as A
5939 A >= 0? A : -A same as abs (A)
5940 A > 0? A : -A same as abs (A)
5941 A <= 0? A : -A same as -abs (A)
5942 A < 0? A : -A same as -abs (A)
5944 None of these transformations work for modes with signed
5945 zeros. If A is +/-0, the first two transformations will
5946 change the sign of the result (from +0 to -0, or vice
5947 versa). The last four will fix the sign of the result,
5948 even though the original expressions could be positive or
5949 negative, depending on the sign of A.
5951 Note that all these transformations are correct if A is
5952 NaN, since the two alternatives (A and -A) are also NaNs. */
5953 if (!HONOR_SIGNED_ZEROS (type)
5954 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5955 ? real_zerop (arg01)
5956 : integer_zerop (arg01))
5957 && ((TREE_CODE (arg2) == NEGATE_EXPR
5958 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5959 /* In the case that A is of the form X-Y, '-A' (arg2) may
5960 have already been folded to Y-X, check for that. */
5961 || (TREE_CODE (arg1) == MINUS_EXPR
5962 && TREE_CODE (arg2) == MINUS_EXPR
5963 && operand_equal_p (TREE_OPERAND (arg1, 0),
5964 TREE_OPERAND (arg2, 1), 0)
5965 && operand_equal_p (TREE_OPERAND (arg1, 1),
5966 TREE_OPERAND (arg2, 0), 0))))
5967 switch (comp_code)
5969 case EQ_EXPR:
5970 case UNEQ_EXPR:
5971 tem = fold_convert_loc (loc, arg1_type, arg1);
5972 return fold_convert_loc (loc, type, negate_expr (tem));
5973 case NE_EXPR:
5974 case LTGT_EXPR:
5975 return fold_convert_loc (loc, type, arg1);
5976 case UNGE_EXPR:
5977 case UNGT_EXPR:
5978 if (flag_trapping_math)
5979 break;
5980 /* Fall through. */
5981 case GE_EXPR:
5982 case GT_EXPR:
5983 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5984 break;
5985 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5986 return fold_convert_loc (loc, type, tem);
5987 case UNLE_EXPR:
5988 case UNLT_EXPR:
5989 if (flag_trapping_math)
5990 break;
5991 /* FALLTHRU */
5992 case LE_EXPR:
5993 case LT_EXPR:
5994 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5995 break;
5996 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5997 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5999 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6000 is not, invokes UB both in abs and in the negation of it.
6001 So, use ABSU_EXPR instead. */
6002 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6003 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6004 tem = negate_expr (tem);
6005 return fold_convert_loc (loc, type, tem);
6007 else
6009 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6010 return negate_expr (fold_convert_loc (loc, type, tem));
6012 default:
6013 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6014 break;
6017 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6018 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6019 both transformations are correct when A is NaN: A != 0
6020 is then true, and A == 0 is false. */
6022 if (!HONOR_SIGNED_ZEROS (type)
6023 && integer_zerop (arg01) && integer_zerop (arg2))
6025 if (comp_code == NE_EXPR)
6026 return fold_convert_loc (loc, type, arg1);
6027 else if (comp_code == EQ_EXPR)
6028 return build_zero_cst (type);
6031 /* Try some transformations of A op B ? A : B.
6033 A == B? A : B same as B
6034 A != B? A : B same as A
6035 A >= B? A : B same as max (A, B)
6036 A > B? A : B same as max (B, A)
6037 A <= B? A : B same as min (A, B)
6038 A < B? A : B same as min (B, A)
6040 As above, these transformations don't work in the presence
6041 of signed zeros. For example, if A and B are zeros of
6042 opposite sign, the first two transformations will change
6043 the sign of the result. In the last four, the original
6044 expressions give different results for (A=+0, B=-0) and
6045 (A=-0, B=+0), but the transformed expressions do not.
6047 The first two transformations are correct if either A or B
6048 is a NaN. In the first transformation, the condition will
6049 be false, and B will indeed be chosen. In the case of the
6050 second transformation, the condition A != B will be true,
6051 and A will be chosen.
6053 The conversions to max() and min() are not correct if B is
6054 a number and A is not. The conditions in the original
6055 expressions will be false, so all four give B. The min()
6056 and max() versions would give a NaN instead. */
6057 if (!HONOR_SIGNED_ZEROS (type)
6058 && operand_equal_for_comparison_p (arg01, arg2)
6059 /* Avoid these transformations if the COND_EXPR may be used
6060 as an lvalue in the C++ front-end. PR c++/19199. */
6061 && (in_gimple_form
6062 || VECTOR_TYPE_P (type)
6063 || (! lang_GNU_CXX ()
6064 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6065 || ! maybe_lvalue_p (arg1)
6066 || ! maybe_lvalue_p (arg2)))
6068 tree comp_op0 = arg00;
6069 tree comp_op1 = arg01;
6070 tree comp_type = TREE_TYPE (comp_op0);
6072 switch (comp_code)
6074 case EQ_EXPR:
6075 return fold_convert_loc (loc, type, arg2);
6076 case NE_EXPR:
6077 return fold_convert_loc (loc, type, arg1);
6078 case LE_EXPR:
6079 case LT_EXPR:
6080 case UNLE_EXPR:
6081 case UNLT_EXPR:
6082 /* In C++ a ?: expression can be an lvalue, so put the
6083 operand which will be used if they are equal first
6084 so that we can convert this back to the
6085 corresponding COND_EXPR. */
6086 if (!HONOR_NANS (arg1))
6088 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6089 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6090 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6091 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6092 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6093 comp_op1, comp_op0);
6094 return fold_convert_loc (loc, type, tem);
6096 break;
6097 case GE_EXPR:
6098 case GT_EXPR:
6099 case UNGE_EXPR:
6100 case UNGT_EXPR:
6101 if (!HONOR_NANS (arg1))
6103 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6104 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6105 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6106 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6107 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6108 comp_op1, comp_op0);
6109 return fold_convert_loc (loc, type, tem);
6111 break;
6112 case UNEQ_EXPR:
6113 if (!HONOR_NANS (arg1))
6114 return fold_convert_loc (loc, type, arg2);
6115 break;
6116 case LTGT_EXPR:
6117 if (!HONOR_NANS (arg1))
6118 return fold_convert_loc (loc, type, arg1);
6119 break;
6120 default:
6121 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6122 break;
6126 return NULL_TREE;
6131 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6132 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6133 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6134 false) >= 2)
6135 #endif
6137 /* EXP is some logical combination of boolean tests. See if we can
6138 merge it into some range test. Return the new tree if so. */
6140 static tree
6141 fold_range_test (location_t loc, enum tree_code code, tree type,
6142 tree op0, tree op1)
6144 int or_op = (code == TRUTH_ORIF_EXPR
6145 || code == TRUTH_OR_EXPR);
6146 int in0_p, in1_p, in_p;
6147 tree low0, low1, low, high0, high1, high;
6148 bool strict_overflow_p = false;
6149 tree tem, lhs, rhs;
6150 const char * const warnmsg = G_("assuming signed overflow does not occur "
6151 "when simplifying range test");
6153 if (!INTEGRAL_TYPE_P (type))
6154 return 0;
6156 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6157 /* If op0 is known true or false and this is a short-circuiting
6158 operation we must not merge with op1 since that makes side-effects
6159 unconditional. So special-case this. */
6160 if (!lhs
6161 && ((code == TRUTH_ORIF_EXPR && in0_p)
6162 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6163 return op0;
6164 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6166 /* If this is an OR operation, invert both sides; we will invert
6167 again at the end. */
6168 if (or_op)
6169 in0_p = ! in0_p, in1_p = ! in1_p;
6171 /* If both expressions are the same, if we can merge the ranges, and we
6172 can build the range test, return it or it inverted. If one of the
6173 ranges is always true or always false, consider it to be the same
6174 expression as the other. */
6175 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6176 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6177 in1_p, low1, high1)
6178 && (tem = (build_range_check (loc, type,
6179 lhs != 0 ? lhs
6180 : rhs != 0 ? rhs : integer_zero_node,
6181 in_p, low, high))) != 0)
6183 if (strict_overflow_p)
6184 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6185 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6188 /* On machines where the branch cost is expensive, if this is a
6189 short-circuited branch and the underlying object on both sides
6190 is the same, make a non-short-circuit operation. */
6191 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6192 if (param_logical_op_non_short_circuit != -1)
6193 logical_op_non_short_circuit
6194 = param_logical_op_non_short_circuit;
6195 if (logical_op_non_short_circuit
6196 && !sanitize_coverage_p ()
6197 && lhs != 0 && rhs != 0
6198 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6199 && operand_equal_p (lhs, rhs, 0))
6201 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6202 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6203 which cases we can't do this. */
6204 if (simple_operand_p (lhs))
6205 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6206 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6207 type, op0, op1);
6209 else if (!lang_hooks.decls.global_bindings_p ()
6210 && !CONTAINS_PLACEHOLDER_P (lhs))
6212 tree common = save_expr (lhs);
6214 if ((lhs = build_range_check (loc, type, common,
6215 or_op ? ! in0_p : in0_p,
6216 low0, high0)) != 0
6217 && (rhs = build_range_check (loc, type, common,
6218 or_op ? ! in1_p : in1_p,
6219 low1, high1)) != 0)
6221 if (strict_overflow_p)
6222 fold_overflow_warning (warnmsg,
6223 WARN_STRICT_OVERFLOW_COMPARISON);
6224 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6225 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6226 type, lhs, rhs);
6231 return 0;
6234 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6235 bit value. Arrange things so the extra bits will be set to zero if and
6236 only if C is signed-extended to its full width. If MASK is nonzero,
6237 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6239 static tree
6240 unextend (tree c, int p, int unsignedp, tree mask)
6242 tree type = TREE_TYPE (c);
6243 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6244 tree temp;
6246 if (p == modesize || unsignedp)
6247 return c;
6249 /* We work by getting just the sign bit into the low-order bit, then
6250 into the high-order bit, then sign-extend. We then XOR that value
6251 with C. */
6252 temp = build_int_cst (TREE_TYPE (c),
6253 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6255 /* We must use a signed type in order to get an arithmetic right shift.
6256 However, we must also avoid introducing accidental overflows, so that
6257 a subsequent call to integer_zerop will work. Hence we must
6258 do the type conversion here. At this point, the constant is either
6259 zero or one, and the conversion to a signed type can never overflow.
6260 We could get an overflow if this conversion is done anywhere else. */
6261 if (TYPE_UNSIGNED (type))
6262 temp = fold_convert (signed_type_for (type), temp);
6264 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6265 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6266 if (mask != 0)
6267 temp = const_binop (BIT_AND_EXPR, temp,
6268 fold_convert (TREE_TYPE (c), mask));
6269 /* If necessary, convert the type back to match the type of C. */
6270 if (TYPE_UNSIGNED (type))
6271 temp = fold_convert (type, temp);
6273 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6276 /* For an expression that has the form
6277 (A && B) || ~B
6279 (A || B) && ~B,
6280 we can drop one of the inner expressions and simplify to
6281 A || ~B
6283 A && ~B
6284 LOC is the location of the resulting expression. OP is the inner
6285 logical operation; the left-hand side in the examples above, while CMPOP
6286 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6287 removing a condition that guards another, as in
6288 (A != NULL && A->...) || A == NULL
6289 which we must not transform. If RHS_ONLY is true, only eliminate the
6290 right-most operand of the inner logical operation. */
6292 static tree
6293 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6294 bool rhs_only)
6296 tree type = TREE_TYPE (cmpop);
6297 enum tree_code code = TREE_CODE (cmpop);
6298 enum tree_code truthop_code = TREE_CODE (op);
6299 tree lhs = TREE_OPERAND (op, 0);
6300 tree rhs = TREE_OPERAND (op, 1);
6301 tree orig_lhs = lhs, orig_rhs = rhs;
6302 enum tree_code rhs_code = TREE_CODE (rhs);
6303 enum tree_code lhs_code = TREE_CODE (lhs);
6304 enum tree_code inv_code;
6306 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6307 return NULL_TREE;
6309 if (TREE_CODE_CLASS (code) != tcc_comparison)
6310 return NULL_TREE;
6312 if (rhs_code == truthop_code)
6314 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6315 if (newrhs != NULL_TREE)
6317 rhs = newrhs;
6318 rhs_code = TREE_CODE (rhs);
6321 if (lhs_code == truthop_code && !rhs_only)
6323 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6324 if (newlhs != NULL_TREE)
6326 lhs = newlhs;
6327 lhs_code = TREE_CODE (lhs);
6331 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6332 if (inv_code == rhs_code
6333 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6334 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6335 return lhs;
6336 if (!rhs_only && inv_code == lhs_code
6337 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6338 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6339 return rhs;
6340 if (rhs != orig_rhs || lhs != orig_lhs)
6341 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6342 lhs, rhs);
6343 return NULL_TREE;
6346 /* Find ways of folding logical expressions of LHS and RHS:
6347 Try to merge two comparisons to the same innermost item.
6348 Look for range tests like "ch >= '0' && ch <= '9'".
6349 Look for combinations of simple terms on machines with expensive branches
6350 and evaluate the RHS unconditionally.
6352 For example, if we have p->a == 2 && p->b == 4 and we can make an
6353 object large enough to span both A and B, we can do this with a comparison
6354 against the object ANDed with the a mask.
6356 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6357 operations to do this with one comparison.
6359 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6360 function and the one above.
6362 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6363 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6365 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6366 two operands.
6368 We return the simplified tree or 0 if no optimization is possible. */
6370 static tree
6371 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6372 tree lhs, tree rhs)
6374 /* If this is the "or" of two comparisons, we can do something if
6375 the comparisons are NE_EXPR. If this is the "and", we can do something
6376 if the comparisons are EQ_EXPR. I.e.,
6377 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6379 WANTED_CODE is this operation code. For single bit fields, we can
6380 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6381 comparison for one-bit fields. */
6383 enum tree_code wanted_code;
6384 enum tree_code lcode, rcode;
6385 tree ll_arg, lr_arg, rl_arg, rr_arg;
6386 tree ll_inner, lr_inner, rl_inner, rr_inner;
6387 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6388 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6389 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6390 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6391 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6392 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6393 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6394 scalar_int_mode lnmode, rnmode;
6395 tree ll_mask, lr_mask, rl_mask, rr_mask;
6396 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6397 tree l_const, r_const;
6398 tree lntype, rntype, result;
6399 HOST_WIDE_INT first_bit, end_bit;
6400 int volatilep;
6402 /* Start by getting the comparison codes. Fail if anything is volatile.
6403 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6404 it were surrounded with a NE_EXPR. */
6406 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6407 return 0;
6409 lcode = TREE_CODE (lhs);
6410 rcode = TREE_CODE (rhs);
6412 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6414 lhs = build2 (NE_EXPR, truth_type, lhs,
6415 build_int_cst (TREE_TYPE (lhs), 0));
6416 lcode = NE_EXPR;
6419 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6421 rhs = build2 (NE_EXPR, truth_type, rhs,
6422 build_int_cst (TREE_TYPE (rhs), 0));
6423 rcode = NE_EXPR;
6426 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6427 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6428 return 0;
6430 ll_arg = TREE_OPERAND (lhs, 0);
6431 lr_arg = TREE_OPERAND (lhs, 1);
6432 rl_arg = TREE_OPERAND (rhs, 0);
6433 rr_arg = TREE_OPERAND (rhs, 1);
6435 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6436 if (simple_operand_p (ll_arg)
6437 && simple_operand_p (lr_arg))
6439 if (operand_equal_p (ll_arg, rl_arg, 0)
6440 && operand_equal_p (lr_arg, rr_arg, 0))
6442 result = combine_comparisons (loc, code, lcode, rcode,
6443 truth_type, ll_arg, lr_arg);
6444 if (result)
6445 return result;
6447 else if (operand_equal_p (ll_arg, rr_arg, 0)
6448 && operand_equal_p (lr_arg, rl_arg, 0))
6450 result = combine_comparisons (loc, code, lcode,
6451 swap_tree_comparison (rcode),
6452 truth_type, ll_arg, lr_arg);
6453 if (result)
6454 return result;
6458 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6461 /* If the RHS can be evaluated unconditionally and its operands are
6462 simple, it wins to evaluate the RHS unconditionally on machines
6463 with expensive branches. In this case, this isn't a comparison
6464 that can be merged. */
6466 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6467 false) >= 2
6468 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6469 && simple_operand_p (rl_arg)
6470 && simple_operand_p (rr_arg))
6472 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6473 if (code == TRUTH_OR_EXPR
6474 && lcode == NE_EXPR && integer_zerop (lr_arg)
6475 && rcode == NE_EXPR && integer_zerop (rr_arg)
6476 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6477 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6478 return build2_loc (loc, NE_EXPR, truth_type,
6479 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6480 ll_arg, rl_arg),
6481 build_int_cst (TREE_TYPE (ll_arg), 0));
6483 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6484 if (code == TRUTH_AND_EXPR
6485 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6486 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6487 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6488 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6489 return build2_loc (loc, EQ_EXPR, truth_type,
6490 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6491 ll_arg, rl_arg),
6492 build_int_cst (TREE_TYPE (ll_arg), 0));
6495 /* See if the comparisons can be merged. Then get all the parameters for
6496 each side. */
6498 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6499 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6500 return 0;
6502 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6503 volatilep = 0;
6504 ll_inner = decode_field_reference (loc, &ll_arg,
6505 &ll_bitsize, &ll_bitpos, &ll_mode,
6506 &ll_unsignedp, &ll_reversep, &volatilep,
6507 &ll_mask, &ll_and_mask);
6508 lr_inner = decode_field_reference (loc, &lr_arg,
6509 &lr_bitsize, &lr_bitpos, &lr_mode,
6510 &lr_unsignedp, &lr_reversep, &volatilep,
6511 &lr_mask, &lr_and_mask);
6512 rl_inner = decode_field_reference (loc, &rl_arg,
6513 &rl_bitsize, &rl_bitpos, &rl_mode,
6514 &rl_unsignedp, &rl_reversep, &volatilep,
6515 &rl_mask, &rl_and_mask);
6516 rr_inner = decode_field_reference (loc, &rr_arg,
6517 &rr_bitsize, &rr_bitpos, &rr_mode,
6518 &rr_unsignedp, &rr_reversep, &volatilep,
6519 &rr_mask, &rr_and_mask);
6521 /* It must be true that the inner operation on the lhs of each
6522 comparison must be the same if we are to be able to do anything.
6523 Then see if we have constants. If not, the same must be true for
6524 the rhs's. */
6525 if (volatilep
6526 || ll_reversep != rl_reversep
6527 || ll_inner == 0 || rl_inner == 0
6528 || ! operand_equal_p (ll_inner, rl_inner, 0))
6529 return 0;
6531 if (TREE_CODE (lr_arg) == INTEGER_CST
6532 && TREE_CODE (rr_arg) == INTEGER_CST)
6534 l_const = lr_arg, r_const = rr_arg;
6535 lr_reversep = ll_reversep;
6537 else if (lr_reversep != rr_reversep
6538 || lr_inner == 0 || rr_inner == 0
6539 || ! operand_equal_p (lr_inner, rr_inner, 0))
6540 return 0;
6541 else
6542 l_const = r_const = 0;
6544 /* If either comparison code is not correct for our logical operation,
6545 fail. However, we can convert a one-bit comparison against zero into
6546 the opposite comparison against that bit being set in the field. */
6548 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6549 if (lcode != wanted_code)
6551 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6553 /* Make the left operand unsigned, since we are only interested
6554 in the value of one bit. Otherwise we are doing the wrong
6555 thing below. */
6556 ll_unsignedp = 1;
6557 l_const = ll_mask;
6559 else
6560 return 0;
6563 /* This is analogous to the code for l_const above. */
6564 if (rcode != wanted_code)
6566 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6568 rl_unsignedp = 1;
6569 r_const = rl_mask;
6571 else
6572 return 0;
6575 /* See if we can find a mode that contains both fields being compared on
6576 the left. If we can't, fail. Otherwise, update all constants and masks
6577 to be relative to a field of that size. */
6578 first_bit = MIN (ll_bitpos, rl_bitpos);
6579 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6580 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6581 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6582 volatilep, &lnmode))
6583 return 0;
6585 lnbitsize = GET_MODE_BITSIZE (lnmode);
6586 lnbitpos = first_bit & ~ (lnbitsize - 1);
6587 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6588 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6590 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6592 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6593 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6596 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6597 size_int (xll_bitpos));
6598 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6599 size_int (xrl_bitpos));
6600 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6601 return 0;
6603 if (l_const)
6605 l_const = fold_convert_loc (loc, lntype, l_const);
6606 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6607 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6608 if (l_const == NULL_TREE)
6609 return 0;
6610 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6611 fold_build1_loc (loc, BIT_NOT_EXPR,
6612 lntype, ll_mask))))
6614 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6616 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6619 if (r_const)
6621 r_const = fold_convert_loc (loc, lntype, r_const);
6622 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6623 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6624 if (r_const == NULL_TREE)
6625 return 0;
6626 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6627 fold_build1_loc (loc, BIT_NOT_EXPR,
6628 lntype, rl_mask))))
6630 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6632 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6636 /* If the right sides are not constant, do the same for it. Also,
6637 disallow this optimization if a size, signedness or storage order
6638 mismatch occurs between the left and right sides. */
6639 if (l_const == 0)
6641 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6642 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6643 || ll_reversep != lr_reversep
6644 /* Make sure the two fields on the right
6645 correspond to the left without being swapped. */
6646 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6647 return 0;
6649 first_bit = MIN (lr_bitpos, rr_bitpos);
6650 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6651 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6652 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6653 volatilep, &rnmode))
6654 return 0;
6656 rnbitsize = GET_MODE_BITSIZE (rnmode);
6657 rnbitpos = first_bit & ~ (rnbitsize - 1);
6658 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6659 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6661 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6663 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6664 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6667 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6668 rntype, lr_mask),
6669 size_int (xlr_bitpos));
6670 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6671 rntype, rr_mask),
6672 size_int (xrr_bitpos));
6673 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6674 return 0;
6676 /* Make a mask that corresponds to both fields being compared.
6677 Do this for both items being compared. If the operands are the
6678 same size and the bits being compared are in the same position
6679 then we can do this by masking both and comparing the masked
6680 results. */
6681 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6682 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6683 if (lnbitsize == rnbitsize
6684 && xll_bitpos == xlr_bitpos
6685 && lnbitpos >= 0
6686 && rnbitpos >= 0)
6688 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6689 lntype, lnbitsize, lnbitpos,
6690 ll_unsignedp || rl_unsignedp, ll_reversep);
6691 if (! all_ones_mask_p (ll_mask, lnbitsize))
6692 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6694 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6695 rntype, rnbitsize, rnbitpos,
6696 lr_unsignedp || rr_unsignedp, lr_reversep);
6697 if (! all_ones_mask_p (lr_mask, rnbitsize))
6698 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6700 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6703 /* There is still another way we can do something: If both pairs of
6704 fields being compared are adjacent, we may be able to make a wider
6705 field containing them both.
6707 Note that we still must mask the lhs/rhs expressions. Furthermore,
6708 the mask must be shifted to account for the shift done by
6709 make_bit_field_ref. */
6710 if (((ll_bitsize + ll_bitpos == rl_bitpos
6711 && lr_bitsize + lr_bitpos == rr_bitpos)
6712 || (ll_bitpos == rl_bitpos + rl_bitsize
6713 && lr_bitpos == rr_bitpos + rr_bitsize))
6714 && ll_bitpos >= 0
6715 && rl_bitpos >= 0
6716 && lr_bitpos >= 0
6717 && rr_bitpos >= 0)
6719 tree type;
6721 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6722 ll_bitsize + rl_bitsize,
6723 MIN (ll_bitpos, rl_bitpos),
6724 ll_unsignedp, ll_reversep);
6725 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6726 lr_bitsize + rr_bitsize,
6727 MIN (lr_bitpos, rr_bitpos),
6728 lr_unsignedp, lr_reversep);
6730 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6731 size_int (MIN (xll_bitpos, xrl_bitpos)));
6732 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6733 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6734 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6735 return 0;
6737 /* Convert to the smaller type before masking out unwanted bits. */
6738 type = lntype;
6739 if (lntype != rntype)
6741 if (lnbitsize > rnbitsize)
6743 lhs = fold_convert_loc (loc, rntype, lhs);
6744 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6745 type = rntype;
6747 else if (lnbitsize < rnbitsize)
6749 rhs = fold_convert_loc (loc, lntype, rhs);
6750 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6751 type = lntype;
6755 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6756 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6758 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6759 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6761 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6764 return 0;
6767 /* Handle the case of comparisons with constants. If there is something in
6768 common between the masks, those bits of the constants must be the same.
6769 If not, the condition is always false. Test for this to avoid generating
6770 incorrect code below. */
6771 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6772 if (! integer_zerop (result)
6773 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6774 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6776 if (wanted_code == NE_EXPR)
6778 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6779 return constant_boolean_node (true, truth_type);
6781 else
6783 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6784 return constant_boolean_node (false, truth_type);
6788 if (lnbitpos < 0)
6789 return 0;
6791 /* Construct the expression we will return. First get the component
6792 reference we will make. Unless the mask is all ones the width of
6793 that field, perform the mask operation. Then compare with the
6794 merged constant. */
6795 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6796 lntype, lnbitsize, lnbitpos,
6797 ll_unsignedp || rl_unsignedp, ll_reversep);
6799 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6800 if (! all_ones_mask_p (ll_mask, lnbitsize))
6801 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6803 return build2_loc (loc, wanted_code, truth_type, result,
6804 const_binop (BIT_IOR_EXPR, l_const, r_const));
6807 /* T is an integer expression that is being multiplied, divided, or taken a
6808 modulus (CODE says which and what kind of divide or modulus) by a
6809 constant C. See if we can eliminate that operation by folding it with
6810 other operations already in T. WIDE_TYPE, if non-null, is a type that
6811 should be used for the computation if wider than our type.
6813 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6814 (X * 2) + (Y * 4). We must, however, be assured that either the original
6815 expression would not overflow or that overflow is undefined for the type
6816 in the language in question.
6818 If we return a non-null expression, it is an equivalent form of the
6819 original computation, but need not be in the original type.
6821 We set *STRICT_OVERFLOW_P to true if the return values depends on
6822 signed overflow being undefined. Otherwise we do not change
6823 *STRICT_OVERFLOW_P. */
6825 static tree
6826 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6827 bool *strict_overflow_p)
6829 /* To avoid exponential search depth, refuse to allow recursion past
6830 three levels. Beyond that (1) it's highly unlikely that we'll find
6831 something interesting and (2) we've probably processed it before
6832 when we built the inner expression. */
6834 static int depth;
6835 tree ret;
6837 if (depth > 3)
6838 return NULL;
6840 depth++;
6841 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6842 depth--;
6844 return ret;
6847 static tree
6848 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6849 bool *strict_overflow_p)
6851 tree type = TREE_TYPE (t);
6852 enum tree_code tcode = TREE_CODE (t);
6853 tree ctype = (wide_type != 0
6854 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6855 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6856 ? wide_type : type);
6857 tree t1, t2;
6858 int same_p = tcode == code;
6859 tree op0 = NULL_TREE, op1 = NULL_TREE;
6860 bool sub_strict_overflow_p;
6862 /* Don't deal with constants of zero here; they confuse the code below. */
6863 if (integer_zerop (c))
6864 return NULL_TREE;
6866 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6867 op0 = TREE_OPERAND (t, 0);
6869 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6870 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6872 /* Note that we need not handle conditional operations here since fold
6873 already handles those cases. So just do arithmetic here. */
6874 switch (tcode)
6876 case INTEGER_CST:
6877 /* For a constant, we can always simplify if we are a multiply
6878 or (for divide and modulus) if it is a multiple of our constant. */
6879 if (code == MULT_EXPR
6880 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6881 TYPE_SIGN (type)))
6883 tree tem = const_binop (code, fold_convert (ctype, t),
6884 fold_convert (ctype, c));
6885 /* If the multiplication overflowed, we lost information on it.
6886 See PR68142 and PR69845. */
6887 if (TREE_OVERFLOW (tem))
6888 return NULL_TREE;
6889 return tem;
6891 break;
6893 CASE_CONVERT: case NON_LVALUE_EXPR:
6894 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6895 break;
6896 /* If op0 is an expression ... */
6897 if ((COMPARISON_CLASS_P (op0)
6898 || UNARY_CLASS_P (op0)
6899 || BINARY_CLASS_P (op0)
6900 || VL_EXP_CLASS_P (op0)
6901 || EXPRESSION_CLASS_P (op0))
6902 /* ... and has wrapping overflow, and its type is smaller
6903 than ctype, then we cannot pass through as widening. */
6904 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6905 && (TYPE_PRECISION (ctype)
6906 > TYPE_PRECISION (TREE_TYPE (op0))))
6907 /* ... or this is a truncation (t is narrower than op0),
6908 then we cannot pass through this narrowing. */
6909 || (TYPE_PRECISION (type)
6910 < TYPE_PRECISION (TREE_TYPE (op0)))
6911 /* ... or signedness changes for division or modulus,
6912 then we cannot pass through this conversion. */
6913 || (code != MULT_EXPR
6914 && (TYPE_UNSIGNED (ctype)
6915 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6916 /* ... or has undefined overflow while the converted to
6917 type has not, we cannot do the operation in the inner type
6918 as that would introduce undefined overflow. */
6919 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6920 && !TYPE_OVERFLOW_UNDEFINED (type))))
6921 break;
6923 /* Pass the constant down and see if we can make a simplification. If
6924 we can, replace this expression with the inner simplification for
6925 possible later conversion to our or some other type. */
6926 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6927 && TREE_CODE (t2) == INTEGER_CST
6928 && !TREE_OVERFLOW (t2)
6929 && (t1 = extract_muldiv (op0, t2, code,
6930 code == MULT_EXPR ? ctype : NULL_TREE,
6931 strict_overflow_p)) != 0)
6932 return t1;
6933 break;
6935 case ABS_EXPR:
6936 /* If widening the type changes it from signed to unsigned, then we
6937 must avoid building ABS_EXPR itself as unsigned. */
6938 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6940 tree cstype = (*signed_type_for) (ctype);
6941 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6942 != 0)
6944 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6945 return fold_convert (ctype, t1);
6947 break;
6949 /* If the constant is negative, we cannot simplify this. */
6950 if (tree_int_cst_sgn (c) == -1)
6951 break;
6952 /* FALLTHROUGH */
6953 case NEGATE_EXPR:
6954 /* For division and modulus, type can't be unsigned, as e.g.
6955 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6956 For signed types, even with wrapping overflow, this is fine. */
6957 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6958 break;
6959 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6960 != 0)
6961 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6962 break;
6964 case MIN_EXPR: case MAX_EXPR:
6965 /* If widening the type changes the signedness, then we can't perform
6966 this optimization as that changes the result. */
6967 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6968 break;
6970 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6971 sub_strict_overflow_p = false;
6972 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6973 &sub_strict_overflow_p)) != 0
6974 && (t2 = extract_muldiv (op1, c, code, wide_type,
6975 &sub_strict_overflow_p)) != 0)
6977 if (tree_int_cst_sgn (c) < 0)
6978 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6979 if (sub_strict_overflow_p)
6980 *strict_overflow_p = true;
6981 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6982 fold_convert (ctype, t2));
6984 break;
6986 case LSHIFT_EXPR: case RSHIFT_EXPR:
6987 /* If the second operand is constant, this is a multiplication
6988 or floor division, by a power of two, so we can treat it that
6989 way unless the multiplier or divisor overflows. Signed
6990 left-shift overflow is implementation-defined rather than
6991 undefined in C90, so do not convert signed left shift into
6992 multiplication. */
6993 if (TREE_CODE (op1) == INTEGER_CST
6994 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6995 /* const_binop may not detect overflow correctly,
6996 so check for it explicitly here. */
6997 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6998 wi::to_wide (op1))
6999 && (t1 = fold_convert (ctype,
7000 const_binop (LSHIFT_EXPR, size_one_node,
7001 op1))) != 0
7002 && !TREE_OVERFLOW (t1))
7003 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7004 ? MULT_EXPR : FLOOR_DIV_EXPR,
7005 ctype,
7006 fold_convert (ctype, op0),
7007 t1),
7008 c, code, wide_type, strict_overflow_p);
7009 break;
7011 case PLUS_EXPR: case MINUS_EXPR:
7012 /* See if we can eliminate the operation on both sides. If we can, we
7013 can return a new PLUS or MINUS. If we can't, the only remaining
7014 cases where we can do anything are if the second operand is a
7015 constant. */
7016 sub_strict_overflow_p = false;
7017 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7018 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7019 if (t1 != 0 && t2 != 0
7020 && TYPE_OVERFLOW_WRAPS (ctype)
7021 && (code == MULT_EXPR
7022 /* If not multiplication, we can only do this if both operands
7023 are divisible by c. */
7024 || (multiple_of_p (ctype, op0, c)
7025 && multiple_of_p (ctype, op1, c))))
7027 if (sub_strict_overflow_p)
7028 *strict_overflow_p = true;
7029 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7030 fold_convert (ctype, t2));
7033 /* If this was a subtraction, negate OP1 and set it to be an addition.
7034 This simplifies the logic below. */
7035 if (tcode == MINUS_EXPR)
7037 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7038 /* If OP1 was not easily negatable, the constant may be OP0. */
7039 if (TREE_CODE (op0) == INTEGER_CST)
7041 std::swap (op0, op1);
7042 std::swap (t1, t2);
7046 if (TREE_CODE (op1) != INTEGER_CST)
7047 break;
7049 /* If either OP1 or C are negative, this optimization is not safe for
7050 some of the division and remainder types while for others we need
7051 to change the code. */
7052 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7054 if (code == CEIL_DIV_EXPR)
7055 code = FLOOR_DIV_EXPR;
7056 else if (code == FLOOR_DIV_EXPR)
7057 code = CEIL_DIV_EXPR;
7058 else if (code != MULT_EXPR
7059 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7060 break;
7063 /* If it's a multiply or a division/modulus operation of a multiple
7064 of our constant, do the operation and verify it doesn't overflow. */
7065 if (code == MULT_EXPR
7066 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7067 TYPE_SIGN (type)))
7069 op1 = const_binop (code, fold_convert (ctype, op1),
7070 fold_convert (ctype, c));
7071 /* We allow the constant to overflow with wrapping semantics. */
7072 if (op1 == 0
7073 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7074 break;
7076 else
7077 break;
7079 /* If we have an unsigned type, we cannot widen the operation since it
7080 will change the result if the original computation overflowed. */
7081 if (TYPE_UNSIGNED (ctype) && ctype != type)
7082 break;
7084 /* The last case is if we are a multiply. In that case, we can
7085 apply the distributive law to commute the multiply and addition
7086 if the multiplication of the constants doesn't overflow
7087 and overflow is defined. With undefined overflow
7088 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7089 But fold_plusminus_mult_expr would factor back any power-of-two
7090 value so do not distribute in the first place in this case. */
7091 if (code == MULT_EXPR
7092 && TYPE_OVERFLOW_WRAPS (ctype)
7093 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7094 return fold_build2 (tcode, ctype,
7095 fold_build2 (code, ctype,
7096 fold_convert (ctype, op0),
7097 fold_convert (ctype, c)),
7098 op1);
7100 break;
7102 case MULT_EXPR:
7103 /* We have a special case here if we are doing something like
7104 (C * 8) % 4 since we know that's zero. */
7105 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7106 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7107 /* If the multiplication can overflow we cannot optimize this. */
7108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7109 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7110 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7111 TYPE_SIGN (type)))
7113 *strict_overflow_p = true;
7114 return omit_one_operand (type, integer_zero_node, op0);
7117 /* ... fall through ... */
7119 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7120 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7121 /* If we can extract our operation from the LHS, do so and return a
7122 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7123 do something only if the second operand is a constant. */
7124 if (same_p
7125 && TYPE_OVERFLOW_WRAPS (ctype)
7126 && (t1 = extract_muldiv (op0, c, code, wide_type,
7127 strict_overflow_p)) != 0)
7128 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7129 fold_convert (ctype, op1));
7130 else if (tcode == MULT_EXPR && code == MULT_EXPR
7131 && TYPE_OVERFLOW_WRAPS (ctype)
7132 && (t1 = extract_muldiv (op1, c, code, wide_type,
7133 strict_overflow_p)) != 0)
7134 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7135 fold_convert (ctype, t1));
7136 else if (TREE_CODE (op1) != INTEGER_CST)
7137 return 0;
7139 /* If these are the same operation types, we can associate them
7140 assuming no overflow. */
7141 if (tcode == code)
7143 bool overflow_p = false;
7144 wi::overflow_type overflow_mul;
7145 signop sign = TYPE_SIGN (ctype);
7146 unsigned prec = TYPE_PRECISION (ctype);
7147 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7148 wi::to_wide (c, prec),
7149 sign, &overflow_mul);
7150 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7151 if (overflow_mul
7152 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7153 overflow_p = true;
7154 if (!overflow_p)
7155 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7156 wide_int_to_tree (ctype, mul));
7159 /* If these operations "cancel" each other, we have the main
7160 optimizations of this pass, which occur when either constant is a
7161 multiple of the other, in which case we replace this with either an
7162 operation or CODE or TCODE.
7164 If we have an unsigned type, we cannot do this since it will change
7165 the result if the original computation overflowed. */
7166 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7167 && !TYPE_OVERFLOW_SANITIZED (ctype)
7168 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7169 || (tcode == MULT_EXPR
7170 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7171 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7172 && code != MULT_EXPR)))
7174 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7175 TYPE_SIGN (type)))
7177 *strict_overflow_p = true;
7178 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7179 fold_convert (ctype,
7180 const_binop (TRUNC_DIV_EXPR,
7181 op1, c)));
7183 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7184 TYPE_SIGN (type)))
7186 *strict_overflow_p = true;
7187 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7188 fold_convert (ctype,
7189 const_binop (TRUNC_DIV_EXPR,
7190 c, op1)));
7193 break;
7195 default:
7196 break;
7199 return 0;
7202 /* Return a node which has the indicated constant VALUE (either 0 or
7203 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7204 and is of the indicated TYPE. */
7206 tree
7207 constant_boolean_node (bool value, tree type)
7209 if (type == integer_type_node)
7210 return value ? integer_one_node : integer_zero_node;
7211 else if (type == boolean_type_node)
7212 return value ? boolean_true_node : boolean_false_node;
7213 else if (VECTOR_TYPE_P (type))
7214 return build_vector_from_val (type,
7215 build_int_cst (TREE_TYPE (type),
7216 value ? -1 : 0));
7217 else
7218 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7222 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7223 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7224 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7225 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7226 COND is the first argument to CODE; otherwise (as in the example
7227 given here), it is the second argument. TYPE is the type of the
7228 original expression. Return NULL_TREE if no simplification is
7229 possible. */
7231 static tree
7232 fold_binary_op_with_conditional_arg (location_t loc,
7233 enum tree_code code,
7234 tree type, tree op0, tree op1,
7235 tree cond, tree arg, int cond_first_p)
7237 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7238 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7239 tree test, true_value, false_value;
7240 tree lhs = NULL_TREE;
7241 tree rhs = NULL_TREE;
7242 enum tree_code cond_code = COND_EXPR;
7244 /* Do not move possibly trapping operations into the conditional as this
7245 pessimizes code and causes gimplification issues when applied late. */
7246 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7247 ANY_INTEGRAL_TYPE_P (type)
7248 && TYPE_OVERFLOW_TRAPS (type), op1))
7249 return NULL_TREE;
7251 if (TREE_CODE (cond) == COND_EXPR
7252 || TREE_CODE (cond) == VEC_COND_EXPR)
7254 test = TREE_OPERAND (cond, 0);
7255 true_value = TREE_OPERAND (cond, 1);
7256 false_value = TREE_OPERAND (cond, 2);
7257 /* If this operand throws an expression, then it does not make
7258 sense to try to perform a logical or arithmetic operation
7259 involving it. */
7260 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7261 lhs = true_value;
7262 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7263 rhs = false_value;
7265 else if (!(TREE_CODE (type) != VECTOR_TYPE
7266 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7268 tree testtype = TREE_TYPE (cond);
7269 test = cond;
7270 true_value = constant_boolean_node (true, testtype);
7271 false_value = constant_boolean_node (false, testtype);
7273 else
7274 /* Detect the case of mixing vector and scalar types - bail out. */
7275 return NULL_TREE;
7277 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7278 cond_code = VEC_COND_EXPR;
7280 /* This transformation is only worthwhile if we don't have to wrap ARG
7281 in a SAVE_EXPR and the operation can be simplified without recursing
7282 on at least one of the branches once its pushed inside the COND_EXPR. */
7283 if (!TREE_CONSTANT (arg)
7284 && (TREE_SIDE_EFFECTS (arg)
7285 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7286 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7287 return NULL_TREE;
7289 arg = fold_convert_loc (loc, arg_type, arg);
7290 if (lhs == 0)
7292 true_value = fold_convert_loc (loc, cond_type, true_value);
7293 if (cond_first_p)
7294 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7295 else
7296 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7298 if (rhs == 0)
7300 false_value = fold_convert_loc (loc, cond_type, false_value);
7301 if (cond_first_p)
7302 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7303 else
7304 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7307 /* Check that we have simplified at least one of the branches. */
7308 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7309 return NULL_TREE;
7311 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7315 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7317 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7318 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7319 if ARG - ZERO_ARG is the same as X.
7321 If ARG is NULL, check for any value of type TYPE.
7323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7324 and finite. The problematic cases are when X is zero, and its mode
7325 has signed zeros. In the case of rounding towards -infinity,
7326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7329 bool
7330 fold_real_zero_addition_p (const_tree type, const_tree arg,
7331 const_tree zero_arg, int negate)
7333 if (!real_zerop (zero_arg))
7334 return false;
7336 /* Don't allow the fold with -fsignaling-nans. */
7337 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7338 return false;
7340 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7341 if (!HONOR_SIGNED_ZEROS (type))
7342 return true;
7344 /* There is no case that is safe for all rounding modes. */
7345 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7346 return false;
7348 /* In a vector or complex, we would need to check the sign of all zeros. */
7349 if (TREE_CODE (zero_arg) == VECTOR_CST)
7350 zero_arg = uniform_vector_p (zero_arg);
7351 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7352 return false;
7354 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7355 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7356 negate = !negate;
7358 /* The mode has signed zeros, and we have to honor their sign.
7359 In this situation, there are only two cases we can return true for.
7360 (i) X - 0 is the same as X with default rounding.
7361 (ii) X + 0 is X when X can't possibly be -0.0. */
7362 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7365 /* Subroutine of match.pd that optimizes comparisons of a division by
7366 a nonzero integer constant against an integer constant, i.e.
7367 X/C1 op C2.
7369 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7370 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7372 enum tree_code
7373 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7374 tree *hi, bool *neg_overflow)
7376 tree prod, tmp, type = TREE_TYPE (c1);
7377 signop sign = TYPE_SIGN (type);
7378 wi::overflow_type overflow;
7380 /* We have to do this the hard way to detect unsigned overflow.
7381 prod = int_const_binop (MULT_EXPR, c1, c2); */
7382 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7383 prod = force_fit_type (type, val, -1, overflow);
7384 *neg_overflow = false;
7386 if (sign == UNSIGNED)
7388 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7389 *lo = prod;
7391 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7392 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7393 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7395 else if (tree_int_cst_sgn (c1) >= 0)
7397 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7398 switch (tree_int_cst_sgn (c2))
7400 case -1:
7401 *neg_overflow = true;
7402 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7403 *hi = prod;
7404 break;
7406 case 0:
7407 *lo = fold_negate_const (tmp, type);
7408 *hi = tmp;
7409 break;
7411 case 1:
7412 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7413 *lo = prod;
7414 break;
7416 default:
7417 gcc_unreachable ();
7420 else
7422 /* A negative divisor reverses the relational operators. */
7423 code = swap_tree_comparison (code);
7425 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7426 switch (tree_int_cst_sgn (c2))
7428 case -1:
7429 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7430 *lo = prod;
7431 break;
7433 case 0:
7434 *hi = fold_negate_const (tmp, type);
7435 *lo = tmp;
7436 break;
7438 case 1:
7439 *neg_overflow = true;
7440 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7441 *hi = prod;
7442 break;
7444 default:
7445 gcc_unreachable ();
7449 if (code != EQ_EXPR && code != NE_EXPR)
7450 return code;
7452 if (TREE_OVERFLOW (*lo)
7453 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7454 *lo = NULL_TREE;
7455 if (TREE_OVERFLOW (*hi)
7456 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7457 *hi = NULL_TREE;
7459 return code;
7462 /* Test whether it is preferable to swap two operands, ARG0 and
7463 ARG1, for example because ARG0 is an integer constant and ARG1
7464 isn't. */
7466 bool
7467 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7469 if (CONSTANT_CLASS_P (arg1))
7470 return 0;
7471 if (CONSTANT_CLASS_P (arg0))
7472 return 1;
7474 STRIP_NOPS (arg0);
7475 STRIP_NOPS (arg1);
7477 if (TREE_CONSTANT (arg1))
7478 return 0;
7479 if (TREE_CONSTANT (arg0))
7480 return 1;
7482 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7483 for commutative and comparison operators. Ensuring a canonical
7484 form allows the optimizers to find additional redundancies without
7485 having to explicitly check for both orderings. */
7486 if (TREE_CODE (arg0) == SSA_NAME
7487 && TREE_CODE (arg1) == SSA_NAME
7488 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7489 return 1;
7491 /* Put SSA_NAMEs last. */
7492 if (TREE_CODE (arg1) == SSA_NAME)
7493 return 0;
7494 if (TREE_CODE (arg0) == SSA_NAME)
7495 return 1;
7497 /* Put variables last. */
7498 if (DECL_P (arg1))
7499 return 0;
7500 if (DECL_P (arg0))
7501 return 1;
7503 return 0;
7507 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7508 means A >= Y && A != MAX, but in this case we know that
7509 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7511 static tree
7512 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7514 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7516 if (TREE_CODE (bound) == LT_EXPR)
7517 a = TREE_OPERAND (bound, 0);
7518 else if (TREE_CODE (bound) == GT_EXPR)
7519 a = TREE_OPERAND (bound, 1);
7520 else
7521 return NULL_TREE;
7523 typea = TREE_TYPE (a);
7524 if (!INTEGRAL_TYPE_P (typea)
7525 && !POINTER_TYPE_P (typea))
7526 return NULL_TREE;
7528 if (TREE_CODE (ineq) == LT_EXPR)
7530 a1 = TREE_OPERAND (ineq, 1);
7531 y = TREE_OPERAND (ineq, 0);
7533 else if (TREE_CODE (ineq) == GT_EXPR)
7535 a1 = TREE_OPERAND (ineq, 0);
7536 y = TREE_OPERAND (ineq, 1);
7538 else
7539 return NULL_TREE;
7541 if (TREE_TYPE (a1) != typea)
7542 return NULL_TREE;
7544 if (POINTER_TYPE_P (typea))
7546 /* Convert the pointer types into integer before taking the difference. */
7547 tree ta = fold_convert_loc (loc, ssizetype, a);
7548 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7549 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7551 else
7552 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7554 if (!diff || !integer_onep (diff))
7555 return NULL_TREE;
7557 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7560 /* Fold a sum or difference of at least one multiplication.
7561 Returns the folded tree or NULL if no simplification could be made. */
7563 static tree
7564 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7565 tree arg0, tree arg1)
7567 tree arg00, arg01, arg10, arg11;
7568 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7570 /* (A * C) +- (B * C) -> (A+-B) * C.
7571 (A * C) +- A -> A * (C+-1).
7572 We are most concerned about the case where C is a constant,
7573 but other combinations show up during loop reduction. Since
7574 it is not difficult, try all four possibilities. */
7576 if (TREE_CODE (arg0) == MULT_EXPR)
7578 arg00 = TREE_OPERAND (arg0, 0);
7579 arg01 = TREE_OPERAND (arg0, 1);
7581 else if (TREE_CODE (arg0) == INTEGER_CST)
7583 arg00 = build_one_cst (type);
7584 arg01 = arg0;
7586 else
7588 /* We cannot generate constant 1 for fract. */
7589 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7590 return NULL_TREE;
7591 arg00 = arg0;
7592 arg01 = build_one_cst (type);
7594 if (TREE_CODE (arg1) == MULT_EXPR)
7596 arg10 = TREE_OPERAND (arg1, 0);
7597 arg11 = TREE_OPERAND (arg1, 1);
7599 else if (TREE_CODE (arg1) == INTEGER_CST)
7601 arg10 = build_one_cst (type);
7602 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7603 the purpose of this canonicalization. */
7604 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7605 && negate_expr_p (arg1)
7606 && code == PLUS_EXPR)
7608 arg11 = negate_expr (arg1);
7609 code = MINUS_EXPR;
7611 else
7612 arg11 = arg1;
7614 else
7616 /* We cannot generate constant 1 for fract. */
7617 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7618 return NULL_TREE;
7619 arg10 = arg1;
7620 arg11 = build_one_cst (type);
7622 same = NULL_TREE;
7624 /* Prefer factoring a common non-constant. */
7625 if (operand_equal_p (arg00, arg10, 0))
7626 same = arg00, alt0 = arg01, alt1 = arg11;
7627 else if (operand_equal_p (arg01, arg11, 0))
7628 same = arg01, alt0 = arg00, alt1 = arg10;
7629 else if (operand_equal_p (arg00, arg11, 0))
7630 same = arg00, alt0 = arg01, alt1 = arg10;
7631 else if (operand_equal_p (arg01, arg10, 0))
7632 same = arg01, alt0 = arg00, alt1 = arg11;
7634 /* No identical multiplicands; see if we can find a common
7635 power-of-two factor in non-power-of-two multiplies. This
7636 can help in multi-dimensional array access. */
7637 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7639 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7640 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7641 HOST_WIDE_INT tmp;
7642 bool swap = false;
7643 tree maybe_same;
7645 /* Move min of absolute values to int11. */
7646 if (absu_hwi (int01) < absu_hwi (int11))
7648 tmp = int01, int01 = int11, int11 = tmp;
7649 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7650 maybe_same = arg01;
7651 swap = true;
7653 else
7654 maybe_same = arg11;
7656 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7657 if (factor > 1
7658 && pow2p_hwi (factor)
7659 && (int01 & (factor - 1)) == 0
7660 /* The remainder should not be a constant, otherwise we
7661 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7662 increased the number of multiplications necessary. */
7663 && TREE_CODE (arg10) != INTEGER_CST)
7665 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7666 build_int_cst (TREE_TYPE (arg00),
7667 int01 / int11));
7668 alt1 = arg10;
7669 same = maybe_same;
7670 if (swap)
7671 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7675 if (!same)
7676 return NULL_TREE;
7678 if (! ANY_INTEGRAL_TYPE_P (type)
7679 || TYPE_OVERFLOW_WRAPS (type)
7680 /* We are neither factoring zero nor minus one. */
7681 || TREE_CODE (same) == INTEGER_CST)
7682 return fold_build2_loc (loc, MULT_EXPR, type,
7683 fold_build2_loc (loc, code, type,
7684 fold_convert_loc (loc, type, alt0),
7685 fold_convert_loc (loc, type, alt1)),
7686 fold_convert_loc (loc, type, same));
7688 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7689 same may be minus one and thus the multiplication may overflow. Perform
7690 the sum operation in an unsigned type. */
7691 tree utype = unsigned_type_for (type);
7692 tree tem = fold_build2_loc (loc, code, utype,
7693 fold_convert_loc (loc, utype, alt0),
7694 fold_convert_loc (loc, utype, alt1));
7695 /* If the sum evaluated to a constant that is not -INF the multiplication
7696 cannot overflow. */
7697 if (TREE_CODE (tem) == INTEGER_CST
7698 && (wi::to_wide (tem)
7699 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7700 return fold_build2_loc (loc, MULT_EXPR, type,
7701 fold_convert (type, tem), same);
7703 /* Do not resort to unsigned multiplication because
7704 we lose the no-overflow property of the expression. */
7705 return NULL_TREE;
7708 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7709 specified by EXPR into the buffer PTR of length LEN bytes.
7710 Return the number of bytes placed in the buffer, or zero
7711 upon failure. */
7713 static int
7714 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7716 tree type = TREE_TYPE (expr);
7717 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7718 int byte, offset, word, words;
7719 unsigned char value;
7721 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7722 return 0;
7723 if (off == -1)
7724 off = 0;
7726 if (ptr == NULL)
7727 /* Dry run. */
7728 return MIN (len, total_bytes - off);
7730 words = total_bytes / UNITS_PER_WORD;
7732 for (byte = 0; byte < total_bytes; byte++)
7734 int bitpos = byte * BITS_PER_UNIT;
7735 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7736 number of bytes. */
7737 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7739 if (total_bytes > UNITS_PER_WORD)
7741 word = byte / UNITS_PER_WORD;
7742 if (WORDS_BIG_ENDIAN)
7743 word = (words - 1) - word;
7744 offset = word * UNITS_PER_WORD;
7745 if (BYTES_BIG_ENDIAN)
7746 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7747 else
7748 offset += byte % UNITS_PER_WORD;
7750 else
7751 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7752 if (offset >= off && offset - off < len)
7753 ptr[offset - off] = value;
7755 return MIN (len, total_bytes - off);
7759 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7760 specified by EXPR into the buffer PTR of length LEN bytes.
7761 Return the number of bytes placed in the buffer, or zero
7762 upon failure. */
7764 static int
7765 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7767 tree type = TREE_TYPE (expr);
7768 scalar_mode mode = SCALAR_TYPE_MODE (type);
7769 int total_bytes = GET_MODE_SIZE (mode);
7770 FIXED_VALUE_TYPE value;
7771 tree i_value, i_type;
7773 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7774 return 0;
7776 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7778 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7779 return 0;
7781 value = TREE_FIXED_CST (expr);
7782 i_value = double_int_to_tree (i_type, value.data);
7784 return native_encode_int (i_value, ptr, len, off);
7788 /* Subroutine of native_encode_expr. Encode the REAL_CST
7789 specified by EXPR into the buffer PTR of length LEN bytes.
7790 Return the number of bytes placed in the buffer, or zero
7791 upon failure. */
7793 static int
7794 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7796 tree type = TREE_TYPE (expr);
7797 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7798 int byte, offset, word, words, bitpos;
7799 unsigned char value;
7801 /* There are always 32 bits in each long, no matter the size of
7802 the hosts long. We handle floating point representations with
7803 up to 192 bits. */
7804 long tmp[6];
7806 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7807 return 0;
7808 if (off == -1)
7809 off = 0;
7811 if (ptr == NULL)
7812 /* Dry run. */
7813 return MIN (len, total_bytes - off);
7815 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7817 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7819 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7820 bitpos += BITS_PER_UNIT)
7822 byte = (bitpos / BITS_PER_UNIT) & 3;
7823 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7825 if (UNITS_PER_WORD < 4)
7827 word = byte / UNITS_PER_WORD;
7828 if (WORDS_BIG_ENDIAN)
7829 word = (words - 1) - word;
7830 offset = word * UNITS_PER_WORD;
7831 if (BYTES_BIG_ENDIAN)
7832 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 else
7834 offset += byte % UNITS_PER_WORD;
7836 else
7838 offset = byte;
7839 if (BYTES_BIG_ENDIAN)
7841 /* Reverse bytes within each long, or within the entire float
7842 if it's smaller than a long (for HFmode). */
7843 offset = MIN (3, total_bytes - 1) - offset;
7844 gcc_assert (offset >= 0);
7847 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7848 if (offset >= off
7849 && offset - off < len)
7850 ptr[offset - off] = value;
7852 return MIN (len, total_bytes - off);
7855 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7856 specified by EXPR into the buffer PTR of length LEN bytes.
7857 Return the number of bytes placed in the buffer, or zero
7858 upon failure. */
7860 static int
7861 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7863 int rsize, isize;
7864 tree part;
7866 part = TREE_REALPART (expr);
7867 rsize = native_encode_expr (part, ptr, len, off);
7868 if (off == -1 && rsize == 0)
7869 return 0;
7870 part = TREE_IMAGPART (expr);
7871 if (off != -1)
7872 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7873 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7874 len - rsize, off);
7875 if (off == -1 && isize != rsize)
7876 return 0;
7877 return rsize + isize;
7880 /* Like native_encode_vector, but only encode the first COUNT elements.
7881 The other arguments are as for native_encode_vector. */
7883 static int
7884 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7885 int off, unsigned HOST_WIDE_INT count)
7887 tree itype = TREE_TYPE (TREE_TYPE (expr));
7888 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7889 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7891 /* This is the only case in which elements can be smaller than a byte.
7892 Element 0 is always in the lsb of the containing byte. */
7893 unsigned int elt_bits = TYPE_PRECISION (itype);
7894 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7895 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7896 return 0;
7898 if (off == -1)
7899 off = 0;
7901 /* Zero the buffer and then set bits later where necessary. */
7902 int extract_bytes = MIN (len, total_bytes - off);
7903 if (ptr)
7904 memset (ptr, 0, extract_bytes);
7906 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7907 unsigned int first_elt = off * elts_per_byte;
7908 unsigned int extract_elts = extract_bytes * elts_per_byte;
7909 for (unsigned int i = 0; i < extract_elts; ++i)
7911 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7912 if (TREE_CODE (elt) != INTEGER_CST)
7913 return 0;
7915 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7917 unsigned int bit = i * elt_bits;
7918 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7921 return extract_bytes;
7924 int offset = 0;
7925 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7926 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7928 if (off >= size)
7930 off -= size;
7931 continue;
7933 tree elem = VECTOR_CST_ELT (expr, i);
7934 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7935 len - offset, off);
7936 if ((off == -1 && res != size) || res == 0)
7937 return 0;
7938 offset += res;
7939 if (offset >= len)
7940 return (off == -1 && i < count - 1) ? 0 : offset;
7941 if (off != -1)
7942 off = 0;
7944 return offset;
7947 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7950 upon failure. */
7952 static int
7953 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7955 unsigned HOST_WIDE_INT count;
7956 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7957 return 0;
7958 return native_encode_vector_part (expr, ptr, len, off, count);
7962 /* Subroutine of native_encode_expr. Encode the STRING_CST
7963 specified by EXPR into the buffer PTR of length LEN bytes.
7964 Return the number of bytes placed in the buffer, or zero
7965 upon failure. */
7967 static int
7968 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7970 tree type = TREE_TYPE (expr);
7972 /* Wide-char strings are encoded in target byte-order so native
7973 encoding them is trivial. */
7974 if (BITS_PER_UNIT != CHAR_BIT
7975 || TREE_CODE (type) != ARRAY_TYPE
7976 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7977 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7978 return 0;
7980 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7981 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7982 return 0;
7983 if (off == -1)
7984 off = 0;
7985 len = MIN (total_bytes - off, len);
7986 if (ptr == NULL)
7987 /* Dry run. */;
7988 else
7990 int written = 0;
7991 if (off < TREE_STRING_LENGTH (expr))
7993 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7994 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7996 memset (ptr + written, 0, len - written);
7998 return len;
8002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8003 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8004 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8005 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8006 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8007 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8008 Return the number of bytes placed in the buffer, or zero upon failure. */
8011 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8013 /* We don't support starting at negative offset and -1 is special. */
8014 if (off < -1)
8015 return 0;
8017 switch (TREE_CODE (expr))
8019 case INTEGER_CST:
8020 return native_encode_int (expr, ptr, len, off);
8022 case REAL_CST:
8023 return native_encode_real (expr, ptr, len, off);
8025 case FIXED_CST:
8026 return native_encode_fixed (expr, ptr, len, off);
8028 case COMPLEX_CST:
8029 return native_encode_complex (expr, ptr, len, off);
8031 case VECTOR_CST:
8032 return native_encode_vector (expr, ptr, len, off);
8034 case STRING_CST:
8035 return native_encode_string (expr, ptr, len, off);
8037 default:
8038 return 0;
8042 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8043 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8044 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8045 machine modes, we can't just use build_nonstandard_integer_type. */
8047 tree
8048 find_bitfield_repr_type (int fieldsize, int len)
8050 machine_mode mode;
8051 for (int pass = 0; pass < 2; pass++)
8053 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8054 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8055 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8056 && known_eq (GET_MODE_PRECISION (mode),
8057 GET_MODE_BITSIZE (mode))
8058 && known_le (GET_MODE_SIZE (mode), len))
8060 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8061 if (ret && TYPE_MODE (ret) == mode)
8062 return ret;
8066 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8067 if (int_n_enabled_p[i]
8068 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8069 && int_n_trees[i].unsigned_type)
8071 tree ret = int_n_trees[i].unsigned_type;
8072 mode = TYPE_MODE (ret);
8073 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8074 && known_eq (GET_MODE_PRECISION (mode),
8075 GET_MODE_BITSIZE (mode))
8076 && known_le (GET_MODE_SIZE (mode), len))
8077 return ret;
8080 return NULL_TREE;
8083 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8084 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8085 to be non-NULL and OFF zero), then in addition to filling the
8086 bytes pointed by PTR with the value also clear any bits pointed
8087 by MASK that are known to be initialized, keep them as is for
8088 e.g. uninitialized padding bits or uninitialized fields. */
8091 native_encode_initializer (tree init, unsigned char *ptr, int len,
8092 int off, unsigned char *mask)
8094 int r;
8096 /* We don't support starting at negative offset and -1 is special. */
8097 if (off < -1 || init == NULL_TREE)
8098 return 0;
8100 gcc_assert (mask == NULL || (off == 0 && ptr));
8102 STRIP_NOPS (init);
8103 switch (TREE_CODE (init))
8105 case VIEW_CONVERT_EXPR:
8106 case NON_LVALUE_EXPR:
8107 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8108 mask);
8109 default:
8110 r = native_encode_expr (init, ptr, len, off);
8111 if (mask)
8112 memset (mask, 0, r);
8113 return r;
8114 case CONSTRUCTOR:
8115 tree type = TREE_TYPE (init);
8116 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8117 if (total_bytes < 0)
8118 return 0;
8119 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8120 return 0;
8121 int o = off == -1 ? 0 : off;
8122 if (TREE_CODE (type) == ARRAY_TYPE)
8124 tree min_index;
8125 unsigned HOST_WIDE_INT cnt;
8126 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8127 constructor_elt *ce;
8129 if (!TYPE_DOMAIN (type)
8130 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8131 return 0;
8133 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8134 if (fieldsize <= 0)
8135 return 0;
8137 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8138 if (ptr)
8139 memset (ptr, '\0', MIN (total_bytes - off, len));
8141 for (cnt = 0; ; cnt++)
8143 tree val = NULL_TREE, index = NULL_TREE;
8144 HOST_WIDE_INT pos = curpos, count = 0;
8145 bool full = false;
8146 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8148 val = ce->value;
8149 index = ce->index;
8151 else if (mask == NULL
8152 || CONSTRUCTOR_NO_CLEARING (init)
8153 || curpos >= total_bytes)
8154 break;
8155 else
8156 pos = total_bytes;
8158 if (index && TREE_CODE (index) == RANGE_EXPR)
8160 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8161 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8162 return 0;
8163 offset_int wpos
8164 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8165 - wi::to_offset (min_index),
8166 TYPE_PRECISION (sizetype));
8167 wpos *= fieldsize;
8168 if (!wi::fits_shwi_p (pos))
8169 return 0;
8170 pos = wpos.to_shwi ();
8171 offset_int wcount
8172 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8173 - wi::to_offset (TREE_OPERAND (index, 0)),
8174 TYPE_PRECISION (sizetype));
8175 if (!wi::fits_shwi_p (wcount))
8176 return 0;
8177 count = wcount.to_shwi ();
8179 else if (index)
8181 if (TREE_CODE (index) != INTEGER_CST)
8182 return 0;
8183 offset_int wpos
8184 = wi::sext (wi::to_offset (index)
8185 - wi::to_offset (min_index),
8186 TYPE_PRECISION (sizetype));
8187 wpos *= fieldsize;
8188 if (!wi::fits_shwi_p (wpos))
8189 return 0;
8190 pos = wpos.to_shwi ();
8193 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8195 if (valueinit == -1)
8197 tree zero = build_zero_cst (TREE_TYPE (type));
8198 r = native_encode_initializer (zero, ptr + curpos,
8199 fieldsize, 0,
8200 mask + curpos);
8201 if (TREE_CODE (zero) == CONSTRUCTOR)
8202 ggc_free (zero);
8203 if (!r)
8204 return 0;
8205 valueinit = curpos;
8206 curpos += fieldsize;
8208 while (curpos != pos)
8210 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8211 memcpy (mask + curpos, mask + valueinit, fieldsize);
8212 curpos += fieldsize;
8216 curpos = pos;
8217 if (val)
8220 if (off == -1
8221 || (curpos >= off
8222 && (curpos + fieldsize
8223 <= (HOST_WIDE_INT) off + len)))
8225 if (full)
8227 if (ptr)
8228 memcpy (ptr + (curpos - o), ptr + (pos - o),
8229 fieldsize);
8230 if (mask)
8231 memcpy (mask + curpos, mask + pos, fieldsize);
8233 else if (!native_encode_initializer (val,
8235 ? ptr + curpos - o
8236 : NULL,
8237 fieldsize,
8238 off == -1 ? -1
8239 : 0,
8240 mask
8241 ? mask + curpos
8242 : NULL))
8243 return 0;
8244 else
8246 full = true;
8247 pos = curpos;
8250 else if (curpos + fieldsize > off
8251 && curpos < (HOST_WIDE_INT) off + len)
8253 /* Partial overlap. */
8254 unsigned char *p = NULL;
8255 int no = 0;
8256 int l;
8257 gcc_assert (mask == NULL);
8258 if (curpos >= off)
8260 if (ptr)
8261 p = ptr + curpos - off;
8262 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8263 fieldsize);
8265 else
8267 p = ptr;
8268 no = off - curpos;
8269 l = len;
8271 if (!native_encode_initializer (val, p, l, no, NULL))
8272 return 0;
8274 curpos += fieldsize;
8276 while (count-- != 0);
8278 return MIN (total_bytes - off, len);
8280 else if (TREE_CODE (type) == RECORD_TYPE
8281 || TREE_CODE (type) == UNION_TYPE)
8283 unsigned HOST_WIDE_INT cnt;
8284 constructor_elt *ce;
8285 tree fld_base = TYPE_FIELDS (type);
8286 tree to_free = NULL_TREE;
8288 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8289 if (ptr != NULL)
8290 memset (ptr, '\0', MIN (total_bytes - o, len));
8291 for (cnt = 0; ; cnt++)
8293 tree val = NULL_TREE, field = NULL_TREE;
8294 HOST_WIDE_INT pos = 0, fieldsize;
8295 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8297 if (to_free)
8299 ggc_free (to_free);
8300 to_free = NULL_TREE;
8303 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8305 val = ce->value;
8306 field = ce->index;
8307 if (field == NULL_TREE)
8308 return 0;
8310 pos = int_byte_position (field);
8311 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8312 continue;
8314 else if (mask == NULL
8315 || CONSTRUCTOR_NO_CLEARING (init))
8316 break;
8317 else
8318 pos = total_bytes;
8320 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8322 tree fld;
8323 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8325 if (TREE_CODE (fld) != FIELD_DECL)
8326 continue;
8327 if (fld == field)
8328 break;
8329 if (DECL_PADDING_P (fld))
8330 continue;
8331 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8332 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8333 return 0;
8334 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8335 continue;
8336 break;
8338 if (fld == NULL_TREE)
8340 if (ce == NULL)
8341 break;
8342 return 0;
8344 fld_base = DECL_CHAIN (fld);
8345 if (fld != field)
8347 cnt--;
8348 field = fld;
8349 pos = int_byte_position (field);
8350 val = build_zero_cst (TREE_TYPE (fld));
8351 if (TREE_CODE (val) == CONSTRUCTOR)
8352 to_free = val;
8356 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8357 && TYPE_DOMAIN (TREE_TYPE (field))
8358 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8360 if (mask || off != -1)
8361 return 0;
8362 if (val == NULL_TREE)
8363 continue;
8364 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8365 return 0;
8366 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8367 if (fieldsize < 0
8368 || (int) fieldsize != fieldsize
8369 || (pos + fieldsize) > INT_MAX)
8370 return 0;
8371 if (pos + fieldsize > total_bytes)
8373 if (ptr != NULL && total_bytes < len)
8374 memset (ptr + total_bytes, '\0',
8375 MIN (pos + fieldsize, len) - total_bytes);
8376 total_bytes = pos + fieldsize;
8379 else
8381 if (DECL_SIZE_UNIT (field) == NULL_TREE
8382 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8383 return 0;
8384 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8386 if (fieldsize == 0)
8387 continue;
8389 /* Prepare to deal with integral bit-fields and filter out other
8390 bit-fields that do not start and end on a byte boundary. */
8391 if (DECL_BIT_FIELD (field))
8393 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8394 return 0;
8395 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8396 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8398 bpos %= BITS_PER_UNIT;
8399 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8400 epos = fieldsize % BITS_PER_UNIT;
8401 fieldsize += BITS_PER_UNIT - 1;
8402 fieldsize /= BITS_PER_UNIT;
8404 else if (bpos % BITS_PER_UNIT
8405 || DECL_SIZE (field) == NULL_TREE
8406 || !tree_fits_shwi_p (DECL_SIZE (field))
8407 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8408 return 0;
8411 if (off != -1 && pos + fieldsize <= off)
8412 continue;
8414 if (val == NULL_TREE)
8415 continue;
8417 if (DECL_BIT_FIELD (field)
8418 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8420 /* FIXME: Handle PDP endian. */
8421 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8422 return 0;
8424 if (TREE_CODE (val) != INTEGER_CST)
8425 return 0;
8427 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8428 tree repr_type = NULL_TREE;
8429 HOST_WIDE_INT rpos = 0;
8430 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8432 rpos = int_byte_position (repr);
8433 repr_type = TREE_TYPE (repr);
8435 else
8437 repr_type = find_bitfield_repr_type (fieldsize, len);
8438 if (repr_type == NULL_TREE)
8439 return 0;
8440 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8441 gcc_assert (repr_size > 0 && repr_size <= len);
8442 if (pos + repr_size <= o + len)
8443 rpos = pos;
8444 else
8446 rpos = o + len - repr_size;
8447 gcc_assert (rpos <= pos);
8451 if (rpos > pos)
8452 return 0;
8453 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8454 int diff = (TYPE_PRECISION (repr_type)
8455 - TYPE_PRECISION (TREE_TYPE (field)));
8456 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8457 if (!BYTES_BIG_ENDIAN)
8458 w = wi::lshift (w, bitoff);
8459 else
8460 w = wi::lshift (w, diff - bitoff);
8461 val = wide_int_to_tree (repr_type, w);
8463 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8464 / BITS_PER_UNIT + 1];
8465 int l = native_encode_int (val, buf, sizeof buf, 0);
8466 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8467 return 0;
8469 if (ptr == NULL)
8470 continue;
8472 /* If the bitfield does not start at byte boundary, handle
8473 the partial byte at the start. */
8474 if (bpos
8475 && (off == -1 || (pos >= off && len >= 1)))
8477 if (!BYTES_BIG_ENDIAN)
8479 int msk = (1 << bpos) - 1;
8480 buf[pos - rpos] &= ~msk;
8481 buf[pos - rpos] |= ptr[pos - o] & msk;
8482 if (mask)
8484 if (fieldsize > 1 || epos == 0)
8485 mask[pos] &= msk;
8486 else
8487 mask[pos] &= (msk | ~((1 << epos) - 1));
8490 else
8492 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8493 buf[pos - rpos] &= msk;
8494 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8495 if (mask)
8497 if (fieldsize > 1 || epos == 0)
8498 mask[pos] &= ~msk;
8499 else
8500 mask[pos] &= (~msk
8501 | ((1 << (BITS_PER_UNIT - epos))
8502 - 1));
8506 /* If the bitfield does not end at byte boundary, handle
8507 the partial byte at the end. */
8508 if (epos
8509 && (off == -1
8510 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8512 if (!BYTES_BIG_ENDIAN)
8514 int msk = (1 << epos) - 1;
8515 buf[pos - rpos + fieldsize - 1] &= msk;
8516 buf[pos - rpos + fieldsize - 1]
8517 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8518 if (mask && (fieldsize > 1 || bpos == 0))
8519 mask[pos + fieldsize - 1] &= ~msk;
8521 else
8523 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8524 buf[pos - rpos + fieldsize - 1] &= ~msk;
8525 buf[pos - rpos + fieldsize - 1]
8526 |= ptr[pos + fieldsize - 1 - o] & msk;
8527 if (mask && (fieldsize > 1 || bpos == 0))
8528 mask[pos + fieldsize - 1] &= msk;
8531 if (off == -1
8532 || (pos >= off
8533 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8535 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8536 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8537 memset (mask + pos + (bpos != 0), 0,
8538 fieldsize - (bpos != 0) - (epos != 0));
8540 else
8542 /* Partial overlap. */
8543 HOST_WIDE_INT fsz = fieldsize;
8544 gcc_assert (mask == NULL);
8545 if (pos < off)
8547 fsz -= (off - pos);
8548 pos = off;
8550 if (pos + fsz > (HOST_WIDE_INT) off + len)
8551 fsz = (HOST_WIDE_INT) off + len - pos;
8552 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8554 continue;
8557 if (off == -1
8558 || (pos >= off
8559 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8561 int fldsize = fieldsize;
8562 if (off == -1)
8564 tree fld = DECL_CHAIN (field);
8565 while (fld)
8567 if (TREE_CODE (fld) == FIELD_DECL)
8568 break;
8569 fld = DECL_CHAIN (fld);
8571 if (fld == NULL_TREE)
8572 fldsize = len - pos;
8574 r = native_encode_initializer (val, ptr ? ptr + pos - o
8575 : NULL,
8576 fldsize,
8577 off == -1 ? -1 : 0,
8578 mask ? mask + pos : NULL);
8579 if (!r)
8580 return 0;
8581 if (off == -1
8582 && fldsize != fieldsize
8583 && r > fieldsize
8584 && pos + r > total_bytes)
8585 total_bytes = pos + r;
8587 else
8589 /* Partial overlap. */
8590 unsigned char *p = NULL;
8591 int no = 0;
8592 int l;
8593 gcc_assert (mask == NULL);
8594 if (pos >= off)
8596 if (ptr)
8597 p = ptr + pos - off;
8598 l = MIN ((HOST_WIDE_INT) off + len - pos,
8599 fieldsize);
8601 else
8603 p = ptr;
8604 no = off - pos;
8605 l = len;
8607 if (!native_encode_initializer (val, p, l, no, NULL))
8608 return 0;
8611 return MIN (total_bytes - off, len);
8613 return 0;
8618 /* Subroutine of native_interpret_expr. Interpret the contents of
8619 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8620 If the buffer cannot be interpreted, return NULL_TREE. */
8622 static tree
8623 native_interpret_int (tree type, const unsigned char *ptr, int len)
8625 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8627 if (total_bytes > len
8628 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8629 return NULL_TREE;
8631 wide_int result = wi::from_buffer (ptr, total_bytes);
8633 return wide_int_to_tree (type, result);
8637 /* Subroutine of native_interpret_expr. Interpret the contents of
8638 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8639 If the buffer cannot be interpreted, return NULL_TREE. */
8641 static tree
8642 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8644 scalar_mode mode = SCALAR_TYPE_MODE (type);
8645 int total_bytes = GET_MODE_SIZE (mode);
8646 double_int result;
8647 FIXED_VALUE_TYPE fixed_value;
8649 if (total_bytes > len
8650 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8651 return NULL_TREE;
8653 result = double_int::from_buffer (ptr, total_bytes);
8654 fixed_value = fixed_from_double_int (result, mode);
8656 return build_fixed (type, fixed_value);
8660 /* Subroutine of native_interpret_expr. Interpret the contents of
8661 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8662 If the buffer cannot be interpreted, return NULL_TREE. */
8664 tree
8665 native_interpret_real (tree type, const unsigned char *ptr, int len)
8667 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8668 int total_bytes = GET_MODE_SIZE (mode);
8669 unsigned char value;
8670 /* There are always 32 bits in each long, no matter the size of
8671 the hosts long. We handle floating point representations with
8672 up to 192 bits. */
8673 REAL_VALUE_TYPE r;
8674 long tmp[6];
8676 if (total_bytes > len || total_bytes > 24)
8677 return NULL_TREE;
8678 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8680 memset (tmp, 0, sizeof (tmp));
8681 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8682 bitpos += BITS_PER_UNIT)
8684 /* Both OFFSET and BYTE index within a long;
8685 bitpos indexes the whole float. */
8686 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8687 if (UNITS_PER_WORD < 4)
8689 int word = byte / UNITS_PER_WORD;
8690 if (WORDS_BIG_ENDIAN)
8691 word = (words - 1) - word;
8692 offset = word * UNITS_PER_WORD;
8693 if (BYTES_BIG_ENDIAN)
8694 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8695 else
8696 offset += byte % UNITS_PER_WORD;
8698 else
8700 offset = byte;
8701 if (BYTES_BIG_ENDIAN)
8703 /* Reverse bytes within each long, or within the entire float
8704 if it's smaller than a long (for HFmode). */
8705 offset = MIN (3, total_bytes - 1) - offset;
8706 gcc_assert (offset >= 0);
8709 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8711 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8714 real_from_target (&r, tmp, mode);
8715 return build_real (type, r);
8719 /* Subroutine of native_interpret_expr. Interpret the contents of
8720 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8721 If the buffer cannot be interpreted, return NULL_TREE. */
8723 static tree
8724 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8726 tree etype, rpart, ipart;
8727 int size;
8729 etype = TREE_TYPE (type);
8730 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8731 if (size * 2 > len)
8732 return NULL_TREE;
8733 rpart = native_interpret_expr (etype, ptr, size);
8734 if (!rpart)
8735 return NULL_TREE;
8736 ipart = native_interpret_expr (etype, ptr+size, size);
8737 if (!ipart)
8738 return NULL_TREE;
8739 return build_complex (type, rpart, ipart);
8742 /* Read a vector of type TYPE from the target memory image given by BYTES,
8743 which contains LEN bytes. The vector is known to be encodable using
8744 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8746 Return the vector on success, otherwise return null. */
8748 static tree
8749 native_interpret_vector_part (tree type, const unsigned char *bytes,
8750 unsigned int len, unsigned int npatterns,
8751 unsigned int nelts_per_pattern)
8753 tree elt_type = TREE_TYPE (type);
8754 if (VECTOR_BOOLEAN_TYPE_P (type)
8755 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8757 /* This is the only case in which elements can be smaller than a byte.
8758 Element 0 is always in the lsb of the containing byte. */
8759 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8760 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8761 return NULL_TREE;
8763 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8764 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8766 unsigned int bit_index = i * elt_bits;
8767 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8768 unsigned int lsb = bit_index % BITS_PER_UNIT;
8769 builder.quick_push (bytes[byte_index] & (1 << lsb)
8770 ? build_all_ones_cst (elt_type)
8771 : build_zero_cst (elt_type));
8773 return builder.build ();
8776 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8777 if (elt_bytes * npatterns * nelts_per_pattern > len)
8778 return NULL_TREE;
8780 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8781 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8783 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8784 if (!elt)
8785 return NULL_TREE;
8786 builder.quick_push (elt);
8787 bytes += elt_bytes;
8789 return builder.build ();
8792 /* Subroutine of native_interpret_expr. Interpret the contents of
8793 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8794 If the buffer cannot be interpreted, return NULL_TREE. */
8796 static tree
8797 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8799 tree etype;
8800 unsigned int size;
8801 unsigned HOST_WIDE_INT count;
8803 etype = TREE_TYPE (type);
8804 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8805 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8806 || size * count > len)
8807 return NULL_TREE;
8809 return native_interpret_vector_part (type, ptr, len, count, 1);
8813 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8814 the buffer PTR of length LEN as a constant of type TYPE. For
8815 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8816 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8817 return NULL_TREE. */
8819 tree
8820 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8822 switch (TREE_CODE (type))
8824 case INTEGER_TYPE:
8825 case ENUMERAL_TYPE:
8826 case BOOLEAN_TYPE:
8827 case POINTER_TYPE:
8828 case REFERENCE_TYPE:
8829 case OFFSET_TYPE:
8830 return native_interpret_int (type, ptr, len);
8832 case REAL_TYPE:
8833 if (tree ret = native_interpret_real (type, ptr, len))
8835 /* For floating point values in composite modes, punt if this
8836 folding doesn't preserve bit representation. As the mode doesn't
8837 have fixed precision while GCC pretends it does, there could be
8838 valid values that GCC can't really represent accurately.
8839 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8840 bit combinationations which GCC doesn't preserve. */
8841 unsigned char buf[24 * 2];
8842 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8843 int total_bytes = GET_MODE_SIZE (mode);
8844 memcpy (buf + 24, ptr, total_bytes);
8845 clear_type_padding_in_mask (type, buf + 24);
8846 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8847 || memcmp (buf + 24, buf, total_bytes) != 0)
8848 return NULL_TREE;
8849 return ret;
8851 return NULL_TREE;
8853 case FIXED_POINT_TYPE:
8854 return native_interpret_fixed (type, ptr, len);
8856 case COMPLEX_TYPE:
8857 return native_interpret_complex (type, ptr, len);
8859 case VECTOR_TYPE:
8860 return native_interpret_vector (type, ptr, len);
8862 default:
8863 return NULL_TREE;
8867 /* Returns true if we can interpret the contents of a native encoding
8868 as TYPE. */
8870 bool
8871 can_native_interpret_type_p (tree type)
8873 switch (TREE_CODE (type))
8875 case INTEGER_TYPE:
8876 case ENUMERAL_TYPE:
8877 case BOOLEAN_TYPE:
8878 case POINTER_TYPE:
8879 case REFERENCE_TYPE:
8880 case FIXED_POINT_TYPE:
8881 case REAL_TYPE:
8882 case COMPLEX_TYPE:
8883 case VECTOR_TYPE:
8884 case OFFSET_TYPE:
8885 return true;
8886 default:
8887 return false;
8891 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8892 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8894 tree
8895 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8896 int len)
8898 vec<constructor_elt, va_gc> *elts = NULL;
8899 if (TREE_CODE (type) == ARRAY_TYPE)
8901 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8902 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8903 return NULL_TREE;
8905 HOST_WIDE_INT cnt = 0;
8906 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8908 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8909 return NULL_TREE;
8910 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8912 if (eltsz == 0)
8913 cnt = 0;
8914 HOST_WIDE_INT pos = 0;
8915 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8917 tree v = NULL_TREE;
8918 if (pos >= len || pos + eltsz > len)
8919 return NULL_TREE;
8920 if (can_native_interpret_type_p (TREE_TYPE (type)))
8922 v = native_interpret_expr (TREE_TYPE (type),
8923 ptr + off + pos, eltsz);
8924 if (v == NULL_TREE)
8925 return NULL_TREE;
8927 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8928 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8929 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8930 eltsz);
8931 if (v == NULL_TREE)
8932 return NULL_TREE;
8933 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8935 return build_constructor (type, elts);
8937 if (TREE_CODE (type) != RECORD_TYPE)
8938 return NULL_TREE;
8939 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8941 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8942 continue;
8943 tree fld = field;
8944 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8945 int diff = 0;
8946 tree v = NULL_TREE;
8947 if (DECL_BIT_FIELD (field))
8949 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8950 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8952 poly_int64 bitoffset;
8953 poly_uint64 field_offset, fld_offset;
8954 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8955 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8956 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8957 else
8958 bitoffset = 0;
8959 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8960 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8961 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8962 - TYPE_PRECISION (TREE_TYPE (field)));
8963 if (!bitoffset.is_constant (&bitoff)
8964 || bitoff < 0
8965 || bitoff > diff)
8966 return NULL_TREE;
8968 else
8970 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8971 return NULL_TREE;
8972 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8973 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8974 bpos %= BITS_PER_UNIT;
8975 fieldsize += bpos;
8976 fieldsize += BITS_PER_UNIT - 1;
8977 fieldsize /= BITS_PER_UNIT;
8978 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8979 if (repr_type == NULL_TREE)
8980 return NULL_TREE;
8981 sz = int_size_in_bytes (repr_type);
8982 if (sz < 0 || sz > len)
8983 return NULL_TREE;
8984 pos = int_byte_position (field);
8985 if (pos < 0 || pos > len || pos + fieldsize > len)
8986 return NULL_TREE;
8987 HOST_WIDE_INT rpos;
8988 if (pos + sz <= len)
8989 rpos = pos;
8990 else
8992 rpos = len - sz;
8993 gcc_assert (rpos <= pos);
8995 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8996 pos = rpos;
8997 diff = (TYPE_PRECISION (repr_type)
8998 - TYPE_PRECISION (TREE_TYPE (field)));
8999 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9000 if (v == NULL_TREE)
9001 return NULL_TREE;
9002 fld = NULL_TREE;
9006 if (fld)
9008 sz = int_size_in_bytes (TREE_TYPE (fld));
9009 if (sz < 0 || sz > len)
9010 return NULL_TREE;
9011 tree byte_pos = byte_position (fld);
9012 if (!tree_fits_shwi_p (byte_pos))
9013 return NULL_TREE;
9014 pos = tree_to_shwi (byte_pos);
9015 if (pos < 0 || pos > len || pos + sz > len)
9016 return NULL_TREE;
9018 if (fld == NULL_TREE)
9019 /* Already handled above. */;
9020 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9022 v = native_interpret_expr (TREE_TYPE (fld),
9023 ptr + off + pos, sz);
9024 if (v == NULL_TREE)
9025 return NULL_TREE;
9027 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9028 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9029 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9030 if (v == NULL_TREE)
9031 return NULL_TREE;
9032 if (fld != field)
9034 if (TREE_CODE (v) != INTEGER_CST)
9035 return NULL_TREE;
9037 /* FIXME: Figure out how to handle PDP endian bitfields. */
9038 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9039 return NULL_TREE;
9040 if (!BYTES_BIG_ENDIAN)
9041 v = wide_int_to_tree (TREE_TYPE (field),
9042 wi::lrshift (wi::to_wide (v), bitoff));
9043 else
9044 v = wide_int_to_tree (TREE_TYPE (field),
9045 wi::lrshift (wi::to_wide (v),
9046 diff - bitoff));
9048 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9050 return build_constructor (type, elts);
9053 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9054 or extracted constant positions and/or sizes aren't byte aligned. */
9056 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9057 bits between adjacent elements. AMNT should be within
9058 [0, BITS_PER_UNIT).
9059 Example, AMNT = 2:
9060 00011111|11100000 << 2 = 01111111|10000000
9061 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9063 void
9064 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9065 unsigned int amnt)
9067 if (amnt == 0)
9068 return;
9070 unsigned char carry_over = 0U;
9071 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9072 unsigned char clear_mask = (~0U) << amnt;
9074 for (unsigned int i = 0; i < sz; i++)
9076 unsigned prev_carry_over = carry_over;
9077 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9079 ptr[i] <<= amnt;
9080 if (i != 0)
9082 ptr[i] &= clear_mask;
9083 ptr[i] |= prev_carry_over;
9088 /* Like shift_bytes_in_array_left but for big-endian.
9089 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9090 bits between adjacent elements. AMNT should be within
9091 [0, BITS_PER_UNIT).
9092 Example, AMNT = 2:
9093 00011111|11100000 >> 2 = 00000111|11111000
9094 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9096 void
9097 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9098 unsigned int amnt)
9100 if (amnt == 0)
9101 return;
9103 unsigned char carry_over = 0U;
9104 unsigned char carry_mask = ~(~0U << amnt);
9106 for (unsigned int i = 0; i < sz; i++)
9108 unsigned prev_carry_over = carry_over;
9109 carry_over = ptr[i] & carry_mask;
9111 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9112 ptr[i] >>= amnt;
9113 ptr[i] |= prev_carry_over;
9117 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9118 directly on the VECTOR_CST encoding, in a way that works for variable-
9119 length vectors. Return the resulting VECTOR_CST on success or null
9120 on failure. */
9122 static tree
9123 fold_view_convert_vector_encoding (tree type, tree expr)
9125 tree expr_type = TREE_TYPE (expr);
9126 poly_uint64 type_bits, expr_bits;
9127 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9128 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9129 return NULL_TREE;
9131 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9132 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9133 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9134 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9136 /* We can only preserve the semantics of a stepped pattern if the new
9137 vector element is an integer of the same size. */
9138 if (VECTOR_CST_STEPPED_P (expr)
9139 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9140 return NULL_TREE;
9142 /* The number of bits needed to encode one element from every pattern
9143 of the original vector. */
9144 unsigned int expr_sequence_bits
9145 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9147 /* The number of bits needed to encode one element from every pattern
9148 of the result. */
9149 unsigned int type_sequence_bits
9150 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9152 /* Don't try to read more bytes than are available, which can happen
9153 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9154 The general VIEW_CONVERT handling can cope with that case, so there's
9155 no point complicating things here. */
9156 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9157 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9158 BITS_PER_UNIT);
9159 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9160 if (known_gt (buffer_bits, expr_bits))
9161 return NULL_TREE;
9163 /* Get enough bytes of EXPR to form the new encoding. */
9164 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9165 buffer.quick_grow (buffer_bytes);
9166 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9167 buffer_bits / expr_elt_bits)
9168 != (int) buffer_bytes)
9169 return NULL_TREE;
9171 /* Reencode the bytes as TYPE. */
9172 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9173 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9174 type_npatterns, nelts_per_pattern);
9177 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9178 TYPE at compile-time. If we're unable to perform the conversion
9179 return NULL_TREE. */
9181 static tree
9182 fold_view_convert_expr (tree type, tree expr)
9184 /* We support up to 512-bit values (for V8DFmode). */
9185 unsigned char buffer[64];
9186 int len;
9188 /* Check that the host and target are sane. */
9189 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9190 return NULL_TREE;
9192 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9193 if (tree res = fold_view_convert_vector_encoding (type, expr))
9194 return res;
9196 len = native_encode_expr (expr, buffer, sizeof (buffer));
9197 if (len == 0)
9198 return NULL_TREE;
9200 return native_interpret_expr (type, buffer, len);
9203 /* Build an expression for the address of T. Folds away INDIRECT_REF
9204 to avoid confusing the gimplify process. */
9206 tree
9207 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9209 /* The size of the object is not relevant when talking about its address. */
9210 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9211 t = TREE_OPERAND (t, 0);
9213 if (INDIRECT_REF_P (t))
9215 t = TREE_OPERAND (t, 0);
9217 if (TREE_TYPE (t) != ptrtype)
9218 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9220 else if (TREE_CODE (t) == MEM_REF
9221 && integer_zerop (TREE_OPERAND (t, 1)))
9223 t = TREE_OPERAND (t, 0);
9225 if (TREE_TYPE (t) != ptrtype)
9226 t = fold_convert_loc (loc, ptrtype, t);
9228 else if (TREE_CODE (t) == MEM_REF
9229 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9230 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9231 TREE_OPERAND (t, 0),
9232 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9233 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9235 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9237 if (TREE_TYPE (t) != ptrtype)
9238 t = fold_convert_loc (loc, ptrtype, t);
9240 else
9241 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9243 return t;
9246 /* Build an expression for the address of T. */
9248 tree
9249 build_fold_addr_expr_loc (location_t loc, tree t)
9251 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9253 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9256 /* Fold a unary expression of code CODE and type TYPE with operand
9257 OP0. Return the folded expression if folding is successful.
9258 Otherwise, return NULL_TREE. */
9260 tree
9261 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9263 tree tem;
9264 tree arg0;
9265 enum tree_code_class kind = TREE_CODE_CLASS (code);
9267 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9268 && TREE_CODE_LENGTH (code) == 1);
9270 arg0 = op0;
9271 if (arg0)
9273 if (CONVERT_EXPR_CODE_P (code)
9274 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9276 /* Don't use STRIP_NOPS, because signedness of argument type
9277 matters. */
9278 STRIP_SIGN_NOPS (arg0);
9280 else
9282 /* Strip any conversions that don't change the mode. This
9283 is safe for every expression, except for a comparison
9284 expression because its signedness is derived from its
9285 operands.
9287 Note that this is done as an internal manipulation within
9288 the constant folder, in order to find the simplest
9289 representation of the arguments so that their form can be
9290 studied. In any cases, the appropriate type conversions
9291 should be put back in the tree that will get out of the
9292 constant folder. */
9293 STRIP_NOPS (arg0);
9296 if (CONSTANT_CLASS_P (arg0))
9298 tree tem = const_unop (code, type, arg0);
9299 if (tem)
9301 if (TREE_TYPE (tem) != type)
9302 tem = fold_convert_loc (loc, type, tem);
9303 return tem;
9308 tem = generic_simplify (loc, code, type, op0);
9309 if (tem)
9310 return tem;
9312 if (TREE_CODE_CLASS (code) == tcc_unary)
9314 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9315 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9316 fold_build1_loc (loc, code, type,
9317 fold_convert_loc (loc, TREE_TYPE (op0),
9318 TREE_OPERAND (arg0, 1))));
9319 else if (TREE_CODE (arg0) == COND_EXPR)
9321 tree arg01 = TREE_OPERAND (arg0, 1);
9322 tree arg02 = TREE_OPERAND (arg0, 2);
9323 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9324 arg01 = fold_build1_loc (loc, code, type,
9325 fold_convert_loc (loc,
9326 TREE_TYPE (op0), arg01));
9327 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9328 arg02 = fold_build1_loc (loc, code, type,
9329 fold_convert_loc (loc,
9330 TREE_TYPE (op0), arg02));
9331 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9332 arg01, arg02);
9334 /* If this was a conversion, and all we did was to move into
9335 inside the COND_EXPR, bring it back out. But leave it if
9336 it is a conversion from integer to integer and the
9337 result precision is no wider than a word since such a
9338 conversion is cheap and may be optimized away by combine,
9339 while it couldn't if it were outside the COND_EXPR. Then return
9340 so we don't get into an infinite recursion loop taking the
9341 conversion out and then back in. */
9343 if ((CONVERT_EXPR_CODE_P (code)
9344 || code == NON_LVALUE_EXPR)
9345 && TREE_CODE (tem) == COND_EXPR
9346 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9347 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9348 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9349 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9350 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9351 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9352 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9353 && (INTEGRAL_TYPE_P
9354 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9355 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9356 || flag_syntax_only))
9357 tem = build1_loc (loc, code, type,
9358 build3 (COND_EXPR,
9359 TREE_TYPE (TREE_OPERAND
9360 (TREE_OPERAND (tem, 1), 0)),
9361 TREE_OPERAND (tem, 0),
9362 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9363 TREE_OPERAND (TREE_OPERAND (tem, 2),
9364 0)));
9365 return tem;
9369 switch (code)
9371 case NON_LVALUE_EXPR:
9372 if (!maybe_lvalue_p (op0))
9373 return fold_convert_loc (loc, type, op0);
9374 return NULL_TREE;
9376 CASE_CONVERT:
9377 case FLOAT_EXPR:
9378 case FIX_TRUNC_EXPR:
9379 if (COMPARISON_CLASS_P (op0))
9381 /* If we have (type) (a CMP b) and type is an integral type, return
9382 new expression involving the new type. Canonicalize
9383 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9384 non-integral type.
9385 Do not fold the result as that would not simplify further, also
9386 folding again results in recursions. */
9387 if (TREE_CODE (type) == BOOLEAN_TYPE)
9388 return build2_loc (loc, TREE_CODE (op0), type,
9389 TREE_OPERAND (op0, 0),
9390 TREE_OPERAND (op0, 1));
9391 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9392 && TREE_CODE (type) != VECTOR_TYPE)
9393 return build3_loc (loc, COND_EXPR, type, op0,
9394 constant_boolean_node (true, type),
9395 constant_boolean_node (false, type));
9398 /* Handle (T *)&A.B.C for A being of type T and B and C
9399 living at offset zero. This occurs frequently in
9400 C++ upcasting and then accessing the base. */
9401 if (TREE_CODE (op0) == ADDR_EXPR
9402 && POINTER_TYPE_P (type)
9403 && handled_component_p (TREE_OPERAND (op0, 0)))
9405 poly_int64 bitsize, bitpos;
9406 tree offset;
9407 machine_mode mode;
9408 int unsignedp, reversep, volatilep;
9409 tree base
9410 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9411 &offset, &mode, &unsignedp, &reversep,
9412 &volatilep);
9413 /* If the reference was to a (constant) zero offset, we can use
9414 the address of the base if it has the same base type
9415 as the result type and the pointer type is unqualified. */
9416 if (!offset
9417 && known_eq (bitpos, 0)
9418 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9419 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9420 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9421 return fold_convert_loc (loc, type,
9422 build_fold_addr_expr_loc (loc, base));
9425 if (TREE_CODE (op0) == MODIFY_EXPR
9426 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9427 /* Detect assigning a bitfield. */
9428 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9429 && DECL_BIT_FIELD
9430 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9432 /* Don't leave an assignment inside a conversion
9433 unless assigning a bitfield. */
9434 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9435 /* First do the assignment, then return converted constant. */
9436 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9437 suppress_warning (tem /* What warning? */);
9438 TREE_USED (tem) = 1;
9439 return tem;
9442 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9443 constants (if x has signed type, the sign bit cannot be set
9444 in c). This folds extension into the BIT_AND_EXPR.
9445 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9446 very likely don't have maximal range for their precision and this
9447 transformation effectively doesn't preserve non-maximal ranges. */
9448 if (TREE_CODE (type) == INTEGER_TYPE
9449 && TREE_CODE (op0) == BIT_AND_EXPR
9450 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9452 tree and_expr = op0;
9453 tree and0 = TREE_OPERAND (and_expr, 0);
9454 tree and1 = TREE_OPERAND (and_expr, 1);
9455 int change = 0;
9457 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9458 || (TYPE_PRECISION (type)
9459 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9460 change = 1;
9461 else if (TYPE_PRECISION (TREE_TYPE (and1))
9462 <= HOST_BITS_PER_WIDE_INT
9463 && tree_fits_uhwi_p (and1))
9465 unsigned HOST_WIDE_INT cst;
9467 cst = tree_to_uhwi (and1);
9468 cst &= HOST_WIDE_INT_M1U
9469 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9470 change = (cst == 0);
9471 if (change
9472 && !flag_syntax_only
9473 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9474 == ZERO_EXTEND))
9476 tree uns = unsigned_type_for (TREE_TYPE (and0));
9477 and0 = fold_convert_loc (loc, uns, and0);
9478 and1 = fold_convert_loc (loc, uns, and1);
9481 if (change)
9483 tem = force_fit_type (type, wi::to_widest (and1), 0,
9484 TREE_OVERFLOW (and1));
9485 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9486 fold_convert_loc (loc, type, and0), tem);
9490 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9491 cast (T1)X will fold away. We assume that this happens when X itself
9492 is a cast. */
9493 if (POINTER_TYPE_P (type)
9494 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9495 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9497 tree arg00 = TREE_OPERAND (arg0, 0);
9498 tree arg01 = TREE_OPERAND (arg0, 1);
9500 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9501 when the pointed type needs higher alignment than
9502 the p+ first operand's pointed type. */
9503 if (!in_gimple_form
9504 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9505 && (min_align_of_type (TREE_TYPE (type))
9506 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9507 return NULL_TREE;
9509 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9510 when type is a reference type and arg00's type is not,
9511 because arg00 could be validly nullptr and if arg01 doesn't return,
9512 we don't want false positive binding of reference to nullptr. */
9513 if (TREE_CODE (type) == REFERENCE_TYPE
9514 && !in_gimple_form
9515 && sanitize_flags_p (SANITIZE_NULL)
9516 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9517 return NULL_TREE;
9519 arg00 = fold_convert_loc (loc, type, arg00);
9520 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9523 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9524 of the same precision, and X is an integer type not narrower than
9525 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9526 if (INTEGRAL_TYPE_P (type)
9527 && TREE_CODE (op0) == BIT_NOT_EXPR
9528 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9529 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9530 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9532 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9533 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9534 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9535 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9536 fold_convert_loc (loc, type, tem));
9539 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9540 type of X and Y (integer types only). */
9541 if (INTEGRAL_TYPE_P (type)
9542 && TREE_CODE (op0) == MULT_EXPR
9543 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9544 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9545 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9546 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9548 /* Be careful not to introduce new overflows. */
9549 tree mult_type;
9550 if (TYPE_OVERFLOW_WRAPS (type))
9551 mult_type = type;
9552 else
9553 mult_type = unsigned_type_for (type);
9555 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9557 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9558 fold_convert_loc (loc, mult_type,
9559 TREE_OPERAND (op0, 0)),
9560 fold_convert_loc (loc, mult_type,
9561 TREE_OPERAND (op0, 1)));
9562 return fold_convert_loc (loc, type, tem);
9566 return NULL_TREE;
9568 case VIEW_CONVERT_EXPR:
9569 if (TREE_CODE (op0) == MEM_REF)
9571 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9572 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9573 tem = fold_build2_loc (loc, MEM_REF, type,
9574 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9575 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9576 return tem;
9579 return NULL_TREE;
9581 case NEGATE_EXPR:
9582 tem = fold_negate_expr (loc, arg0);
9583 if (tem)
9584 return fold_convert_loc (loc, type, tem);
9585 return NULL_TREE;
9587 case ABS_EXPR:
9588 /* Convert fabs((double)float) into (double)fabsf(float). */
9589 if (TREE_CODE (arg0) == NOP_EXPR
9590 && TREE_CODE (type) == REAL_TYPE)
9592 tree targ0 = strip_float_extensions (arg0);
9593 if (targ0 != arg0)
9594 return fold_convert_loc (loc, type,
9595 fold_build1_loc (loc, ABS_EXPR,
9596 TREE_TYPE (targ0),
9597 targ0));
9599 return NULL_TREE;
9601 case BIT_NOT_EXPR:
9602 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9603 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9604 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9605 fold_convert_loc (loc, type,
9606 TREE_OPERAND (arg0, 0)))))
9607 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9608 fold_convert_loc (loc, type,
9609 TREE_OPERAND (arg0, 1)));
9610 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9611 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9612 fold_convert_loc (loc, type,
9613 TREE_OPERAND (arg0, 1)))))
9614 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9615 fold_convert_loc (loc, type,
9616 TREE_OPERAND (arg0, 0)), tem);
9618 return NULL_TREE;
9620 case TRUTH_NOT_EXPR:
9621 /* Note that the operand of this must be an int
9622 and its values must be 0 or 1.
9623 ("true" is a fixed value perhaps depending on the language,
9624 but we don't handle values other than 1 correctly yet.) */
9625 tem = fold_truth_not_expr (loc, arg0);
9626 if (!tem)
9627 return NULL_TREE;
9628 return fold_convert_loc (loc, type, tem);
9630 case INDIRECT_REF:
9631 /* Fold *&X to X if X is an lvalue. */
9632 if (TREE_CODE (op0) == ADDR_EXPR)
9634 tree op00 = TREE_OPERAND (op0, 0);
9635 if ((VAR_P (op00)
9636 || TREE_CODE (op00) == PARM_DECL
9637 || TREE_CODE (op00) == RESULT_DECL)
9638 && !TREE_READONLY (op00))
9639 return op00;
9641 return NULL_TREE;
9643 default:
9644 return NULL_TREE;
9645 } /* switch (code) */
9649 /* If the operation was a conversion do _not_ mark a resulting constant
9650 with TREE_OVERFLOW if the original constant was not. These conversions
9651 have implementation defined behavior and retaining the TREE_OVERFLOW
9652 flag here would confuse later passes such as VRP. */
9653 tree
9654 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9655 tree type, tree op0)
9657 tree res = fold_unary_loc (loc, code, type, op0);
9658 if (res
9659 && TREE_CODE (res) == INTEGER_CST
9660 && TREE_CODE (op0) == INTEGER_CST
9661 && CONVERT_EXPR_CODE_P (code))
9662 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9664 return res;
9667 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9668 operands OP0 and OP1. LOC is the location of the resulting expression.
9669 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9670 Return the folded expression if folding is successful. Otherwise,
9671 return NULL_TREE. */
9672 static tree
9673 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9674 tree arg0, tree arg1, tree op0, tree op1)
9676 tree tem;
9678 /* We only do these simplifications if we are optimizing. */
9679 if (!optimize)
9680 return NULL_TREE;
9682 /* Check for things like (A || B) && (A || C). We can convert this
9683 to A || (B && C). Note that either operator can be any of the four
9684 truth and/or operations and the transformation will still be
9685 valid. Also note that we only care about order for the
9686 ANDIF and ORIF operators. If B contains side effects, this
9687 might change the truth-value of A. */
9688 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9689 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9690 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9691 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9692 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9693 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9695 tree a00 = TREE_OPERAND (arg0, 0);
9696 tree a01 = TREE_OPERAND (arg0, 1);
9697 tree a10 = TREE_OPERAND (arg1, 0);
9698 tree a11 = TREE_OPERAND (arg1, 1);
9699 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9700 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9701 && (code == TRUTH_AND_EXPR
9702 || code == TRUTH_OR_EXPR));
9704 if (operand_equal_p (a00, a10, 0))
9705 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9706 fold_build2_loc (loc, code, type, a01, a11));
9707 else if (commutative && operand_equal_p (a00, a11, 0))
9708 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9709 fold_build2_loc (loc, code, type, a01, a10));
9710 else if (commutative && operand_equal_p (a01, a10, 0))
9711 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9712 fold_build2_loc (loc, code, type, a00, a11));
9714 /* This case if tricky because we must either have commutative
9715 operators or else A10 must not have side-effects. */
9717 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9718 && operand_equal_p (a01, a11, 0))
9719 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9720 fold_build2_loc (loc, code, type, a00, a10),
9721 a01);
9724 /* See if we can build a range comparison. */
9725 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9726 return tem;
9728 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9729 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9731 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9732 if (tem)
9733 return fold_build2_loc (loc, code, type, tem, arg1);
9736 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9737 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9739 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9740 if (tem)
9741 return fold_build2_loc (loc, code, type, arg0, tem);
9744 /* Check for the possibility of merging component references. If our
9745 lhs is another similar operation, try to merge its rhs with our
9746 rhs. Then try to merge our lhs and rhs. */
9747 if (TREE_CODE (arg0) == code
9748 && (tem = fold_truth_andor_1 (loc, code, type,
9749 TREE_OPERAND (arg0, 1), arg1)) != 0)
9750 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9752 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9753 return tem;
9755 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9756 if (param_logical_op_non_short_circuit != -1)
9757 logical_op_non_short_circuit
9758 = param_logical_op_non_short_circuit;
9759 if (logical_op_non_short_circuit
9760 && !sanitize_coverage_p ()
9761 && (code == TRUTH_AND_EXPR
9762 || code == TRUTH_ANDIF_EXPR
9763 || code == TRUTH_OR_EXPR
9764 || code == TRUTH_ORIF_EXPR))
9766 enum tree_code ncode, icode;
9768 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9769 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9770 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9772 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9773 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9774 We don't want to pack more than two leafs to a non-IF AND/OR
9775 expression.
9776 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9777 equal to IF-CODE, then we don't want to add right-hand operand.
9778 If the inner right-hand side of left-hand operand has
9779 side-effects, or isn't simple, then we can't add to it,
9780 as otherwise we might destroy if-sequence. */
9781 if (TREE_CODE (arg0) == icode
9782 && simple_condition_p (arg1)
9783 /* Needed for sequence points to handle trappings, and
9784 side-effects. */
9785 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9787 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9788 arg1);
9789 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9790 tem);
9792 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9793 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9794 else if (TREE_CODE (arg1) == icode
9795 && simple_condition_p (arg0)
9796 /* Needed for sequence points to handle trappings, and
9797 side-effects. */
9798 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9800 tem = fold_build2_loc (loc, ncode, type,
9801 arg0, TREE_OPERAND (arg1, 0));
9802 return fold_build2_loc (loc, icode, type, tem,
9803 TREE_OPERAND (arg1, 1));
9805 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9806 into (A OR B).
9807 For sequence point consistancy, we need to check for trapping,
9808 and side-effects. */
9809 else if (code == icode && simple_condition_p (arg0)
9810 && simple_condition_p (arg1))
9811 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9814 return NULL_TREE;
9817 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9818 by changing CODE to reduce the magnitude of constants involved in
9819 ARG0 of the comparison.
9820 Returns a canonicalized comparison tree if a simplification was
9821 possible, otherwise returns NULL_TREE.
9822 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9823 valid if signed overflow is undefined. */
9825 static tree
9826 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9827 tree arg0, tree arg1,
9828 bool *strict_overflow_p)
9830 enum tree_code code0 = TREE_CODE (arg0);
9831 tree t, cst0 = NULL_TREE;
9832 int sgn0;
9834 /* Match A +- CST code arg1. We can change this only if overflow
9835 is undefined. */
9836 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9837 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9838 /* In principle pointers also have undefined overflow behavior,
9839 but that causes problems elsewhere. */
9840 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9841 && (code0 == MINUS_EXPR
9842 || code0 == PLUS_EXPR)
9843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9844 return NULL_TREE;
9846 /* Identify the constant in arg0 and its sign. */
9847 cst0 = TREE_OPERAND (arg0, 1);
9848 sgn0 = tree_int_cst_sgn (cst0);
9850 /* Overflowed constants and zero will cause problems. */
9851 if (integer_zerop (cst0)
9852 || TREE_OVERFLOW (cst0))
9853 return NULL_TREE;
9855 /* See if we can reduce the magnitude of the constant in
9856 arg0 by changing the comparison code. */
9857 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9858 if (code == LT_EXPR
9859 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9860 code = LE_EXPR;
9861 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9862 else if (code == GT_EXPR
9863 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9864 code = GE_EXPR;
9865 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9866 else if (code == LE_EXPR
9867 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9868 code = LT_EXPR;
9869 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9870 else if (code == GE_EXPR
9871 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9872 code = GT_EXPR;
9873 else
9874 return NULL_TREE;
9875 *strict_overflow_p = true;
9877 /* Now build the constant reduced in magnitude. But not if that
9878 would produce one outside of its types range. */
9879 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9880 && ((sgn0 == 1
9881 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9882 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9883 || (sgn0 == -1
9884 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9885 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9886 return NULL_TREE;
9888 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9889 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9890 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9891 t = fold_convert (TREE_TYPE (arg1), t);
9893 return fold_build2_loc (loc, code, type, t, arg1);
9896 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9897 overflow further. Try to decrease the magnitude of constants involved
9898 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9899 and put sole constants at the second argument position.
9900 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9902 static tree
9903 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9904 tree arg0, tree arg1)
9906 tree t;
9907 bool strict_overflow_p;
9908 const char * const warnmsg = G_("assuming signed overflow does not occur "
9909 "when reducing constant in comparison");
9911 /* Try canonicalization by simplifying arg0. */
9912 strict_overflow_p = false;
9913 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9914 &strict_overflow_p);
9915 if (t)
9917 if (strict_overflow_p)
9918 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9919 return t;
9922 /* Try canonicalization by simplifying arg1 using the swapped
9923 comparison. */
9924 code = swap_tree_comparison (code);
9925 strict_overflow_p = false;
9926 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9927 &strict_overflow_p);
9928 if (t && strict_overflow_p)
9929 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9930 return t;
9933 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9934 space. This is used to avoid issuing overflow warnings for
9935 expressions like &p->x which cannot wrap. */
9937 static bool
9938 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9940 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9941 return true;
9943 if (maybe_lt (bitpos, 0))
9944 return true;
9946 poly_wide_int wi_offset;
9947 int precision = TYPE_PRECISION (TREE_TYPE (base));
9948 if (offset == NULL_TREE)
9949 wi_offset = wi::zero (precision);
9950 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9951 return true;
9952 else
9953 wi_offset = wi::to_poly_wide (offset);
9955 wi::overflow_type overflow;
9956 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9957 precision);
9958 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9959 if (overflow)
9960 return true;
9962 poly_uint64 total_hwi, size;
9963 if (!total.to_uhwi (&total_hwi)
9964 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9965 &size)
9966 || known_eq (size, 0U))
9967 return true;
9969 if (known_le (total_hwi, size))
9970 return false;
9972 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9973 array. */
9974 if (TREE_CODE (base) == ADDR_EXPR
9975 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9976 &size)
9977 && maybe_ne (size, 0U)
9978 && known_le (total_hwi, size))
9979 return false;
9981 return true;
9984 /* Return a positive integer when the symbol DECL is known to have
9985 a nonzero address, zero when it's known not to (e.g., it's a weak
9986 symbol), and a negative integer when the symbol is not yet in the
9987 symbol table and so whether or not its address is zero is unknown.
9988 For function local objects always return positive integer. */
9989 static int
9990 maybe_nonzero_address (tree decl)
9992 /* Normally, don't do anything for variables and functions before symtab is
9993 built; it is quite possible that DECL will be declared weak later.
9994 But if folding_initializer, we need a constant answer now, so create
9995 the symtab entry and prevent later weak declaration. */
9996 if (DECL_P (decl) && decl_in_symtab_p (decl))
9997 if (struct symtab_node *symbol
9998 = (folding_initializer
9999 ? symtab_node::get_create (decl)
10000 : symtab_node::get (decl)))
10001 return symbol->nonzero_address ();
10003 /* Function local objects are never NULL. */
10004 if (DECL_P (decl)
10005 && (DECL_CONTEXT (decl)
10006 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10007 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10008 return 1;
10010 return -1;
10013 /* Subroutine of fold_binary. This routine performs all of the
10014 transformations that are common to the equality/inequality
10015 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10016 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10017 fold_binary should call fold_binary. Fold a comparison with
10018 tree code CODE and type TYPE with operands OP0 and OP1. Return
10019 the folded comparison or NULL_TREE. */
10021 static tree
10022 fold_comparison (location_t loc, enum tree_code code, tree type,
10023 tree op0, tree op1)
10025 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10026 tree arg0, arg1, tem;
10028 arg0 = op0;
10029 arg1 = op1;
10031 STRIP_SIGN_NOPS (arg0);
10032 STRIP_SIGN_NOPS (arg1);
10034 /* For comparisons of pointers we can decompose it to a compile time
10035 comparison of the base objects and the offsets into the object.
10036 This requires at least one operand being an ADDR_EXPR or a
10037 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10038 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10039 && (TREE_CODE (arg0) == ADDR_EXPR
10040 || TREE_CODE (arg1) == ADDR_EXPR
10041 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10042 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10044 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10045 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10046 machine_mode mode;
10047 int volatilep, reversep, unsignedp;
10048 bool indirect_base0 = false, indirect_base1 = false;
10050 /* Get base and offset for the access. Strip ADDR_EXPR for
10051 get_inner_reference, but put it back by stripping INDIRECT_REF
10052 off the base object if possible. indirect_baseN will be true
10053 if baseN is not an address but refers to the object itself. */
10054 base0 = arg0;
10055 if (TREE_CODE (arg0) == ADDR_EXPR)
10057 base0
10058 = get_inner_reference (TREE_OPERAND (arg0, 0),
10059 &bitsize, &bitpos0, &offset0, &mode,
10060 &unsignedp, &reversep, &volatilep);
10061 if (INDIRECT_REF_P (base0))
10062 base0 = TREE_OPERAND (base0, 0);
10063 else
10064 indirect_base0 = true;
10066 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10068 base0 = TREE_OPERAND (arg0, 0);
10069 STRIP_SIGN_NOPS (base0);
10070 if (TREE_CODE (base0) == ADDR_EXPR)
10072 base0
10073 = get_inner_reference (TREE_OPERAND (base0, 0),
10074 &bitsize, &bitpos0, &offset0, &mode,
10075 &unsignedp, &reversep, &volatilep);
10076 if (INDIRECT_REF_P (base0))
10077 base0 = TREE_OPERAND (base0, 0);
10078 else
10079 indirect_base0 = true;
10081 if (offset0 == NULL_TREE || integer_zerop (offset0))
10082 offset0 = TREE_OPERAND (arg0, 1);
10083 else
10084 offset0 = size_binop (PLUS_EXPR, offset0,
10085 TREE_OPERAND (arg0, 1));
10086 if (poly_int_tree_p (offset0))
10088 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10089 TYPE_PRECISION (sizetype));
10090 tem <<= LOG2_BITS_PER_UNIT;
10091 tem += bitpos0;
10092 if (tem.to_shwi (&bitpos0))
10093 offset0 = NULL_TREE;
10097 base1 = arg1;
10098 if (TREE_CODE (arg1) == ADDR_EXPR)
10100 base1
10101 = get_inner_reference (TREE_OPERAND (arg1, 0),
10102 &bitsize, &bitpos1, &offset1, &mode,
10103 &unsignedp, &reversep, &volatilep);
10104 if (INDIRECT_REF_P (base1))
10105 base1 = TREE_OPERAND (base1, 0);
10106 else
10107 indirect_base1 = true;
10109 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10111 base1 = TREE_OPERAND (arg1, 0);
10112 STRIP_SIGN_NOPS (base1);
10113 if (TREE_CODE (base1) == ADDR_EXPR)
10115 base1
10116 = get_inner_reference (TREE_OPERAND (base1, 0),
10117 &bitsize, &bitpos1, &offset1, &mode,
10118 &unsignedp, &reversep, &volatilep);
10119 if (INDIRECT_REF_P (base1))
10120 base1 = TREE_OPERAND (base1, 0);
10121 else
10122 indirect_base1 = true;
10124 if (offset1 == NULL_TREE || integer_zerop (offset1))
10125 offset1 = TREE_OPERAND (arg1, 1);
10126 else
10127 offset1 = size_binop (PLUS_EXPR, offset1,
10128 TREE_OPERAND (arg1, 1));
10129 if (poly_int_tree_p (offset1))
10131 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10132 TYPE_PRECISION (sizetype));
10133 tem <<= LOG2_BITS_PER_UNIT;
10134 tem += bitpos1;
10135 if (tem.to_shwi (&bitpos1))
10136 offset1 = NULL_TREE;
10140 /* If we have equivalent bases we might be able to simplify. */
10141 if (indirect_base0 == indirect_base1
10142 && operand_equal_p (base0, base1,
10143 indirect_base0 ? OEP_ADDRESS_OF : 0))
10145 /* We can fold this expression to a constant if the non-constant
10146 offset parts are equal. */
10147 if ((offset0 == offset1
10148 || (offset0 && offset1
10149 && operand_equal_p (offset0, offset1, 0)))
10150 && (equality_code
10151 || (indirect_base0
10152 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10153 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10155 if (!equality_code
10156 && maybe_ne (bitpos0, bitpos1)
10157 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10158 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10159 fold_overflow_warning (("assuming pointer wraparound does not "
10160 "occur when comparing P +- C1 with "
10161 "P +- C2"),
10162 WARN_STRICT_OVERFLOW_CONDITIONAL);
10164 switch (code)
10166 case EQ_EXPR:
10167 if (known_eq (bitpos0, bitpos1))
10168 return constant_boolean_node (true, type);
10169 if (known_ne (bitpos0, bitpos1))
10170 return constant_boolean_node (false, type);
10171 break;
10172 case NE_EXPR:
10173 if (known_ne (bitpos0, bitpos1))
10174 return constant_boolean_node (true, type);
10175 if (known_eq (bitpos0, bitpos1))
10176 return constant_boolean_node (false, type);
10177 break;
10178 case LT_EXPR:
10179 if (known_lt (bitpos0, bitpos1))
10180 return constant_boolean_node (true, type);
10181 if (known_ge (bitpos0, bitpos1))
10182 return constant_boolean_node (false, type);
10183 break;
10184 case LE_EXPR:
10185 if (known_le (bitpos0, bitpos1))
10186 return constant_boolean_node (true, type);
10187 if (known_gt (bitpos0, bitpos1))
10188 return constant_boolean_node (false, type);
10189 break;
10190 case GE_EXPR:
10191 if (known_ge (bitpos0, bitpos1))
10192 return constant_boolean_node (true, type);
10193 if (known_lt (bitpos0, bitpos1))
10194 return constant_boolean_node (false, type);
10195 break;
10196 case GT_EXPR:
10197 if (known_gt (bitpos0, bitpos1))
10198 return constant_boolean_node (true, type);
10199 if (known_le (bitpos0, bitpos1))
10200 return constant_boolean_node (false, type);
10201 break;
10202 default:;
10205 /* We can simplify the comparison to a comparison of the variable
10206 offset parts if the constant offset parts are equal.
10207 Be careful to use signed sizetype here because otherwise we
10208 mess with array offsets in the wrong way. This is possible
10209 because pointer arithmetic is restricted to retain within an
10210 object and overflow on pointer differences is undefined as of
10211 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10212 else if (known_eq (bitpos0, bitpos1)
10213 && (equality_code
10214 || (indirect_base0
10215 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10216 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10218 /* By converting to signed sizetype we cover middle-end pointer
10219 arithmetic which operates on unsigned pointer types of size
10220 type size and ARRAY_REF offsets which are properly sign or
10221 zero extended from their type in case it is narrower than
10222 sizetype. */
10223 if (offset0 == NULL_TREE)
10224 offset0 = build_int_cst (ssizetype, 0);
10225 else
10226 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10227 if (offset1 == NULL_TREE)
10228 offset1 = build_int_cst (ssizetype, 0);
10229 else
10230 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10232 if (!equality_code
10233 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10234 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10235 fold_overflow_warning (("assuming pointer wraparound does not "
10236 "occur when comparing P +- C1 with "
10237 "P +- C2"),
10238 WARN_STRICT_OVERFLOW_COMPARISON);
10240 return fold_build2_loc (loc, code, type, offset0, offset1);
10243 /* For equal offsets we can simplify to a comparison of the
10244 base addresses. */
10245 else if (known_eq (bitpos0, bitpos1)
10246 && (indirect_base0
10247 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10248 && (indirect_base1
10249 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10250 && ((offset0 == offset1)
10251 || (offset0 && offset1
10252 && operand_equal_p (offset0, offset1, 0))))
10254 if (indirect_base0)
10255 base0 = build_fold_addr_expr_loc (loc, base0);
10256 if (indirect_base1)
10257 base1 = build_fold_addr_expr_loc (loc, base1);
10258 return fold_build2_loc (loc, code, type, base0, base1);
10260 /* Comparison between an ordinary (non-weak) symbol and a null
10261 pointer can be eliminated since such symbols must have a non
10262 null address. In C, relational expressions between pointers
10263 to objects and null pointers are undefined. The results
10264 below follow the C++ rules with the additional property that
10265 every object pointer compares greater than a null pointer.
10267 else if (((DECL_P (base0)
10268 && maybe_nonzero_address (base0) > 0
10269 /* Avoid folding references to struct members at offset 0 to
10270 prevent tests like '&ptr->firstmember == 0' from getting
10271 eliminated. When ptr is null, although the -> expression
10272 is strictly speaking invalid, GCC retains it as a matter
10273 of QoI. See PR c/44555. */
10274 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10275 || CONSTANT_CLASS_P (base0))
10276 && indirect_base0
10277 /* The caller guarantees that when one of the arguments is
10278 constant (i.e., null in this case) it is second. */
10279 && integer_zerop (arg1))
10281 switch (code)
10283 case EQ_EXPR:
10284 case LE_EXPR:
10285 case LT_EXPR:
10286 return constant_boolean_node (false, type);
10287 case GE_EXPR:
10288 case GT_EXPR:
10289 case NE_EXPR:
10290 return constant_boolean_node (true, type);
10291 default:
10292 gcc_unreachable ();
10297 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10298 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10299 the resulting offset is smaller in absolute value than the
10300 original one and has the same sign. */
10301 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10302 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10303 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10304 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10305 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10306 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10307 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10308 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10310 tree const1 = TREE_OPERAND (arg0, 1);
10311 tree const2 = TREE_OPERAND (arg1, 1);
10312 tree variable1 = TREE_OPERAND (arg0, 0);
10313 tree variable2 = TREE_OPERAND (arg1, 0);
10314 tree cst;
10315 const char * const warnmsg = G_("assuming signed overflow does not "
10316 "occur when combining constants around "
10317 "a comparison");
10319 /* Put the constant on the side where it doesn't overflow and is
10320 of lower absolute value and of same sign than before. */
10321 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10322 ? MINUS_EXPR : PLUS_EXPR,
10323 const2, const1);
10324 if (!TREE_OVERFLOW (cst)
10325 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10326 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10328 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10329 return fold_build2_loc (loc, code, type,
10330 variable1,
10331 fold_build2_loc (loc, TREE_CODE (arg1),
10332 TREE_TYPE (arg1),
10333 variable2, cst));
10336 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10337 ? MINUS_EXPR : PLUS_EXPR,
10338 const1, const2);
10339 if (!TREE_OVERFLOW (cst)
10340 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10341 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10344 return fold_build2_loc (loc, code, type,
10345 fold_build2_loc (loc, TREE_CODE (arg0),
10346 TREE_TYPE (arg0),
10347 variable1, cst),
10348 variable2);
10352 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10353 if (tem)
10354 return tem;
10356 /* If we are comparing an expression that just has comparisons
10357 of two integer values, arithmetic expressions of those comparisons,
10358 and constants, we can simplify it. There are only three cases
10359 to check: the two values can either be equal, the first can be
10360 greater, or the second can be greater. Fold the expression for
10361 those three values. Since each value must be 0 or 1, we have
10362 eight possibilities, each of which corresponds to the constant 0
10363 or 1 or one of the six possible comparisons.
10365 This handles common cases like (a > b) == 0 but also handles
10366 expressions like ((x > y) - (y > x)) > 0, which supposedly
10367 occur in macroized code. */
10369 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10371 tree cval1 = 0, cval2 = 0;
10373 if (twoval_comparison_p (arg0, &cval1, &cval2)
10374 /* Don't handle degenerate cases here; they should already
10375 have been handled anyway. */
10376 && cval1 != 0 && cval2 != 0
10377 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10378 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10379 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10380 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10381 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10382 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10383 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10385 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10386 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10388 /* We can't just pass T to eval_subst in case cval1 or cval2
10389 was the same as ARG1. */
10391 tree high_result
10392 = fold_build2_loc (loc, code, type,
10393 eval_subst (loc, arg0, cval1, maxval,
10394 cval2, minval),
10395 arg1);
10396 tree equal_result
10397 = fold_build2_loc (loc, code, type,
10398 eval_subst (loc, arg0, cval1, maxval,
10399 cval2, maxval),
10400 arg1);
10401 tree low_result
10402 = fold_build2_loc (loc, code, type,
10403 eval_subst (loc, arg0, cval1, minval,
10404 cval2, maxval),
10405 arg1);
10407 /* All three of these results should be 0 or 1. Confirm they are.
10408 Then use those values to select the proper code to use. */
10410 if (TREE_CODE (high_result) == INTEGER_CST
10411 && TREE_CODE (equal_result) == INTEGER_CST
10412 && TREE_CODE (low_result) == INTEGER_CST)
10414 /* Make a 3-bit mask with the high-order bit being the
10415 value for `>', the next for '=', and the low for '<'. */
10416 switch ((integer_onep (high_result) * 4)
10417 + (integer_onep (equal_result) * 2)
10418 + integer_onep (low_result))
10420 case 0:
10421 /* Always false. */
10422 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10423 case 1:
10424 code = LT_EXPR;
10425 break;
10426 case 2:
10427 code = EQ_EXPR;
10428 break;
10429 case 3:
10430 code = LE_EXPR;
10431 break;
10432 case 4:
10433 code = GT_EXPR;
10434 break;
10435 case 5:
10436 code = NE_EXPR;
10437 break;
10438 case 6:
10439 code = GE_EXPR;
10440 break;
10441 case 7:
10442 /* Always true. */
10443 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10446 return fold_build2_loc (loc, code, type, cval1, cval2);
10451 return NULL_TREE;
10455 /* Subroutine of fold_binary. Optimize complex multiplications of the
10456 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10457 argument EXPR represents the expression "z" of type TYPE. */
10459 static tree
10460 fold_mult_zconjz (location_t loc, tree type, tree expr)
10462 tree itype = TREE_TYPE (type);
10463 tree rpart, ipart, tem;
10465 if (TREE_CODE (expr) == COMPLEX_EXPR)
10467 rpart = TREE_OPERAND (expr, 0);
10468 ipart = TREE_OPERAND (expr, 1);
10470 else if (TREE_CODE (expr) == COMPLEX_CST)
10472 rpart = TREE_REALPART (expr);
10473 ipart = TREE_IMAGPART (expr);
10475 else
10477 expr = save_expr (expr);
10478 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10479 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10482 rpart = save_expr (rpart);
10483 ipart = save_expr (ipart);
10484 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10485 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10486 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10487 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10488 build_zero_cst (itype));
10492 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10493 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10494 true if successful. */
10496 static bool
10497 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10499 unsigned HOST_WIDE_INT i, nunits;
10501 if (TREE_CODE (arg) == VECTOR_CST
10502 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10504 for (i = 0; i < nunits; ++i)
10505 elts[i] = VECTOR_CST_ELT (arg, i);
10507 else if (TREE_CODE (arg) == CONSTRUCTOR)
10509 constructor_elt *elt;
10511 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10512 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10513 return false;
10514 else
10515 elts[i] = elt->value;
10517 else
10518 return false;
10519 for (; i < nelts; i++)
10520 elts[i]
10521 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10522 return true;
10525 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10526 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10527 NULL_TREE otherwise. */
10529 tree
10530 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10532 unsigned int i;
10533 unsigned HOST_WIDE_INT nelts;
10534 bool need_ctor = false;
10536 if (!sel.length ().is_constant (&nelts))
10537 return NULL_TREE;
10538 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10539 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10540 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10541 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10542 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10543 return NULL_TREE;
10545 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10546 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10547 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10548 return NULL_TREE;
10550 tree_vector_builder out_elts (type, nelts, 1);
10551 for (i = 0; i < nelts; i++)
10553 HOST_WIDE_INT index;
10554 if (!sel[i].is_constant (&index))
10555 return NULL_TREE;
10556 if (!CONSTANT_CLASS_P (in_elts[index]))
10557 need_ctor = true;
10558 out_elts.quick_push (unshare_expr (in_elts[index]));
10561 if (need_ctor)
10563 vec<constructor_elt, va_gc> *v;
10564 vec_alloc (v, nelts);
10565 for (i = 0; i < nelts; i++)
10566 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10567 return build_constructor (type, v);
10569 else
10570 return out_elts.build ();
10573 /* Try to fold a pointer difference of type TYPE two address expressions of
10574 array references AREF0 and AREF1 using location LOC. Return a
10575 simplified expression for the difference or NULL_TREE. */
10577 static tree
10578 fold_addr_of_array_ref_difference (location_t loc, tree type,
10579 tree aref0, tree aref1,
10580 bool use_pointer_diff)
10582 tree base0 = TREE_OPERAND (aref0, 0);
10583 tree base1 = TREE_OPERAND (aref1, 0);
10584 tree base_offset = build_int_cst (type, 0);
10586 /* If the bases are array references as well, recurse. If the bases
10587 are pointer indirections compute the difference of the pointers.
10588 If the bases are equal, we are set. */
10589 if ((TREE_CODE (base0) == ARRAY_REF
10590 && TREE_CODE (base1) == ARRAY_REF
10591 && (base_offset
10592 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10593 use_pointer_diff)))
10594 || (INDIRECT_REF_P (base0)
10595 && INDIRECT_REF_P (base1)
10596 && (base_offset
10597 = use_pointer_diff
10598 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10599 TREE_OPERAND (base0, 0),
10600 TREE_OPERAND (base1, 0))
10601 : fold_binary_loc (loc, MINUS_EXPR, type,
10602 fold_convert (type,
10603 TREE_OPERAND (base0, 0)),
10604 fold_convert (type,
10605 TREE_OPERAND (base1, 0)))))
10606 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10608 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10609 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10610 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10611 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10612 return fold_build2_loc (loc, PLUS_EXPR, type,
10613 base_offset,
10614 fold_build2_loc (loc, MULT_EXPR, type,
10615 diff, esz));
10617 return NULL_TREE;
10620 /* If the real or vector real constant CST of type TYPE has an exact
10621 inverse, return it, else return NULL. */
10623 tree
10624 exact_inverse (tree type, tree cst)
10626 REAL_VALUE_TYPE r;
10627 tree unit_type;
10628 machine_mode mode;
10630 switch (TREE_CODE (cst))
10632 case REAL_CST:
10633 r = TREE_REAL_CST (cst);
10635 if (exact_real_inverse (TYPE_MODE (type), &r))
10636 return build_real (type, r);
10638 return NULL_TREE;
10640 case VECTOR_CST:
10642 unit_type = TREE_TYPE (type);
10643 mode = TYPE_MODE (unit_type);
10645 tree_vector_builder elts;
10646 if (!elts.new_unary_operation (type, cst, false))
10647 return NULL_TREE;
10648 unsigned int count = elts.encoded_nelts ();
10649 for (unsigned int i = 0; i < count; ++i)
10651 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10652 if (!exact_real_inverse (mode, &r))
10653 return NULL_TREE;
10654 elts.quick_push (build_real (unit_type, r));
10657 return elts.build ();
10660 default:
10661 return NULL_TREE;
10665 /* Mask out the tz least significant bits of X of type TYPE where
10666 tz is the number of trailing zeroes in Y. */
10667 static wide_int
10668 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10670 int tz = wi::ctz (y);
10671 if (tz > 0)
10672 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10673 return x;
10676 /* Return true when T is an address and is known to be nonzero.
10677 For floating point we further ensure that T is not denormal.
10678 Similar logic is present in nonzero_address in rtlanal.h.
10680 If the return value is based on the assumption that signed overflow
10681 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10682 change *STRICT_OVERFLOW_P. */
10684 static bool
10685 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10687 tree type = TREE_TYPE (t);
10688 enum tree_code code;
10690 /* Doing something useful for floating point would need more work. */
10691 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10692 return false;
10694 code = TREE_CODE (t);
10695 switch (TREE_CODE_CLASS (code))
10697 case tcc_unary:
10698 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10699 strict_overflow_p);
10700 case tcc_binary:
10701 case tcc_comparison:
10702 return tree_binary_nonzero_warnv_p (code, type,
10703 TREE_OPERAND (t, 0),
10704 TREE_OPERAND (t, 1),
10705 strict_overflow_p);
10706 case tcc_constant:
10707 case tcc_declaration:
10708 case tcc_reference:
10709 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10711 default:
10712 break;
10715 switch (code)
10717 case TRUTH_NOT_EXPR:
10718 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10719 strict_overflow_p);
10721 case TRUTH_AND_EXPR:
10722 case TRUTH_OR_EXPR:
10723 case TRUTH_XOR_EXPR:
10724 return tree_binary_nonzero_warnv_p (code, type,
10725 TREE_OPERAND (t, 0),
10726 TREE_OPERAND (t, 1),
10727 strict_overflow_p);
10729 case COND_EXPR:
10730 case CONSTRUCTOR:
10731 case OBJ_TYPE_REF:
10732 case ADDR_EXPR:
10733 case WITH_SIZE_EXPR:
10734 case SSA_NAME:
10735 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10737 case COMPOUND_EXPR:
10738 case MODIFY_EXPR:
10739 case BIND_EXPR:
10740 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10741 strict_overflow_p);
10743 case SAVE_EXPR:
10744 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10745 strict_overflow_p);
10747 case CALL_EXPR:
10749 tree fndecl = get_callee_fndecl (t);
10750 if (!fndecl) return false;
10751 if (flag_delete_null_pointer_checks && !flag_check_new
10752 && DECL_IS_OPERATOR_NEW_P (fndecl)
10753 && !TREE_NOTHROW (fndecl))
10754 return true;
10755 if (flag_delete_null_pointer_checks
10756 && lookup_attribute ("returns_nonnull",
10757 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10758 return true;
10759 return alloca_call_p (t);
10762 default:
10763 break;
10765 return false;
10768 /* Return true when T is an address and is known to be nonzero.
10769 Handle warnings about undefined signed overflow. */
10771 bool
10772 tree_expr_nonzero_p (tree t)
10774 bool ret, strict_overflow_p;
10776 strict_overflow_p = false;
10777 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10778 if (strict_overflow_p)
10779 fold_overflow_warning (("assuming signed overflow does not occur when "
10780 "determining that expression is always "
10781 "non-zero"),
10782 WARN_STRICT_OVERFLOW_MISC);
10783 return ret;
10786 /* Return true if T is known not to be equal to an integer W. */
10788 bool
10789 expr_not_equal_to (tree t, const wide_int &w)
10791 int_range_max vr;
10792 switch (TREE_CODE (t))
10794 case INTEGER_CST:
10795 return wi::to_wide (t) != w;
10797 case SSA_NAME:
10798 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10799 return false;
10801 if (cfun)
10802 get_range_query (cfun)->range_of_expr (vr, t);
10803 else
10804 get_global_range_query ()->range_of_expr (vr, t);
10806 if (!vr.undefined_p () && !vr.contains_p (w))
10807 return true;
10808 /* If T has some known zero bits and W has any of those bits set,
10809 then T is known not to be equal to W. */
10810 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10811 TYPE_PRECISION (TREE_TYPE (t))), 0))
10812 return true;
10813 return false;
10815 default:
10816 return false;
10820 /* Fold a binary expression of code CODE and type TYPE with operands
10821 OP0 and OP1. LOC is the location of the resulting expression.
10822 Return the folded expression if folding is successful. Otherwise,
10823 return NULL_TREE. */
10825 tree
10826 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10827 tree op0, tree op1)
10829 enum tree_code_class kind = TREE_CODE_CLASS (code);
10830 tree arg0, arg1, tem;
10831 tree t1 = NULL_TREE;
10832 bool strict_overflow_p;
10833 unsigned int prec;
10835 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10836 && TREE_CODE_LENGTH (code) == 2
10837 && op0 != NULL_TREE
10838 && op1 != NULL_TREE);
10840 arg0 = op0;
10841 arg1 = op1;
10843 /* Strip any conversions that don't change the mode. This is
10844 safe for every expression, except for a comparison expression
10845 because its signedness is derived from its operands. So, in
10846 the latter case, only strip conversions that don't change the
10847 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10848 preserved.
10850 Note that this is done as an internal manipulation within the
10851 constant folder, in order to find the simplest representation
10852 of the arguments so that their form can be studied. In any
10853 cases, the appropriate type conversions should be put back in
10854 the tree that will get out of the constant folder. */
10856 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10858 STRIP_SIGN_NOPS (arg0);
10859 STRIP_SIGN_NOPS (arg1);
10861 else
10863 STRIP_NOPS (arg0);
10864 STRIP_NOPS (arg1);
10867 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10868 constant but we can't do arithmetic on them. */
10869 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10871 tem = const_binop (code, type, arg0, arg1);
10872 if (tem != NULL_TREE)
10874 if (TREE_TYPE (tem) != type)
10875 tem = fold_convert_loc (loc, type, tem);
10876 return tem;
10880 /* If this is a commutative operation, and ARG0 is a constant, move it
10881 to ARG1 to reduce the number of tests below. */
10882 if (commutative_tree_code (code)
10883 && tree_swap_operands_p (arg0, arg1))
10884 return fold_build2_loc (loc, code, type, op1, op0);
10886 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10887 to ARG1 to reduce the number of tests below. */
10888 if (kind == tcc_comparison
10889 && tree_swap_operands_p (arg0, arg1))
10890 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10892 tem = generic_simplify (loc, code, type, op0, op1);
10893 if (tem)
10894 return tem;
10896 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10898 First check for cases where an arithmetic operation is applied to a
10899 compound, conditional, or comparison operation. Push the arithmetic
10900 operation inside the compound or conditional to see if any folding
10901 can then be done. Convert comparison to conditional for this purpose.
10902 The also optimizes non-constant cases that used to be done in
10903 expand_expr.
10905 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10906 one of the operands is a comparison and the other is a comparison, a
10907 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10908 code below would make the expression more complex. Change it to a
10909 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10910 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10912 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10913 || code == EQ_EXPR || code == NE_EXPR)
10914 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10915 && ((truth_value_p (TREE_CODE (arg0))
10916 && (truth_value_p (TREE_CODE (arg1))
10917 || (TREE_CODE (arg1) == BIT_AND_EXPR
10918 && integer_onep (TREE_OPERAND (arg1, 1)))))
10919 || (truth_value_p (TREE_CODE (arg1))
10920 && (truth_value_p (TREE_CODE (arg0))
10921 || (TREE_CODE (arg0) == BIT_AND_EXPR
10922 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10924 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10925 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10926 : TRUTH_XOR_EXPR,
10927 boolean_type_node,
10928 fold_convert_loc (loc, boolean_type_node, arg0),
10929 fold_convert_loc (loc, boolean_type_node, arg1));
10931 if (code == EQ_EXPR)
10932 tem = invert_truthvalue_loc (loc, tem);
10934 return fold_convert_loc (loc, type, tem);
10937 if (TREE_CODE_CLASS (code) == tcc_binary
10938 || TREE_CODE_CLASS (code) == tcc_comparison)
10940 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10942 tem = fold_build2_loc (loc, code, type,
10943 fold_convert_loc (loc, TREE_TYPE (op0),
10944 TREE_OPERAND (arg0, 1)), op1);
10945 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10946 tem);
10948 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10950 tem = fold_build2_loc (loc, code, type, op0,
10951 fold_convert_loc (loc, TREE_TYPE (op1),
10952 TREE_OPERAND (arg1, 1)));
10953 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10954 tem);
10957 if (TREE_CODE (arg0) == COND_EXPR
10958 || TREE_CODE (arg0) == VEC_COND_EXPR
10959 || COMPARISON_CLASS_P (arg0))
10961 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10962 arg0, arg1,
10963 /*cond_first_p=*/1);
10964 if (tem != NULL_TREE)
10965 return tem;
10968 if (TREE_CODE (arg1) == COND_EXPR
10969 || TREE_CODE (arg1) == VEC_COND_EXPR
10970 || COMPARISON_CLASS_P (arg1))
10972 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10973 arg1, arg0,
10974 /*cond_first_p=*/0);
10975 if (tem != NULL_TREE)
10976 return tem;
10980 switch (code)
10982 case MEM_REF:
10983 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10984 if (TREE_CODE (arg0) == ADDR_EXPR
10985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10987 tree iref = TREE_OPERAND (arg0, 0);
10988 return fold_build2 (MEM_REF, type,
10989 TREE_OPERAND (iref, 0),
10990 int_const_binop (PLUS_EXPR, arg1,
10991 TREE_OPERAND (iref, 1)));
10994 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10995 if (TREE_CODE (arg0) == ADDR_EXPR
10996 && handled_component_p (TREE_OPERAND (arg0, 0)))
10998 tree base;
10999 poly_int64 coffset;
11000 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11001 &coffset);
11002 if (!base)
11003 return NULL_TREE;
11004 return fold_build2 (MEM_REF, type,
11005 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11006 int_const_binop (PLUS_EXPR, arg1,
11007 size_int (coffset)));
11010 return NULL_TREE;
11012 case POINTER_PLUS_EXPR:
11013 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11014 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11015 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11016 return fold_convert_loc (loc, type,
11017 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11018 fold_convert_loc (loc, sizetype,
11019 arg1),
11020 fold_convert_loc (loc, sizetype,
11021 arg0)));
11023 return NULL_TREE;
11025 case PLUS_EXPR:
11026 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11028 /* X + (X / CST) * -CST is X % CST. */
11029 if (TREE_CODE (arg1) == MULT_EXPR
11030 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11031 && operand_equal_p (arg0,
11032 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11034 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11035 tree cst1 = TREE_OPERAND (arg1, 1);
11036 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11037 cst1, cst0);
11038 if (sum && integer_zerop (sum))
11039 return fold_convert_loc (loc, type,
11040 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11041 TREE_TYPE (arg0), arg0,
11042 cst0));
11046 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11047 one. Make sure the type is not saturating and has the signedness of
11048 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11049 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11050 if ((TREE_CODE (arg0) == MULT_EXPR
11051 || TREE_CODE (arg1) == MULT_EXPR)
11052 && !TYPE_SATURATING (type)
11053 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11054 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11055 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11057 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11058 if (tem)
11059 return tem;
11062 if (! FLOAT_TYPE_P (type))
11064 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11065 (plus (plus (mult) (mult)) (foo)) so that we can
11066 take advantage of the factoring cases below. */
11067 if (ANY_INTEGRAL_TYPE_P (type)
11068 && TYPE_OVERFLOW_WRAPS (type)
11069 && (((TREE_CODE (arg0) == PLUS_EXPR
11070 || TREE_CODE (arg0) == MINUS_EXPR)
11071 && TREE_CODE (arg1) == MULT_EXPR)
11072 || ((TREE_CODE (arg1) == PLUS_EXPR
11073 || TREE_CODE (arg1) == MINUS_EXPR)
11074 && TREE_CODE (arg0) == MULT_EXPR)))
11076 tree parg0, parg1, parg, marg;
11077 enum tree_code pcode;
11079 if (TREE_CODE (arg1) == MULT_EXPR)
11080 parg = arg0, marg = arg1;
11081 else
11082 parg = arg1, marg = arg0;
11083 pcode = TREE_CODE (parg);
11084 parg0 = TREE_OPERAND (parg, 0);
11085 parg1 = TREE_OPERAND (parg, 1);
11086 STRIP_NOPS (parg0);
11087 STRIP_NOPS (parg1);
11089 if (TREE_CODE (parg0) == MULT_EXPR
11090 && TREE_CODE (parg1) != MULT_EXPR)
11091 return fold_build2_loc (loc, pcode, type,
11092 fold_build2_loc (loc, PLUS_EXPR, type,
11093 fold_convert_loc (loc, type,
11094 parg0),
11095 fold_convert_loc (loc, type,
11096 marg)),
11097 fold_convert_loc (loc, type, parg1));
11098 if (TREE_CODE (parg0) != MULT_EXPR
11099 && TREE_CODE (parg1) == MULT_EXPR)
11100 return
11101 fold_build2_loc (loc, PLUS_EXPR, type,
11102 fold_convert_loc (loc, type, parg0),
11103 fold_build2_loc (loc, pcode, type,
11104 fold_convert_loc (loc, type, marg),
11105 fold_convert_loc (loc, type,
11106 parg1)));
11109 else
11111 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11112 to __complex__ ( x, y ). This is not the same for SNaNs or
11113 if signed zeros are involved. */
11114 if (!HONOR_SNANS (arg0)
11115 && !HONOR_SIGNED_ZEROS (arg0)
11116 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11118 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11119 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11120 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11121 bool arg0rz = false, arg0iz = false;
11122 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11123 || (arg0i && (arg0iz = real_zerop (arg0i))))
11125 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11126 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11127 if (arg0rz && arg1i && real_zerop (arg1i))
11129 tree rp = arg1r ? arg1r
11130 : build1 (REALPART_EXPR, rtype, arg1);
11131 tree ip = arg0i ? arg0i
11132 : build1 (IMAGPART_EXPR, rtype, arg0);
11133 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11135 else if (arg0iz && arg1r && real_zerop (arg1r))
11137 tree rp = arg0r ? arg0r
11138 : build1 (REALPART_EXPR, rtype, arg0);
11139 tree ip = arg1i ? arg1i
11140 : build1 (IMAGPART_EXPR, rtype, arg1);
11141 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11146 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11147 We associate floats only if the user has specified
11148 -fassociative-math. */
11149 if (flag_associative_math
11150 && TREE_CODE (arg1) == PLUS_EXPR
11151 && TREE_CODE (arg0) != MULT_EXPR)
11153 tree tree10 = TREE_OPERAND (arg1, 0);
11154 tree tree11 = TREE_OPERAND (arg1, 1);
11155 if (TREE_CODE (tree11) == MULT_EXPR
11156 && TREE_CODE (tree10) == MULT_EXPR)
11158 tree tree0;
11159 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11160 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11163 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11164 We associate floats only if the user has specified
11165 -fassociative-math. */
11166 if (flag_associative_math
11167 && TREE_CODE (arg0) == PLUS_EXPR
11168 && TREE_CODE (arg1) != MULT_EXPR)
11170 tree tree00 = TREE_OPERAND (arg0, 0);
11171 tree tree01 = TREE_OPERAND (arg0, 1);
11172 if (TREE_CODE (tree01) == MULT_EXPR
11173 && TREE_CODE (tree00) == MULT_EXPR)
11175 tree tree0;
11176 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11177 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11182 bit_rotate:
11183 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11184 is a rotate of A by C1 bits. */
11185 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11186 is a rotate of A by B bits.
11187 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11188 though in this case CODE must be | and not + or ^, otherwise
11189 it doesn't return A when B is 0. */
11191 enum tree_code code0, code1;
11192 tree rtype;
11193 code0 = TREE_CODE (arg0);
11194 code1 = TREE_CODE (arg1);
11195 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11196 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11197 && operand_equal_p (TREE_OPERAND (arg0, 0),
11198 TREE_OPERAND (arg1, 0), 0)
11199 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11200 TYPE_UNSIGNED (rtype))
11201 /* Only create rotates in complete modes. Other cases are not
11202 expanded properly. */
11203 && (element_precision (rtype)
11204 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11206 tree tree01, tree11;
11207 tree orig_tree01, orig_tree11;
11208 enum tree_code code01, code11;
11210 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11211 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11212 STRIP_NOPS (tree01);
11213 STRIP_NOPS (tree11);
11214 code01 = TREE_CODE (tree01);
11215 code11 = TREE_CODE (tree11);
11216 if (code11 != MINUS_EXPR
11217 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11219 std::swap (code0, code1);
11220 std::swap (code01, code11);
11221 std::swap (tree01, tree11);
11222 std::swap (orig_tree01, orig_tree11);
11224 if (code01 == INTEGER_CST
11225 && code11 == INTEGER_CST
11226 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11227 == element_precision (rtype)))
11229 tem = build2_loc (loc, LROTATE_EXPR,
11230 rtype, TREE_OPERAND (arg0, 0),
11231 code0 == LSHIFT_EXPR
11232 ? orig_tree01 : orig_tree11);
11233 return fold_convert_loc (loc, type, tem);
11235 else if (code11 == MINUS_EXPR)
11237 tree tree110, tree111;
11238 tree110 = TREE_OPERAND (tree11, 0);
11239 tree111 = TREE_OPERAND (tree11, 1);
11240 STRIP_NOPS (tree110);
11241 STRIP_NOPS (tree111);
11242 if (TREE_CODE (tree110) == INTEGER_CST
11243 && compare_tree_int (tree110,
11244 element_precision (rtype)) == 0
11245 && operand_equal_p (tree01, tree111, 0))
11247 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11248 ? LROTATE_EXPR : RROTATE_EXPR),
11249 rtype, TREE_OPERAND (arg0, 0),
11250 orig_tree01);
11251 return fold_convert_loc (loc, type, tem);
11254 else if (code == BIT_IOR_EXPR
11255 && code11 == BIT_AND_EXPR
11256 && pow2p_hwi (element_precision (rtype)))
11258 tree tree110, tree111;
11259 tree110 = TREE_OPERAND (tree11, 0);
11260 tree111 = TREE_OPERAND (tree11, 1);
11261 STRIP_NOPS (tree110);
11262 STRIP_NOPS (tree111);
11263 if (TREE_CODE (tree110) == NEGATE_EXPR
11264 && TREE_CODE (tree111) == INTEGER_CST
11265 && compare_tree_int (tree111,
11266 element_precision (rtype) - 1) == 0
11267 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11269 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11270 ? LROTATE_EXPR : RROTATE_EXPR),
11271 rtype, TREE_OPERAND (arg0, 0),
11272 orig_tree01);
11273 return fold_convert_loc (loc, type, tem);
11279 associate:
11280 /* In most languages, can't associate operations on floats through
11281 parentheses. Rather than remember where the parentheses were, we
11282 don't associate floats at all, unless the user has specified
11283 -fassociative-math.
11284 And, we need to make sure type is not saturating. */
11286 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11287 && !TYPE_SATURATING (type)
11288 && !TYPE_OVERFLOW_SANITIZED (type))
11290 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11291 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11292 tree atype = type;
11293 bool ok = true;
11295 /* Split both trees into variables, constants, and literals. Then
11296 associate each group together, the constants with literals,
11297 then the result with variables. This increases the chances of
11298 literals being recombined later and of generating relocatable
11299 expressions for the sum of a constant and literal. */
11300 var0 = split_tree (arg0, type, code,
11301 &minus_var0, &con0, &minus_con0,
11302 &lit0, &minus_lit0, 0);
11303 var1 = split_tree (arg1, type, code,
11304 &minus_var1, &con1, &minus_con1,
11305 &lit1, &minus_lit1, code == MINUS_EXPR);
11307 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11308 if (code == MINUS_EXPR)
11309 code = PLUS_EXPR;
11311 /* With undefined overflow prefer doing association in a type
11312 which wraps on overflow, if that is one of the operand types. */
11313 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11314 && !TYPE_OVERFLOW_WRAPS (type))
11316 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11317 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11318 atype = TREE_TYPE (arg0);
11319 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11320 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11321 atype = TREE_TYPE (arg1);
11322 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11325 /* With undefined overflow we can only associate constants with one
11326 variable, and constants whose association doesn't overflow. */
11327 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11328 && !TYPE_OVERFLOW_WRAPS (atype))
11330 if ((var0 && var1) || (minus_var0 && minus_var1))
11332 /* ??? If split_tree would handle NEGATE_EXPR we could
11333 simply reject these cases and the allowed cases would
11334 be the var0/minus_var1 ones. */
11335 tree tmp0 = var0 ? var0 : minus_var0;
11336 tree tmp1 = var1 ? var1 : minus_var1;
11337 bool one_neg = false;
11339 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11341 tmp0 = TREE_OPERAND (tmp0, 0);
11342 one_neg = !one_neg;
11344 if (CONVERT_EXPR_P (tmp0)
11345 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11346 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11347 <= TYPE_PRECISION (atype)))
11348 tmp0 = TREE_OPERAND (tmp0, 0);
11349 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11351 tmp1 = TREE_OPERAND (tmp1, 0);
11352 one_neg = !one_neg;
11354 if (CONVERT_EXPR_P (tmp1)
11355 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11356 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11357 <= TYPE_PRECISION (atype)))
11358 tmp1 = TREE_OPERAND (tmp1, 0);
11359 /* The only case we can still associate with two variables
11360 is if they cancel out. */
11361 if (!one_neg
11362 || !operand_equal_p (tmp0, tmp1, 0))
11363 ok = false;
11365 else if ((var0 && minus_var1
11366 && ! operand_equal_p (var0, minus_var1, 0))
11367 || (minus_var0 && var1
11368 && ! operand_equal_p (minus_var0, var1, 0)))
11369 ok = false;
11372 /* Only do something if we found more than two objects. Otherwise,
11373 nothing has changed and we risk infinite recursion. */
11374 if (ok
11375 && ((var0 != 0) + (var1 != 0)
11376 + (minus_var0 != 0) + (minus_var1 != 0)
11377 + (con0 != 0) + (con1 != 0)
11378 + (minus_con0 != 0) + (minus_con1 != 0)
11379 + (lit0 != 0) + (lit1 != 0)
11380 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11382 var0 = associate_trees (loc, var0, var1, code, atype);
11383 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11384 code, atype);
11385 con0 = associate_trees (loc, con0, con1, code, atype);
11386 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11387 code, atype);
11388 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11389 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11390 code, atype);
11392 if (minus_var0 && var0)
11394 var0 = associate_trees (loc, var0, minus_var0,
11395 MINUS_EXPR, atype);
11396 minus_var0 = 0;
11398 if (minus_con0 && con0)
11400 con0 = associate_trees (loc, con0, minus_con0,
11401 MINUS_EXPR, atype);
11402 minus_con0 = 0;
11405 /* Preserve the MINUS_EXPR if the negative part of the literal is
11406 greater than the positive part. Otherwise, the multiplicative
11407 folding code (i.e extract_muldiv) may be fooled in case
11408 unsigned constants are subtracted, like in the following
11409 example: ((X*2 + 4) - 8U)/2. */
11410 if (minus_lit0 && lit0)
11412 if (TREE_CODE (lit0) == INTEGER_CST
11413 && TREE_CODE (minus_lit0) == INTEGER_CST
11414 && tree_int_cst_lt (lit0, minus_lit0)
11415 /* But avoid ending up with only negated parts. */
11416 && (var0 || con0))
11418 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11419 MINUS_EXPR, atype);
11420 lit0 = 0;
11422 else
11424 lit0 = associate_trees (loc, lit0, minus_lit0,
11425 MINUS_EXPR, atype);
11426 minus_lit0 = 0;
11430 /* Don't introduce overflows through reassociation. */
11431 if ((lit0 && TREE_OVERFLOW_P (lit0))
11432 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11433 return NULL_TREE;
11435 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11436 con0 = associate_trees (loc, con0, lit0, code, atype);
11437 lit0 = 0;
11438 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11439 code, atype);
11440 minus_lit0 = 0;
11442 /* Eliminate minus_con0. */
11443 if (minus_con0)
11445 if (con0)
11446 con0 = associate_trees (loc, con0, minus_con0,
11447 MINUS_EXPR, atype);
11448 else if (var0)
11449 var0 = associate_trees (loc, var0, minus_con0,
11450 MINUS_EXPR, atype);
11451 else
11452 gcc_unreachable ();
11453 minus_con0 = 0;
11456 /* Eliminate minus_var0. */
11457 if (minus_var0)
11459 if (con0)
11460 con0 = associate_trees (loc, con0, minus_var0,
11461 MINUS_EXPR, atype);
11462 else
11463 gcc_unreachable ();
11464 minus_var0 = 0;
11467 return
11468 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11469 code, atype));
11473 return NULL_TREE;
11475 case POINTER_DIFF_EXPR:
11476 case MINUS_EXPR:
11477 /* Fold &a[i] - &a[j] to i-j. */
11478 if (TREE_CODE (arg0) == ADDR_EXPR
11479 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11480 && TREE_CODE (arg1) == ADDR_EXPR
11481 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11483 tree tem = fold_addr_of_array_ref_difference (loc, type,
11484 TREE_OPERAND (arg0, 0),
11485 TREE_OPERAND (arg1, 0),
11486 code
11487 == POINTER_DIFF_EXPR);
11488 if (tem)
11489 return tem;
11492 /* Further transformations are not for pointers. */
11493 if (code == POINTER_DIFF_EXPR)
11494 return NULL_TREE;
11496 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11497 if (TREE_CODE (arg0) == NEGATE_EXPR
11498 && negate_expr_p (op1)
11499 /* If arg0 is e.g. unsigned int and type is int, then this could
11500 introduce UB, because if A is INT_MIN at runtime, the original
11501 expression can be well defined while the latter is not.
11502 See PR83269. */
11503 && !(ANY_INTEGRAL_TYPE_P (type)
11504 && TYPE_OVERFLOW_UNDEFINED (type)
11505 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11506 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11507 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11508 fold_convert_loc (loc, type,
11509 TREE_OPERAND (arg0, 0)));
11511 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11512 __complex__ ( x, -y ). This is not the same for SNaNs or if
11513 signed zeros are involved. */
11514 if (!HONOR_SNANS (arg0)
11515 && !HONOR_SIGNED_ZEROS (arg0)
11516 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11518 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11519 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11520 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11521 bool arg0rz = false, arg0iz = false;
11522 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11523 || (arg0i && (arg0iz = real_zerop (arg0i))))
11525 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11526 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11527 if (arg0rz && arg1i && real_zerop (arg1i))
11529 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11530 arg1r ? arg1r
11531 : build1 (REALPART_EXPR, rtype, arg1));
11532 tree ip = arg0i ? arg0i
11533 : build1 (IMAGPART_EXPR, rtype, arg0);
11534 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11536 else if (arg0iz && arg1r && real_zerop (arg1r))
11538 tree rp = arg0r ? arg0r
11539 : build1 (REALPART_EXPR, rtype, arg0);
11540 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11541 arg1i ? arg1i
11542 : build1 (IMAGPART_EXPR, rtype, arg1));
11543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11548 /* A - B -> A + (-B) if B is easily negatable. */
11549 if (negate_expr_p (op1)
11550 && ! TYPE_OVERFLOW_SANITIZED (type)
11551 && ((FLOAT_TYPE_P (type)
11552 /* Avoid this transformation if B is a positive REAL_CST. */
11553 && (TREE_CODE (op1) != REAL_CST
11554 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11555 || INTEGRAL_TYPE_P (type)))
11556 return fold_build2_loc (loc, PLUS_EXPR, type,
11557 fold_convert_loc (loc, type, arg0),
11558 negate_expr (op1));
11560 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11561 one. Make sure the type is not saturating and has the signedness of
11562 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11563 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11564 if ((TREE_CODE (arg0) == MULT_EXPR
11565 || TREE_CODE (arg1) == MULT_EXPR)
11566 && !TYPE_SATURATING (type)
11567 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11568 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11569 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11571 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11572 if (tem)
11573 return tem;
11576 goto associate;
11578 case MULT_EXPR:
11579 if (! FLOAT_TYPE_P (type))
11581 /* Transform x * -C into -x * C if x is easily negatable. */
11582 if (TREE_CODE (op1) == INTEGER_CST
11583 && tree_int_cst_sgn (op1) == -1
11584 && negate_expr_p (op0)
11585 && negate_expr_p (op1)
11586 && (tem = negate_expr (op1)) != op1
11587 && ! TREE_OVERFLOW (tem))
11588 return fold_build2_loc (loc, MULT_EXPR, type,
11589 fold_convert_loc (loc, type,
11590 negate_expr (op0)), tem);
11592 strict_overflow_p = false;
11593 if (TREE_CODE (arg1) == INTEGER_CST
11594 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11595 &strict_overflow_p)) != 0)
11597 if (strict_overflow_p)
11598 fold_overflow_warning (("assuming signed overflow does not "
11599 "occur when simplifying "
11600 "multiplication"),
11601 WARN_STRICT_OVERFLOW_MISC);
11602 return fold_convert_loc (loc, type, tem);
11605 /* Optimize z * conj(z) for integer complex numbers. */
11606 if (TREE_CODE (arg0) == CONJ_EXPR
11607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11608 return fold_mult_zconjz (loc, type, arg1);
11609 if (TREE_CODE (arg1) == CONJ_EXPR
11610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11611 return fold_mult_zconjz (loc, type, arg0);
11613 else
11615 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11616 This is not the same for NaNs or if signed zeros are
11617 involved. */
11618 if (!HONOR_NANS (arg0)
11619 && !HONOR_SIGNED_ZEROS (arg0)
11620 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11621 && TREE_CODE (arg1) == COMPLEX_CST
11622 && real_zerop (TREE_REALPART (arg1)))
11624 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11625 if (real_onep (TREE_IMAGPART (arg1)))
11626 return
11627 fold_build2_loc (loc, COMPLEX_EXPR, type,
11628 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11629 rtype, arg0)),
11630 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11631 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11632 return
11633 fold_build2_loc (loc, COMPLEX_EXPR, type,
11634 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11635 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11636 rtype, arg0)));
11639 /* Optimize z * conj(z) for floating point complex numbers.
11640 Guarded by flag_unsafe_math_optimizations as non-finite
11641 imaginary components don't produce scalar results. */
11642 if (flag_unsafe_math_optimizations
11643 && TREE_CODE (arg0) == CONJ_EXPR
11644 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11645 return fold_mult_zconjz (loc, type, arg1);
11646 if (flag_unsafe_math_optimizations
11647 && TREE_CODE (arg1) == CONJ_EXPR
11648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11649 return fold_mult_zconjz (loc, type, arg0);
11651 goto associate;
11653 case BIT_IOR_EXPR:
11654 /* Canonicalize (X & C1) | C2. */
11655 if (TREE_CODE (arg0) == BIT_AND_EXPR
11656 && TREE_CODE (arg1) == INTEGER_CST
11657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11659 int width = TYPE_PRECISION (type), w;
11660 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11661 wide_int c2 = wi::to_wide (arg1);
11663 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11664 if ((c1 & c2) == c1)
11665 return omit_one_operand_loc (loc, type, arg1,
11666 TREE_OPERAND (arg0, 0));
11668 wide_int msk = wi::mask (width, false,
11669 TYPE_PRECISION (TREE_TYPE (arg1)));
11671 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11672 if (wi::bit_and_not (msk, c1 | c2) == 0)
11674 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11675 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11678 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11679 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11680 mode which allows further optimizations. */
11681 c1 &= msk;
11682 c2 &= msk;
11683 wide_int c3 = wi::bit_and_not (c1, c2);
11684 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11686 wide_int mask = wi::mask (w, false,
11687 TYPE_PRECISION (type));
11688 if (((c1 | c2) & mask) == mask
11689 && wi::bit_and_not (c1, mask) == 0)
11691 c3 = mask;
11692 break;
11696 if (c3 != c1)
11698 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11699 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11700 wide_int_to_tree (type, c3));
11701 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11705 /* See if this can be simplified into a rotate first. If that
11706 is unsuccessful continue in the association code. */
11707 goto bit_rotate;
11709 case BIT_XOR_EXPR:
11710 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11711 if (TREE_CODE (arg0) == BIT_AND_EXPR
11712 && INTEGRAL_TYPE_P (type)
11713 && integer_onep (TREE_OPERAND (arg0, 1))
11714 && integer_onep (arg1))
11715 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11716 build_zero_cst (TREE_TYPE (arg0)));
11718 /* See if this can be simplified into a rotate first. If that
11719 is unsuccessful continue in the association code. */
11720 goto bit_rotate;
11722 case BIT_AND_EXPR:
11723 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11724 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11725 && INTEGRAL_TYPE_P (type)
11726 && integer_onep (TREE_OPERAND (arg0, 1))
11727 && integer_onep (arg1))
11729 tree tem2;
11730 tem = TREE_OPERAND (arg0, 0);
11731 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11732 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11733 tem, tem2);
11734 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11735 build_zero_cst (TREE_TYPE (tem)));
11737 /* Fold ~X & 1 as (X & 1) == 0. */
11738 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11739 && INTEGRAL_TYPE_P (type)
11740 && integer_onep (arg1))
11742 tree tem2;
11743 tem = TREE_OPERAND (arg0, 0);
11744 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11745 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11746 tem, tem2);
11747 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11748 build_zero_cst (TREE_TYPE (tem)));
11750 /* Fold !X & 1 as X == 0. */
11751 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11752 && integer_onep (arg1))
11754 tem = TREE_OPERAND (arg0, 0);
11755 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11756 build_zero_cst (TREE_TYPE (tem)));
11759 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11760 multiple of 1 << CST. */
11761 if (TREE_CODE (arg1) == INTEGER_CST)
11763 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11764 wide_int ncst1 = -cst1;
11765 if ((cst1 & ncst1) == ncst1
11766 && multiple_of_p (type, arg0,
11767 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11768 return fold_convert_loc (loc, type, arg0);
11771 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11772 bits from CST2. */
11773 if (TREE_CODE (arg1) == INTEGER_CST
11774 && TREE_CODE (arg0) == MULT_EXPR
11775 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11777 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11778 wide_int masked
11779 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11781 if (masked == 0)
11782 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11783 arg0, arg1);
11784 else if (masked != warg1)
11786 /* Avoid the transform if arg1 is a mask of some
11787 mode which allows further optimizations. */
11788 int pop = wi::popcount (warg1);
11789 if (!(pop >= BITS_PER_UNIT
11790 && pow2p_hwi (pop)
11791 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11792 return fold_build2_loc (loc, code, type, op0,
11793 wide_int_to_tree (type, masked));
11797 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11798 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11799 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11801 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11803 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11804 if (mask == -1)
11805 return
11806 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11809 goto associate;
11811 case RDIV_EXPR:
11812 /* Don't touch a floating-point divide by zero unless the mode
11813 of the constant can represent infinity. */
11814 if (TREE_CODE (arg1) == REAL_CST
11815 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11816 && real_zerop (arg1))
11817 return NULL_TREE;
11819 /* (-A) / (-B) -> A / B */
11820 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11821 return fold_build2_loc (loc, RDIV_EXPR, type,
11822 TREE_OPERAND (arg0, 0),
11823 negate_expr (arg1));
11824 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11825 return fold_build2_loc (loc, RDIV_EXPR, type,
11826 negate_expr (arg0),
11827 TREE_OPERAND (arg1, 0));
11828 return NULL_TREE;
11830 case TRUNC_DIV_EXPR:
11831 /* Fall through */
11833 case FLOOR_DIV_EXPR:
11834 /* Simplify A / (B << N) where A and B are positive and B is
11835 a power of 2, to A >> (N + log2(B)). */
11836 strict_overflow_p = false;
11837 if (TREE_CODE (arg1) == LSHIFT_EXPR
11838 && (TYPE_UNSIGNED (type)
11839 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11841 tree sval = TREE_OPERAND (arg1, 0);
11842 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11844 tree sh_cnt = TREE_OPERAND (arg1, 1);
11845 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11846 wi::exact_log2 (wi::to_wide (sval)));
11848 if (strict_overflow_p)
11849 fold_overflow_warning (("assuming signed overflow does not "
11850 "occur when simplifying A / (B << N)"),
11851 WARN_STRICT_OVERFLOW_MISC);
11853 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11854 sh_cnt, pow2);
11855 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11856 fold_convert_loc (loc, type, arg0), sh_cnt);
11860 /* Fall through */
11862 case ROUND_DIV_EXPR:
11863 case CEIL_DIV_EXPR:
11864 case EXACT_DIV_EXPR:
11865 if (integer_zerop (arg1))
11866 return NULL_TREE;
11868 /* Convert -A / -B to A / B when the type is signed and overflow is
11869 undefined. */
11870 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11871 && TREE_CODE (op0) == NEGATE_EXPR
11872 && negate_expr_p (op1))
11874 if (ANY_INTEGRAL_TYPE_P (type))
11875 fold_overflow_warning (("assuming signed overflow does not occur "
11876 "when distributing negation across "
11877 "division"),
11878 WARN_STRICT_OVERFLOW_MISC);
11879 return fold_build2_loc (loc, code, type,
11880 fold_convert_loc (loc, type,
11881 TREE_OPERAND (arg0, 0)),
11882 negate_expr (op1));
11884 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11885 && TREE_CODE (arg1) == NEGATE_EXPR
11886 && negate_expr_p (op0))
11888 if (ANY_INTEGRAL_TYPE_P (type))
11889 fold_overflow_warning (("assuming signed overflow does not occur "
11890 "when distributing negation across "
11891 "division"),
11892 WARN_STRICT_OVERFLOW_MISC);
11893 return fold_build2_loc (loc, code, type,
11894 negate_expr (op0),
11895 fold_convert_loc (loc, type,
11896 TREE_OPERAND (arg1, 0)));
11899 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11900 operation, EXACT_DIV_EXPR.
11902 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11903 At one time others generated faster code, it's not clear if they do
11904 after the last round to changes to the DIV code in expmed.cc. */
11905 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11906 && multiple_of_p (type, arg0, arg1))
11907 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11908 fold_convert (type, arg0),
11909 fold_convert (type, arg1));
11911 strict_overflow_p = false;
11912 if (TREE_CODE (arg1) == INTEGER_CST
11913 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11914 &strict_overflow_p)) != 0)
11916 if (strict_overflow_p)
11917 fold_overflow_warning (("assuming signed overflow does not occur "
11918 "when simplifying division"),
11919 WARN_STRICT_OVERFLOW_MISC);
11920 return fold_convert_loc (loc, type, tem);
11923 return NULL_TREE;
11925 case CEIL_MOD_EXPR:
11926 case FLOOR_MOD_EXPR:
11927 case ROUND_MOD_EXPR:
11928 case TRUNC_MOD_EXPR:
11929 strict_overflow_p = false;
11930 if (TREE_CODE (arg1) == INTEGER_CST
11931 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11932 &strict_overflow_p)) != 0)
11934 if (strict_overflow_p)
11935 fold_overflow_warning (("assuming signed overflow does not occur "
11936 "when simplifying modulus"),
11937 WARN_STRICT_OVERFLOW_MISC);
11938 return fold_convert_loc (loc, type, tem);
11941 return NULL_TREE;
11943 case LROTATE_EXPR:
11944 case RROTATE_EXPR:
11945 case RSHIFT_EXPR:
11946 case LSHIFT_EXPR:
11947 /* Since negative shift count is not well-defined,
11948 don't try to compute it in the compiler. */
11949 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11950 return NULL_TREE;
11952 prec = element_precision (type);
11954 /* If we have a rotate of a bit operation with the rotate count and
11955 the second operand of the bit operation both constant,
11956 permute the two operations. */
11957 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11958 && (TREE_CODE (arg0) == BIT_AND_EXPR
11959 || TREE_CODE (arg0) == BIT_IOR_EXPR
11960 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11961 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11963 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11964 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11965 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11966 fold_build2_loc (loc, code, type,
11967 arg00, arg1),
11968 fold_build2_loc (loc, code, type,
11969 arg01, arg1));
11972 /* Two consecutive rotates adding up to the some integer
11973 multiple of the precision of the type can be ignored. */
11974 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11975 && TREE_CODE (arg0) == RROTATE_EXPR
11976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11977 && wi::umod_trunc (wi::to_wide (arg1)
11978 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11979 prec) == 0)
11980 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11982 return NULL_TREE;
11984 case MIN_EXPR:
11985 case MAX_EXPR:
11986 goto associate;
11988 case TRUTH_ANDIF_EXPR:
11989 /* Note that the operands of this must be ints
11990 and their values must be 0 or 1.
11991 ("true" is a fixed value perhaps depending on the language.) */
11992 /* If first arg is constant zero, return it. */
11993 if (integer_zerop (arg0))
11994 return fold_convert_loc (loc, type, arg0);
11995 /* FALLTHRU */
11996 case TRUTH_AND_EXPR:
11997 /* If either arg is constant true, drop it. */
11998 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12000 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12001 /* Preserve sequence points. */
12002 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12004 /* If second arg is constant zero, result is zero, but first arg
12005 must be evaluated. */
12006 if (integer_zerop (arg1))
12007 return omit_one_operand_loc (loc, type, arg1, arg0);
12008 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12009 case will be handled here. */
12010 if (integer_zerop (arg0))
12011 return omit_one_operand_loc (loc, type, arg0, arg1);
12013 /* !X && X is always false. */
12014 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12015 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12017 /* X && !X is always false. */
12018 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12020 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12022 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12023 means A >= Y && A != MAX, but in this case we know that
12024 A < X <= MAX. */
12026 if (!TREE_SIDE_EFFECTS (arg0)
12027 && !TREE_SIDE_EFFECTS (arg1))
12029 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12030 if (tem && !operand_equal_p (tem, arg0, 0))
12031 return fold_convert (type,
12032 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12033 tem, arg1));
12035 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12036 if (tem && !operand_equal_p (tem, arg1, 0))
12037 return fold_convert (type,
12038 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12039 arg0, tem));
12042 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12043 != NULL_TREE)
12044 return tem;
12046 return NULL_TREE;
12048 case TRUTH_ORIF_EXPR:
12049 /* Note that the operands of this must be ints
12050 and their values must be 0 or true.
12051 ("true" is a fixed value perhaps depending on the language.) */
12052 /* If first arg is constant true, return it. */
12053 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12054 return fold_convert_loc (loc, type, arg0);
12055 /* FALLTHRU */
12056 case TRUTH_OR_EXPR:
12057 /* If either arg is constant zero, drop it. */
12058 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12059 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12060 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12061 /* Preserve sequence points. */
12062 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12063 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12064 /* If second arg is constant true, result is true, but we must
12065 evaluate first arg. */
12066 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12067 return omit_one_operand_loc (loc, type, arg1, arg0);
12068 /* Likewise for first arg, but note this only occurs here for
12069 TRUTH_OR_EXPR. */
12070 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12071 return omit_one_operand_loc (loc, type, arg0, arg1);
12073 /* !X || X is always true. */
12074 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12076 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12077 /* X || !X is always true. */
12078 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12080 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12082 /* (X && !Y) || (!X && Y) is X ^ Y */
12083 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12084 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12086 tree a0, a1, l0, l1, n0, n1;
12088 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12089 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12091 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12092 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12094 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12095 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12097 if ((operand_equal_p (n0, a0, 0)
12098 && operand_equal_p (n1, a1, 0))
12099 || (operand_equal_p (n0, a1, 0)
12100 && operand_equal_p (n1, a0, 0)))
12101 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12104 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12105 != NULL_TREE)
12106 return tem;
12108 return NULL_TREE;
12110 case TRUTH_XOR_EXPR:
12111 /* If the second arg is constant zero, drop it. */
12112 if (integer_zerop (arg1))
12113 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12114 /* If the second arg is constant true, this is a logical inversion. */
12115 if (integer_onep (arg1))
12117 tem = invert_truthvalue_loc (loc, arg0);
12118 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12120 /* Identical arguments cancel to zero. */
12121 if (operand_equal_p (arg0, arg1, 0))
12122 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12124 /* !X ^ X is always true. */
12125 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12126 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12127 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12129 /* X ^ !X is always true. */
12130 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12131 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12132 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12134 return NULL_TREE;
12136 case EQ_EXPR:
12137 case NE_EXPR:
12138 STRIP_NOPS (arg0);
12139 STRIP_NOPS (arg1);
12141 tem = fold_comparison (loc, code, type, op0, op1);
12142 if (tem != NULL_TREE)
12143 return tem;
12145 /* bool_var != 1 becomes !bool_var. */
12146 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12147 && code == NE_EXPR)
12148 return fold_convert_loc (loc, type,
12149 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12150 TREE_TYPE (arg0), arg0));
12152 /* bool_var == 0 becomes !bool_var. */
12153 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12154 && code == EQ_EXPR)
12155 return fold_convert_loc (loc, type,
12156 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12157 TREE_TYPE (arg0), arg0));
12159 /* !exp != 0 becomes !exp */
12160 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12161 && code == NE_EXPR)
12162 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12164 /* If this is an EQ or NE comparison with zero and ARG0 is
12165 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12166 two operations, but the latter can be done in one less insn
12167 on machines that have only two-operand insns or on which a
12168 constant cannot be the first operand. */
12169 if (TREE_CODE (arg0) == BIT_AND_EXPR
12170 && integer_zerop (arg1))
12172 tree arg00 = TREE_OPERAND (arg0, 0);
12173 tree arg01 = TREE_OPERAND (arg0, 1);
12174 if (TREE_CODE (arg00) == LSHIFT_EXPR
12175 && integer_onep (TREE_OPERAND (arg00, 0)))
12177 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12178 arg01, TREE_OPERAND (arg00, 1));
12179 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12180 build_one_cst (TREE_TYPE (arg0)));
12181 return fold_build2_loc (loc, code, type,
12182 fold_convert_loc (loc, TREE_TYPE (arg1),
12183 tem), arg1);
12185 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12186 && integer_onep (TREE_OPERAND (arg01, 0)))
12188 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12189 arg00, TREE_OPERAND (arg01, 1));
12190 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12191 build_one_cst (TREE_TYPE (arg0)));
12192 return fold_build2_loc (loc, code, type,
12193 fold_convert_loc (loc, TREE_TYPE (arg1),
12194 tem), arg1);
12198 /* If this is a comparison of a field, we may be able to simplify it. */
12199 if ((TREE_CODE (arg0) == COMPONENT_REF
12200 || TREE_CODE (arg0) == BIT_FIELD_REF)
12201 /* Handle the constant case even without -O
12202 to make sure the warnings are given. */
12203 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12205 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12206 if (t1)
12207 return t1;
12210 /* Optimize comparisons of strlen vs zero to a compare of the
12211 first character of the string vs zero. To wit,
12212 strlen(ptr) == 0 => *ptr == 0
12213 strlen(ptr) != 0 => *ptr != 0
12214 Other cases should reduce to one of these two (or a constant)
12215 due to the return value of strlen being unsigned. */
12216 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12218 tree fndecl = get_callee_fndecl (arg0);
12220 if (fndecl
12221 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12222 && call_expr_nargs (arg0) == 1
12223 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12224 == POINTER_TYPE))
12226 tree ptrtype
12227 = build_pointer_type (build_qualified_type (char_type_node,
12228 TYPE_QUAL_CONST));
12229 tree ptr = fold_convert_loc (loc, ptrtype,
12230 CALL_EXPR_ARG (arg0, 0));
12231 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12232 return fold_build2_loc (loc, code, type, iref,
12233 build_int_cst (TREE_TYPE (iref), 0));
12237 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12238 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12239 if (TREE_CODE (arg0) == RSHIFT_EXPR
12240 && integer_zerop (arg1)
12241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12243 tree arg00 = TREE_OPERAND (arg0, 0);
12244 tree arg01 = TREE_OPERAND (arg0, 1);
12245 tree itype = TREE_TYPE (arg00);
12246 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12248 if (TYPE_UNSIGNED (itype))
12250 itype = signed_type_for (itype);
12251 arg00 = fold_convert_loc (loc, itype, arg00);
12253 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12254 type, arg00, build_zero_cst (itype));
12258 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12259 (X & C) == 0 when C is a single bit. */
12260 if (TREE_CODE (arg0) == BIT_AND_EXPR
12261 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12262 && integer_zerop (arg1)
12263 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12265 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12266 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12267 TREE_OPERAND (arg0, 1));
12268 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12269 type, tem,
12270 fold_convert_loc (loc, TREE_TYPE (arg0),
12271 arg1));
12274 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12275 constant C is a power of two, i.e. a single bit. */
12276 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12277 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12278 && integer_zerop (arg1)
12279 && integer_pow2p (TREE_OPERAND (arg0, 1))
12280 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12281 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12283 tree arg00 = TREE_OPERAND (arg0, 0);
12284 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12285 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12288 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12289 when is C is a power of two, i.e. a single bit. */
12290 if (TREE_CODE (arg0) == BIT_AND_EXPR
12291 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12292 && integer_zerop (arg1)
12293 && integer_pow2p (TREE_OPERAND (arg0, 1))
12294 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12295 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12297 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12298 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12299 arg000, TREE_OPERAND (arg0, 1));
12300 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12301 tem, build_int_cst (TREE_TYPE (tem), 0));
12304 if (integer_zerop (arg1)
12305 && tree_expr_nonzero_p (arg0))
12307 tree res = constant_boolean_node (code==NE_EXPR, type);
12308 return omit_one_operand_loc (loc, type, res, arg0);
12311 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12312 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12314 tree arg00 = TREE_OPERAND (arg0, 0);
12315 tree arg01 = TREE_OPERAND (arg0, 1);
12316 tree arg10 = TREE_OPERAND (arg1, 0);
12317 tree arg11 = TREE_OPERAND (arg1, 1);
12318 tree itype = TREE_TYPE (arg0);
12320 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12321 operand_equal_p guarantees no side-effects so we don't need
12322 to use omit_one_operand on Z. */
12323 if (operand_equal_p (arg01, arg11, 0))
12324 return fold_build2_loc (loc, code, type, arg00,
12325 fold_convert_loc (loc, TREE_TYPE (arg00),
12326 arg10));
12327 if (operand_equal_p (arg01, arg10, 0))
12328 return fold_build2_loc (loc, code, type, arg00,
12329 fold_convert_loc (loc, TREE_TYPE (arg00),
12330 arg11));
12331 if (operand_equal_p (arg00, arg11, 0))
12332 return fold_build2_loc (loc, code, type, arg01,
12333 fold_convert_loc (loc, TREE_TYPE (arg01),
12334 arg10));
12335 if (operand_equal_p (arg00, arg10, 0))
12336 return fold_build2_loc (loc, code, type, arg01,
12337 fold_convert_loc (loc, TREE_TYPE (arg01),
12338 arg11));
12340 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12341 if (TREE_CODE (arg01) == INTEGER_CST
12342 && TREE_CODE (arg11) == INTEGER_CST)
12344 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12345 fold_convert_loc (loc, itype, arg11));
12346 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12347 return fold_build2_loc (loc, code, type, tem,
12348 fold_convert_loc (loc, itype, arg10));
12352 /* Attempt to simplify equality/inequality comparisons of complex
12353 values. Only lower the comparison if the result is known or
12354 can be simplified to a single scalar comparison. */
12355 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12356 || TREE_CODE (arg0) == COMPLEX_CST)
12357 && (TREE_CODE (arg1) == COMPLEX_EXPR
12358 || TREE_CODE (arg1) == COMPLEX_CST))
12360 tree real0, imag0, real1, imag1;
12361 tree rcond, icond;
12363 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12365 real0 = TREE_OPERAND (arg0, 0);
12366 imag0 = TREE_OPERAND (arg0, 1);
12368 else
12370 real0 = TREE_REALPART (arg0);
12371 imag0 = TREE_IMAGPART (arg0);
12374 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12376 real1 = TREE_OPERAND (arg1, 0);
12377 imag1 = TREE_OPERAND (arg1, 1);
12379 else
12381 real1 = TREE_REALPART (arg1);
12382 imag1 = TREE_IMAGPART (arg1);
12385 rcond = fold_binary_loc (loc, code, type, real0, real1);
12386 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12388 if (integer_zerop (rcond))
12390 if (code == EQ_EXPR)
12391 return omit_two_operands_loc (loc, type, boolean_false_node,
12392 imag0, imag1);
12393 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12395 else
12397 if (code == NE_EXPR)
12398 return omit_two_operands_loc (loc, type, boolean_true_node,
12399 imag0, imag1);
12400 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12404 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12405 if (icond && TREE_CODE (icond) == INTEGER_CST)
12407 if (integer_zerop (icond))
12409 if (code == EQ_EXPR)
12410 return omit_two_operands_loc (loc, type, boolean_false_node,
12411 real0, real1);
12412 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12414 else
12416 if (code == NE_EXPR)
12417 return omit_two_operands_loc (loc, type, boolean_true_node,
12418 real0, real1);
12419 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12424 return NULL_TREE;
12426 case LT_EXPR:
12427 case GT_EXPR:
12428 case LE_EXPR:
12429 case GE_EXPR:
12430 tem = fold_comparison (loc, code, type, op0, op1);
12431 if (tem != NULL_TREE)
12432 return tem;
12434 /* Transform comparisons of the form X +- C CMP X. */
12435 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12436 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12438 && !HONOR_SNANS (arg0))
12440 tree arg01 = TREE_OPERAND (arg0, 1);
12441 enum tree_code code0 = TREE_CODE (arg0);
12442 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12444 /* (X - c) > X becomes false. */
12445 if (code == GT_EXPR
12446 && ((code0 == MINUS_EXPR && is_positive >= 0)
12447 || (code0 == PLUS_EXPR && is_positive <= 0)))
12448 return constant_boolean_node (0, type);
12450 /* Likewise (X + c) < X becomes false. */
12451 if (code == LT_EXPR
12452 && ((code0 == PLUS_EXPR && is_positive >= 0)
12453 || (code0 == MINUS_EXPR && is_positive <= 0)))
12454 return constant_boolean_node (0, type);
12456 /* Convert (X - c) <= X to true. */
12457 if (!HONOR_NANS (arg1)
12458 && code == LE_EXPR
12459 && ((code0 == MINUS_EXPR && is_positive >= 0)
12460 || (code0 == PLUS_EXPR && is_positive <= 0)))
12461 return constant_boolean_node (1, type);
12463 /* Convert (X + c) >= X to true. */
12464 if (!HONOR_NANS (arg1)
12465 && code == GE_EXPR
12466 && ((code0 == PLUS_EXPR && is_positive >= 0)
12467 || (code0 == MINUS_EXPR && is_positive <= 0)))
12468 return constant_boolean_node (1, type);
12471 /* If we are comparing an ABS_EXPR with a constant, we can
12472 convert all the cases into explicit comparisons, but they may
12473 well not be faster than doing the ABS and one comparison.
12474 But ABS (X) <= C is a range comparison, which becomes a subtraction
12475 and a comparison, and is probably faster. */
12476 if (code == LE_EXPR
12477 && TREE_CODE (arg1) == INTEGER_CST
12478 && TREE_CODE (arg0) == ABS_EXPR
12479 && ! TREE_SIDE_EFFECTS (arg0)
12480 && (tem = negate_expr (arg1)) != 0
12481 && TREE_CODE (tem) == INTEGER_CST
12482 && !TREE_OVERFLOW (tem))
12483 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12484 build2 (GE_EXPR, type,
12485 TREE_OPERAND (arg0, 0), tem),
12486 build2 (LE_EXPR, type,
12487 TREE_OPERAND (arg0, 0), arg1));
12489 /* Convert ABS_EXPR<x> >= 0 to true. */
12490 strict_overflow_p = false;
12491 if (code == GE_EXPR
12492 && (integer_zerop (arg1)
12493 || (! HONOR_NANS (arg0)
12494 && real_zerop (arg1)))
12495 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12497 if (strict_overflow_p)
12498 fold_overflow_warning (("assuming signed overflow does not occur "
12499 "when simplifying comparison of "
12500 "absolute value and zero"),
12501 WARN_STRICT_OVERFLOW_CONDITIONAL);
12502 return omit_one_operand_loc (loc, type,
12503 constant_boolean_node (true, type),
12504 arg0);
12507 /* Convert ABS_EXPR<x> < 0 to false. */
12508 strict_overflow_p = false;
12509 if (code == LT_EXPR
12510 && (integer_zerop (arg1) || real_zerop (arg1))
12511 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12513 if (strict_overflow_p)
12514 fold_overflow_warning (("assuming signed overflow does not occur "
12515 "when simplifying comparison of "
12516 "absolute value and zero"),
12517 WARN_STRICT_OVERFLOW_CONDITIONAL);
12518 return omit_one_operand_loc (loc, type,
12519 constant_boolean_node (false, type),
12520 arg0);
12523 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12524 and similarly for >= into !=. */
12525 if ((code == LT_EXPR || code == GE_EXPR)
12526 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12527 && TREE_CODE (arg1) == LSHIFT_EXPR
12528 && integer_onep (TREE_OPERAND (arg1, 0)))
12529 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12530 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12531 TREE_OPERAND (arg1, 1)),
12532 build_zero_cst (TREE_TYPE (arg0)));
12534 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12535 otherwise Y might be >= # of bits in X's type and thus e.g.
12536 (unsigned char) (1 << Y) for Y 15 might be 0.
12537 If the cast is widening, then 1 << Y should have unsigned type,
12538 otherwise if Y is number of bits in the signed shift type minus 1,
12539 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12540 31 might be 0xffffffff80000000. */
12541 if ((code == LT_EXPR || code == GE_EXPR)
12542 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12543 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12544 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12545 && CONVERT_EXPR_P (arg1)
12546 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12547 && (element_precision (TREE_TYPE (arg1))
12548 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12549 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12550 || (element_precision (TREE_TYPE (arg1))
12551 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12552 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12554 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12555 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12556 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12557 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12558 build_zero_cst (TREE_TYPE (arg0)));
12561 return NULL_TREE;
12563 case UNORDERED_EXPR:
12564 case ORDERED_EXPR:
12565 case UNLT_EXPR:
12566 case UNLE_EXPR:
12567 case UNGT_EXPR:
12568 case UNGE_EXPR:
12569 case UNEQ_EXPR:
12570 case LTGT_EXPR:
12571 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12573 tree targ0 = strip_float_extensions (arg0);
12574 tree targ1 = strip_float_extensions (arg1);
12575 tree newtype = TREE_TYPE (targ0);
12577 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12578 newtype = TREE_TYPE (targ1);
12580 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12581 return fold_build2_loc (loc, code, type,
12582 fold_convert_loc (loc, newtype, targ0),
12583 fold_convert_loc (loc, newtype, targ1));
12586 return NULL_TREE;
12588 case COMPOUND_EXPR:
12589 /* When pedantic, a compound expression can be neither an lvalue
12590 nor an integer constant expression. */
12591 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12592 return NULL_TREE;
12593 /* Don't let (0, 0) be null pointer constant. */
12594 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12595 : fold_convert_loc (loc, type, arg1);
12596 return tem;
12598 default:
12599 return NULL_TREE;
12600 } /* switch (code) */
12603 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12604 ((A & N) + B) & M -> (A + B) & M
12605 Similarly if (N & M) == 0,
12606 ((A | N) + B) & M -> (A + B) & M
12607 and for - instead of + (or unary - instead of +)
12608 and/or ^ instead of |.
12609 If B is constant and (B & M) == 0, fold into A & M.
12611 This function is a helper for match.pd patterns. Return non-NULL
12612 type in which the simplified operation should be performed only
12613 if any optimization is possible.
12615 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12616 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12617 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12618 +/-. */
12619 tree
12620 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12621 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12622 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12623 tree *pmop)
12625 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12626 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12627 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12628 if (~cst1 == 0
12629 || (cst1 & (cst1 + 1)) != 0
12630 || !INTEGRAL_TYPE_P (type)
12631 || (!TYPE_OVERFLOW_WRAPS (type)
12632 && TREE_CODE (type) != INTEGER_TYPE)
12633 || (wi::max_value (type) & cst1) != cst1)
12634 return NULL_TREE;
12636 enum tree_code codes[2] = { code00, code01 };
12637 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12638 int which = 0;
12639 wide_int cst0;
12641 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12642 arg1 (M) is == (1LL << cst) - 1.
12643 Store C into PMOP[0] and D into PMOP[1]. */
12644 pmop[0] = arg00;
12645 pmop[1] = arg01;
12646 which = code != NEGATE_EXPR;
12648 for (; which >= 0; which--)
12649 switch (codes[which])
12651 case BIT_AND_EXPR:
12652 case BIT_IOR_EXPR:
12653 case BIT_XOR_EXPR:
12654 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12655 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12656 if (codes[which] == BIT_AND_EXPR)
12658 if (cst0 != cst1)
12659 break;
12661 else if (cst0 != 0)
12662 break;
12663 /* If C or D is of the form (A & N) where
12664 (N & M) == M, or of the form (A | N) or
12665 (A ^ N) where (N & M) == 0, replace it with A. */
12666 pmop[which] = arg0xx[2 * which];
12667 break;
12668 case ERROR_MARK:
12669 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12670 break;
12671 /* If C or D is a N where (N & M) == 0, it can be
12672 omitted (replaced with 0). */
12673 if ((code == PLUS_EXPR
12674 || (code == MINUS_EXPR && which == 0))
12675 && (cst1 & wi::to_wide (pmop[which])) == 0)
12676 pmop[which] = build_int_cst (type, 0);
12677 /* Similarly, with C - N where (-N & M) == 0. */
12678 if (code == MINUS_EXPR
12679 && which == 1
12680 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12681 pmop[which] = build_int_cst (type, 0);
12682 break;
12683 default:
12684 gcc_unreachable ();
12687 /* Only build anything new if we optimized one or both arguments above. */
12688 if (pmop[0] == arg00 && pmop[1] == arg01)
12689 return NULL_TREE;
12691 if (TYPE_OVERFLOW_WRAPS (type))
12692 return type;
12693 else
12694 return unsigned_type_for (type);
12697 /* Used by contains_label_[p1]. */
12699 struct contains_label_data
12701 hash_set<tree> *pset;
12702 bool inside_switch_p;
12705 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12706 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12707 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12709 static tree
12710 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12712 contains_label_data *d = (contains_label_data *) data;
12713 switch (TREE_CODE (*tp))
12715 case LABEL_EXPR:
12716 return *tp;
12718 case CASE_LABEL_EXPR:
12719 if (!d->inside_switch_p)
12720 return *tp;
12721 return NULL_TREE;
12723 case SWITCH_EXPR:
12724 if (!d->inside_switch_p)
12726 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12727 return *tp;
12728 d->inside_switch_p = true;
12729 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12730 return *tp;
12731 d->inside_switch_p = false;
12732 *walk_subtrees = 0;
12734 return NULL_TREE;
12736 case GOTO_EXPR:
12737 *walk_subtrees = 0;
12738 return NULL_TREE;
12740 default:
12741 return NULL_TREE;
12745 /* Return whether the sub-tree ST contains a label which is accessible from
12746 outside the sub-tree. */
12748 static bool
12749 contains_label_p (tree st)
12751 hash_set<tree> pset;
12752 contains_label_data data = { &pset, false };
12753 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12756 /* Fold a ternary expression of code CODE and type TYPE with operands
12757 OP0, OP1, and OP2. Return the folded expression if folding is
12758 successful. Otherwise, return NULL_TREE. */
12760 tree
12761 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12762 tree op0, tree op1, tree op2)
12764 tree tem;
12765 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12766 enum tree_code_class kind = TREE_CODE_CLASS (code);
12768 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12769 && TREE_CODE_LENGTH (code) == 3);
12771 /* If this is a commutative operation, and OP0 is a constant, move it
12772 to OP1 to reduce the number of tests below. */
12773 if (commutative_ternary_tree_code (code)
12774 && tree_swap_operands_p (op0, op1))
12775 return fold_build3_loc (loc, code, type, op1, op0, op2);
12777 tem = generic_simplify (loc, code, type, op0, op1, op2);
12778 if (tem)
12779 return tem;
12781 /* Strip any conversions that don't change the mode. This is safe
12782 for every expression, except for a comparison expression because
12783 its signedness is derived from its operands. So, in the latter
12784 case, only strip conversions that don't change the signedness.
12786 Note that this is done as an internal manipulation within the
12787 constant folder, in order to find the simplest representation of
12788 the arguments so that their form can be studied. In any cases,
12789 the appropriate type conversions should be put back in the tree
12790 that will get out of the constant folder. */
12791 if (op0)
12793 arg0 = op0;
12794 STRIP_NOPS (arg0);
12797 if (op1)
12799 arg1 = op1;
12800 STRIP_NOPS (arg1);
12803 if (op2)
12805 arg2 = op2;
12806 STRIP_NOPS (arg2);
12809 switch (code)
12811 case COMPONENT_REF:
12812 if (TREE_CODE (arg0) == CONSTRUCTOR
12813 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12815 unsigned HOST_WIDE_INT idx;
12816 tree field, value;
12817 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12818 if (field == arg1)
12819 return value;
12821 return NULL_TREE;
12823 case COND_EXPR:
12824 case VEC_COND_EXPR:
12825 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12826 so all simple results must be passed through pedantic_non_lvalue. */
12827 if (TREE_CODE (arg0) == INTEGER_CST)
12829 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12830 tem = integer_zerop (arg0) ? op2 : op1;
12831 /* Only optimize constant conditions when the selected branch
12832 has the same type as the COND_EXPR. This avoids optimizing
12833 away "c ? x : throw", where the throw has a void type.
12834 Avoid throwing away that operand which contains label. */
12835 if ((!TREE_SIDE_EFFECTS (unused_op)
12836 || !contains_label_p (unused_op))
12837 && (! VOID_TYPE_P (TREE_TYPE (tem))
12838 || VOID_TYPE_P (type)))
12839 return protected_set_expr_location_unshare (tem, loc);
12840 return NULL_TREE;
12842 else if (TREE_CODE (arg0) == VECTOR_CST)
12844 unsigned HOST_WIDE_INT nelts;
12845 if ((TREE_CODE (arg1) == VECTOR_CST
12846 || TREE_CODE (arg1) == CONSTRUCTOR)
12847 && (TREE_CODE (arg2) == VECTOR_CST
12848 || TREE_CODE (arg2) == CONSTRUCTOR)
12849 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12851 vec_perm_builder sel (nelts, nelts, 1);
12852 for (unsigned int i = 0; i < nelts; i++)
12854 tree val = VECTOR_CST_ELT (arg0, i);
12855 if (integer_all_onesp (val))
12856 sel.quick_push (i);
12857 else if (integer_zerop (val))
12858 sel.quick_push (nelts + i);
12859 else /* Currently unreachable. */
12860 return NULL_TREE;
12862 vec_perm_indices indices (sel, 2, nelts);
12863 tree t = fold_vec_perm (type, arg1, arg2, indices);
12864 if (t != NULL_TREE)
12865 return t;
12869 /* If we have A op B ? A : C, we may be able to convert this to a
12870 simpler expression, depending on the operation and the values
12871 of B and C. Signed zeros prevent all of these transformations,
12872 for reasons given above each one.
12874 Also try swapping the arguments and inverting the conditional. */
12875 if (COMPARISON_CLASS_P (arg0)
12876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12877 && !HONOR_SIGNED_ZEROS (op1))
12879 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12880 TREE_OPERAND (arg0, 0),
12881 TREE_OPERAND (arg0, 1),
12882 op1, op2);
12883 if (tem)
12884 return tem;
12887 if (COMPARISON_CLASS_P (arg0)
12888 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12889 && !HONOR_SIGNED_ZEROS (op2))
12891 enum tree_code comp_code = TREE_CODE (arg0);
12892 tree arg00 = TREE_OPERAND (arg0, 0);
12893 tree arg01 = TREE_OPERAND (arg0, 1);
12894 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12895 if (comp_code != ERROR_MARK)
12896 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12897 arg00,
12898 arg01,
12899 op2, op1);
12900 if (tem)
12901 return tem;
12904 /* If the second operand is simpler than the third, swap them
12905 since that produces better jump optimization results. */
12906 if (truth_value_p (TREE_CODE (arg0))
12907 && tree_swap_operands_p (op1, op2))
12909 location_t loc0 = expr_location_or (arg0, loc);
12910 /* See if this can be inverted. If it can't, possibly because
12911 it was a floating-point inequality comparison, don't do
12912 anything. */
12913 tem = fold_invert_truthvalue (loc0, arg0);
12914 if (tem)
12915 return fold_build3_loc (loc, code, type, tem, op2, op1);
12918 /* Convert A ? 1 : 0 to simply A. */
12919 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12920 : (integer_onep (op1)
12921 && !VECTOR_TYPE_P (type)))
12922 && integer_zerop (op2)
12923 /* If we try to convert OP0 to our type, the
12924 call to fold will try to move the conversion inside
12925 a COND, which will recurse. In that case, the COND_EXPR
12926 is probably the best choice, so leave it alone. */
12927 && type == TREE_TYPE (arg0))
12928 return protected_set_expr_location_unshare (arg0, loc);
12930 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12931 over COND_EXPR in cases such as floating point comparisons. */
12932 if (integer_zerop (op1)
12933 && code == COND_EXPR
12934 && integer_onep (op2)
12935 && !VECTOR_TYPE_P (type)
12936 && truth_value_p (TREE_CODE (arg0)))
12937 return fold_convert_loc (loc, type,
12938 invert_truthvalue_loc (loc, arg0));
12940 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12941 if (TREE_CODE (arg0) == LT_EXPR
12942 && integer_zerop (TREE_OPERAND (arg0, 1))
12943 && integer_zerop (op2)
12944 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12946 /* sign_bit_p looks through both zero and sign extensions,
12947 but for this optimization only sign extensions are
12948 usable. */
12949 tree tem2 = TREE_OPERAND (arg0, 0);
12950 while (tem != tem2)
12952 if (TREE_CODE (tem2) != NOP_EXPR
12953 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12955 tem = NULL_TREE;
12956 break;
12958 tem2 = TREE_OPERAND (tem2, 0);
12960 /* sign_bit_p only checks ARG1 bits within A's precision.
12961 If <sign bit of A> has wider type than A, bits outside
12962 of A's precision in <sign bit of A> need to be checked.
12963 If they are all 0, this optimization needs to be done
12964 in unsigned A's type, if they are all 1 in signed A's type,
12965 otherwise this can't be done. */
12966 if (tem
12967 && TYPE_PRECISION (TREE_TYPE (tem))
12968 < TYPE_PRECISION (TREE_TYPE (arg1))
12969 && TYPE_PRECISION (TREE_TYPE (tem))
12970 < TYPE_PRECISION (type))
12972 int inner_width, outer_width;
12973 tree tem_type;
12975 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12976 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12977 if (outer_width > TYPE_PRECISION (type))
12978 outer_width = TYPE_PRECISION (type);
12980 wide_int mask = wi::shifted_mask
12981 (inner_width, outer_width - inner_width, false,
12982 TYPE_PRECISION (TREE_TYPE (arg1)));
12984 wide_int common = mask & wi::to_wide (arg1);
12985 if (common == mask)
12987 tem_type = signed_type_for (TREE_TYPE (tem));
12988 tem = fold_convert_loc (loc, tem_type, tem);
12990 else if (common == 0)
12992 tem_type = unsigned_type_for (TREE_TYPE (tem));
12993 tem = fold_convert_loc (loc, tem_type, tem);
12995 else
12996 tem = NULL;
12999 if (tem)
13000 return
13001 fold_convert_loc (loc, type,
13002 fold_build2_loc (loc, BIT_AND_EXPR,
13003 TREE_TYPE (tem), tem,
13004 fold_convert_loc (loc,
13005 TREE_TYPE (tem),
13006 arg1)));
13009 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13010 already handled above. */
13011 if (TREE_CODE (arg0) == BIT_AND_EXPR
13012 && integer_onep (TREE_OPERAND (arg0, 1))
13013 && integer_zerop (op2)
13014 && integer_pow2p (arg1))
13016 tree tem = TREE_OPERAND (arg0, 0);
13017 STRIP_NOPS (tem);
13018 if (TREE_CODE (tem) == RSHIFT_EXPR
13019 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13020 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13021 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13022 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13023 fold_convert_loc (loc, type,
13024 TREE_OPERAND (tem, 0)),
13025 op1);
13028 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13029 is probably obsolete because the first operand should be a
13030 truth value (that's why we have the two cases above), but let's
13031 leave it in until we can confirm this for all front-ends. */
13032 if (integer_zerop (op2)
13033 && TREE_CODE (arg0) == NE_EXPR
13034 && integer_zerop (TREE_OPERAND (arg0, 1))
13035 && integer_pow2p (arg1)
13036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13037 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13038 arg1, OEP_ONLY_CONST)
13039 /* operand_equal_p compares just value, not precision, so e.g.
13040 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13041 second operand 32-bit -128, which is not a power of two (or vice
13042 versa. */
13043 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13044 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13046 /* Disable the transformations below for vectors, since
13047 fold_binary_op_with_conditional_arg may undo them immediately,
13048 yielding an infinite loop. */
13049 if (code == VEC_COND_EXPR)
13050 return NULL_TREE;
13052 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13053 if (integer_zerop (op2)
13054 && truth_value_p (TREE_CODE (arg0))
13055 && truth_value_p (TREE_CODE (arg1))
13056 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13057 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13058 : TRUTH_ANDIF_EXPR,
13059 type, fold_convert_loc (loc, type, arg0), op1);
13061 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13062 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13063 && truth_value_p (TREE_CODE (arg0))
13064 && truth_value_p (TREE_CODE (arg1))
13065 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13067 location_t loc0 = expr_location_or (arg0, loc);
13068 /* Only perform transformation if ARG0 is easily inverted. */
13069 tem = fold_invert_truthvalue (loc0, arg0);
13070 if (tem)
13071 return fold_build2_loc (loc, code == VEC_COND_EXPR
13072 ? BIT_IOR_EXPR
13073 : TRUTH_ORIF_EXPR,
13074 type, fold_convert_loc (loc, type, tem),
13075 op1);
13078 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13079 if (integer_zerop (arg1)
13080 && truth_value_p (TREE_CODE (arg0))
13081 && truth_value_p (TREE_CODE (op2))
13082 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13084 location_t loc0 = expr_location_or (arg0, loc);
13085 /* Only perform transformation if ARG0 is easily inverted. */
13086 tem = fold_invert_truthvalue (loc0, arg0);
13087 if (tem)
13088 return fold_build2_loc (loc, code == VEC_COND_EXPR
13089 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13090 type, fold_convert_loc (loc, type, tem),
13091 op2);
13094 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13095 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13096 && truth_value_p (TREE_CODE (arg0))
13097 && truth_value_p (TREE_CODE (op2))
13098 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13099 return fold_build2_loc (loc, code == VEC_COND_EXPR
13100 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13101 type, fold_convert_loc (loc, type, arg0), op2);
13103 return NULL_TREE;
13105 case CALL_EXPR:
13106 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13107 of fold_ternary on them. */
13108 gcc_unreachable ();
13110 case BIT_FIELD_REF:
13111 if (TREE_CODE (arg0) == VECTOR_CST
13112 && (type == TREE_TYPE (TREE_TYPE (arg0))
13113 || (VECTOR_TYPE_P (type)
13114 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13115 && tree_fits_uhwi_p (op1)
13116 && tree_fits_uhwi_p (op2))
13118 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13119 unsigned HOST_WIDE_INT width
13120 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13121 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13122 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13123 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13125 if (n != 0
13126 && (idx % width) == 0
13127 && (n % width) == 0
13128 && known_le ((idx + n) / width,
13129 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13131 idx = idx / width;
13132 n = n / width;
13134 if (TREE_CODE (arg0) == VECTOR_CST)
13136 if (n == 1)
13138 tem = VECTOR_CST_ELT (arg0, idx);
13139 if (VECTOR_TYPE_P (type))
13140 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13141 return tem;
13144 tree_vector_builder vals (type, n, 1);
13145 for (unsigned i = 0; i < n; ++i)
13146 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13147 return vals.build ();
13152 /* On constants we can use native encode/interpret to constant
13153 fold (nearly) all BIT_FIELD_REFs. */
13154 if (CONSTANT_CLASS_P (arg0)
13155 && can_native_interpret_type_p (type)
13156 && BITS_PER_UNIT == 8
13157 && tree_fits_uhwi_p (op1)
13158 && tree_fits_uhwi_p (op2))
13160 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13161 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13162 /* Limit us to a reasonable amount of work. To relax the
13163 other limitations we need bit-shifting of the buffer
13164 and rounding up the size. */
13165 if (bitpos % BITS_PER_UNIT == 0
13166 && bitsize % BITS_PER_UNIT == 0
13167 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13169 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13170 unsigned HOST_WIDE_INT len
13171 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13172 bitpos / BITS_PER_UNIT);
13173 if (len > 0
13174 && len * BITS_PER_UNIT >= bitsize)
13176 tree v = native_interpret_expr (type, b,
13177 bitsize / BITS_PER_UNIT);
13178 if (v)
13179 return v;
13184 return NULL_TREE;
13186 case VEC_PERM_EXPR:
13187 /* Perform constant folding of BIT_INSERT_EXPR. */
13188 if (TREE_CODE (arg2) == VECTOR_CST
13189 && TREE_CODE (op0) == VECTOR_CST
13190 && TREE_CODE (op1) == VECTOR_CST)
13192 /* Build a vector of integers from the tree mask. */
13193 vec_perm_builder builder;
13194 if (!tree_to_vec_perm_builder (&builder, arg2))
13195 return NULL_TREE;
13197 /* Create a vec_perm_indices for the integer vector. */
13198 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13199 bool single_arg = (op0 == op1);
13200 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13201 return fold_vec_perm (type, op0, op1, sel);
13203 return NULL_TREE;
13205 case BIT_INSERT_EXPR:
13206 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13207 if (TREE_CODE (arg0) == INTEGER_CST
13208 && TREE_CODE (arg1) == INTEGER_CST)
13210 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13211 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13212 wide_int tem = (wi::to_wide (arg0)
13213 & wi::shifted_mask (bitpos, bitsize, true,
13214 TYPE_PRECISION (type)));
13215 wide_int tem2
13216 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13217 bitsize), bitpos);
13218 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13220 else if (TREE_CODE (arg0) == VECTOR_CST
13221 && CONSTANT_CLASS_P (arg1)
13222 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13223 TREE_TYPE (arg1)))
13225 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13226 unsigned HOST_WIDE_INT elsize
13227 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13228 if (bitpos % elsize == 0)
13230 unsigned k = bitpos / elsize;
13231 unsigned HOST_WIDE_INT nelts;
13232 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13233 return arg0;
13234 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13236 tree_vector_builder elts (type, nelts, 1);
13237 elts.quick_grow (nelts);
13238 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13239 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13240 return elts.build ();
13244 return NULL_TREE;
13246 default:
13247 return NULL_TREE;
13248 } /* switch (code) */
13251 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13252 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13253 constructor element index of the value returned. If the element is
13254 not found NULL_TREE is returned and *CTOR_IDX is updated to
13255 the index of the element after the ACCESS_INDEX position (which
13256 may be outside of the CTOR array). */
13258 tree
13259 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13260 unsigned *ctor_idx)
13262 tree index_type = NULL_TREE;
13263 signop index_sgn = UNSIGNED;
13264 offset_int low_bound = 0;
13266 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13268 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13269 if (domain_type && TYPE_MIN_VALUE (domain_type))
13271 /* Static constructors for variably sized objects makes no sense. */
13272 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13273 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13274 /* ??? When it is obvious that the range is signed, treat it so. */
13275 if (TYPE_UNSIGNED (index_type)
13276 && TYPE_MAX_VALUE (domain_type)
13277 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13278 TYPE_MIN_VALUE (domain_type)))
13280 index_sgn = SIGNED;
13281 low_bound
13282 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13283 SIGNED);
13285 else
13287 index_sgn = TYPE_SIGN (index_type);
13288 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13293 if (index_type)
13294 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13295 index_sgn);
13297 offset_int index = low_bound;
13298 if (index_type)
13299 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13301 offset_int max_index = index;
13302 unsigned cnt;
13303 tree cfield, cval;
13304 bool first_p = true;
13306 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13308 /* Array constructor might explicitly set index, or specify a range,
13309 or leave index NULL meaning that it is next index after previous
13310 one. */
13311 if (cfield)
13313 if (TREE_CODE (cfield) == INTEGER_CST)
13314 max_index = index
13315 = offset_int::from (wi::to_wide (cfield), index_sgn);
13316 else
13318 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13319 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13320 index_sgn);
13321 max_index
13322 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13323 index_sgn);
13324 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13327 else if (!first_p)
13329 index = max_index + 1;
13330 if (index_type)
13331 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13332 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13333 max_index = index;
13335 else
13336 first_p = false;
13338 /* Do we have match? */
13339 if (wi::cmp (access_index, index, index_sgn) >= 0)
13341 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13343 if (ctor_idx)
13344 *ctor_idx = cnt;
13345 return cval;
13348 else if (in_gimple_form)
13349 /* We're past the element we search for. Note during parsing
13350 the elements might not be sorted.
13351 ??? We should use a binary search and a flag on the
13352 CONSTRUCTOR as to whether elements are sorted in declaration
13353 order. */
13354 break;
13356 if (ctor_idx)
13357 *ctor_idx = cnt;
13358 return NULL_TREE;
13361 /* Perform constant folding and related simplification of EXPR.
13362 The related simplifications include x*1 => x, x*0 => 0, etc.,
13363 and application of the associative law.
13364 NOP_EXPR conversions may be removed freely (as long as we
13365 are careful not to change the type of the overall expression).
13366 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13367 but we can constant-fold them if they have constant operands. */
13369 #ifdef ENABLE_FOLD_CHECKING
13370 # define fold(x) fold_1 (x)
13371 static tree fold_1 (tree);
13372 static
13373 #endif
13374 tree
13375 fold (tree expr)
13377 const tree t = expr;
13378 enum tree_code code = TREE_CODE (t);
13379 enum tree_code_class kind = TREE_CODE_CLASS (code);
13380 tree tem;
13381 location_t loc = EXPR_LOCATION (expr);
13383 /* Return right away if a constant. */
13384 if (kind == tcc_constant)
13385 return t;
13387 /* CALL_EXPR-like objects with variable numbers of operands are
13388 treated specially. */
13389 if (kind == tcc_vl_exp)
13391 if (code == CALL_EXPR)
13393 tem = fold_call_expr (loc, expr, false);
13394 return tem ? tem : expr;
13396 return expr;
13399 if (IS_EXPR_CODE_CLASS (kind))
13401 tree type = TREE_TYPE (t);
13402 tree op0, op1, op2;
13404 switch (TREE_CODE_LENGTH (code))
13406 case 1:
13407 op0 = TREE_OPERAND (t, 0);
13408 tem = fold_unary_loc (loc, code, type, op0);
13409 return tem ? tem : expr;
13410 case 2:
13411 op0 = TREE_OPERAND (t, 0);
13412 op1 = TREE_OPERAND (t, 1);
13413 tem = fold_binary_loc (loc, code, type, op0, op1);
13414 return tem ? tem : expr;
13415 case 3:
13416 op0 = TREE_OPERAND (t, 0);
13417 op1 = TREE_OPERAND (t, 1);
13418 op2 = TREE_OPERAND (t, 2);
13419 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13420 return tem ? tem : expr;
13421 default:
13422 break;
13426 switch (code)
13428 case ARRAY_REF:
13430 tree op0 = TREE_OPERAND (t, 0);
13431 tree op1 = TREE_OPERAND (t, 1);
13433 if (TREE_CODE (op1) == INTEGER_CST
13434 && TREE_CODE (op0) == CONSTRUCTOR
13435 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13437 tree val = get_array_ctor_element_at_index (op0,
13438 wi::to_offset (op1));
13439 if (val)
13440 return val;
13443 return t;
13446 /* Return a VECTOR_CST if possible. */
13447 case CONSTRUCTOR:
13449 tree type = TREE_TYPE (t);
13450 if (TREE_CODE (type) != VECTOR_TYPE)
13451 return t;
13453 unsigned i;
13454 tree val;
13455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13456 if (! CONSTANT_CLASS_P (val))
13457 return t;
13459 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13462 case CONST_DECL:
13463 return fold (DECL_INITIAL (t));
13465 default:
13466 return t;
13467 } /* switch (code) */
13470 #ifdef ENABLE_FOLD_CHECKING
13471 #undef fold
13473 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13474 hash_table<nofree_ptr_hash<const tree_node> > *);
13475 static void fold_check_failed (const_tree, const_tree);
13476 void print_fold_checksum (const_tree);
13478 /* When --enable-checking=fold, compute a digest of expr before
13479 and after actual fold call to see if fold did not accidentally
13480 change original expr. */
13482 tree
13483 fold (tree expr)
13485 tree ret;
13486 struct md5_ctx ctx;
13487 unsigned char checksum_before[16], checksum_after[16];
13488 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13490 md5_init_ctx (&ctx);
13491 fold_checksum_tree (expr, &ctx, &ht);
13492 md5_finish_ctx (&ctx, checksum_before);
13493 ht.empty ();
13495 ret = fold_1 (expr);
13497 md5_init_ctx (&ctx);
13498 fold_checksum_tree (expr, &ctx, &ht);
13499 md5_finish_ctx (&ctx, checksum_after);
13501 if (memcmp (checksum_before, checksum_after, 16))
13502 fold_check_failed (expr, ret);
13504 return ret;
13507 void
13508 print_fold_checksum (const_tree expr)
13510 struct md5_ctx ctx;
13511 unsigned char checksum[16], cnt;
13512 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13514 md5_init_ctx (&ctx);
13515 fold_checksum_tree (expr, &ctx, &ht);
13516 md5_finish_ctx (&ctx, checksum);
13517 for (cnt = 0; cnt < 16; ++cnt)
13518 fprintf (stderr, "%02x", checksum[cnt]);
13519 putc ('\n', stderr);
13522 static void
13523 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13525 internal_error ("fold check: original tree changed by fold");
13528 static void
13529 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13530 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13532 const tree_node **slot;
13533 enum tree_code code;
13534 union tree_node *buf;
13535 int i, len;
13537 recursive_label:
13538 if (expr == NULL)
13539 return;
13540 slot = ht->find_slot (expr, INSERT);
13541 if (*slot != NULL)
13542 return;
13543 *slot = expr;
13544 code = TREE_CODE (expr);
13545 if (TREE_CODE_CLASS (code) == tcc_declaration
13546 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13548 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13549 size_t sz = tree_size (expr);
13550 buf = XALLOCAVAR (union tree_node, sz);
13551 memcpy ((char *) buf, expr, sz);
13552 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13553 buf->decl_with_vis.symtab_node = NULL;
13554 buf->base.nowarning_flag = 0;
13555 expr = (tree) buf;
13557 else if (TREE_CODE_CLASS (code) == tcc_type
13558 && (TYPE_POINTER_TO (expr)
13559 || TYPE_REFERENCE_TO (expr)
13560 || TYPE_CACHED_VALUES_P (expr)
13561 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13562 || TYPE_NEXT_VARIANT (expr)
13563 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13565 /* Allow these fields to be modified. */
13566 tree tmp;
13567 size_t sz = tree_size (expr);
13568 buf = XALLOCAVAR (union tree_node, sz);
13569 memcpy ((char *) buf, expr, sz);
13570 expr = tmp = (tree) buf;
13571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13572 TYPE_POINTER_TO (tmp) = NULL;
13573 TYPE_REFERENCE_TO (tmp) = NULL;
13574 TYPE_NEXT_VARIANT (tmp) = NULL;
13575 TYPE_ALIAS_SET (tmp) = -1;
13576 if (TYPE_CACHED_VALUES_P (tmp))
13578 TYPE_CACHED_VALUES_P (tmp) = 0;
13579 TYPE_CACHED_VALUES (tmp) = NULL;
13582 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13584 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13585 that and change builtins.cc etc. instead - see PR89543. */
13586 size_t sz = tree_size (expr);
13587 buf = XALLOCAVAR (union tree_node, sz);
13588 memcpy ((char *) buf, expr, sz);
13589 buf->base.nowarning_flag = 0;
13590 expr = (tree) buf;
13592 md5_process_bytes (expr, tree_size (expr), ctx);
13593 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13594 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13595 if (TREE_CODE_CLASS (code) != tcc_type
13596 && TREE_CODE_CLASS (code) != tcc_declaration
13597 && code != TREE_LIST
13598 && code != SSA_NAME
13599 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13600 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13601 switch (TREE_CODE_CLASS (code))
13603 case tcc_constant:
13604 switch (code)
13606 case STRING_CST:
13607 md5_process_bytes (TREE_STRING_POINTER (expr),
13608 TREE_STRING_LENGTH (expr), ctx);
13609 break;
13610 case COMPLEX_CST:
13611 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13612 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13613 break;
13614 case VECTOR_CST:
13615 len = vector_cst_encoded_nelts (expr);
13616 for (i = 0; i < len; ++i)
13617 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13618 break;
13619 default:
13620 break;
13622 break;
13623 case tcc_exceptional:
13624 switch (code)
13626 case TREE_LIST:
13627 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13628 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13629 expr = TREE_CHAIN (expr);
13630 goto recursive_label;
13631 break;
13632 case TREE_VEC:
13633 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13634 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13635 break;
13636 default:
13637 break;
13639 break;
13640 case tcc_expression:
13641 case tcc_reference:
13642 case tcc_comparison:
13643 case tcc_unary:
13644 case tcc_binary:
13645 case tcc_statement:
13646 case tcc_vl_exp:
13647 len = TREE_OPERAND_LENGTH (expr);
13648 for (i = 0; i < len; ++i)
13649 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13650 break;
13651 case tcc_declaration:
13652 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13653 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13654 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13656 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13657 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13658 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13659 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13660 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13663 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13665 if (TREE_CODE (expr) == FUNCTION_DECL)
13667 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13668 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13670 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13672 break;
13673 case tcc_type:
13674 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13675 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13676 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13677 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13678 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13679 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13680 if (INTEGRAL_TYPE_P (expr)
13681 || SCALAR_FLOAT_TYPE_P (expr))
13683 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13684 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13686 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13687 if (RECORD_OR_UNION_TYPE_P (expr))
13688 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13689 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13690 break;
13691 default:
13692 break;
13696 /* Helper function for outputting the checksum of a tree T. When
13697 debugging with gdb, you can "define mynext" to be "next" followed
13698 by "call debug_fold_checksum (op0)", then just trace down till the
13699 outputs differ. */
13701 DEBUG_FUNCTION void
13702 debug_fold_checksum (const_tree t)
13704 int i;
13705 unsigned char checksum[16];
13706 struct md5_ctx ctx;
13707 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (t, &ctx, &ht);
13711 md5_finish_ctx (&ctx, checksum);
13712 ht.empty ();
13714 for (i = 0; i < 16; i++)
13715 fprintf (stderr, "%d ", checksum[i]);
13717 fprintf (stderr, "\n");
13720 #endif
13722 /* Fold a unary tree expression with code CODE of type TYPE with an
13723 operand OP0. LOC is the location of the resulting expression.
13724 Return a folded expression if successful. Otherwise, return a tree
13725 expression with code CODE of type TYPE with an operand OP0. */
13727 tree
13728 fold_build1_loc (location_t loc,
13729 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13731 tree tem;
13732 #ifdef ENABLE_FOLD_CHECKING
13733 unsigned char checksum_before[16], checksum_after[16];
13734 struct md5_ctx ctx;
13735 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13737 md5_init_ctx (&ctx);
13738 fold_checksum_tree (op0, &ctx, &ht);
13739 md5_finish_ctx (&ctx, checksum_before);
13740 ht.empty ();
13741 #endif
13743 tem = fold_unary_loc (loc, code, type, op0);
13744 if (!tem)
13745 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13747 #ifdef ENABLE_FOLD_CHECKING
13748 md5_init_ctx (&ctx);
13749 fold_checksum_tree (op0, &ctx, &ht);
13750 md5_finish_ctx (&ctx, checksum_after);
13752 if (memcmp (checksum_before, checksum_after, 16))
13753 fold_check_failed (op0, tem);
13754 #endif
13755 return tem;
13758 /* Fold a binary tree expression with code CODE of type TYPE with
13759 operands OP0 and OP1. LOC is the location of the resulting
13760 expression. Return a folded expression if successful. Otherwise,
13761 return a tree expression with code CODE of type TYPE with operands
13762 OP0 and OP1. */
13764 tree
13765 fold_build2_loc (location_t loc,
13766 enum tree_code code, tree type, tree op0, tree op1
13767 MEM_STAT_DECL)
13769 tree tem;
13770 #ifdef ENABLE_FOLD_CHECKING
13771 unsigned char checksum_before_op0[16],
13772 checksum_before_op1[16],
13773 checksum_after_op0[16],
13774 checksum_after_op1[16];
13775 struct md5_ctx ctx;
13776 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13778 md5_init_ctx (&ctx);
13779 fold_checksum_tree (op0, &ctx, &ht);
13780 md5_finish_ctx (&ctx, checksum_before_op0);
13781 ht.empty ();
13783 md5_init_ctx (&ctx);
13784 fold_checksum_tree (op1, &ctx, &ht);
13785 md5_finish_ctx (&ctx, checksum_before_op1);
13786 ht.empty ();
13787 #endif
13789 tem = fold_binary_loc (loc, code, type, op0, op1);
13790 if (!tem)
13791 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13793 #ifdef ENABLE_FOLD_CHECKING
13794 md5_init_ctx (&ctx);
13795 fold_checksum_tree (op0, &ctx, &ht);
13796 md5_finish_ctx (&ctx, checksum_after_op0);
13797 ht.empty ();
13799 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13800 fold_check_failed (op0, tem);
13802 md5_init_ctx (&ctx);
13803 fold_checksum_tree (op1, &ctx, &ht);
13804 md5_finish_ctx (&ctx, checksum_after_op1);
13806 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13807 fold_check_failed (op1, tem);
13808 #endif
13809 return tem;
13812 /* Fold a ternary tree expression with code CODE of type TYPE with
13813 operands OP0, OP1, and OP2. Return a folded expression if
13814 successful. Otherwise, return a tree expression with code CODE of
13815 type TYPE with operands OP0, OP1, and OP2. */
13817 tree
13818 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13819 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13821 tree tem;
13822 #ifdef ENABLE_FOLD_CHECKING
13823 unsigned char checksum_before_op0[16],
13824 checksum_before_op1[16],
13825 checksum_before_op2[16],
13826 checksum_after_op0[16],
13827 checksum_after_op1[16],
13828 checksum_after_op2[16];
13829 struct md5_ctx ctx;
13830 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13832 md5_init_ctx (&ctx);
13833 fold_checksum_tree (op0, &ctx, &ht);
13834 md5_finish_ctx (&ctx, checksum_before_op0);
13835 ht.empty ();
13837 md5_init_ctx (&ctx);
13838 fold_checksum_tree (op1, &ctx, &ht);
13839 md5_finish_ctx (&ctx, checksum_before_op1);
13840 ht.empty ();
13842 md5_init_ctx (&ctx);
13843 fold_checksum_tree (op2, &ctx, &ht);
13844 md5_finish_ctx (&ctx, checksum_before_op2);
13845 ht.empty ();
13846 #endif
13848 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13849 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13850 if (!tem)
13851 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13853 #ifdef ENABLE_FOLD_CHECKING
13854 md5_init_ctx (&ctx);
13855 fold_checksum_tree (op0, &ctx, &ht);
13856 md5_finish_ctx (&ctx, checksum_after_op0);
13857 ht.empty ();
13859 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13860 fold_check_failed (op0, tem);
13862 md5_init_ctx (&ctx);
13863 fold_checksum_tree (op1, &ctx, &ht);
13864 md5_finish_ctx (&ctx, checksum_after_op1);
13865 ht.empty ();
13867 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13868 fold_check_failed (op1, tem);
13870 md5_init_ctx (&ctx);
13871 fold_checksum_tree (op2, &ctx, &ht);
13872 md5_finish_ctx (&ctx, checksum_after_op2);
13874 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13875 fold_check_failed (op2, tem);
13876 #endif
13877 return tem;
13880 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13881 arguments in ARGARRAY, and a null static chain.
13882 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13883 of type TYPE from the given operands as constructed by build_call_array. */
13885 tree
13886 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13887 int nargs, tree *argarray)
13889 tree tem;
13890 #ifdef ENABLE_FOLD_CHECKING
13891 unsigned char checksum_before_fn[16],
13892 checksum_before_arglist[16],
13893 checksum_after_fn[16],
13894 checksum_after_arglist[16];
13895 struct md5_ctx ctx;
13896 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13897 int i;
13899 md5_init_ctx (&ctx);
13900 fold_checksum_tree (fn, &ctx, &ht);
13901 md5_finish_ctx (&ctx, checksum_before_fn);
13902 ht.empty ();
13904 md5_init_ctx (&ctx);
13905 for (i = 0; i < nargs; i++)
13906 fold_checksum_tree (argarray[i], &ctx, &ht);
13907 md5_finish_ctx (&ctx, checksum_before_arglist);
13908 ht.empty ();
13909 #endif
13911 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13912 if (!tem)
13913 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13915 #ifdef ENABLE_FOLD_CHECKING
13916 md5_init_ctx (&ctx);
13917 fold_checksum_tree (fn, &ctx, &ht);
13918 md5_finish_ctx (&ctx, checksum_after_fn);
13919 ht.empty ();
13921 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13922 fold_check_failed (fn, tem);
13924 md5_init_ctx (&ctx);
13925 for (i = 0; i < nargs; i++)
13926 fold_checksum_tree (argarray[i], &ctx, &ht);
13927 md5_finish_ctx (&ctx, checksum_after_arglist);
13929 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13930 fold_check_failed (NULL_TREE, tem);
13931 #endif
13932 return tem;
13935 /* Perform constant folding and related simplification of initializer
13936 expression EXPR. These behave identically to "fold_buildN" but ignore
13937 potential run-time traps and exceptions that fold must preserve. */
13939 #define START_FOLD_INIT \
13940 int saved_signaling_nans = flag_signaling_nans;\
13941 int saved_trapping_math = flag_trapping_math;\
13942 int saved_rounding_math = flag_rounding_math;\
13943 int saved_trapv = flag_trapv;\
13944 int saved_folding_initializer = folding_initializer;\
13945 flag_signaling_nans = 0;\
13946 flag_trapping_math = 0;\
13947 flag_rounding_math = 0;\
13948 flag_trapv = 0;\
13949 folding_initializer = 1;
13951 #define END_FOLD_INIT \
13952 flag_signaling_nans = saved_signaling_nans;\
13953 flag_trapping_math = saved_trapping_math;\
13954 flag_rounding_math = saved_rounding_math;\
13955 flag_trapv = saved_trapv;\
13956 folding_initializer = saved_folding_initializer;
13958 tree
13959 fold_init (tree expr)
13961 tree result;
13962 START_FOLD_INIT;
13964 result = fold (expr);
13966 END_FOLD_INIT;
13967 return result;
13970 tree
13971 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13972 tree type, tree op)
13974 tree result;
13975 START_FOLD_INIT;
13977 result = fold_build1_loc (loc, code, type, op);
13979 END_FOLD_INIT;
13980 return result;
13983 tree
13984 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13985 tree type, tree op0, tree op1)
13987 tree result;
13988 START_FOLD_INIT;
13990 result = fold_build2_loc (loc, code, type, op0, op1);
13992 END_FOLD_INIT;
13993 return result;
13996 tree
13997 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13998 int nargs, tree *argarray)
14000 tree result;
14001 START_FOLD_INIT;
14003 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14005 END_FOLD_INIT;
14006 return result;
14009 tree
14010 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14011 tree lhs, tree rhs)
14013 tree result;
14014 START_FOLD_INIT;
14016 result = fold_binary_loc (loc, code, type, lhs, rhs);
14018 END_FOLD_INIT;
14019 return result;
14022 #undef START_FOLD_INIT
14023 #undef END_FOLD_INIT
14025 /* Determine if first argument is a multiple of second argument. Return 0 if
14026 it is not, or we cannot easily determined it to be.
14028 An example of the sort of thing we care about (at this point; this routine
14029 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14030 fold cases do now) is discovering that
14032 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14034 is a multiple of
14036 SAVE_EXPR (J * 8)
14038 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14040 This code also handles discovering that
14042 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14044 is a multiple of 8 so we don't have to worry about dealing with a
14045 possible remainder.
14047 Note that we *look* inside a SAVE_EXPR only to determine how it was
14048 calculated; it is not safe for fold to do much of anything else with the
14049 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14050 at run time. For example, the latter example above *cannot* be implemented
14051 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14052 evaluation time of the original SAVE_EXPR is not necessarily the same at
14053 the time the new expression is evaluated. The only optimization of this
14054 sort that would be valid is changing
14056 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14058 divided by 8 to
14060 SAVE_EXPR (I) * SAVE_EXPR (J)
14062 (where the same SAVE_EXPR (J) is used in the original and the
14063 transformed version).
14065 NOWRAP specifies whether all outer operations in TYPE should
14066 be considered not wrapping. Any type conversion within TOP acts
14067 as a barrier and we will fall back to NOWRAP being false.
14068 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14069 as not wrapping even though they are generally using unsigned arithmetic. */
14072 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14074 gimple *stmt;
14075 tree op1, op2;
14077 if (operand_equal_p (top, bottom, 0))
14078 return 1;
14080 if (TREE_CODE (type) != INTEGER_TYPE)
14081 return 0;
14083 switch (TREE_CODE (top))
14085 case BIT_AND_EXPR:
14086 /* Bitwise and provides a power of two multiple. If the mask is
14087 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14088 if (!integer_pow2p (bottom))
14089 return 0;
14090 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14091 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14093 case MULT_EXPR:
14094 /* If the multiplication can wrap we cannot recurse further unless
14095 the bottom is a power of two which is where wrapping does not
14096 matter. */
14097 if (!nowrap
14098 && !TYPE_OVERFLOW_UNDEFINED (type)
14099 && !integer_pow2p (bottom))
14100 return 0;
14101 if (TREE_CODE (bottom) == INTEGER_CST)
14103 op1 = TREE_OPERAND (top, 0);
14104 op2 = TREE_OPERAND (top, 1);
14105 if (TREE_CODE (op1) == INTEGER_CST)
14106 std::swap (op1, op2);
14107 if (TREE_CODE (op2) == INTEGER_CST)
14109 if (multiple_of_p (type, op2, bottom, nowrap))
14110 return 1;
14111 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14112 if (multiple_of_p (type, bottom, op2, nowrap))
14114 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14115 wi::to_widest (op2));
14116 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14118 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14119 return multiple_of_p (type, op1, op2, nowrap);
14122 return multiple_of_p (type, op1, bottom, nowrap);
14125 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14126 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14128 case LSHIFT_EXPR:
14129 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14130 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14132 op1 = TREE_OPERAND (top, 1);
14133 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14135 wide_int mul_op
14136 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14137 return multiple_of_p (type,
14138 wide_int_to_tree (type, mul_op), bottom,
14139 nowrap);
14142 return 0;
14144 case MINUS_EXPR:
14145 case PLUS_EXPR:
14146 /* If the addition or subtraction can wrap we cannot recurse further
14147 unless bottom is a power of two which is where wrapping does not
14148 matter. */
14149 if (!nowrap
14150 && !TYPE_OVERFLOW_UNDEFINED (type)
14151 && !integer_pow2p (bottom))
14152 return 0;
14154 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14155 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14156 but 0xfffffffd is not. */
14157 op1 = TREE_OPERAND (top, 1);
14158 if (TREE_CODE (top) == PLUS_EXPR
14159 && nowrap
14160 && TYPE_UNSIGNED (type)
14161 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14162 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14164 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14165 precisely, so be conservative here checking if both op0 and op1
14166 are multiple of bottom. Note we check the second operand first
14167 since it's usually simpler. */
14168 return (multiple_of_p (type, op1, bottom, nowrap)
14169 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14171 CASE_CONVERT:
14172 /* Can't handle conversions from non-integral or wider integral type. */
14173 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14174 || (TYPE_PRECISION (type)
14175 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14176 return 0;
14177 /* NOWRAP only extends to operations in the outermost type so
14178 make sure to strip it off here. */
14179 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14180 TREE_OPERAND (top, 0), bottom, false);
14182 case SAVE_EXPR:
14183 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14185 case COND_EXPR:
14186 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14187 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14189 case INTEGER_CST:
14190 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14191 return 0;
14192 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14193 SIGNED);
14195 case SSA_NAME:
14196 if (TREE_CODE (bottom) == INTEGER_CST
14197 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14198 && gimple_code (stmt) == GIMPLE_ASSIGN)
14200 enum tree_code code = gimple_assign_rhs_code (stmt);
14202 /* Check for special cases to see if top is defined as multiple
14203 of bottom:
14205 top = (X & ~(bottom - 1) ; bottom is power of 2
14209 Y = X % bottom
14210 top = X - Y. */
14211 if (code == BIT_AND_EXPR
14212 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14213 && TREE_CODE (op2) == INTEGER_CST
14214 && integer_pow2p (bottom)
14215 && wi::multiple_of_p (wi::to_widest (op2),
14216 wi::to_widest (bottom), UNSIGNED))
14217 return 1;
14219 op1 = gimple_assign_rhs1 (stmt);
14220 if (code == MINUS_EXPR
14221 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14222 && TREE_CODE (op2) == SSA_NAME
14223 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14224 && gimple_code (stmt) == GIMPLE_ASSIGN
14225 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14226 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14227 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14228 return 1;
14231 /* fall through */
14233 default:
14234 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14235 return multiple_p (wi::to_poly_widest (top),
14236 wi::to_poly_widest (bottom));
14238 return 0;
14242 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14243 This function returns true for integer expressions, and returns
14244 false if uncertain. */
14246 bool
14247 tree_expr_finite_p (const_tree x)
14249 machine_mode mode = element_mode (x);
14250 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14251 return true;
14252 switch (TREE_CODE (x))
14254 case REAL_CST:
14255 return real_isfinite (TREE_REAL_CST_PTR (x));
14256 case COMPLEX_CST:
14257 return tree_expr_finite_p (TREE_REALPART (x))
14258 && tree_expr_finite_p (TREE_IMAGPART (x));
14259 case FLOAT_EXPR:
14260 return true;
14261 case ABS_EXPR:
14262 case CONVERT_EXPR:
14263 case NON_LVALUE_EXPR:
14264 case NEGATE_EXPR:
14265 case SAVE_EXPR:
14266 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14267 case MIN_EXPR:
14268 case MAX_EXPR:
14269 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14270 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14271 case COND_EXPR:
14272 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14273 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14274 case CALL_EXPR:
14275 switch (get_call_combined_fn (x))
14277 CASE_CFN_FABS:
14278 CASE_CFN_FABS_FN:
14279 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14280 CASE_CFN_FMAX:
14281 CASE_CFN_FMAX_FN:
14282 CASE_CFN_FMIN:
14283 CASE_CFN_FMIN_FN:
14284 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14285 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14286 default:
14287 return false;
14290 default:
14291 return false;
14295 /* Return true if expression X evaluates to an infinity.
14296 This function returns false for integer expressions. */
14298 bool
14299 tree_expr_infinite_p (const_tree x)
14301 if (!HONOR_INFINITIES (x))
14302 return false;
14303 switch (TREE_CODE (x))
14305 case REAL_CST:
14306 return real_isinf (TREE_REAL_CST_PTR (x));
14307 case ABS_EXPR:
14308 case NEGATE_EXPR:
14309 case NON_LVALUE_EXPR:
14310 case SAVE_EXPR:
14311 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14312 case COND_EXPR:
14313 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14314 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14315 default:
14316 return false;
14320 /* Return true if expression X could evaluate to an infinity.
14321 This function returns false for integer expressions, and returns
14322 true if uncertain. */
14324 bool
14325 tree_expr_maybe_infinite_p (const_tree x)
14327 if (!HONOR_INFINITIES (x))
14328 return false;
14329 switch (TREE_CODE (x))
14331 case REAL_CST:
14332 return real_isinf (TREE_REAL_CST_PTR (x));
14333 case FLOAT_EXPR:
14334 return false;
14335 case ABS_EXPR:
14336 case NEGATE_EXPR:
14337 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14338 case COND_EXPR:
14339 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14340 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14341 default:
14342 return true;
14346 /* Return true if expression X evaluates to a signaling NaN.
14347 This function returns false for integer expressions. */
14349 bool
14350 tree_expr_signaling_nan_p (const_tree x)
14352 if (!HONOR_SNANS (x))
14353 return false;
14354 switch (TREE_CODE (x))
14356 case REAL_CST:
14357 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14358 case NON_LVALUE_EXPR:
14359 case SAVE_EXPR:
14360 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14361 case COND_EXPR:
14362 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14363 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14364 default:
14365 return false;
14369 /* Return true if expression X could evaluate to a signaling NaN.
14370 This function returns false for integer expressions, and returns
14371 true if uncertain. */
14373 bool
14374 tree_expr_maybe_signaling_nan_p (const_tree x)
14376 if (!HONOR_SNANS (x))
14377 return false;
14378 switch (TREE_CODE (x))
14380 case REAL_CST:
14381 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14382 case FLOAT_EXPR:
14383 return false;
14384 case ABS_EXPR:
14385 case CONVERT_EXPR:
14386 case NEGATE_EXPR:
14387 case NON_LVALUE_EXPR:
14388 case SAVE_EXPR:
14389 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14390 case MIN_EXPR:
14391 case MAX_EXPR:
14392 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14393 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14394 case COND_EXPR:
14395 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14396 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14397 case CALL_EXPR:
14398 switch (get_call_combined_fn (x))
14400 CASE_CFN_FABS:
14401 CASE_CFN_FABS_FN:
14402 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14403 CASE_CFN_FMAX:
14404 CASE_CFN_FMAX_FN:
14405 CASE_CFN_FMIN:
14406 CASE_CFN_FMIN_FN:
14407 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14408 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14409 default:
14410 return true;
14412 default:
14413 return true;
14417 /* Return true if expression X evaluates to a NaN.
14418 This function returns false for integer expressions. */
14420 bool
14421 tree_expr_nan_p (const_tree x)
14423 if (!HONOR_NANS (x))
14424 return false;
14425 switch (TREE_CODE (x))
14427 case REAL_CST:
14428 return real_isnan (TREE_REAL_CST_PTR (x));
14429 case NON_LVALUE_EXPR:
14430 case SAVE_EXPR:
14431 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14432 case COND_EXPR:
14433 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14434 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14435 default:
14436 return false;
14440 /* Return true if expression X could evaluate to a NaN.
14441 This function returns false for integer expressions, and returns
14442 true if uncertain. */
14444 bool
14445 tree_expr_maybe_nan_p (const_tree x)
14447 if (!HONOR_NANS (x))
14448 return false;
14449 switch (TREE_CODE (x))
14451 case REAL_CST:
14452 return real_isnan (TREE_REAL_CST_PTR (x));
14453 case FLOAT_EXPR:
14454 return false;
14455 case PLUS_EXPR:
14456 case MINUS_EXPR:
14457 case MULT_EXPR:
14458 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14459 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14460 case ABS_EXPR:
14461 case CONVERT_EXPR:
14462 case NEGATE_EXPR:
14463 case NON_LVALUE_EXPR:
14464 case SAVE_EXPR:
14465 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14466 case MIN_EXPR:
14467 case MAX_EXPR:
14468 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14469 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14470 case COND_EXPR:
14471 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14472 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14473 case CALL_EXPR:
14474 switch (get_call_combined_fn (x))
14476 CASE_CFN_FABS:
14477 CASE_CFN_FABS_FN:
14478 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14479 CASE_CFN_FMAX:
14480 CASE_CFN_FMAX_FN:
14481 CASE_CFN_FMIN:
14482 CASE_CFN_FMIN_FN:
14483 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14484 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14485 default:
14486 return true;
14488 default:
14489 return true;
14493 /* Return true if expression X could evaluate to -0.0.
14494 This function returns true if uncertain. */
14496 bool
14497 tree_expr_maybe_real_minus_zero_p (const_tree x)
14499 if (!HONOR_SIGNED_ZEROS (x))
14500 return false;
14501 switch (TREE_CODE (x))
14503 case REAL_CST:
14504 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14505 case INTEGER_CST:
14506 case FLOAT_EXPR:
14507 case ABS_EXPR:
14508 return false;
14509 case NON_LVALUE_EXPR:
14510 case SAVE_EXPR:
14511 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14512 case COND_EXPR:
14513 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14514 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14515 case CALL_EXPR:
14516 switch (get_call_combined_fn (x))
14518 CASE_CFN_FABS:
14519 CASE_CFN_FABS_FN:
14520 return false;
14521 default:
14522 break;
14524 default:
14525 break;
14527 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14528 * but currently those predicates require tree and not const_tree. */
14529 return true;
14532 #define tree_expr_nonnegative_warnv_p(X, Y) \
14533 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14535 #define RECURSE(X) \
14536 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14538 /* Return true if CODE or TYPE is known to be non-negative. */
14540 static bool
14541 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14543 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14544 && truth_value_p (code))
14545 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14546 have a signed:1 type (where the value is -1 and 0). */
14547 return true;
14548 return false;
14551 /* Return true if (CODE OP0) is known to be non-negative. If the return
14552 value is based on the assumption that signed overflow is undefined,
14553 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14554 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14556 bool
14557 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14558 bool *strict_overflow_p, int depth)
14560 if (TYPE_UNSIGNED (type))
14561 return true;
14563 switch (code)
14565 case ABS_EXPR:
14566 /* We can't return 1 if flag_wrapv is set because
14567 ABS_EXPR<INT_MIN> = INT_MIN. */
14568 if (!ANY_INTEGRAL_TYPE_P (type))
14569 return true;
14570 if (TYPE_OVERFLOW_UNDEFINED (type))
14572 *strict_overflow_p = true;
14573 return true;
14575 break;
14577 case NON_LVALUE_EXPR:
14578 case FLOAT_EXPR:
14579 case FIX_TRUNC_EXPR:
14580 return RECURSE (op0);
14582 CASE_CONVERT:
14584 tree inner_type = TREE_TYPE (op0);
14585 tree outer_type = type;
14587 if (SCALAR_FLOAT_TYPE_P (outer_type))
14589 if (SCALAR_FLOAT_TYPE_P (inner_type))
14590 return RECURSE (op0);
14591 if (INTEGRAL_TYPE_P (inner_type))
14593 if (TYPE_UNSIGNED (inner_type))
14594 return true;
14595 return RECURSE (op0);
14598 else if (INTEGRAL_TYPE_P (outer_type))
14600 if (SCALAR_FLOAT_TYPE_P (inner_type))
14601 return RECURSE (op0);
14602 if (INTEGRAL_TYPE_P (inner_type))
14603 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14604 && TYPE_UNSIGNED (inner_type);
14607 break;
14609 default:
14610 return tree_simple_nonnegative_warnv_p (code, type);
14613 /* We don't know sign of `t', so be conservative and return false. */
14614 return false;
14617 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14618 value is based on the assumption that signed overflow is undefined,
14619 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14620 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14622 bool
14623 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14624 tree op1, bool *strict_overflow_p,
14625 int depth)
14627 if (TYPE_UNSIGNED (type))
14628 return true;
14630 switch (code)
14632 case POINTER_PLUS_EXPR:
14633 case PLUS_EXPR:
14634 if (FLOAT_TYPE_P (type))
14635 return RECURSE (op0) && RECURSE (op1);
14637 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14638 both unsigned and at least 2 bits shorter than the result. */
14639 if (TREE_CODE (type) == INTEGER_TYPE
14640 && TREE_CODE (op0) == NOP_EXPR
14641 && TREE_CODE (op1) == NOP_EXPR)
14643 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14644 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14645 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14646 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14648 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14649 TYPE_PRECISION (inner2)) + 1;
14650 return prec < TYPE_PRECISION (type);
14653 break;
14655 case MULT_EXPR:
14656 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14658 /* x * x is always non-negative for floating point x
14659 or without overflow. */
14660 if (operand_equal_p (op0, op1, 0)
14661 || (RECURSE (op0) && RECURSE (op1)))
14663 if (ANY_INTEGRAL_TYPE_P (type)
14664 && TYPE_OVERFLOW_UNDEFINED (type))
14665 *strict_overflow_p = true;
14666 return true;
14670 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14671 both unsigned and their total bits is shorter than the result. */
14672 if (TREE_CODE (type) == INTEGER_TYPE
14673 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14674 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14676 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14677 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14678 : TREE_TYPE (op0);
14679 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14680 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14681 : TREE_TYPE (op1);
14683 bool unsigned0 = TYPE_UNSIGNED (inner0);
14684 bool unsigned1 = TYPE_UNSIGNED (inner1);
14686 if (TREE_CODE (op0) == INTEGER_CST)
14687 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14689 if (TREE_CODE (op1) == INTEGER_CST)
14690 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14692 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14693 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14695 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14696 ? tree_int_cst_min_precision (op0, UNSIGNED)
14697 : TYPE_PRECISION (inner0);
14699 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14700 ? tree_int_cst_min_precision (op1, UNSIGNED)
14701 : TYPE_PRECISION (inner1);
14703 return precision0 + precision1 < TYPE_PRECISION (type);
14706 return false;
14708 case BIT_AND_EXPR:
14709 return RECURSE (op0) || RECURSE (op1);
14711 case MAX_EXPR:
14712 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14713 things. */
14714 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14715 return RECURSE (op0) && RECURSE (op1);
14716 return RECURSE (op0) || RECURSE (op1);
14718 case BIT_IOR_EXPR:
14719 case BIT_XOR_EXPR:
14720 case MIN_EXPR:
14721 case RDIV_EXPR:
14722 case TRUNC_DIV_EXPR:
14723 case CEIL_DIV_EXPR:
14724 case FLOOR_DIV_EXPR:
14725 case ROUND_DIV_EXPR:
14726 return RECURSE (op0) && RECURSE (op1);
14728 case TRUNC_MOD_EXPR:
14729 return RECURSE (op0);
14731 case FLOOR_MOD_EXPR:
14732 return RECURSE (op1);
14734 case CEIL_MOD_EXPR:
14735 case ROUND_MOD_EXPR:
14736 default:
14737 return tree_simple_nonnegative_warnv_p (code, type);
14740 /* We don't know sign of `t', so be conservative and return false. */
14741 return false;
14744 /* Return true if T is known to be non-negative. If the return
14745 value is based on the assumption that signed overflow is undefined,
14746 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14747 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14749 bool
14750 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14752 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14753 return true;
14755 switch (TREE_CODE (t))
14757 case INTEGER_CST:
14758 return tree_int_cst_sgn (t) >= 0;
14760 case REAL_CST:
14761 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14763 case FIXED_CST:
14764 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14766 case COND_EXPR:
14767 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14769 case SSA_NAME:
14770 /* Limit the depth of recursion to avoid quadratic behavior.
14771 This is expected to catch almost all occurrences in practice.
14772 If this code misses important cases that unbounded recursion
14773 would not, passes that need this information could be revised
14774 to provide it through dataflow propagation. */
14775 return (!name_registered_for_update_p (t)
14776 && depth < param_max_ssa_name_query_depth
14777 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14778 strict_overflow_p, depth));
14780 default:
14781 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14785 /* Return true if T is known to be non-negative. If the return
14786 value is based on the assumption that signed overflow is undefined,
14787 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14788 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14790 bool
14791 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14792 bool *strict_overflow_p, int depth)
14794 switch (fn)
14796 CASE_CFN_ACOS:
14797 CASE_CFN_ACOS_FN:
14798 CASE_CFN_ACOSH:
14799 CASE_CFN_ACOSH_FN:
14800 CASE_CFN_CABS:
14801 CASE_CFN_CABS_FN:
14802 CASE_CFN_COSH:
14803 CASE_CFN_COSH_FN:
14804 CASE_CFN_ERFC:
14805 CASE_CFN_ERFC_FN:
14806 CASE_CFN_EXP:
14807 CASE_CFN_EXP_FN:
14808 CASE_CFN_EXP10:
14809 CASE_CFN_EXP2:
14810 CASE_CFN_EXP2_FN:
14811 CASE_CFN_FABS:
14812 CASE_CFN_FABS_FN:
14813 CASE_CFN_FDIM:
14814 CASE_CFN_FDIM_FN:
14815 CASE_CFN_HYPOT:
14816 CASE_CFN_HYPOT_FN:
14817 CASE_CFN_POW10:
14818 CASE_CFN_FFS:
14819 CASE_CFN_PARITY:
14820 CASE_CFN_POPCOUNT:
14821 CASE_CFN_CLZ:
14822 CASE_CFN_CLRSB:
14823 case CFN_BUILT_IN_BSWAP16:
14824 case CFN_BUILT_IN_BSWAP32:
14825 case CFN_BUILT_IN_BSWAP64:
14826 case CFN_BUILT_IN_BSWAP128:
14827 /* Always true. */
14828 return true;
14830 CASE_CFN_SQRT:
14831 CASE_CFN_SQRT_FN:
14832 /* sqrt(-0.0) is -0.0. */
14833 if (!HONOR_SIGNED_ZEROS (type))
14834 return true;
14835 return RECURSE (arg0);
14837 CASE_CFN_ASINH:
14838 CASE_CFN_ASINH_FN:
14839 CASE_CFN_ATAN:
14840 CASE_CFN_ATAN_FN:
14841 CASE_CFN_ATANH:
14842 CASE_CFN_ATANH_FN:
14843 CASE_CFN_CBRT:
14844 CASE_CFN_CBRT_FN:
14845 CASE_CFN_CEIL:
14846 CASE_CFN_CEIL_FN:
14847 CASE_CFN_ERF:
14848 CASE_CFN_ERF_FN:
14849 CASE_CFN_EXPM1:
14850 CASE_CFN_EXPM1_FN:
14851 CASE_CFN_FLOOR:
14852 CASE_CFN_FLOOR_FN:
14853 CASE_CFN_FMOD:
14854 CASE_CFN_FMOD_FN:
14855 CASE_CFN_FREXP:
14856 CASE_CFN_FREXP_FN:
14857 CASE_CFN_ICEIL:
14858 CASE_CFN_IFLOOR:
14859 CASE_CFN_IRINT:
14860 CASE_CFN_IROUND:
14861 CASE_CFN_LCEIL:
14862 CASE_CFN_LDEXP:
14863 CASE_CFN_LFLOOR:
14864 CASE_CFN_LLCEIL:
14865 CASE_CFN_LLFLOOR:
14866 CASE_CFN_LLRINT:
14867 CASE_CFN_LLRINT_FN:
14868 CASE_CFN_LLROUND:
14869 CASE_CFN_LLROUND_FN:
14870 CASE_CFN_LRINT:
14871 CASE_CFN_LRINT_FN:
14872 CASE_CFN_LROUND:
14873 CASE_CFN_LROUND_FN:
14874 CASE_CFN_MODF:
14875 CASE_CFN_MODF_FN:
14876 CASE_CFN_NEARBYINT:
14877 CASE_CFN_NEARBYINT_FN:
14878 CASE_CFN_RINT:
14879 CASE_CFN_RINT_FN:
14880 CASE_CFN_ROUND:
14881 CASE_CFN_ROUND_FN:
14882 CASE_CFN_ROUNDEVEN:
14883 CASE_CFN_ROUNDEVEN_FN:
14884 CASE_CFN_SCALB:
14885 CASE_CFN_SCALBLN:
14886 CASE_CFN_SCALBLN_FN:
14887 CASE_CFN_SCALBN:
14888 CASE_CFN_SCALBN_FN:
14889 CASE_CFN_SIGNBIT:
14890 CASE_CFN_SIGNIFICAND:
14891 CASE_CFN_SINH:
14892 CASE_CFN_SINH_FN:
14893 CASE_CFN_TANH:
14894 CASE_CFN_TANH_FN:
14895 CASE_CFN_TRUNC:
14896 CASE_CFN_TRUNC_FN:
14897 /* True if the 1st argument is nonnegative. */
14898 return RECURSE (arg0);
14900 CASE_CFN_FMAX:
14901 CASE_CFN_FMAX_FN:
14902 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14903 things. In the presence of sNaNs, we're only guaranteed to be
14904 non-negative if both operands are non-negative. In the presence
14905 of qNaNs, we're non-negative if either operand is non-negative
14906 and can't be a qNaN, or if both operands are non-negative. */
14907 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14908 tree_expr_maybe_signaling_nan_p (arg1))
14909 return RECURSE (arg0) && RECURSE (arg1);
14910 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14911 || RECURSE (arg1))
14912 : (RECURSE (arg1)
14913 && !tree_expr_maybe_nan_p (arg1));
14915 CASE_CFN_FMIN:
14916 CASE_CFN_FMIN_FN:
14917 /* True if the 1st AND 2nd arguments are nonnegative. */
14918 return RECURSE (arg0) && RECURSE (arg1);
14920 CASE_CFN_COPYSIGN:
14921 CASE_CFN_COPYSIGN_FN:
14922 /* True if the 2nd argument is nonnegative. */
14923 return RECURSE (arg1);
14925 CASE_CFN_POWI:
14926 /* True if the 1st argument is nonnegative or the second
14927 argument is an even integer. */
14928 if (TREE_CODE (arg1) == INTEGER_CST
14929 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14930 return true;
14931 return RECURSE (arg0);
14933 CASE_CFN_POW:
14934 CASE_CFN_POW_FN:
14935 /* True if the 1st argument is nonnegative or the second
14936 argument is an even integer valued real. */
14937 if (TREE_CODE (arg1) == REAL_CST)
14939 REAL_VALUE_TYPE c;
14940 HOST_WIDE_INT n;
14942 c = TREE_REAL_CST (arg1);
14943 n = real_to_integer (&c);
14944 if ((n & 1) == 0)
14946 REAL_VALUE_TYPE cint;
14947 real_from_integer (&cint, VOIDmode, n, SIGNED);
14948 if (real_identical (&c, &cint))
14949 return true;
14952 return RECURSE (arg0);
14954 default:
14955 break;
14957 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14960 /* Return true if T is known to be non-negative. If the return
14961 value is based on the assumption that signed overflow is undefined,
14962 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14963 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14965 static bool
14966 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14968 enum tree_code code = TREE_CODE (t);
14969 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14970 return true;
14972 switch (code)
14974 case TARGET_EXPR:
14976 tree temp = TARGET_EXPR_SLOT (t);
14977 t = TARGET_EXPR_INITIAL (t);
14979 /* If the initializer is non-void, then it's a normal expression
14980 that will be assigned to the slot. */
14981 if (!VOID_TYPE_P (TREE_TYPE (t)))
14982 return RECURSE (t);
14984 /* Otherwise, the initializer sets the slot in some way. One common
14985 way is an assignment statement at the end of the initializer. */
14986 while (1)
14988 if (TREE_CODE (t) == BIND_EXPR)
14989 t = expr_last (BIND_EXPR_BODY (t));
14990 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14991 || TREE_CODE (t) == TRY_CATCH_EXPR)
14992 t = expr_last (TREE_OPERAND (t, 0));
14993 else if (TREE_CODE (t) == STATEMENT_LIST)
14994 t = expr_last (t);
14995 else
14996 break;
14998 if (TREE_CODE (t) == MODIFY_EXPR
14999 && TREE_OPERAND (t, 0) == temp)
15000 return RECURSE (TREE_OPERAND (t, 1));
15002 return false;
15005 case CALL_EXPR:
15007 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15008 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15010 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15011 get_call_combined_fn (t),
15012 arg0,
15013 arg1,
15014 strict_overflow_p, depth);
15016 case COMPOUND_EXPR:
15017 case MODIFY_EXPR:
15018 return RECURSE (TREE_OPERAND (t, 1));
15020 case BIND_EXPR:
15021 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15023 case SAVE_EXPR:
15024 return RECURSE (TREE_OPERAND (t, 0));
15026 default:
15027 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15031 #undef RECURSE
15032 #undef tree_expr_nonnegative_warnv_p
15034 /* Return true if T is known to be non-negative. If the return
15035 value is based on the assumption that signed overflow is undefined,
15036 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15037 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15039 bool
15040 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15042 enum tree_code code;
15043 if (t == error_mark_node)
15044 return false;
15046 code = TREE_CODE (t);
15047 switch (TREE_CODE_CLASS (code))
15049 case tcc_binary:
15050 case tcc_comparison:
15051 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15052 TREE_TYPE (t),
15053 TREE_OPERAND (t, 0),
15054 TREE_OPERAND (t, 1),
15055 strict_overflow_p, depth);
15057 case tcc_unary:
15058 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15059 TREE_TYPE (t),
15060 TREE_OPERAND (t, 0),
15061 strict_overflow_p, depth);
15063 case tcc_constant:
15064 case tcc_declaration:
15065 case tcc_reference:
15066 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15068 default:
15069 break;
15072 switch (code)
15074 case TRUTH_AND_EXPR:
15075 case TRUTH_OR_EXPR:
15076 case TRUTH_XOR_EXPR:
15077 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15078 TREE_TYPE (t),
15079 TREE_OPERAND (t, 0),
15080 TREE_OPERAND (t, 1),
15081 strict_overflow_p, depth);
15082 case TRUTH_NOT_EXPR:
15083 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15084 TREE_TYPE (t),
15085 TREE_OPERAND (t, 0),
15086 strict_overflow_p, depth);
15088 case COND_EXPR:
15089 case CONSTRUCTOR:
15090 case OBJ_TYPE_REF:
15091 case ADDR_EXPR:
15092 case WITH_SIZE_EXPR:
15093 case SSA_NAME:
15094 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15096 default:
15097 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15101 /* Return true if `t' is known to be non-negative. Handle warnings
15102 about undefined signed overflow. */
15104 bool
15105 tree_expr_nonnegative_p (tree t)
15107 bool ret, strict_overflow_p;
15109 strict_overflow_p = false;
15110 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15111 if (strict_overflow_p)
15112 fold_overflow_warning (("assuming signed overflow does not occur when "
15113 "determining that expression is always "
15114 "non-negative"),
15115 WARN_STRICT_OVERFLOW_MISC);
15116 return ret;
15120 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15121 For floating point we further ensure that T is not denormal.
15122 Similar logic is present in nonzero_address in rtlanal.h.
15124 If the return value is based on the assumption that signed overflow
15125 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15126 change *STRICT_OVERFLOW_P. */
15128 bool
15129 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15130 bool *strict_overflow_p)
15132 switch (code)
15134 case ABS_EXPR:
15135 return tree_expr_nonzero_warnv_p (op0,
15136 strict_overflow_p);
15138 case NOP_EXPR:
15140 tree inner_type = TREE_TYPE (op0);
15141 tree outer_type = type;
15143 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15144 && tree_expr_nonzero_warnv_p (op0,
15145 strict_overflow_p));
15147 break;
15149 case NON_LVALUE_EXPR:
15150 return tree_expr_nonzero_warnv_p (op0,
15151 strict_overflow_p);
15153 default:
15154 break;
15157 return false;
15160 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15161 For floating point we further ensure that T is not denormal.
15162 Similar logic is present in nonzero_address in rtlanal.h.
15164 If the return value is based on the assumption that signed overflow
15165 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15166 change *STRICT_OVERFLOW_P. */
15168 bool
15169 tree_binary_nonzero_warnv_p (enum tree_code code,
15170 tree type,
15171 tree op0,
15172 tree op1, bool *strict_overflow_p)
15174 bool sub_strict_overflow_p;
15175 switch (code)
15177 case POINTER_PLUS_EXPR:
15178 case PLUS_EXPR:
15179 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15181 /* With the presence of negative values it is hard
15182 to say something. */
15183 sub_strict_overflow_p = false;
15184 if (!tree_expr_nonnegative_warnv_p (op0,
15185 &sub_strict_overflow_p)
15186 || !tree_expr_nonnegative_warnv_p (op1,
15187 &sub_strict_overflow_p))
15188 return false;
15189 /* One of operands must be positive and the other non-negative. */
15190 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15191 overflows, on a twos-complement machine the sum of two
15192 nonnegative numbers can never be zero. */
15193 return (tree_expr_nonzero_warnv_p (op0,
15194 strict_overflow_p)
15195 || tree_expr_nonzero_warnv_p (op1,
15196 strict_overflow_p));
15198 break;
15200 case MULT_EXPR:
15201 if (TYPE_OVERFLOW_UNDEFINED (type))
15203 if (tree_expr_nonzero_warnv_p (op0,
15204 strict_overflow_p)
15205 && tree_expr_nonzero_warnv_p (op1,
15206 strict_overflow_p))
15208 *strict_overflow_p = true;
15209 return true;
15212 break;
15214 case MIN_EXPR:
15215 sub_strict_overflow_p = false;
15216 if (tree_expr_nonzero_warnv_p (op0,
15217 &sub_strict_overflow_p)
15218 && tree_expr_nonzero_warnv_p (op1,
15219 &sub_strict_overflow_p))
15221 if (sub_strict_overflow_p)
15222 *strict_overflow_p = true;
15224 break;
15226 case MAX_EXPR:
15227 sub_strict_overflow_p = false;
15228 if (tree_expr_nonzero_warnv_p (op0,
15229 &sub_strict_overflow_p))
15231 if (sub_strict_overflow_p)
15232 *strict_overflow_p = true;
15234 /* When both operands are nonzero, then MAX must be too. */
15235 if (tree_expr_nonzero_warnv_p (op1,
15236 strict_overflow_p))
15237 return true;
15239 /* MAX where operand 0 is positive is positive. */
15240 return tree_expr_nonnegative_warnv_p (op0,
15241 strict_overflow_p);
15243 /* MAX where operand 1 is positive is positive. */
15244 else if (tree_expr_nonzero_warnv_p (op1,
15245 &sub_strict_overflow_p)
15246 && tree_expr_nonnegative_warnv_p (op1,
15247 &sub_strict_overflow_p))
15249 if (sub_strict_overflow_p)
15250 *strict_overflow_p = true;
15251 return true;
15253 break;
15255 case BIT_IOR_EXPR:
15256 return (tree_expr_nonzero_warnv_p (op1,
15257 strict_overflow_p)
15258 || tree_expr_nonzero_warnv_p (op0,
15259 strict_overflow_p));
15261 default:
15262 break;
15265 return false;
15268 /* Return true when T is an address and is known to be nonzero.
15269 For floating point we further ensure that T is not denormal.
15270 Similar logic is present in nonzero_address in rtlanal.h.
15272 If the return value is based on the assumption that signed overflow
15273 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15274 change *STRICT_OVERFLOW_P. */
15276 bool
15277 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15279 bool sub_strict_overflow_p;
15280 switch (TREE_CODE (t))
15282 case INTEGER_CST:
15283 return !integer_zerop (t);
15285 case ADDR_EXPR:
15287 tree base = TREE_OPERAND (t, 0);
15289 if (!DECL_P (base))
15290 base = get_base_address (base);
15292 if (base && TREE_CODE (base) == TARGET_EXPR)
15293 base = TARGET_EXPR_SLOT (base);
15295 if (!base)
15296 return false;
15298 /* For objects in symbol table check if we know they are non-zero.
15299 Don't do anything for variables and functions before symtab is built;
15300 it is quite possible that they will be declared weak later. */
15301 int nonzero_addr = maybe_nonzero_address (base);
15302 if (nonzero_addr >= 0)
15303 return nonzero_addr;
15305 /* Constants are never weak. */
15306 if (CONSTANT_CLASS_P (base))
15307 return true;
15309 return false;
15312 case COND_EXPR:
15313 sub_strict_overflow_p = false;
15314 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15315 &sub_strict_overflow_p)
15316 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15317 &sub_strict_overflow_p))
15319 if (sub_strict_overflow_p)
15320 *strict_overflow_p = true;
15321 return true;
15323 break;
15325 case SSA_NAME:
15326 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15327 break;
15328 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15330 default:
15331 break;
15333 return false;
15336 #define integer_valued_real_p(X) \
15337 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15339 #define RECURSE(X) \
15340 ((integer_valued_real_p) (X, depth + 1))
15342 /* Return true if the floating point result of (CODE OP0) has an
15343 integer value. We also allow +Inf, -Inf and NaN to be considered
15344 integer values. Return false for signaling NaN.
15346 DEPTH is the current nesting depth of the query. */
15348 bool
15349 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15351 switch (code)
15353 case FLOAT_EXPR:
15354 return true;
15356 case ABS_EXPR:
15357 return RECURSE (op0);
15359 CASE_CONVERT:
15361 tree type = TREE_TYPE (op0);
15362 if (TREE_CODE (type) == INTEGER_TYPE)
15363 return true;
15364 if (SCALAR_FLOAT_TYPE_P (type))
15365 return RECURSE (op0);
15366 break;
15369 default:
15370 break;
15372 return false;
15375 /* Return true if the floating point result of (CODE OP0 OP1) has an
15376 integer value. We also allow +Inf, -Inf and NaN to be considered
15377 integer values. Return false for signaling NaN.
15379 DEPTH is the current nesting depth of the query. */
15381 bool
15382 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15384 switch (code)
15386 case PLUS_EXPR:
15387 case MINUS_EXPR:
15388 case MULT_EXPR:
15389 case MIN_EXPR:
15390 case MAX_EXPR:
15391 return RECURSE (op0) && RECURSE (op1);
15393 default:
15394 break;
15396 return false;
15399 /* Return true if the floating point result of calling FNDECL with arguments
15400 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15401 considered integer values. Return false for signaling NaN. If FNDECL
15402 takes fewer than 2 arguments, the remaining ARGn are null.
15404 DEPTH is the current nesting depth of the query. */
15406 bool
15407 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15409 switch (fn)
15411 CASE_CFN_CEIL:
15412 CASE_CFN_CEIL_FN:
15413 CASE_CFN_FLOOR:
15414 CASE_CFN_FLOOR_FN:
15415 CASE_CFN_NEARBYINT:
15416 CASE_CFN_NEARBYINT_FN:
15417 CASE_CFN_RINT:
15418 CASE_CFN_RINT_FN:
15419 CASE_CFN_ROUND:
15420 CASE_CFN_ROUND_FN:
15421 CASE_CFN_ROUNDEVEN:
15422 CASE_CFN_ROUNDEVEN_FN:
15423 CASE_CFN_TRUNC:
15424 CASE_CFN_TRUNC_FN:
15425 return true;
15427 CASE_CFN_FMIN:
15428 CASE_CFN_FMIN_FN:
15429 CASE_CFN_FMAX:
15430 CASE_CFN_FMAX_FN:
15431 return RECURSE (arg0) && RECURSE (arg1);
15433 default:
15434 break;
15436 return false;
15439 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15440 has an integer value. We also allow +Inf, -Inf and NaN to be
15441 considered integer values. Return false for signaling NaN.
15443 DEPTH is the current nesting depth of the query. */
15445 bool
15446 integer_valued_real_single_p (tree t, int depth)
15448 switch (TREE_CODE (t))
15450 case REAL_CST:
15451 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15453 case COND_EXPR:
15454 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15456 case SSA_NAME:
15457 /* Limit the depth of recursion to avoid quadratic behavior.
15458 This is expected to catch almost all occurrences in practice.
15459 If this code misses important cases that unbounded recursion
15460 would not, passes that need this information could be revised
15461 to provide it through dataflow propagation. */
15462 return (!name_registered_for_update_p (t)
15463 && depth < param_max_ssa_name_query_depth
15464 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15465 depth));
15467 default:
15468 break;
15470 return false;
15473 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15474 has an integer value. We also allow +Inf, -Inf and NaN to be
15475 considered integer values. Return false for signaling NaN.
15477 DEPTH is the current nesting depth of the query. */
15479 static bool
15480 integer_valued_real_invalid_p (tree t, int depth)
15482 switch (TREE_CODE (t))
15484 case COMPOUND_EXPR:
15485 case MODIFY_EXPR:
15486 case BIND_EXPR:
15487 return RECURSE (TREE_OPERAND (t, 1));
15489 case SAVE_EXPR:
15490 return RECURSE (TREE_OPERAND (t, 0));
15492 default:
15493 break;
15495 return false;
15498 #undef RECURSE
15499 #undef integer_valued_real_p
15501 /* Return true if the floating point expression T has an integer value.
15502 We also allow +Inf, -Inf and NaN to be considered integer values.
15503 Return false for signaling NaN.
15505 DEPTH is the current nesting depth of the query. */
15507 bool
15508 integer_valued_real_p (tree t, int depth)
15510 if (t == error_mark_node)
15511 return false;
15513 STRIP_ANY_LOCATION_WRAPPER (t);
15515 tree_code code = TREE_CODE (t);
15516 switch (TREE_CODE_CLASS (code))
15518 case tcc_binary:
15519 case tcc_comparison:
15520 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15521 TREE_OPERAND (t, 1), depth);
15523 case tcc_unary:
15524 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15526 case tcc_constant:
15527 case tcc_declaration:
15528 case tcc_reference:
15529 return integer_valued_real_single_p (t, depth);
15531 default:
15532 break;
15535 switch (code)
15537 case COND_EXPR:
15538 case SSA_NAME:
15539 return integer_valued_real_single_p (t, depth);
15541 case CALL_EXPR:
15543 tree arg0 = (call_expr_nargs (t) > 0
15544 ? CALL_EXPR_ARG (t, 0)
15545 : NULL_TREE);
15546 tree arg1 = (call_expr_nargs (t) > 1
15547 ? CALL_EXPR_ARG (t, 1)
15548 : NULL_TREE);
15549 return integer_valued_real_call_p (get_call_combined_fn (t),
15550 arg0, arg1, depth);
15553 default:
15554 return integer_valued_real_invalid_p (t, depth);
15558 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15559 attempt to fold the expression to a constant without modifying TYPE,
15560 OP0 or OP1.
15562 If the expression could be simplified to a constant, then return
15563 the constant. If the expression would not be simplified to a
15564 constant, then return NULL_TREE. */
15566 tree
15567 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15569 tree tem = fold_binary (code, type, op0, op1);
15570 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15573 /* Given the components of a unary expression CODE, TYPE and OP0,
15574 attempt to fold the expression to a constant without modifying
15575 TYPE or OP0.
15577 If the expression could be simplified to a constant, then return
15578 the constant. If the expression would not be simplified to a
15579 constant, then return NULL_TREE. */
15581 tree
15582 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15584 tree tem = fold_unary (code, type, op0);
15585 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15588 /* If EXP represents referencing an element in a constant string
15589 (either via pointer arithmetic or array indexing), return the
15590 tree representing the value accessed, otherwise return NULL. */
15592 tree
15593 fold_read_from_constant_string (tree exp)
15595 if ((INDIRECT_REF_P (exp)
15596 || TREE_CODE (exp) == ARRAY_REF)
15597 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15599 tree exp1 = TREE_OPERAND (exp, 0);
15600 tree index;
15601 tree string;
15602 location_t loc = EXPR_LOCATION (exp);
15604 if (INDIRECT_REF_P (exp))
15605 string = string_constant (exp1, &index, NULL, NULL);
15606 else
15608 tree low_bound = array_ref_low_bound (exp);
15609 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15611 /* Optimize the special-case of a zero lower bound.
15613 We convert the low_bound to sizetype to avoid some problems
15614 with constant folding. (E.g. suppose the lower bound is 1,
15615 and its mode is QI. Without the conversion,l (ARRAY
15616 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15617 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15618 if (! integer_zerop (low_bound))
15619 index = size_diffop_loc (loc, index,
15620 fold_convert_loc (loc, sizetype, low_bound));
15622 string = exp1;
15625 scalar_int_mode char_mode;
15626 if (string
15627 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15628 && TREE_CODE (string) == STRING_CST
15629 && tree_fits_uhwi_p (index)
15630 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15631 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15632 &char_mode)
15633 && GET_MODE_SIZE (char_mode) == 1)
15634 return build_int_cst_type (TREE_TYPE (exp),
15635 (TREE_STRING_POINTER (string)
15636 [TREE_INT_CST_LOW (index)]));
15638 return NULL;
15641 /* Folds a read from vector element at IDX of vector ARG. */
15643 tree
15644 fold_read_from_vector (tree arg, poly_uint64 idx)
15646 unsigned HOST_WIDE_INT i;
15647 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15648 && known_ge (idx, 0u)
15649 && idx.is_constant (&i))
15651 if (TREE_CODE (arg) == VECTOR_CST)
15652 return VECTOR_CST_ELT (arg, i);
15653 else if (TREE_CODE (arg) == CONSTRUCTOR)
15655 if (CONSTRUCTOR_NELTS (arg)
15656 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15657 return NULL_TREE;
15658 if (i >= CONSTRUCTOR_NELTS (arg))
15659 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15660 return CONSTRUCTOR_ELT (arg, i)->value;
15663 return NULL_TREE;
15666 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15667 an integer constant, real, or fixed-point constant.
15669 TYPE is the type of the result. */
15671 static tree
15672 fold_negate_const (tree arg0, tree type)
15674 tree t = NULL_TREE;
15676 switch (TREE_CODE (arg0))
15678 case REAL_CST:
15679 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15680 break;
15682 case FIXED_CST:
15684 FIXED_VALUE_TYPE f;
15685 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15686 &(TREE_FIXED_CST (arg0)), NULL,
15687 TYPE_SATURATING (type));
15688 t = build_fixed (type, f);
15689 /* Propagate overflow flags. */
15690 if (overflow_p | TREE_OVERFLOW (arg0))
15691 TREE_OVERFLOW (t) = 1;
15692 break;
15695 default:
15696 if (poly_int_tree_p (arg0))
15698 wi::overflow_type overflow;
15699 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15700 t = force_fit_type (type, res, 1,
15701 (overflow && ! TYPE_UNSIGNED (type))
15702 || TREE_OVERFLOW (arg0));
15703 break;
15706 gcc_unreachable ();
15709 return t;
15712 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15713 an integer constant or real constant.
15715 TYPE is the type of the result. */
15717 tree
15718 fold_abs_const (tree arg0, tree type)
15720 tree t = NULL_TREE;
15722 switch (TREE_CODE (arg0))
15724 case INTEGER_CST:
15726 /* If the value is unsigned or non-negative, then the absolute value
15727 is the same as the ordinary value. */
15728 wide_int val = wi::to_wide (arg0);
15729 wi::overflow_type overflow = wi::OVF_NONE;
15730 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15733 /* If the value is negative, then the absolute value is
15734 its negation. */
15735 else
15736 val = wi::neg (val, &overflow);
15738 /* Force to the destination type, set TREE_OVERFLOW for signed
15739 TYPE only. */
15740 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15742 break;
15744 case REAL_CST:
15745 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15746 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15747 else
15748 t = arg0;
15749 break;
15751 default:
15752 gcc_unreachable ();
15755 return t;
15758 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15759 constant. TYPE is the type of the result. */
15761 static tree
15762 fold_not_const (const_tree arg0, tree type)
15764 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15766 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15769 /* Given CODE, a relational operator, the target type, TYPE and two
15770 constant operands OP0 and OP1, return the result of the
15771 relational operation. If the result is not a compile time
15772 constant, then return NULL_TREE. */
15774 static tree
15775 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15777 int result, invert;
15779 /* From here on, the only cases we handle are when the result is
15780 known to be a constant. */
15782 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15784 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15785 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15787 /* Handle the cases where either operand is a NaN. */
15788 if (real_isnan (c0) || real_isnan (c1))
15790 switch (code)
15792 case EQ_EXPR:
15793 case ORDERED_EXPR:
15794 result = 0;
15795 break;
15797 case NE_EXPR:
15798 case UNORDERED_EXPR:
15799 case UNLT_EXPR:
15800 case UNLE_EXPR:
15801 case UNGT_EXPR:
15802 case UNGE_EXPR:
15803 case UNEQ_EXPR:
15804 result = 1;
15805 break;
15807 case LT_EXPR:
15808 case LE_EXPR:
15809 case GT_EXPR:
15810 case GE_EXPR:
15811 case LTGT_EXPR:
15812 if (flag_trapping_math)
15813 return NULL_TREE;
15814 result = 0;
15815 break;
15817 default:
15818 gcc_unreachable ();
15821 return constant_boolean_node (result, type);
15824 return constant_boolean_node (real_compare (code, c0, c1), type);
15827 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15829 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15830 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15831 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15834 /* Handle equality/inequality of complex constants. */
15835 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15837 tree rcond = fold_relational_const (code, type,
15838 TREE_REALPART (op0),
15839 TREE_REALPART (op1));
15840 tree icond = fold_relational_const (code, type,
15841 TREE_IMAGPART (op0),
15842 TREE_IMAGPART (op1));
15843 if (code == EQ_EXPR)
15844 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15845 else if (code == NE_EXPR)
15846 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15847 else
15848 return NULL_TREE;
15851 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15853 if (!VECTOR_TYPE_P (type))
15855 /* Have vector comparison with scalar boolean result. */
15856 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15857 && known_eq (VECTOR_CST_NELTS (op0),
15858 VECTOR_CST_NELTS (op1)));
15859 unsigned HOST_WIDE_INT nunits;
15860 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15861 return NULL_TREE;
15862 for (unsigned i = 0; i < nunits; i++)
15864 tree elem0 = VECTOR_CST_ELT (op0, i);
15865 tree elem1 = VECTOR_CST_ELT (op1, i);
15866 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15867 if (tmp == NULL_TREE)
15868 return NULL_TREE;
15869 if (integer_zerop (tmp))
15870 return constant_boolean_node (code == NE_EXPR, type);
15872 return constant_boolean_node (code == EQ_EXPR, type);
15874 tree_vector_builder elts;
15875 if (!elts.new_binary_operation (type, op0, op1, false))
15876 return NULL_TREE;
15877 unsigned int count = elts.encoded_nelts ();
15878 for (unsigned i = 0; i < count; i++)
15880 tree elem_type = TREE_TYPE (type);
15881 tree elem0 = VECTOR_CST_ELT (op0, i);
15882 tree elem1 = VECTOR_CST_ELT (op1, i);
15884 tree tem = fold_relational_const (code, elem_type,
15885 elem0, elem1);
15887 if (tem == NULL_TREE)
15888 return NULL_TREE;
15890 elts.quick_push (build_int_cst (elem_type,
15891 integer_zerop (tem) ? 0 : -1));
15894 return elts.build ();
15897 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15899 To compute GT, swap the arguments and do LT.
15900 To compute GE, do LT and invert the result.
15901 To compute LE, swap the arguments, do LT and invert the result.
15902 To compute NE, do EQ and invert the result.
15904 Therefore, the code below must handle only EQ and LT. */
15906 if (code == LE_EXPR || code == GT_EXPR)
15908 std::swap (op0, op1);
15909 code = swap_tree_comparison (code);
15912 /* Note that it is safe to invert for real values here because we
15913 have already handled the one case that it matters. */
15915 invert = 0;
15916 if (code == NE_EXPR || code == GE_EXPR)
15918 invert = 1;
15919 code = invert_tree_comparison (code, false);
15922 /* Compute a result for LT or EQ if args permit;
15923 Otherwise return T. */
15924 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15926 if (code == EQ_EXPR)
15927 result = tree_int_cst_equal (op0, op1);
15928 else
15929 result = tree_int_cst_lt (op0, op1);
15931 else
15932 return NULL_TREE;
15934 if (invert)
15935 result ^= 1;
15936 return constant_boolean_node (result, type);
15939 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15940 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15941 itself. */
15943 tree
15944 fold_build_cleanup_point_expr (tree type, tree expr)
15946 /* If the expression does not have side effects then we don't have to wrap
15947 it with a cleanup point expression. */
15948 if (!TREE_SIDE_EFFECTS (expr))
15949 return expr;
15951 /* If the expression is a return, check to see if the expression inside the
15952 return has no side effects or the right hand side of the modify expression
15953 inside the return. If either don't have side effects set we don't need to
15954 wrap the expression in a cleanup point expression. Note we don't check the
15955 left hand side of the modify because it should always be a return decl. */
15956 if (TREE_CODE (expr) == RETURN_EXPR)
15958 tree op = TREE_OPERAND (expr, 0);
15959 if (!op || !TREE_SIDE_EFFECTS (op))
15960 return expr;
15961 op = TREE_OPERAND (op, 1);
15962 if (!TREE_SIDE_EFFECTS (op))
15963 return expr;
15966 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15969 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15970 of an indirection through OP0, or NULL_TREE if no simplification is
15971 possible. */
15973 tree
15974 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15976 tree sub = op0;
15977 tree subtype;
15978 poly_uint64 const_op01;
15980 STRIP_NOPS (sub);
15981 subtype = TREE_TYPE (sub);
15982 if (!POINTER_TYPE_P (subtype)
15983 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15984 return NULL_TREE;
15986 if (TREE_CODE (sub) == ADDR_EXPR)
15988 tree op = TREE_OPERAND (sub, 0);
15989 tree optype = TREE_TYPE (op);
15991 /* *&CONST_DECL -> to the value of the const decl. */
15992 if (TREE_CODE (op) == CONST_DECL)
15993 return DECL_INITIAL (op);
15994 /* *&p => p; make sure to handle *&"str"[cst] here. */
15995 if (type == optype)
15997 tree fop = fold_read_from_constant_string (op);
15998 if (fop)
15999 return fop;
16000 else
16001 return op;
16003 /* *(foo *)&fooarray => fooarray[0] */
16004 else if (TREE_CODE (optype) == ARRAY_TYPE
16005 && type == TREE_TYPE (optype)
16006 && (!in_gimple_form
16007 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16009 tree type_domain = TYPE_DOMAIN (optype);
16010 tree min_val = size_zero_node;
16011 if (type_domain && TYPE_MIN_VALUE (type_domain))
16012 min_val = TYPE_MIN_VALUE (type_domain);
16013 if (in_gimple_form
16014 && TREE_CODE (min_val) != INTEGER_CST)
16015 return NULL_TREE;
16016 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16017 NULL_TREE, NULL_TREE);
16019 /* *(foo *)&complexfoo => __real__ complexfoo */
16020 else if (TREE_CODE (optype) == COMPLEX_TYPE
16021 && type == TREE_TYPE (optype))
16022 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16023 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16024 else if (VECTOR_TYPE_P (optype)
16025 && type == TREE_TYPE (optype))
16027 tree part_width = TYPE_SIZE (type);
16028 tree index = bitsize_int (0);
16029 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16030 index);
16034 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16035 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16037 tree op00 = TREE_OPERAND (sub, 0);
16038 tree op01 = TREE_OPERAND (sub, 1);
16040 STRIP_NOPS (op00);
16041 if (TREE_CODE (op00) == ADDR_EXPR)
16043 tree op00type;
16044 op00 = TREE_OPERAND (op00, 0);
16045 op00type = TREE_TYPE (op00);
16047 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16048 if (VECTOR_TYPE_P (op00type)
16049 && type == TREE_TYPE (op00type)
16050 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16051 but we want to treat offsets with MSB set as negative.
16052 For the code below negative offsets are invalid and
16053 TYPE_SIZE of the element is something unsigned, so
16054 check whether op01 fits into poly_int64, which implies
16055 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16056 then just use poly_uint64 because we want to treat the
16057 value as unsigned. */
16058 && tree_fits_poly_int64_p (op01))
16060 tree part_width = TYPE_SIZE (type);
16061 poly_uint64 max_offset
16062 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16063 * TYPE_VECTOR_SUBPARTS (op00type));
16064 if (known_lt (const_op01, max_offset))
16066 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16067 return fold_build3_loc (loc,
16068 BIT_FIELD_REF, type, op00,
16069 part_width, index);
16072 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16073 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16074 && type == TREE_TYPE (op00type))
16076 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16077 const_op01))
16078 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16080 /* ((foo *)&fooarray)[1] => fooarray[1] */
16081 else if (TREE_CODE (op00type) == ARRAY_TYPE
16082 && type == TREE_TYPE (op00type))
16084 tree type_domain = TYPE_DOMAIN (op00type);
16085 tree min_val = size_zero_node;
16086 if (type_domain && TYPE_MIN_VALUE (type_domain))
16087 min_val = TYPE_MIN_VALUE (type_domain);
16088 poly_uint64 type_size, index;
16089 if (poly_int_tree_p (min_val)
16090 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16091 && multiple_p (const_op01, type_size, &index))
16093 poly_offset_int off = index + wi::to_poly_offset (min_val);
16094 op01 = wide_int_to_tree (sizetype, off);
16095 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16096 NULL_TREE, NULL_TREE);
16102 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16103 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16104 && type == TREE_TYPE (TREE_TYPE (subtype))
16105 && (!in_gimple_form
16106 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16108 tree type_domain;
16109 tree min_val = size_zero_node;
16110 sub = build_fold_indirect_ref_loc (loc, sub);
16111 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16112 if (type_domain && TYPE_MIN_VALUE (type_domain))
16113 min_val = TYPE_MIN_VALUE (type_domain);
16114 if (in_gimple_form
16115 && TREE_CODE (min_val) != INTEGER_CST)
16116 return NULL_TREE;
16117 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16118 NULL_TREE);
16121 return NULL_TREE;
16124 /* Builds an expression for an indirection through T, simplifying some
16125 cases. */
16127 tree
16128 build_fold_indirect_ref_loc (location_t loc, tree t)
16130 tree type = TREE_TYPE (TREE_TYPE (t));
16131 tree sub = fold_indirect_ref_1 (loc, type, t);
16133 if (sub)
16134 return sub;
16136 return build1_loc (loc, INDIRECT_REF, type, t);
16139 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16141 tree
16142 fold_indirect_ref_loc (location_t loc, tree t)
16144 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16146 if (sub)
16147 return sub;
16148 else
16149 return t;
16152 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16153 whose result is ignored. The type of the returned tree need not be
16154 the same as the original expression. */
16156 tree
16157 fold_ignored_result (tree t)
16159 if (!TREE_SIDE_EFFECTS (t))
16160 return integer_zero_node;
16162 for (;;)
16163 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16165 case tcc_unary:
16166 t = TREE_OPERAND (t, 0);
16167 break;
16169 case tcc_binary:
16170 case tcc_comparison:
16171 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16172 t = TREE_OPERAND (t, 0);
16173 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16174 t = TREE_OPERAND (t, 1);
16175 else
16176 return t;
16177 break;
16179 case tcc_expression:
16180 switch (TREE_CODE (t))
16182 case COMPOUND_EXPR:
16183 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16184 return t;
16185 t = TREE_OPERAND (t, 0);
16186 break;
16188 case COND_EXPR:
16189 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16190 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16191 return t;
16192 t = TREE_OPERAND (t, 0);
16193 break;
16195 default:
16196 return t;
16198 break;
16200 default:
16201 return t;
16205 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16207 tree
16208 round_up_loc (location_t loc, tree value, unsigned int divisor)
16210 tree div = NULL_TREE;
16212 if (divisor == 1)
16213 return value;
16215 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16216 have to do anything. Only do this when we are not given a const,
16217 because in that case, this check is more expensive than just
16218 doing it. */
16219 if (TREE_CODE (value) != INTEGER_CST)
16221 div = build_int_cst (TREE_TYPE (value), divisor);
16223 if (multiple_of_p (TREE_TYPE (value), value, div))
16224 return value;
16227 /* If divisor is a power of two, simplify this to bit manipulation. */
16228 if (pow2_or_zerop (divisor))
16230 if (TREE_CODE (value) == INTEGER_CST)
16232 wide_int val = wi::to_wide (value);
16233 bool overflow_p;
16235 if ((val & (divisor - 1)) == 0)
16236 return value;
16238 overflow_p = TREE_OVERFLOW (value);
16239 val += divisor - 1;
16240 val &= (int) -divisor;
16241 if (val == 0)
16242 overflow_p = true;
16244 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16246 else
16248 tree t;
16250 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16251 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16252 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16253 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16256 else
16258 if (!div)
16259 div = build_int_cst (TREE_TYPE (value), divisor);
16260 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16261 value = size_binop_loc (loc, MULT_EXPR, value, div);
16264 return value;
16267 /* Likewise, but round down. */
16269 tree
16270 round_down_loc (location_t loc, tree value, int divisor)
16272 tree div = NULL_TREE;
16274 gcc_assert (divisor > 0);
16275 if (divisor == 1)
16276 return value;
16278 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16279 have to do anything. Only do this when we are not given a const,
16280 because in that case, this check is more expensive than just
16281 doing it. */
16282 if (TREE_CODE (value) != INTEGER_CST)
16284 div = build_int_cst (TREE_TYPE (value), divisor);
16286 if (multiple_of_p (TREE_TYPE (value), value, div))
16287 return value;
16290 /* If divisor is a power of two, simplify this to bit manipulation. */
16291 if (pow2_or_zerop (divisor))
16293 tree t;
16295 t = build_int_cst (TREE_TYPE (value), -divisor);
16296 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16298 else
16300 if (!div)
16301 div = build_int_cst (TREE_TYPE (value), divisor);
16302 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16303 value = size_binop_loc (loc, MULT_EXPR, value, div);
16306 return value;
16309 /* Returns the pointer to the base of the object addressed by EXP and
16310 extracts the information about the offset of the access, storing it
16311 to PBITPOS and POFFSET. */
16313 static tree
16314 split_address_to_core_and_offset (tree exp,
16315 poly_int64_pod *pbitpos, tree *poffset)
16317 tree core;
16318 machine_mode mode;
16319 int unsignedp, reversep, volatilep;
16320 poly_int64 bitsize;
16321 location_t loc = EXPR_LOCATION (exp);
16323 if (TREE_CODE (exp) == SSA_NAME)
16324 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16325 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16326 exp = gimple_assign_rhs1 (def);
16328 if (TREE_CODE (exp) == ADDR_EXPR)
16330 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16331 poffset, &mode, &unsignedp, &reversep,
16332 &volatilep);
16333 core = build_fold_addr_expr_loc (loc, core);
16335 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16337 core = TREE_OPERAND (exp, 0);
16338 STRIP_NOPS (core);
16339 *pbitpos = 0;
16340 *poffset = TREE_OPERAND (exp, 1);
16341 if (poly_int_tree_p (*poffset))
16343 poly_offset_int tem
16344 = wi::sext (wi::to_poly_offset (*poffset),
16345 TYPE_PRECISION (TREE_TYPE (*poffset)));
16346 tem <<= LOG2_BITS_PER_UNIT;
16347 if (tem.to_shwi (pbitpos))
16348 *poffset = NULL_TREE;
16351 else
16353 core = exp;
16354 *pbitpos = 0;
16355 *poffset = NULL_TREE;
16358 return core;
16361 /* Returns true if addresses of E1 and E2 differ by a constant, false
16362 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16364 bool
16365 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16367 tree core1, core2;
16368 poly_int64 bitpos1, bitpos2;
16369 tree toffset1, toffset2, tdiff, type;
16371 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16372 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16374 poly_int64 bytepos1, bytepos2;
16375 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16376 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16377 || !operand_equal_p (core1, core2, 0))
16378 return false;
16380 if (toffset1 && toffset2)
16382 type = TREE_TYPE (toffset1);
16383 if (type != TREE_TYPE (toffset2))
16384 toffset2 = fold_convert (type, toffset2);
16386 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16387 if (!cst_and_fits_in_hwi (tdiff))
16388 return false;
16390 *diff = int_cst_value (tdiff);
16392 else if (toffset1 || toffset2)
16394 /* If only one of the offsets is non-constant, the difference cannot
16395 be a constant. */
16396 return false;
16398 else
16399 *diff = 0;
16401 *diff += bytepos1 - bytepos2;
16402 return true;
16405 /* Return OFF converted to a pointer offset type suitable as offset for
16406 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16407 tree
16408 convert_to_ptrofftype_loc (location_t loc, tree off)
16410 if (ptrofftype_p (TREE_TYPE (off)))
16411 return off;
16412 return fold_convert_loc (loc, sizetype, off);
16415 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16416 tree
16417 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16419 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16420 ptr, convert_to_ptrofftype_loc (loc, off));
16423 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16424 tree
16425 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16427 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16428 ptr, size_int (off));
16431 /* Return a pointer to a NUL-terminated string containing the sequence
16432 of bytes corresponding to the representation of the object referred to
16433 by SRC (or a subsequence of such bytes within it if SRC is a reference
16434 to an initialized constant array plus some constant offset).
16435 Set *STRSIZE the number of bytes in the constant sequence including
16436 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16437 where A is the array that stores the constant sequence that SRC points
16438 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16439 need not point to a string or even an array of characters but may point
16440 to an object of any type. */
16442 const char *
16443 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16445 /* The offset into the array A storing the string, and A's byte size. */
16446 tree offset_node;
16447 tree mem_size;
16449 if (strsize)
16450 *strsize = 0;
16452 if (strsize)
16453 src = byte_representation (src, &offset_node, &mem_size, NULL);
16454 else
16455 src = string_constant (src, &offset_node, &mem_size, NULL);
16456 if (!src)
16457 return NULL;
16459 unsigned HOST_WIDE_INT offset = 0;
16460 if (offset_node != NULL_TREE)
16462 if (!tree_fits_uhwi_p (offset_node))
16463 return NULL;
16464 else
16465 offset = tree_to_uhwi (offset_node);
16468 if (!tree_fits_uhwi_p (mem_size))
16469 return NULL;
16471 /* ARRAY_SIZE is the byte size of the array the constant sequence
16472 is stored in and equal to sizeof A. INIT_BYTES is the number
16473 of bytes in the constant sequence used to initialize the array,
16474 including any embedded NULs as well as the terminating NUL (for
16475 strings), but not including any trailing zeros/NULs past
16476 the terminating one appended implicitly to a string literal to
16477 zero out the remainder of the array it's stored in. For example,
16478 given:
16479 const char a[7] = "abc\0d";
16480 n = strlen (a + 1);
16481 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16482 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16483 is equal to strlen (A) + 1. */
16484 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16485 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16486 const char *string = TREE_STRING_POINTER (src);
16488 /* Ideally this would turn into a gcc_checking_assert over time. */
16489 if (init_bytes > array_size)
16490 init_bytes = array_size;
16492 if (init_bytes == 0 || offset >= array_size)
16493 return NULL;
16495 if (strsize)
16497 /* Compute and store the number of characters from the beginning
16498 of the substring at OFFSET to the end, including the terminating
16499 nul. Offsets past the initial length refer to null strings. */
16500 if (offset < init_bytes)
16501 *strsize = init_bytes - offset;
16502 else
16503 *strsize = 1;
16505 else
16507 tree eltype = TREE_TYPE (TREE_TYPE (src));
16508 /* Support only properly NUL-terminated single byte strings. */
16509 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16510 return NULL;
16511 if (string[init_bytes - 1] != '\0')
16512 return NULL;
16515 return offset < init_bytes ? string + offset : "";
16518 /* Return a pointer to a NUL-terminated string corresponding to
16519 the expression STR referencing a constant string, possibly
16520 involving a constant offset. Return null if STR either doesn't
16521 reference a constant string or if it involves a nonconstant
16522 offset. */
16524 const char *
16525 c_getstr (tree str)
16527 return getbyterep (str, NULL);
16530 /* Given a tree T, compute which bits in T may be nonzero. */
16532 wide_int
16533 tree_nonzero_bits (const_tree t)
16535 switch (TREE_CODE (t))
16537 case INTEGER_CST:
16538 return wi::to_wide (t);
16539 case SSA_NAME:
16540 return get_nonzero_bits (t);
16541 case NON_LVALUE_EXPR:
16542 case SAVE_EXPR:
16543 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16544 case BIT_AND_EXPR:
16545 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16546 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16547 case BIT_IOR_EXPR:
16548 case BIT_XOR_EXPR:
16549 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16550 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16551 case COND_EXPR:
16552 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16553 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16554 CASE_CONVERT:
16555 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16556 TYPE_PRECISION (TREE_TYPE (t)),
16557 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16558 case PLUS_EXPR:
16559 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16561 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16562 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16563 if (wi::bit_and (nzbits1, nzbits2) == 0)
16564 return wi::bit_or (nzbits1, nzbits2);
16566 break;
16567 case LSHIFT_EXPR:
16568 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16570 tree type = TREE_TYPE (t);
16571 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16572 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16573 TYPE_PRECISION (type));
16574 return wi::neg_p (arg1)
16575 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16576 : wi::lshift (nzbits, arg1);
16578 break;
16579 case RSHIFT_EXPR:
16580 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16582 tree type = TREE_TYPE (t);
16583 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16584 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16585 TYPE_PRECISION (type));
16586 return wi::neg_p (arg1)
16587 ? wi::lshift (nzbits, -arg1)
16588 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16590 break;
16591 default:
16592 break;
16595 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16598 /* Helper function for address compare simplifications in match.pd.
16599 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16600 TYPE is the type of comparison operands.
16601 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16602 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16603 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16604 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16605 and 2 if unknown. */
16608 address_compare (tree_code code, tree type, tree op0, tree op1,
16609 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16610 bool generic)
16612 if (TREE_CODE (op0) == SSA_NAME)
16613 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16614 if (TREE_CODE (op1) == SSA_NAME)
16615 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16616 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16617 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16618 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16619 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16620 if (base0 && TREE_CODE (base0) == MEM_REF)
16622 off0 += mem_ref_offset (base0).force_shwi ();
16623 base0 = TREE_OPERAND (base0, 0);
16625 if (base1 && TREE_CODE (base1) == MEM_REF)
16627 off1 += mem_ref_offset (base1).force_shwi ();
16628 base1 = TREE_OPERAND (base1, 0);
16630 if (base0 == NULL_TREE || base1 == NULL_TREE)
16631 return 2;
16633 int equal = 2;
16634 /* Punt in GENERIC on variables with value expressions;
16635 the value expressions might point to fields/elements
16636 of other vars etc. */
16637 if (generic
16638 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16639 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16640 return 2;
16641 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16643 symtab_node *node0 = symtab_node::get_create (base0);
16644 symtab_node *node1 = symtab_node::get_create (base1);
16645 equal = node0->equal_address_to (node1);
16647 else if ((DECL_P (base0)
16648 || TREE_CODE (base0) == SSA_NAME
16649 || TREE_CODE (base0) == STRING_CST)
16650 && (DECL_P (base1)
16651 || TREE_CODE (base1) == SSA_NAME
16652 || TREE_CODE (base1) == STRING_CST))
16653 equal = (base0 == base1);
16654 /* Assume different STRING_CSTs with the same content will be
16655 merged. */
16656 if (equal == 0
16657 && TREE_CODE (base0) == STRING_CST
16658 && TREE_CODE (base1) == STRING_CST
16659 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16660 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16661 TREE_STRING_LENGTH (base0)) == 0)
16662 equal = 1;
16663 if (equal == 1)
16665 if (code == EQ_EXPR
16666 || code == NE_EXPR
16667 /* If the offsets are equal we can ignore overflow. */
16668 || known_eq (off0, off1)
16669 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16670 /* Or if we compare using pointers to decls or strings. */
16671 || (POINTER_TYPE_P (type)
16672 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16673 return 1;
16674 return 2;
16676 if (equal != 0)
16677 return equal;
16678 if (code != EQ_EXPR && code != NE_EXPR)
16679 return 2;
16681 /* At this point we know (or assume) the two pointers point at
16682 different objects. */
16683 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16684 off0.is_constant (&ioff0);
16685 off1.is_constant (&ioff1);
16686 /* Punt on non-zero offsets from functions. */
16687 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16688 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16689 return 2;
16690 /* Or if the bases are neither decls nor string literals. */
16691 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16692 return 2;
16693 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16694 return 2;
16695 /* For initializers, assume addresses of different functions are
16696 different. */
16697 if (folding_initializer
16698 && TREE_CODE (base0) == FUNCTION_DECL
16699 && TREE_CODE (base1) == FUNCTION_DECL)
16700 return 0;
16702 /* Compute whether one address points to the start of one
16703 object and another one to the end of another one. */
16704 poly_int64 size0 = 0, size1 = 0;
16705 if (TREE_CODE (base0) == STRING_CST)
16707 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16708 equal = 2;
16709 else
16710 size0 = TREE_STRING_LENGTH (base0);
16712 else if (TREE_CODE (base0) == FUNCTION_DECL)
16713 size0 = 1;
16714 else
16716 tree sz0 = DECL_SIZE_UNIT (base0);
16717 if (!tree_fits_poly_int64_p (sz0))
16718 equal = 2;
16719 else
16720 size0 = tree_to_poly_int64 (sz0);
16722 if (TREE_CODE (base1) == STRING_CST)
16724 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16725 equal = 2;
16726 else
16727 size1 = TREE_STRING_LENGTH (base1);
16729 else if (TREE_CODE (base1) == FUNCTION_DECL)
16730 size1 = 1;
16731 else
16733 tree sz1 = DECL_SIZE_UNIT (base1);
16734 if (!tree_fits_poly_int64_p (sz1))
16735 equal = 2;
16736 else
16737 size1 = tree_to_poly_int64 (sz1);
16739 if (equal == 0)
16741 /* If one offset is pointing (or could be) to the beginning of one
16742 object and the other is pointing to one past the last byte of the
16743 other object, punt. */
16744 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16745 equal = 2;
16746 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16747 equal = 2;
16748 /* If both offsets are the same, there are some cases we know that are
16749 ok. Either if we know they aren't zero, or if we know both sizes
16750 are no zero. */
16751 if (equal == 2
16752 && known_eq (off0, off1)
16753 && (known_ne (off0, 0)
16754 || (known_ne (size0, 0) && known_ne (size1, 0))))
16755 equal = 0;
16758 /* At this point, equal is 2 if either one or both pointers are out of
16759 bounds of their object, or one points to start of its object and the
16760 other points to end of its object. This is unspecified behavior
16761 e.g. in C++. Otherwise equal is 0. */
16762 if (folding_cxx_constexpr && equal)
16763 return equal;
16765 /* When both pointers point to string literals, even when equal is 0,
16766 due to tail merging of string literals the pointers might be the same. */
16767 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16769 if (ioff0 < 0
16770 || ioff1 < 0
16771 || ioff0 > TREE_STRING_LENGTH (base0)
16772 || ioff1 > TREE_STRING_LENGTH (base1))
16773 return 2;
16775 /* If the bytes in the string literals starting at the pointers
16776 differ, the pointers need to be different. */
16777 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16778 TREE_STRING_POINTER (base1) + ioff1,
16779 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16780 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16782 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16783 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16784 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16785 ioffmin) == 0)
16786 /* If even the bytes in the string literal before the
16787 pointers are the same, the string literals could be
16788 tail merged. */
16789 return 2;
16791 return 0;
16794 if (folding_cxx_constexpr)
16795 return 0;
16797 /* If this is a pointer comparison, ignore for now even
16798 valid equalities where one pointer is the offset zero
16799 of one object and the other to one past end of another one. */
16800 if (!INTEGRAL_TYPE_P (type))
16801 return 0;
16803 /* Assume that string literals can't be adjacent to variables
16804 (automatic or global). */
16805 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16806 return 0;
16808 /* Assume that automatic variables can't be adjacent to global
16809 variables. */
16810 if (is_global_var (base0) != is_global_var (base1))
16811 return 0;
16813 return equal;
16816 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16817 tree
16818 ctor_single_nonzero_element (const_tree t)
16820 unsigned HOST_WIDE_INT idx;
16821 constructor_elt *ce;
16822 tree elt = NULL_TREE;
16824 if (TREE_CODE (t) != CONSTRUCTOR)
16825 return NULL_TREE;
16826 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16827 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16829 if (elt)
16830 return NULL_TREE;
16831 elt = ce->value;
16833 return elt;
16836 #if CHECKING_P
16838 namespace selftest {
16840 /* Helper functions for writing tests of folding trees. */
16842 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16844 static void
16845 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16846 tree constant)
16848 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16851 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16852 wrapping WRAPPED_EXPR. */
16854 static void
16855 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16856 tree wrapped_expr)
16858 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16859 ASSERT_NE (wrapped_expr, result);
16860 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16861 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16864 /* Verify that various arithmetic binary operations are folded
16865 correctly. */
16867 static void
16868 test_arithmetic_folding ()
16870 tree type = integer_type_node;
16871 tree x = create_tmp_var_raw (type, "x");
16872 tree zero = build_zero_cst (type);
16873 tree one = build_int_cst (type, 1);
16875 /* Addition. */
16876 /* 1 <-- (0 + 1) */
16877 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16878 one);
16879 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16880 one);
16882 /* (nonlvalue)x <-- (x + 0) */
16883 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16886 /* Subtraction. */
16887 /* 0 <-- (x - x) */
16888 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16889 zero);
16890 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16893 /* Multiplication. */
16894 /* 0 <-- (x * 0) */
16895 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16896 zero);
16898 /* (nonlvalue)x <-- (x * 1) */
16899 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16903 /* Verify that various binary operations on vectors are folded
16904 correctly. */
16906 static void
16907 test_vector_folding ()
16909 tree inner_type = integer_type_node;
16910 tree type = build_vector_type (inner_type, 4);
16911 tree zero = build_zero_cst (type);
16912 tree one = build_one_cst (type);
16913 tree index = build_index_vector (type, 0, 1);
16915 /* Verify equality tests that return a scalar boolean result. */
16916 tree res_type = boolean_type_node;
16917 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16918 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16919 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16920 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16921 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16922 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16923 index, one)));
16924 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16925 index, index)));
16926 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16927 index, index)));
16930 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16932 static void
16933 test_vec_duplicate_folding ()
16935 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16936 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16937 /* This will be 1 if VEC_MODE isn't a vector mode. */
16938 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16940 tree type = build_vector_type (ssizetype, nunits);
16941 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16942 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16943 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16946 /* Run all of the selftests within this file. */
16948 void
16949 fold_const_cc_tests ()
16951 test_arithmetic_folding ();
16952 test_vector_folding ();
16953 test_vec_duplicate_folding ();
16956 } // namespace selftest
16958 #endif /* CHECKING_P */