PR27116, Spelling errors found by Debian style checker
[official-gcc.git] / gcc / fold-const.cc
blob7e5494dfd392105ee3e28583ebede49b61580f7b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
153 /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
155 tree_code
156 minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158 enum tree_code code = ERROR_MARK;
160 if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
161 return ERROR_MARK;
163 if (!operand_equal_p (exp0, exp2))
164 return ERROR_MARK;
166 if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
170 /* X <= Y - 1 equals to X < Y. */
171 if (cmp == LE_EXPR)
172 code = LT_EXPR;
173 /* X > Y - 1 equals to X >= Y. */
174 if (cmp == GT_EXPR)
175 code = GE_EXPR;
176 /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
179 value_range r;
180 get_range_query (cfun)->range_of_expr (r, exp0);
181 if (r.undefined_p ())
182 r.set_varying (TREE_TYPE (exp0));
184 widest_int min = widest_int::from (r.lower_bound (),
185 TYPE_SIGN (TREE_TYPE (exp0)));
186 if (min == wi::to_widest (exp1))
187 code = MAX_EXPR;
190 if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
192 /* X < Y + 1 equals to X <= Y. */
193 if (cmp == LT_EXPR)
194 code = LE_EXPR;
195 /* X >= Y + 1 equals to X > Y. */
196 if (cmp == GE_EXPR)
197 code = GT_EXPR;
198 /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
201 value_range r;
202 get_range_query (cfun)->range_of_expr (r, exp0);
203 if (r.undefined_p ())
204 r.set_varying (TREE_TYPE (exp0));
206 widest_int max = widest_int::from (r.upper_bound (),
207 TYPE_SIGN (TREE_TYPE (exp0)));
208 if (max == wi::to_widest (exp1))
209 code = MIN_EXPR;
213 if (code != ERROR_MARK
214 || operand_equal_p (exp1, exp3))
216 if (cmp == LT_EXPR || cmp == LE_EXPR)
217 code = MIN_EXPR;
218 if (cmp == GT_EXPR || cmp == GE_EXPR)
219 code = MAX_EXPR;
221 return code;
224 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
225 Otherwise, return LOC. */
227 static location_t
228 expr_location_or (tree t, location_t loc)
230 location_t tloc = EXPR_LOCATION (t);
231 return tloc == UNKNOWN_LOCATION ? loc : tloc;
234 /* Similar to protected_set_expr_location, but never modify x in place,
235 if location can and needs to be set, unshare it. */
237 tree
238 protected_set_expr_location_unshare (tree x, location_t loc)
240 if (CAN_HAVE_LOCATION_P (x)
241 && EXPR_LOCATION (x) != loc
242 && !(TREE_CODE (x) == SAVE_EXPR
243 || TREE_CODE (x) == TARGET_EXPR
244 || TREE_CODE (x) == BIND_EXPR))
246 x = copy_node (x);
247 SET_EXPR_LOCATION (x, loc);
249 return x;
252 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
253 division and returns the quotient. Otherwise returns
254 NULL_TREE. */
256 tree
257 div_if_zero_remainder (const_tree arg1, const_tree arg2)
259 widest_int quo;
261 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
262 SIGNED, &quo))
263 return wide_int_to_tree (TREE_TYPE (arg1), quo);
265 return NULL_TREE;
268 /* This is nonzero if we should defer warnings about undefined
269 overflow. This facility exists because these warnings are a
270 special case. The code to estimate loop iterations does not want
271 to issue any warnings, since it works with expressions which do not
272 occur in user code. Various bits of cleanup code call fold(), but
273 only use the result if it has certain characteristics (e.g., is a
274 constant); that code only wants to issue a warning if the result is
275 used. */
277 static int fold_deferring_overflow_warnings;
279 /* If a warning about undefined overflow is deferred, this is the
280 warning. Note that this may cause us to turn two warnings into
281 one, but that is fine since it is sufficient to only give one
282 warning per expression. */
284 static const char* fold_deferred_overflow_warning;
286 /* If a warning about undefined overflow is deferred, this is the
287 level at which the warning should be emitted. */
289 static enum warn_strict_overflow_code fold_deferred_overflow_code;
291 /* Start deferring overflow warnings. We could use a stack here to
292 permit nested calls, but at present it is not necessary. */
294 void
295 fold_defer_overflow_warnings (void)
297 ++fold_deferring_overflow_warnings;
300 /* Stop deferring overflow warnings. If there is a pending warning,
301 and ISSUE is true, then issue the warning if appropriate. STMT is
302 the statement with which the warning should be associated (used for
303 location information); STMT may be NULL. CODE is the level of the
304 warning--a warn_strict_overflow_code value. This function will use
305 the smaller of CODE and the deferred code when deciding whether to
306 issue the warning. CODE may be zero to mean to always use the
307 deferred code. */
309 void
310 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
312 const char *warnmsg;
313 location_t locus;
315 gcc_assert (fold_deferring_overflow_warnings > 0);
316 --fold_deferring_overflow_warnings;
317 if (fold_deferring_overflow_warnings > 0)
319 if (fold_deferred_overflow_warning != NULL
320 && code != 0
321 && code < (int) fold_deferred_overflow_code)
322 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
323 return;
326 warnmsg = fold_deferred_overflow_warning;
327 fold_deferred_overflow_warning = NULL;
329 if (!issue || warnmsg == NULL)
330 return;
332 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
333 return;
335 /* Use the smallest code level when deciding to issue the
336 warning. */
337 if (code == 0 || code > (int) fold_deferred_overflow_code)
338 code = fold_deferred_overflow_code;
340 if (!issue_strict_overflow_warning (code))
341 return;
343 if (stmt == NULL)
344 locus = input_location;
345 else
346 locus = gimple_location (stmt);
347 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
350 /* Stop deferring overflow warnings, ignoring any deferred
351 warnings. */
353 void
354 fold_undefer_and_ignore_overflow_warnings (void)
356 fold_undefer_overflow_warnings (false, NULL, 0);
359 /* Whether we are deferring overflow warnings. */
361 bool
362 fold_deferring_overflow_warnings_p (void)
364 return fold_deferring_overflow_warnings > 0;
367 /* This is called when we fold something based on the fact that signed
368 overflow is undefined. */
370 void
371 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
373 if (fold_deferring_overflow_warnings > 0)
375 if (fold_deferred_overflow_warning == NULL
376 || wc < fold_deferred_overflow_code)
378 fold_deferred_overflow_warning = gmsgid;
379 fold_deferred_overflow_code = wc;
382 else if (issue_strict_overflow_warning (wc))
383 warning (OPT_Wstrict_overflow, gmsgid);
386 /* Return true if the built-in mathematical function specified by CODE
387 is odd, i.e. -f(x) == f(-x). */
389 bool
390 negate_mathfn_p (combined_fn fn)
392 switch (fn)
394 CASE_CFN_ASIN:
395 CASE_CFN_ASIN_FN:
396 CASE_CFN_ASINH:
397 CASE_CFN_ASINH_FN:
398 CASE_CFN_ATAN:
399 CASE_CFN_ATAN_FN:
400 CASE_CFN_ATANH:
401 CASE_CFN_ATANH_FN:
402 CASE_CFN_CASIN:
403 CASE_CFN_CASIN_FN:
404 CASE_CFN_CASINH:
405 CASE_CFN_CASINH_FN:
406 CASE_CFN_CATAN:
407 CASE_CFN_CATAN_FN:
408 CASE_CFN_CATANH:
409 CASE_CFN_CATANH_FN:
410 CASE_CFN_CBRT:
411 CASE_CFN_CBRT_FN:
412 CASE_CFN_CPROJ:
413 CASE_CFN_CPROJ_FN:
414 CASE_CFN_CSIN:
415 CASE_CFN_CSIN_FN:
416 CASE_CFN_CSINH:
417 CASE_CFN_CSINH_FN:
418 CASE_CFN_CTAN:
419 CASE_CFN_CTAN_FN:
420 CASE_CFN_CTANH:
421 CASE_CFN_CTANH_FN:
422 CASE_CFN_ERF:
423 CASE_CFN_ERF_FN:
424 CASE_CFN_LLROUND:
425 CASE_CFN_LLROUND_FN:
426 CASE_CFN_LROUND:
427 CASE_CFN_LROUND_FN:
428 CASE_CFN_ROUND:
429 CASE_CFN_ROUNDEVEN:
430 CASE_CFN_ROUNDEVEN_FN:
431 CASE_CFN_SIN:
432 CASE_CFN_SIN_FN:
433 CASE_CFN_SINH:
434 CASE_CFN_SINH_FN:
435 CASE_CFN_TAN:
436 CASE_CFN_TAN_FN:
437 CASE_CFN_TANH:
438 CASE_CFN_TANH_FN:
439 CASE_CFN_TRUNC:
440 CASE_CFN_TRUNC_FN:
441 return true;
443 CASE_CFN_LLRINT:
444 CASE_CFN_LLRINT_FN:
445 CASE_CFN_LRINT:
446 CASE_CFN_LRINT_FN:
447 CASE_CFN_NEARBYINT:
448 CASE_CFN_NEARBYINT_FN:
449 CASE_CFN_RINT:
450 CASE_CFN_RINT_FN:
451 return !flag_rounding_math;
453 default:
454 break;
456 return false;
459 /* Check whether we may negate an integer constant T without causing
460 overflow. */
462 bool
463 may_negate_without_overflow_p (const_tree t)
465 tree type;
467 gcc_assert (TREE_CODE (t) == INTEGER_CST);
469 type = TREE_TYPE (t);
470 if (TYPE_UNSIGNED (type))
471 return false;
473 return !wi::only_sign_bit_p (wi::to_wide (t));
476 /* Determine whether an expression T can be cheaply negated using
477 the function negate_expr without introducing undefined overflow. */
479 static bool
480 negate_expr_p (tree t)
482 tree type;
484 if (t == 0)
485 return false;
487 type = TREE_TYPE (t);
489 STRIP_SIGN_NOPS (t);
490 switch (TREE_CODE (t))
492 case INTEGER_CST:
493 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
494 return true;
496 /* Check that -CST will not overflow type. */
497 return may_negate_without_overflow_p (t);
498 case BIT_NOT_EXPR:
499 return (INTEGRAL_TYPE_P (type)
500 && TYPE_OVERFLOW_WRAPS (type));
502 case FIXED_CST:
503 return true;
505 case NEGATE_EXPR:
506 return !TYPE_OVERFLOW_SANITIZED (type);
508 case REAL_CST:
509 /* We want to canonicalize to positive real constants. Pretend
510 that only negative ones can be easily negated. */
511 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
513 case COMPLEX_CST:
514 return negate_expr_p (TREE_REALPART (t))
515 && negate_expr_p (TREE_IMAGPART (t));
517 case VECTOR_CST:
519 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
520 return true;
522 /* Steps don't prevent negation. */
523 unsigned int count = vector_cst_encoded_nelts (t);
524 for (unsigned int i = 0; i < count; ++i)
525 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
526 return false;
528 return true;
531 case COMPLEX_EXPR:
532 return negate_expr_p (TREE_OPERAND (t, 0))
533 && negate_expr_p (TREE_OPERAND (t, 1));
535 case CONJ_EXPR:
536 return negate_expr_p (TREE_OPERAND (t, 0));
538 case PLUS_EXPR:
539 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
540 || HONOR_SIGNED_ZEROS (type)
541 || (ANY_INTEGRAL_TYPE_P (type)
542 && ! TYPE_OVERFLOW_WRAPS (type)))
543 return false;
544 /* -(A + B) -> (-B) - A. */
545 if (negate_expr_p (TREE_OPERAND (t, 1)))
546 return true;
547 /* -(A + B) -> (-A) - B. */
548 return negate_expr_p (TREE_OPERAND (t, 0));
550 case MINUS_EXPR:
551 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
552 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
553 && !HONOR_SIGNED_ZEROS (type)
554 && (! ANY_INTEGRAL_TYPE_P (type)
555 || TYPE_OVERFLOW_WRAPS (type));
557 case MULT_EXPR:
558 if (TYPE_UNSIGNED (type))
559 break;
560 /* INT_MIN/n * n doesn't overflow while negating one operand it does
561 if n is a (negative) power of two. */
562 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
563 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
564 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
565 && (wi::popcount
566 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
567 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
568 && (wi::popcount
569 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
570 break;
572 /* Fall through. */
574 case RDIV_EXPR:
575 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
576 return negate_expr_p (TREE_OPERAND (t, 1))
577 || negate_expr_p (TREE_OPERAND (t, 0));
578 break;
580 case TRUNC_DIV_EXPR:
581 case ROUND_DIV_EXPR:
582 case EXACT_DIV_EXPR:
583 if (TYPE_UNSIGNED (type))
584 break;
585 /* In general we can't negate A in A / B, because if A is INT_MIN and
586 B is not 1 we change the sign of the result. */
587 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
588 && negate_expr_p (TREE_OPERAND (t, 0)))
589 return true;
590 /* In general we can't negate B in A / B, because if A is INT_MIN and
591 B is 1, we may turn this into INT_MIN / -1 which is undefined
592 and actually traps on some architectures. */
593 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
594 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
595 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
596 && ! integer_onep (TREE_OPERAND (t, 1))))
597 return negate_expr_p (TREE_OPERAND (t, 1));
598 break;
600 case NOP_EXPR:
601 /* Negate -((double)float) as (double)(-float). */
602 if (SCALAR_FLOAT_TYPE_P (type))
604 tree tem = strip_float_extensions (t);
605 if (tem != t)
606 return negate_expr_p (tem);
608 break;
610 case CALL_EXPR:
611 /* Negate -f(x) as f(-x). */
612 if (negate_mathfn_p (get_call_combined_fn (t)))
613 return negate_expr_p (CALL_EXPR_ARG (t, 0));
614 break;
616 case RSHIFT_EXPR:
617 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
618 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
620 tree op1 = TREE_OPERAND (t, 1);
621 if (wi::to_wide (op1) == element_precision (type) - 1)
622 return true;
624 break;
626 default:
627 break;
629 return false;
632 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
633 simplification is possible.
634 If negate_expr_p would return true for T, NULL_TREE will never be
635 returned. */
637 static tree
638 fold_negate_expr_1 (location_t loc, tree t)
640 tree type = TREE_TYPE (t);
641 tree tem;
643 switch (TREE_CODE (t))
645 /* Convert - (~A) to A + 1. */
646 case BIT_NOT_EXPR:
647 if (INTEGRAL_TYPE_P (type))
648 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
649 build_one_cst (type));
650 break;
652 case INTEGER_CST:
653 tem = fold_negate_const (t, type);
654 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
655 || (ANY_INTEGRAL_TYPE_P (type)
656 && !TYPE_OVERFLOW_TRAPS (type)
657 && TYPE_OVERFLOW_WRAPS (type))
658 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
659 return tem;
660 break;
662 case POLY_INT_CST:
663 case REAL_CST:
664 case FIXED_CST:
665 tem = fold_negate_const (t, type);
666 return tem;
668 case COMPLEX_CST:
670 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
671 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
672 if (rpart && ipart)
673 return build_complex (type, rpart, ipart);
675 break;
677 case VECTOR_CST:
679 tree_vector_builder elts;
680 elts.new_unary_operation (type, t, true);
681 unsigned int count = elts.encoded_nelts ();
682 for (unsigned int i = 0; i < count; ++i)
684 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
685 if (elt == NULL_TREE)
686 return NULL_TREE;
687 elts.quick_push (elt);
690 return elts.build ();
693 case COMPLEX_EXPR:
694 if (negate_expr_p (t))
695 return fold_build2_loc (loc, COMPLEX_EXPR, type,
696 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
697 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
698 break;
700 case CONJ_EXPR:
701 if (negate_expr_p (t))
702 return fold_build1_loc (loc, CONJ_EXPR, type,
703 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
704 break;
706 case NEGATE_EXPR:
707 if (!TYPE_OVERFLOW_SANITIZED (type))
708 return TREE_OPERAND (t, 0);
709 break;
711 case PLUS_EXPR:
712 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
713 && !HONOR_SIGNED_ZEROS (type))
715 /* -(A + B) -> (-B) - A. */
716 if (negate_expr_p (TREE_OPERAND (t, 1)))
718 tem = negate_expr (TREE_OPERAND (t, 1));
719 return fold_build2_loc (loc, MINUS_EXPR, type,
720 tem, TREE_OPERAND (t, 0));
723 /* -(A + B) -> (-A) - B. */
724 if (negate_expr_p (TREE_OPERAND (t, 0)))
726 tem = negate_expr (TREE_OPERAND (t, 0));
727 return fold_build2_loc (loc, MINUS_EXPR, type,
728 tem, TREE_OPERAND (t, 1));
731 break;
733 case MINUS_EXPR:
734 /* - (A - B) -> B - A */
735 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
736 && !HONOR_SIGNED_ZEROS (type))
737 return fold_build2_loc (loc, MINUS_EXPR, type,
738 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
739 break;
741 case MULT_EXPR:
742 if (TYPE_UNSIGNED (type))
743 break;
745 /* Fall through. */
747 case RDIV_EXPR:
748 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
750 tem = TREE_OPERAND (t, 1);
751 if (negate_expr_p (tem))
752 return fold_build2_loc (loc, TREE_CODE (t), type,
753 TREE_OPERAND (t, 0), negate_expr (tem));
754 tem = TREE_OPERAND (t, 0);
755 if (negate_expr_p (tem))
756 return fold_build2_loc (loc, TREE_CODE (t), type,
757 negate_expr (tem), TREE_OPERAND (t, 1));
759 break;
761 case TRUNC_DIV_EXPR:
762 case ROUND_DIV_EXPR:
763 case EXACT_DIV_EXPR:
764 if (TYPE_UNSIGNED (type))
765 break;
766 /* In general we can't negate A in A / B, because if A is INT_MIN and
767 B is not 1 we change the sign of the result. */
768 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
769 && negate_expr_p (TREE_OPERAND (t, 0)))
770 return fold_build2_loc (loc, TREE_CODE (t), type,
771 negate_expr (TREE_OPERAND (t, 0)),
772 TREE_OPERAND (t, 1));
773 /* In general we can't negate B in A / B, because if A is INT_MIN and
774 B is 1, we may turn this into INT_MIN / -1 which is undefined
775 and actually traps on some architectures. */
776 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
777 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
778 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
779 && ! integer_onep (TREE_OPERAND (t, 1))))
780 && negate_expr_p (TREE_OPERAND (t, 1)))
781 return fold_build2_loc (loc, TREE_CODE (t), type,
782 TREE_OPERAND (t, 0),
783 negate_expr (TREE_OPERAND (t, 1)));
784 break;
786 case NOP_EXPR:
787 /* Convert -((double)float) into (double)(-float). */
788 if (SCALAR_FLOAT_TYPE_P (type))
790 tem = strip_float_extensions (t);
791 if (tem != t && negate_expr_p (tem))
792 return fold_convert_loc (loc, type, negate_expr (tem));
794 break;
796 case CALL_EXPR:
797 /* Negate -f(x) as f(-x). */
798 if (negate_mathfn_p (get_call_combined_fn (t))
799 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
801 tree fndecl, arg;
803 fndecl = get_callee_fndecl (t);
804 arg = negate_expr (CALL_EXPR_ARG (t, 0));
805 return build_call_expr_loc (loc, fndecl, 1, arg);
807 break;
809 case RSHIFT_EXPR:
810 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
811 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
813 tree op1 = TREE_OPERAND (t, 1);
814 if (wi::to_wide (op1) == element_precision (type) - 1)
816 tree ntype = TYPE_UNSIGNED (type)
817 ? signed_type_for (type)
818 : unsigned_type_for (type);
819 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
820 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
821 return fold_convert_loc (loc, type, temp);
824 break;
826 default:
827 break;
830 return NULL_TREE;
833 /* A wrapper for fold_negate_expr_1. */
835 static tree
836 fold_negate_expr (location_t loc, tree t)
838 tree type = TREE_TYPE (t);
839 STRIP_SIGN_NOPS (t);
840 tree tem = fold_negate_expr_1 (loc, t);
841 if (tem == NULL_TREE)
842 return NULL_TREE;
843 return fold_convert_loc (loc, type, tem);
846 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
847 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
848 return NULL_TREE. */
850 static tree
851 negate_expr (tree t)
853 tree type, tem;
854 location_t loc;
856 if (t == NULL_TREE)
857 return NULL_TREE;
859 loc = EXPR_LOCATION (t);
860 type = TREE_TYPE (t);
861 STRIP_SIGN_NOPS (t);
863 tem = fold_negate_expr (loc, t);
864 if (!tem)
865 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
866 return fold_convert_loc (loc, type, tem);
869 /* Split a tree IN into a constant, literal and variable parts that could be
870 combined with CODE to make IN. "constant" means an expression with
871 TREE_CONSTANT but that isn't an actual constant. CODE must be a
872 commutative arithmetic operation. Store the constant part into *CONP,
873 the literal in *LITP and return the variable part. If a part isn't
874 present, set it to null. If the tree does not decompose in this way,
875 return the entire tree as the variable part and the other parts as null.
877 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
878 case, we negate an operand that was subtracted. Except if it is a
879 literal for which we use *MINUS_LITP instead.
881 If NEGATE_P is true, we are negating all of IN, again except a literal
882 for which we use *MINUS_LITP instead. If a variable part is of pointer
883 type, it is negated after converting to TYPE. This prevents us from
884 generating illegal MINUS pointer expression. LOC is the location of
885 the converted variable part.
887 If IN is itself a literal or constant, return it as appropriate.
889 Note that we do not guarantee that any of the three values will be the
890 same type as IN, but they will have the same signedness and mode. */
892 static tree
893 split_tree (tree in, tree type, enum tree_code code,
894 tree *minus_varp, tree *conp, tree *minus_conp,
895 tree *litp, tree *minus_litp, int negate_p)
897 tree var = 0;
898 *minus_varp = 0;
899 *conp = 0;
900 *minus_conp = 0;
901 *litp = 0;
902 *minus_litp = 0;
904 /* Strip any conversions that don't change the machine mode or signedness. */
905 STRIP_SIGN_NOPS (in);
907 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
908 || TREE_CODE (in) == FIXED_CST)
909 *litp = in;
910 else if (TREE_CODE (in) == code
911 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
912 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
913 /* We can associate addition and subtraction together (even
914 though the C standard doesn't say so) for integers because
915 the value is not affected. For reals, the value might be
916 affected, so we can't. */
917 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
918 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
919 || (code == MINUS_EXPR
920 && (TREE_CODE (in) == PLUS_EXPR
921 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
923 tree op0 = TREE_OPERAND (in, 0);
924 tree op1 = TREE_OPERAND (in, 1);
925 bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
926 bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
928 /* First see if either of the operands is a literal, then a constant. */
929 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
930 || TREE_CODE (op0) == FIXED_CST)
931 *litp = op0, op0 = 0;
932 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
933 || TREE_CODE (op1) == FIXED_CST)
934 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
936 if (op0 != 0 && TREE_CONSTANT (op0))
937 *conp = op0, op0 = 0;
938 else if (op1 != 0 && TREE_CONSTANT (op1))
939 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
941 /* If we haven't dealt with either operand, this is not a case we can
942 decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 if (op0 != 0 && op1 != 0)
944 var = in;
945 else if (op0 != 0)
946 var = op0;
947 else
948 var = op1, neg_var_p = neg1_p;
950 /* Now do any needed negations. */
951 if (neg_litp_p)
952 *minus_litp = *litp, *litp = 0;
953 if (neg_conp_p && *conp)
954 *minus_conp = *conp, *conp = 0;
955 if (neg_var_p && var)
956 *minus_varp = var, var = 0;
958 else if (TREE_CONSTANT (in))
959 *conp = in;
960 else if (TREE_CODE (in) == BIT_NOT_EXPR
961 && code == PLUS_EXPR)
963 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
964 when IN is constant. */
965 *litp = build_minus_one_cst (type);
966 *minus_varp = TREE_OPERAND (in, 0);
968 else
969 var = in;
971 if (negate_p)
973 if (*litp)
974 *minus_litp = *litp, *litp = 0;
975 else if (*minus_litp)
976 *litp = *minus_litp, *minus_litp = 0;
977 if (*conp)
978 *minus_conp = *conp, *conp = 0;
979 else if (*minus_conp)
980 *conp = *minus_conp, *minus_conp = 0;
981 if (var)
982 *minus_varp = var, var = 0;
983 else if (*minus_varp)
984 var = *minus_varp, *minus_varp = 0;
987 if (*litp
988 && TREE_OVERFLOW_P (*litp))
989 *litp = drop_tree_overflow (*litp);
990 if (*minus_litp
991 && TREE_OVERFLOW_P (*minus_litp))
992 *minus_litp = drop_tree_overflow (*minus_litp);
994 return var;
997 /* Re-associate trees split by the above function. T1 and T2 are
998 either expressions to associate or null. Return the new
999 expression, if any. LOC is the location of the new expression. If
1000 we build an operation, do it in TYPE and with CODE. */
1002 static tree
1003 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1005 if (t1 == 0)
1007 gcc_assert (t2 == 0 || code != MINUS_EXPR);
1008 return t2;
1010 else if (t2 == 0)
1011 return t1;
1013 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1014 try to fold this since we will have infinite recursion. But do
1015 deal with any NEGATE_EXPRs. */
1016 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1017 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1018 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1020 if (code == PLUS_EXPR)
1022 if (TREE_CODE (t1) == NEGATE_EXPR)
1023 return build2_loc (loc, MINUS_EXPR, type,
1024 fold_convert_loc (loc, type, t2),
1025 fold_convert_loc (loc, type,
1026 TREE_OPERAND (t1, 0)));
1027 else if (TREE_CODE (t2) == NEGATE_EXPR)
1028 return build2_loc (loc, MINUS_EXPR, type,
1029 fold_convert_loc (loc, type, t1),
1030 fold_convert_loc (loc, type,
1031 TREE_OPERAND (t2, 0)));
1032 else if (integer_zerop (t2))
1033 return fold_convert_loc (loc, type, t1);
1035 else if (code == MINUS_EXPR)
1037 if (integer_zerop (t2))
1038 return fold_convert_loc (loc, type, t1);
1041 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1042 fold_convert_loc (loc, type, t2));
1045 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1046 fold_convert_loc (loc, type, t2));
1049 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1050 for use in int_const_binop, size_binop and size_diffop. */
1052 static bool
1053 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1055 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1056 return false;
1057 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1058 return false;
1060 switch (code)
1062 case LSHIFT_EXPR:
1063 case RSHIFT_EXPR:
1064 case LROTATE_EXPR:
1065 case RROTATE_EXPR:
1066 return true;
1068 default:
1069 break;
1072 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1073 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1074 && TYPE_MODE (type1) == TYPE_MODE (type2);
1077 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1078 a new constant in RES. Return FALSE if we don't know how to
1079 evaluate CODE at compile-time. */
1081 bool
1082 wide_int_binop (wide_int &res,
1083 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1084 signop sign, wi::overflow_type *overflow)
1086 wide_int tmp;
1087 *overflow = wi::OVF_NONE;
1088 switch (code)
1090 case BIT_IOR_EXPR:
1091 res = wi::bit_or (arg1, arg2);
1092 break;
1094 case BIT_XOR_EXPR:
1095 res = wi::bit_xor (arg1, arg2);
1096 break;
1098 case BIT_AND_EXPR:
1099 res = wi::bit_and (arg1, arg2);
1100 break;
1102 case LSHIFT_EXPR:
1103 if (wi::neg_p (arg2))
1104 return false;
1105 res = wi::lshift (arg1, arg2);
1106 break;
1108 case RSHIFT_EXPR:
1109 if (wi::neg_p (arg2))
1110 return false;
1111 /* It's unclear from the C standard whether shifts can overflow.
1112 The following code ignores overflow; perhaps a C standard
1113 interpretation ruling is needed. */
1114 res = wi::rshift (arg1, arg2, sign);
1115 break;
1117 case RROTATE_EXPR:
1118 case LROTATE_EXPR:
1119 if (wi::neg_p (arg2))
1121 tmp = -arg2;
1122 if (code == RROTATE_EXPR)
1123 code = LROTATE_EXPR;
1124 else
1125 code = RROTATE_EXPR;
1127 else
1128 tmp = arg2;
1130 if (code == RROTATE_EXPR)
1131 res = wi::rrotate (arg1, tmp);
1132 else
1133 res = wi::lrotate (arg1, tmp);
1134 break;
1136 case PLUS_EXPR:
1137 res = wi::add (arg1, arg2, sign, overflow);
1138 break;
1140 case MINUS_EXPR:
1141 res = wi::sub (arg1, arg2, sign, overflow);
1142 break;
1144 case MULT_EXPR:
1145 res = wi::mul (arg1, arg2, sign, overflow);
1146 break;
1148 case MULT_HIGHPART_EXPR:
1149 res = wi::mul_high (arg1, arg2, sign);
1150 break;
1152 case TRUNC_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (arg2 == 0)
1155 return false;
1156 res = wi::div_trunc (arg1, arg2, sign, overflow);
1157 break;
1159 case FLOOR_DIV_EXPR:
1160 if (arg2 == 0)
1161 return false;
1162 res = wi::div_floor (arg1, arg2, sign, overflow);
1163 break;
1165 case CEIL_DIV_EXPR:
1166 if (arg2 == 0)
1167 return false;
1168 res = wi::div_ceil (arg1, arg2, sign, overflow);
1169 break;
1171 case ROUND_DIV_EXPR:
1172 if (arg2 == 0)
1173 return false;
1174 res = wi::div_round (arg1, arg2, sign, overflow);
1175 break;
1177 case TRUNC_MOD_EXPR:
1178 if (arg2 == 0)
1179 return false;
1180 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1181 break;
1183 case FLOOR_MOD_EXPR:
1184 if (arg2 == 0)
1185 return false;
1186 res = wi::mod_floor (arg1, arg2, sign, overflow);
1187 break;
1189 case CEIL_MOD_EXPR:
1190 if (arg2 == 0)
1191 return false;
1192 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1193 break;
1195 case ROUND_MOD_EXPR:
1196 if (arg2 == 0)
1197 return false;
1198 res = wi::mod_round (arg1, arg2, sign, overflow);
1199 break;
1201 case MIN_EXPR:
1202 res = wi::min (arg1, arg2, sign);
1203 break;
1205 case MAX_EXPR:
1206 res = wi::max (arg1, arg2, sign);
1207 break;
1209 default:
1210 return false;
1212 return true;
1215 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1216 produce a new constant in RES. Return FALSE if we don't know how
1217 to evaluate CODE at compile-time. */
1219 static bool
1220 poly_int_binop (poly_wide_int &res, enum tree_code code,
1221 const_tree arg1, const_tree arg2,
1222 signop sign, wi::overflow_type *overflow)
1224 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1225 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1226 switch (code)
1228 case PLUS_EXPR:
1229 res = wi::add (wi::to_poly_wide (arg1),
1230 wi::to_poly_wide (arg2), sign, overflow);
1231 break;
1233 case MINUS_EXPR:
1234 res = wi::sub (wi::to_poly_wide (arg1),
1235 wi::to_poly_wide (arg2), sign, overflow);
1236 break;
1238 case MULT_EXPR:
1239 if (TREE_CODE (arg2) == INTEGER_CST)
1240 res = wi::mul (wi::to_poly_wide (arg1),
1241 wi::to_wide (arg2), sign, overflow);
1242 else if (TREE_CODE (arg1) == INTEGER_CST)
1243 res = wi::mul (wi::to_poly_wide (arg2),
1244 wi::to_wide (arg1), sign, overflow);
1245 else
1246 return NULL_TREE;
1247 break;
1249 case LSHIFT_EXPR:
1250 if (TREE_CODE (arg2) == INTEGER_CST)
1251 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1252 else
1253 return false;
1254 break;
1256 case BIT_IOR_EXPR:
1257 if (TREE_CODE (arg2) != INTEGER_CST
1258 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1259 &res))
1260 return false;
1261 break;
1263 default:
1264 return false;
1266 return true;
1269 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1270 produce a new constant. Return NULL_TREE if we don't know how to
1271 evaluate CODE at compile-time. */
1273 tree
1274 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1275 int overflowable)
1277 poly_wide_int poly_res;
1278 tree type = TREE_TYPE (arg1);
1279 signop sign = TYPE_SIGN (type);
1280 wi::overflow_type overflow = wi::OVF_NONE;
1282 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1284 wide_int warg1 = wi::to_wide (arg1), res;
1285 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1286 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1287 return NULL_TREE;
1288 poly_res = res;
1290 else if (!poly_int_tree_p (arg1)
1291 || !poly_int_tree_p (arg2)
1292 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1293 return NULL_TREE;
1294 return force_fit_type (type, poly_res, overflowable,
1295 (((sign == SIGNED || overflowable == -1)
1296 && overflow)
1297 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1300 /* Return true if binary operation OP distributes over addition in operand
1301 OPNO, with the other operand being held constant. OPNO counts from 1. */
1303 static bool
1304 distributes_over_addition_p (tree_code op, int opno)
1306 switch (op)
1308 case PLUS_EXPR:
1309 case MINUS_EXPR:
1310 case MULT_EXPR:
1311 return true;
1313 case LSHIFT_EXPR:
1314 return opno == 1;
1316 default:
1317 return false;
1321 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1322 constant. We assume ARG1 and ARG2 have the same data type, or at least
1323 are the same kind of constant and the same machine mode. Return zero if
1324 combining the constants is not allowed in the current operating mode. */
1326 static tree
1327 const_binop (enum tree_code code, tree arg1, tree arg2)
1329 /* Sanity check for the recursive cases. */
1330 if (!arg1 || !arg2)
1331 return NULL_TREE;
1333 STRIP_NOPS (arg1);
1334 STRIP_NOPS (arg2);
1336 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1338 if (code == POINTER_PLUS_EXPR)
1339 return int_const_binop (PLUS_EXPR,
1340 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1342 return int_const_binop (code, arg1, arg2);
1345 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1347 machine_mode mode;
1348 REAL_VALUE_TYPE d1;
1349 REAL_VALUE_TYPE d2;
1350 REAL_VALUE_TYPE value;
1351 REAL_VALUE_TYPE result;
1352 bool inexact;
1353 tree t, type;
1355 /* The following codes are handled by real_arithmetic. */
1356 switch (code)
1358 case PLUS_EXPR:
1359 case MINUS_EXPR:
1360 case MULT_EXPR:
1361 case RDIV_EXPR:
1362 case MIN_EXPR:
1363 case MAX_EXPR:
1364 break;
1366 default:
1367 return NULL_TREE;
1370 d1 = TREE_REAL_CST (arg1);
1371 d2 = TREE_REAL_CST (arg2);
1373 type = TREE_TYPE (arg1);
1374 mode = TYPE_MODE (type);
1376 /* Don't perform operation if we honor signaling NaNs and
1377 either operand is a signaling NaN. */
1378 if (HONOR_SNANS (mode)
1379 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1380 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1381 return NULL_TREE;
1383 /* Don't perform operation if it would raise a division
1384 by zero exception. */
1385 if (code == RDIV_EXPR
1386 && real_equal (&d2, &dconst0)
1387 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1388 return NULL_TREE;
1390 /* If either operand is a NaN, just return it. Otherwise, set up
1391 for floating-point trap; we return an overflow. */
1392 if (REAL_VALUE_ISNAN (d1))
1394 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1395 is off. */
1396 d1.signalling = 0;
1397 t = build_real (type, d1);
1398 return t;
1400 else if (REAL_VALUE_ISNAN (d2))
1402 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1403 is off. */
1404 d2.signalling = 0;
1405 t = build_real (type, d2);
1406 return t;
1409 inexact = real_arithmetic (&value, code, &d1, &d2);
1410 real_convert (&result, mode, &value);
1412 /* Don't constant fold this floating point operation if
1413 both operands are not NaN but the result is NaN, and
1414 flag_trapping_math. Such operations should raise an
1415 invalid operation exception. */
1416 if (flag_trapping_math
1417 && MODE_HAS_NANS (mode)
1418 && REAL_VALUE_ISNAN (result)
1419 && !REAL_VALUE_ISNAN (d1)
1420 && !REAL_VALUE_ISNAN (d2))
1421 return NULL_TREE;
1423 /* Don't constant fold this floating point operation if
1424 the result has overflowed and flag_trapping_math. */
1425 if (flag_trapping_math
1426 && MODE_HAS_INFINITIES (mode)
1427 && REAL_VALUE_ISINF (result)
1428 && !REAL_VALUE_ISINF (d1)
1429 && !REAL_VALUE_ISINF (d2))
1430 return NULL_TREE;
1432 /* Don't constant fold this floating point operation if the
1433 result may dependent upon the run-time rounding mode and
1434 flag_rounding_math is set, or if GCC's software emulation
1435 is unable to accurately represent the result. */
1436 if ((flag_rounding_math
1437 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1438 && (inexact || !real_identical (&result, &value)))
1439 return NULL_TREE;
1441 t = build_real (type, result);
1443 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1444 return t;
1447 if (TREE_CODE (arg1) == FIXED_CST)
1449 FIXED_VALUE_TYPE f1;
1450 FIXED_VALUE_TYPE f2;
1451 FIXED_VALUE_TYPE result;
1452 tree t, type;
1453 bool sat_p;
1454 bool overflow_p;
1456 /* The following codes are handled by fixed_arithmetic. */
1457 switch (code)
1459 case PLUS_EXPR:
1460 case MINUS_EXPR:
1461 case MULT_EXPR:
1462 case TRUNC_DIV_EXPR:
1463 if (TREE_CODE (arg2) != FIXED_CST)
1464 return NULL_TREE;
1465 f2 = TREE_FIXED_CST (arg2);
1466 break;
1468 case LSHIFT_EXPR:
1469 case RSHIFT_EXPR:
1471 if (TREE_CODE (arg2) != INTEGER_CST)
1472 return NULL_TREE;
1473 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1474 f2.data.high = w2.elt (1);
1475 f2.data.low = w2.ulow ();
1476 f2.mode = SImode;
1478 break;
1480 default:
1481 return NULL_TREE;
1484 f1 = TREE_FIXED_CST (arg1);
1485 type = TREE_TYPE (arg1);
1486 sat_p = TYPE_SATURATING (type);
1487 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1488 t = build_fixed (type, result);
1489 /* Propagate overflow flags. */
1490 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1491 TREE_OVERFLOW (t) = 1;
1492 return t;
1495 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1497 tree type = TREE_TYPE (arg1);
1498 tree r1 = TREE_REALPART (arg1);
1499 tree i1 = TREE_IMAGPART (arg1);
1500 tree r2 = TREE_REALPART (arg2);
1501 tree i2 = TREE_IMAGPART (arg2);
1502 tree real, imag;
1504 switch (code)
1506 case PLUS_EXPR:
1507 case MINUS_EXPR:
1508 real = const_binop (code, r1, r2);
1509 imag = const_binop (code, i1, i2);
1510 break;
1512 case MULT_EXPR:
1513 if (COMPLEX_FLOAT_TYPE_P (type))
1514 return do_mpc_arg2 (arg1, arg2, type,
1515 /* do_nonfinite= */ folding_initializer,
1516 mpc_mul);
1518 real = const_binop (MINUS_EXPR,
1519 const_binop (MULT_EXPR, r1, r2),
1520 const_binop (MULT_EXPR, i1, i2));
1521 imag = const_binop (PLUS_EXPR,
1522 const_binop (MULT_EXPR, r1, i2),
1523 const_binop (MULT_EXPR, i1, r2));
1524 break;
1526 case RDIV_EXPR:
1527 if (COMPLEX_FLOAT_TYPE_P (type))
1528 return do_mpc_arg2 (arg1, arg2, type,
1529 /* do_nonfinite= */ folding_initializer,
1530 mpc_div);
1531 /* Fallthru. */
1532 case TRUNC_DIV_EXPR:
1533 case CEIL_DIV_EXPR:
1534 case FLOOR_DIV_EXPR:
1535 case ROUND_DIV_EXPR:
1536 if (flag_complex_method == 0)
1538 /* Keep this algorithm in sync with
1539 tree-complex.cc:expand_complex_div_straight().
1541 Expand complex division to scalars, straightforward algorithm.
1542 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1543 t = br*br + bi*bi
1545 tree magsquared
1546 = const_binop (PLUS_EXPR,
1547 const_binop (MULT_EXPR, r2, r2),
1548 const_binop (MULT_EXPR, i2, i2));
1549 tree t1
1550 = const_binop (PLUS_EXPR,
1551 const_binop (MULT_EXPR, r1, r2),
1552 const_binop (MULT_EXPR, i1, i2));
1553 tree t2
1554 = const_binop (MINUS_EXPR,
1555 const_binop (MULT_EXPR, i1, r2),
1556 const_binop (MULT_EXPR, r1, i2));
1558 real = const_binop (code, t1, magsquared);
1559 imag = const_binop (code, t2, magsquared);
1561 else
1563 /* Keep this algorithm in sync with
1564 tree-complex.cc:expand_complex_div_wide().
1566 Expand complex division to scalars, modified algorithm to minimize
1567 overflow with wide input ranges. */
1568 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1569 fold_abs_const (r2, TREE_TYPE (type)),
1570 fold_abs_const (i2, TREE_TYPE (type)));
1572 if (integer_nonzerop (compare))
1574 /* In the TRUE branch, we compute
1575 ratio = br/bi;
1576 div = (br * ratio) + bi;
1577 tr = (ar * ratio) + ai;
1578 ti = (ai * ratio) - ar;
1579 tr = tr / div;
1580 ti = ti / div; */
1581 tree ratio = const_binop (code, r2, i2);
1582 tree div = const_binop (PLUS_EXPR, i2,
1583 const_binop (MULT_EXPR, r2, ratio));
1584 real = const_binop (MULT_EXPR, r1, ratio);
1585 real = const_binop (PLUS_EXPR, real, i1);
1586 real = const_binop (code, real, div);
1588 imag = const_binop (MULT_EXPR, i1, ratio);
1589 imag = const_binop (MINUS_EXPR, imag, r1);
1590 imag = const_binop (code, imag, div);
1592 else
1594 /* In the FALSE branch, we compute
1595 ratio = d/c;
1596 divisor = (d * ratio) + c;
1597 tr = (b * ratio) + a;
1598 ti = b - (a * ratio);
1599 tr = tr / div;
1600 ti = ti / div; */
1601 tree ratio = const_binop (code, i2, r2);
1602 tree div = const_binop (PLUS_EXPR, r2,
1603 const_binop (MULT_EXPR, i2, ratio));
1605 real = const_binop (MULT_EXPR, i1, ratio);
1606 real = const_binop (PLUS_EXPR, real, r1);
1607 real = const_binop (code, real, div);
1609 imag = const_binop (MULT_EXPR, r1, ratio);
1610 imag = const_binop (MINUS_EXPR, i1, imag);
1611 imag = const_binop (code, imag, div);
1614 break;
1616 default:
1617 return NULL_TREE;
1620 if (real && imag)
1621 return build_complex (type, real, imag);
1624 if (TREE_CODE (arg1) == VECTOR_CST
1625 && TREE_CODE (arg2) == VECTOR_CST
1626 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1627 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1629 tree type = TREE_TYPE (arg1);
1630 bool step_ok_p;
1631 if (VECTOR_CST_STEPPED_P (arg1)
1632 && VECTOR_CST_STEPPED_P (arg2))
1633 /* We can operate directly on the encoding if:
1635 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1636 implies
1637 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1639 Addition and subtraction are the supported operators
1640 for which this is true. */
1641 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1642 else if (VECTOR_CST_STEPPED_P (arg1))
1643 /* We can operate directly on stepped encodings if:
1645 a3 - a2 == a2 - a1
1646 implies:
1647 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1649 which is true if (x -> x op c) distributes over addition. */
1650 step_ok_p = distributes_over_addition_p (code, 1);
1651 else
1652 /* Similarly in reverse. */
1653 step_ok_p = distributes_over_addition_p (code, 2);
1654 tree_vector_builder elts;
1655 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1656 return NULL_TREE;
1657 unsigned int count = elts.encoded_nelts ();
1658 for (unsigned int i = 0; i < count; ++i)
1660 tree elem1 = VECTOR_CST_ELT (arg1, i);
1661 tree elem2 = VECTOR_CST_ELT (arg2, i);
1663 tree elt = const_binop (code, elem1, elem2);
1665 /* It is possible that const_binop cannot handle the given
1666 code and return NULL_TREE */
1667 if (elt == NULL_TREE)
1668 return NULL_TREE;
1669 elts.quick_push (elt);
1672 return elts.build ();
1675 /* Shifts allow a scalar offset for a vector. */
1676 if (TREE_CODE (arg1) == VECTOR_CST
1677 && TREE_CODE (arg2) == INTEGER_CST)
1679 tree type = TREE_TYPE (arg1);
1680 bool step_ok_p = distributes_over_addition_p (code, 1);
1681 tree_vector_builder elts;
1682 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1683 return NULL_TREE;
1684 unsigned int count = elts.encoded_nelts ();
1685 for (unsigned int i = 0; i < count; ++i)
1687 tree elem1 = VECTOR_CST_ELT (arg1, i);
1689 tree elt = const_binop (code, elem1, arg2);
1691 /* It is possible that const_binop cannot handle the given
1692 code and return NULL_TREE. */
1693 if (elt == NULL_TREE)
1694 return NULL_TREE;
1695 elts.quick_push (elt);
1698 return elts.build ();
1700 return NULL_TREE;
1703 /* Overload that adds a TYPE parameter to be able to dispatch
1704 to fold_relational_const. */
1706 tree
1707 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1709 if (TREE_CODE_CLASS (code) == tcc_comparison)
1710 return fold_relational_const (code, type, arg1, arg2);
1712 /* ??? Until we make the const_binop worker take the type of the
1713 result as argument put those cases that need it here. */
1714 switch (code)
1716 case VEC_SERIES_EXPR:
1717 if (CONSTANT_CLASS_P (arg1)
1718 && CONSTANT_CLASS_P (arg2))
1719 return build_vec_series (type, arg1, arg2);
1720 return NULL_TREE;
1722 case COMPLEX_EXPR:
1723 if ((TREE_CODE (arg1) == REAL_CST
1724 && TREE_CODE (arg2) == REAL_CST)
1725 || (TREE_CODE (arg1) == INTEGER_CST
1726 && TREE_CODE (arg2) == INTEGER_CST))
1727 return build_complex (type, arg1, arg2);
1728 return NULL_TREE;
1730 case POINTER_DIFF_EXPR:
1731 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1733 poly_offset_int res = (wi::to_poly_offset (arg1)
1734 - wi::to_poly_offset (arg2));
1735 return force_fit_type (type, res, 1,
1736 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1738 return NULL_TREE;
1740 case VEC_PACK_TRUNC_EXPR:
1741 case VEC_PACK_FIX_TRUNC_EXPR:
1742 case VEC_PACK_FLOAT_EXPR:
1744 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1746 if (TREE_CODE (arg1) != VECTOR_CST
1747 || TREE_CODE (arg2) != VECTOR_CST)
1748 return NULL_TREE;
1750 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1751 return NULL_TREE;
1753 out_nelts = in_nelts * 2;
1754 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1755 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1757 tree_vector_builder elts (type, out_nelts, 1);
1758 for (i = 0; i < out_nelts; i++)
1760 tree elt = (i < in_nelts
1761 ? VECTOR_CST_ELT (arg1, i)
1762 : VECTOR_CST_ELT (arg2, i - in_nelts));
1763 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1764 ? NOP_EXPR
1765 : code == VEC_PACK_FLOAT_EXPR
1766 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1767 TREE_TYPE (type), elt);
1768 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1769 return NULL_TREE;
1770 elts.quick_push (elt);
1773 return elts.build ();
1776 case VEC_WIDEN_MULT_LO_EXPR:
1777 case VEC_WIDEN_MULT_HI_EXPR:
1778 case VEC_WIDEN_MULT_EVEN_EXPR:
1779 case VEC_WIDEN_MULT_ODD_EXPR:
1781 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1783 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1784 return NULL_TREE;
1786 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1787 return NULL_TREE;
1788 out_nelts = in_nelts / 2;
1789 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1790 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1792 if (code == VEC_WIDEN_MULT_LO_EXPR)
1793 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1794 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1795 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1796 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1797 scale = 1, ofs = 0;
1798 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1799 scale = 1, ofs = 1;
1801 tree_vector_builder elts (type, out_nelts, 1);
1802 for (out = 0; out < out_nelts; out++)
1804 unsigned int in = (out << scale) + ofs;
1805 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1806 VECTOR_CST_ELT (arg1, in));
1807 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1808 VECTOR_CST_ELT (arg2, in));
1810 if (t1 == NULL_TREE || t2 == NULL_TREE)
1811 return NULL_TREE;
1812 tree elt = const_binop (MULT_EXPR, t1, t2);
1813 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1814 return NULL_TREE;
1815 elts.quick_push (elt);
1818 return elts.build ();
1821 default:;
1824 if (TREE_CODE_CLASS (code) != tcc_binary)
1825 return NULL_TREE;
1827 /* Make sure type and arg0 have the same saturating flag. */
1828 gcc_checking_assert (TYPE_SATURATING (type)
1829 == TYPE_SATURATING (TREE_TYPE (arg1)));
1831 return const_binop (code, arg1, arg2);
1834 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1835 Return zero if computing the constants is not possible. */
1837 tree
1838 const_unop (enum tree_code code, tree type, tree arg0)
1840 /* Don't perform the operation, other than NEGATE and ABS, if
1841 flag_signaling_nans is on and the operand is a signaling NaN. */
1842 if (TREE_CODE (arg0) == REAL_CST
1843 && HONOR_SNANS (arg0)
1844 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1845 && code != NEGATE_EXPR
1846 && code != ABS_EXPR
1847 && code != ABSU_EXPR)
1848 return NULL_TREE;
1850 switch (code)
1852 CASE_CONVERT:
1853 case FLOAT_EXPR:
1854 case FIX_TRUNC_EXPR:
1855 case FIXED_CONVERT_EXPR:
1856 return fold_convert_const (code, type, arg0);
1858 case ADDR_SPACE_CONVERT_EXPR:
1859 /* If the source address is 0, and the source address space
1860 cannot have a valid object at 0, fold to dest type null. */
1861 if (integer_zerop (arg0)
1862 && !(targetm.addr_space.zero_address_valid
1863 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1864 return fold_convert_const (code, type, arg0);
1865 break;
1867 case VIEW_CONVERT_EXPR:
1868 return fold_view_convert_expr (type, arg0);
1870 case NEGATE_EXPR:
1872 /* Can't call fold_negate_const directly here as that doesn't
1873 handle all cases and we might not be able to negate some
1874 constants. */
1875 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1876 if (tem && CONSTANT_CLASS_P (tem))
1877 return tem;
1878 break;
1881 case ABS_EXPR:
1882 case ABSU_EXPR:
1883 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1884 return fold_abs_const (arg0, type);
1885 break;
1887 case CONJ_EXPR:
1888 if (TREE_CODE (arg0) == COMPLEX_CST)
1890 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1891 TREE_TYPE (type));
1892 return build_complex (type, TREE_REALPART (arg0), ipart);
1894 break;
1896 case BIT_NOT_EXPR:
1897 if (TREE_CODE (arg0) == INTEGER_CST)
1898 return fold_not_const (arg0, type);
1899 else if (POLY_INT_CST_P (arg0))
1900 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1901 /* Perform BIT_NOT_EXPR on each element individually. */
1902 else if (TREE_CODE (arg0) == VECTOR_CST)
1904 tree elem;
1906 /* This can cope with stepped encodings because ~x == -1 - x. */
1907 tree_vector_builder elements;
1908 elements.new_unary_operation (type, arg0, true);
1909 unsigned int i, count = elements.encoded_nelts ();
1910 for (i = 0; i < count; ++i)
1912 elem = VECTOR_CST_ELT (arg0, i);
1913 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1914 if (elem == NULL_TREE)
1915 break;
1916 elements.quick_push (elem);
1918 if (i == count)
1919 return elements.build ();
1921 break;
1923 case TRUTH_NOT_EXPR:
1924 if (TREE_CODE (arg0) == INTEGER_CST)
1925 return constant_boolean_node (integer_zerop (arg0), type);
1926 break;
1928 case REALPART_EXPR:
1929 if (TREE_CODE (arg0) == COMPLEX_CST)
1930 return fold_convert (type, TREE_REALPART (arg0));
1931 break;
1933 case IMAGPART_EXPR:
1934 if (TREE_CODE (arg0) == COMPLEX_CST)
1935 return fold_convert (type, TREE_IMAGPART (arg0));
1936 break;
1938 case VEC_UNPACK_LO_EXPR:
1939 case VEC_UNPACK_HI_EXPR:
1940 case VEC_UNPACK_FLOAT_LO_EXPR:
1941 case VEC_UNPACK_FLOAT_HI_EXPR:
1942 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1943 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1945 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1946 enum tree_code subcode;
1948 if (TREE_CODE (arg0) != VECTOR_CST)
1949 return NULL_TREE;
1951 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1952 return NULL_TREE;
1953 out_nelts = in_nelts / 2;
1954 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1956 unsigned int offset = 0;
1957 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1958 || code == VEC_UNPACK_FLOAT_LO_EXPR
1959 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1960 offset = out_nelts;
1962 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1963 subcode = NOP_EXPR;
1964 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1965 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1966 subcode = FLOAT_EXPR;
1967 else
1968 subcode = FIX_TRUNC_EXPR;
1970 tree_vector_builder elts (type, out_nelts, 1);
1971 for (i = 0; i < out_nelts; i++)
1973 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1974 VECTOR_CST_ELT (arg0, i + offset));
1975 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1976 return NULL_TREE;
1977 elts.quick_push (elt);
1980 return elts.build ();
1983 case VEC_DUPLICATE_EXPR:
1984 if (CONSTANT_CLASS_P (arg0))
1985 return build_vector_from_val (type, arg0);
1986 return NULL_TREE;
1988 default:
1989 break;
1992 return NULL_TREE;
1995 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1996 indicates which particular sizetype to create. */
1998 tree
1999 size_int_kind (poly_int64 number, enum size_type_kind kind)
2001 return build_int_cst (sizetype_tab[(int) kind], number);
2004 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2005 is a tree code. The type of the result is taken from the operands.
2006 Both must be equivalent integer types, ala int_binop_types_match_p.
2007 If the operands are constant, so is the result. */
2009 tree
2010 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2012 tree type = TREE_TYPE (arg0);
2014 if (arg0 == error_mark_node || arg1 == error_mark_node)
2015 return error_mark_node;
2017 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2018 TREE_TYPE (arg1)));
2020 /* Handle the special case of two poly_int constants faster. */
2021 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2023 /* And some specific cases even faster than that. */
2024 if (code == PLUS_EXPR)
2026 if (integer_zerop (arg0)
2027 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2028 return arg1;
2029 if (integer_zerop (arg1)
2030 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2031 return arg0;
2033 else if (code == MINUS_EXPR)
2035 if (integer_zerop (arg1)
2036 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2037 return arg0;
2039 else if (code == MULT_EXPR)
2041 if (integer_onep (arg0)
2042 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2043 return arg1;
2046 /* Handle general case of two integer constants. For sizetype
2047 constant calculations we always want to know about overflow,
2048 even in the unsigned case. */
2049 tree res = int_const_binop (code, arg0, arg1, -1);
2050 if (res != NULL_TREE)
2051 return res;
2054 return fold_build2_loc (loc, code, type, arg0, arg1);
2057 /* Given two values, either both of sizetype or both of bitsizetype,
2058 compute the difference between the two values. Return the value
2059 in signed type corresponding to the type of the operands. */
2061 tree
2062 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2064 tree type = TREE_TYPE (arg0);
2065 tree ctype;
2067 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2068 TREE_TYPE (arg1)));
2070 /* If the type is already signed, just do the simple thing. */
2071 if (!TYPE_UNSIGNED (type))
2072 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2074 if (type == sizetype)
2075 ctype = ssizetype;
2076 else if (type == bitsizetype)
2077 ctype = sbitsizetype;
2078 else
2079 ctype = signed_type_for (type);
2081 /* If either operand is not a constant, do the conversions to the signed
2082 type and subtract. The hardware will do the right thing with any
2083 overflow in the subtraction. */
2084 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2085 return size_binop_loc (loc, MINUS_EXPR,
2086 fold_convert_loc (loc, ctype, arg0),
2087 fold_convert_loc (loc, ctype, arg1));
2089 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2090 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2091 overflow) and negate (which can't either). Special-case a result
2092 of zero while we're here. */
2093 if (tree_int_cst_equal (arg0, arg1))
2094 return build_int_cst (ctype, 0);
2095 else if (tree_int_cst_lt (arg1, arg0))
2096 return fold_convert_loc (loc, ctype,
2097 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2098 else
2099 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2100 fold_convert_loc (loc, ctype,
2101 size_binop_loc (loc,
2102 MINUS_EXPR,
2103 arg1, arg0)));
2106 /* A subroutine of fold_convert_const handling conversions of an
2107 INTEGER_CST to another integer type. */
2109 static tree
2110 fold_convert_const_int_from_int (tree type, const_tree arg1)
2112 /* Given an integer constant, make new constant with new type,
2113 appropriately sign-extended or truncated. Use widest_int
2114 so that any extension is done according ARG1's type. */
2115 return force_fit_type (type, wi::to_widest (arg1),
2116 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2117 TREE_OVERFLOW (arg1));
2120 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2121 to an integer type. */
2123 static tree
2124 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2126 bool overflow = false;
2127 tree t;
2129 /* The following code implements the floating point to integer
2130 conversion rules required by the Java Language Specification,
2131 that IEEE NaNs are mapped to zero and values that overflow
2132 the target precision saturate, i.e. values greater than
2133 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2134 are mapped to INT_MIN. These semantics are allowed by the
2135 C and C++ standards that simply state that the behavior of
2136 FP-to-integer conversion is unspecified upon overflow. */
2138 wide_int val;
2139 REAL_VALUE_TYPE r;
2140 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2142 switch (code)
2144 case FIX_TRUNC_EXPR:
2145 real_trunc (&r, VOIDmode, &x);
2146 break;
2148 default:
2149 gcc_unreachable ();
2152 /* If R is NaN, return zero and show we have an overflow. */
2153 if (REAL_VALUE_ISNAN (r))
2155 overflow = true;
2156 val = wi::zero (TYPE_PRECISION (type));
2159 /* See if R is less than the lower bound or greater than the
2160 upper bound. */
2162 if (! overflow)
2164 tree lt = TYPE_MIN_VALUE (type);
2165 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2166 if (real_less (&r, &l))
2168 overflow = true;
2169 val = wi::to_wide (lt);
2173 if (! overflow)
2175 tree ut = TYPE_MAX_VALUE (type);
2176 if (ut)
2178 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2179 if (real_less (&u, &r))
2181 overflow = true;
2182 val = wi::to_wide (ut);
2187 if (! overflow)
2188 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2190 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2191 return t;
2194 /* A subroutine of fold_convert_const handling conversions of a
2195 FIXED_CST to an integer type. */
2197 static tree
2198 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2200 tree t;
2201 double_int temp, temp_trunc;
2202 scalar_mode mode;
2204 /* Right shift FIXED_CST to temp by fbit. */
2205 temp = TREE_FIXED_CST (arg1).data;
2206 mode = TREE_FIXED_CST (arg1).mode;
2207 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2209 temp = temp.rshift (GET_MODE_FBIT (mode),
2210 HOST_BITS_PER_DOUBLE_INT,
2211 SIGNED_FIXED_POINT_MODE_P (mode));
2213 /* Left shift temp to temp_trunc by fbit. */
2214 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2215 HOST_BITS_PER_DOUBLE_INT,
2216 SIGNED_FIXED_POINT_MODE_P (mode));
2218 else
2220 temp = double_int_zero;
2221 temp_trunc = double_int_zero;
2224 /* If FIXED_CST is negative, we need to round the value toward 0.
2225 By checking if the fractional bits are not zero to add 1 to temp. */
2226 if (SIGNED_FIXED_POINT_MODE_P (mode)
2227 && temp_trunc.is_negative ()
2228 && TREE_FIXED_CST (arg1).data != temp_trunc)
2229 temp += double_int_one;
2231 /* Given a fixed-point constant, make new constant with new type,
2232 appropriately sign-extended or truncated. */
2233 t = force_fit_type (type, temp, -1,
2234 (temp.is_negative ()
2235 && (TYPE_UNSIGNED (type)
2236 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2237 | TREE_OVERFLOW (arg1));
2239 return t;
2242 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2243 to another floating point type. */
2245 static tree
2246 fold_convert_const_real_from_real (tree type, const_tree arg1)
2248 REAL_VALUE_TYPE value;
2249 tree t;
2251 /* If the underlying modes are the same, simply treat it as
2252 copy and rebuild with TREE_REAL_CST information and the
2253 given type. */
2254 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2256 t = build_real (type, TREE_REAL_CST (arg1));
2257 return t;
2260 /* Don't perform the operation if flag_signaling_nans is on
2261 and the operand is a signaling NaN. */
2262 if (HONOR_SNANS (arg1)
2263 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2264 return NULL_TREE;
2266 /* With flag_rounding_math we should respect the current rounding mode
2267 unless the conversion is exact. */
2268 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2269 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2270 return NULL_TREE;
2272 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2273 t = build_real (type, value);
2275 /* If converting an infinity or NAN to a representation that doesn't
2276 have one, set the overflow bit so that we can produce some kind of
2277 error message at the appropriate point if necessary. It's not the
2278 most user-friendly message, but it's better than nothing. */
2279 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2280 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2281 TREE_OVERFLOW (t) = 1;
2282 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2283 && !MODE_HAS_NANS (TYPE_MODE (type)))
2284 TREE_OVERFLOW (t) = 1;
2285 /* Regular overflow, conversion produced an infinity in a mode that
2286 can't represent them. */
2287 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2288 && REAL_VALUE_ISINF (value)
2289 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2290 TREE_OVERFLOW (t) = 1;
2291 else
2292 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2293 return t;
2296 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2297 to a floating point type. */
2299 static tree
2300 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2302 REAL_VALUE_TYPE value;
2303 tree t;
2305 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2306 &TREE_FIXED_CST (arg1));
2307 t = build_real (type, value);
2309 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2310 return t;
2313 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2314 to another fixed-point type. */
2316 static tree
2317 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2319 FIXED_VALUE_TYPE value;
2320 tree t;
2321 bool overflow_p;
2323 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2324 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2325 t = build_fixed (type, value);
2327 /* Propagate overflow flags. */
2328 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 return t;
2333 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2334 to a fixed-point type. */
2336 static tree
2337 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2339 FIXED_VALUE_TYPE value;
2340 tree t;
2341 bool overflow_p;
2342 double_int di;
2344 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2346 di.low = TREE_INT_CST_ELT (arg1, 0);
2347 if (TREE_INT_CST_NUNITS (arg1) == 1)
2348 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2349 else
2350 di.high = TREE_INT_CST_ELT (arg1, 1);
2352 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2353 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2354 TYPE_SATURATING (type));
2355 t = build_fixed (type, value);
2357 /* Propagate overflow flags. */
2358 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 return t;
2363 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2364 to a fixed-point type. */
2366 static tree
2367 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2369 FIXED_VALUE_TYPE value;
2370 tree t;
2371 bool overflow_p;
2373 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2374 &TREE_REAL_CST (arg1),
2375 TYPE_SATURATING (type));
2376 t = build_fixed (type, value);
2378 /* Propagate overflow flags. */
2379 if (overflow_p | TREE_OVERFLOW (arg1))
2380 TREE_OVERFLOW (t) = 1;
2381 return t;
2384 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2385 type TYPE. If no simplification can be done return NULL_TREE. */
2387 static tree
2388 fold_convert_const (enum tree_code code, tree type, tree arg1)
2390 tree arg_type = TREE_TYPE (arg1);
2391 if (arg_type == type)
2392 return arg1;
2394 /* We can't widen types, since the runtime value could overflow the
2395 original type before being extended to the new type. */
2396 if (POLY_INT_CST_P (arg1)
2397 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2398 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2399 return build_poly_int_cst (type,
2400 poly_wide_int::from (poly_int_cst_value (arg1),
2401 TYPE_PRECISION (type),
2402 TYPE_SIGN (arg_type)));
2404 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2405 || TREE_CODE (type) == OFFSET_TYPE)
2407 if (TREE_CODE (arg1) == INTEGER_CST)
2408 return fold_convert_const_int_from_int (type, arg1);
2409 else if (TREE_CODE (arg1) == REAL_CST)
2410 return fold_convert_const_int_from_real (code, type, arg1);
2411 else if (TREE_CODE (arg1) == FIXED_CST)
2412 return fold_convert_const_int_from_fixed (type, arg1);
2414 else if (SCALAR_FLOAT_TYPE_P (type))
2416 if (TREE_CODE (arg1) == INTEGER_CST)
2418 tree res = build_real_from_int_cst (type, arg1);
2419 /* Avoid the folding if flag_rounding_math is on and the
2420 conversion is not exact. */
2421 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2423 bool fail = false;
2424 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2425 TYPE_PRECISION (TREE_TYPE (arg1)));
2426 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2427 return NULL_TREE;
2429 return res;
2431 else if (TREE_CODE (arg1) == REAL_CST)
2432 return fold_convert_const_real_from_real (type, arg1);
2433 else if (TREE_CODE (arg1) == FIXED_CST)
2434 return fold_convert_const_real_from_fixed (type, arg1);
2436 else if (FIXED_POINT_TYPE_P (type))
2438 if (TREE_CODE (arg1) == FIXED_CST)
2439 return fold_convert_const_fixed_from_fixed (type, arg1);
2440 else if (TREE_CODE (arg1) == INTEGER_CST)
2441 return fold_convert_const_fixed_from_int (type, arg1);
2442 else if (TREE_CODE (arg1) == REAL_CST)
2443 return fold_convert_const_fixed_from_real (type, arg1);
2445 else if (VECTOR_TYPE_P (type))
2447 if (TREE_CODE (arg1) == VECTOR_CST
2448 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2450 tree elttype = TREE_TYPE (type);
2451 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2452 /* We can't handle steps directly when extending, since the
2453 values need to wrap at the original precision first. */
2454 bool step_ok_p
2455 = (INTEGRAL_TYPE_P (elttype)
2456 && INTEGRAL_TYPE_P (arg1_elttype)
2457 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2458 tree_vector_builder v;
2459 if (!v.new_unary_operation (type, arg1, step_ok_p))
2460 return NULL_TREE;
2461 unsigned int len = v.encoded_nelts ();
2462 for (unsigned int i = 0; i < len; ++i)
2464 tree elt = VECTOR_CST_ELT (arg1, i);
2465 tree cvt = fold_convert_const (code, elttype, elt);
2466 if (cvt == NULL_TREE)
2467 return NULL_TREE;
2468 v.quick_push (cvt);
2470 return v.build ();
2473 return NULL_TREE;
2476 /* Construct a vector of zero elements of vector type TYPE. */
2478 static tree
2479 build_zero_vector (tree type)
2481 tree t;
2483 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2484 return build_vector_from_val (type, t);
2487 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2489 bool
2490 fold_convertible_p (const_tree type, const_tree arg)
2492 const_tree orig = TREE_TYPE (arg);
2494 if (type == orig)
2495 return true;
2497 if (TREE_CODE (arg) == ERROR_MARK
2498 || TREE_CODE (type) == ERROR_MARK
2499 || TREE_CODE (orig) == ERROR_MARK)
2500 return false;
2502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2503 return true;
2505 switch (TREE_CODE (type))
2507 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2508 case POINTER_TYPE: case REFERENCE_TYPE:
2509 case OFFSET_TYPE:
2510 return (INTEGRAL_TYPE_P (orig)
2511 || (POINTER_TYPE_P (orig)
2512 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2513 || TREE_CODE (orig) == OFFSET_TYPE);
2515 case REAL_TYPE:
2516 case FIXED_POINT_TYPE:
2517 case VOID_TYPE:
2518 return TREE_CODE (type) == TREE_CODE (orig);
2520 case VECTOR_TYPE:
2521 return (VECTOR_TYPE_P (orig)
2522 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2523 TYPE_VECTOR_SUBPARTS (orig))
2524 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2526 default:
2527 return false;
2531 /* Convert expression ARG to type TYPE. Used by the middle-end for
2532 simple conversions in preference to calling the front-end's convert. */
2534 tree
2535 fold_convert_loc (location_t loc, tree type, tree arg)
2537 tree orig = TREE_TYPE (arg);
2538 tree tem;
2540 if (type == orig)
2541 return arg;
2543 if (TREE_CODE (arg) == ERROR_MARK
2544 || TREE_CODE (type) == ERROR_MARK
2545 || TREE_CODE (orig) == ERROR_MARK)
2546 return error_mark_node;
2548 switch (TREE_CODE (type))
2550 case POINTER_TYPE:
2551 case REFERENCE_TYPE:
2552 /* Handle conversions between pointers to different address spaces. */
2553 if (POINTER_TYPE_P (orig)
2554 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2555 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2556 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2557 /* fall through */
2559 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2560 case OFFSET_TYPE:
2561 if (TREE_CODE (arg) == INTEGER_CST)
2563 tem = fold_convert_const (NOP_EXPR, type, arg);
2564 if (tem != NULL_TREE)
2565 return tem;
2567 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2568 || TREE_CODE (orig) == OFFSET_TYPE)
2569 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2570 if (TREE_CODE (orig) == COMPLEX_TYPE)
2571 return fold_convert_loc (loc, type,
2572 fold_build1_loc (loc, REALPART_EXPR,
2573 TREE_TYPE (orig), arg));
2574 gcc_assert (VECTOR_TYPE_P (orig)
2575 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2576 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2578 case REAL_TYPE:
2579 if (TREE_CODE (arg) == INTEGER_CST)
2581 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2582 if (tem != NULL_TREE)
2583 return tem;
2585 else if (TREE_CODE (arg) == REAL_CST)
2587 tem = fold_convert_const (NOP_EXPR, type, arg);
2588 if (tem != NULL_TREE)
2589 return tem;
2591 else if (TREE_CODE (arg) == FIXED_CST)
2593 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2594 if (tem != NULL_TREE)
2595 return tem;
2598 switch (TREE_CODE (orig))
2600 case INTEGER_TYPE:
2601 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2602 case POINTER_TYPE: case REFERENCE_TYPE:
2603 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2605 case REAL_TYPE:
2606 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2608 case FIXED_POINT_TYPE:
2609 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2611 case COMPLEX_TYPE:
2612 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2613 return fold_convert_loc (loc, type, tem);
2615 default:
2616 gcc_unreachable ();
2619 case FIXED_POINT_TYPE:
2620 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2621 || TREE_CODE (arg) == REAL_CST)
2623 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2624 if (tem != NULL_TREE)
2625 goto fold_convert_exit;
2628 switch (TREE_CODE (orig))
2630 case FIXED_POINT_TYPE:
2631 case INTEGER_TYPE:
2632 case ENUMERAL_TYPE:
2633 case BOOLEAN_TYPE:
2634 case REAL_TYPE:
2635 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2637 case COMPLEX_TYPE:
2638 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2639 return fold_convert_loc (loc, type, tem);
2641 default:
2642 gcc_unreachable ();
2645 case COMPLEX_TYPE:
2646 switch (TREE_CODE (orig))
2648 case INTEGER_TYPE:
2649 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2650 case POINTER_TYPE: case REFERENCE_TYPE:
2651 case REAL_TYPE:
2652 case FIXED_POINT_TYPE:
2653 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2654 fold_convert_loc (loc, TREE_TYPE (type), arg),
2655 fold_convert_loc (loc, TREE_TYPE (type),
2656 integer_zero_node));
2657 case COMPLEX_TYPE:
2659 tree rpart, ipart;
2661 if (TREE_CODE (arg) == COMPLEX_EXPR)
2663 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2664 TREE_OPERAND (arg, 0));
2665 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2666 TREE_OPERAND (arg, 1));
2667 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2670 arg = save_expr (arg);
2671 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2673 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2674 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2675 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2678 default:
2679 gcc_unreachable ();
2682 case VECTOR_TYPE:
2683 if (integer_zerop (arg))
2684 return build_zero_vector (type);
2685 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2686 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2687 || VECTOR_TYPE_P (orig));
2688 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2690 case VOID_TYPE:
2691 tem = fold_ignored_result (arg);
2692 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2694 default:
2695 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2696 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2697 gcc_unreachable ();
2699 fold_convert_exit:
2700 tem = protected_set_expr_location_unshare (tem, loc);
2701 return tem;
2704 /* Return false if expr can be assumed not to be an lvalue, true
2705 otherwise. */
2707 static bool
2708 maybe_lvalue_p (const_tree x)
2710 /* We only need to wrap lvalue tree codes. */
2711 switch (TREE_CODE (x))
2713 case VAR_DECL:
2714 case PARM_DECL:
2715 case RESULT_DECL:
2716 case LABEL_DECL:
2717 case FUNCTION_DECL:
2718 case SSA_NAME:
2719 case COMPOUND_LITERAL_EXPR:
2721 case COMPONENT_REF:
2722 case MEM_REF:
2723 case INDIRECT_REF:
2724 case ARRAY_REF:
2725 case ARRAY_RANGE_REF:
2726 case BIT_FIELD_REF:
2727 case OBJ_TYPE_REF:
2729 case REALPART_EXPR:
2730 case IMAGPART_EXPR:
2731 case PREINCREMENT_EXPR:
2732 case PREDECREMENT_EXPR:
2733 case SAVE_EXPR:
2734 case TRY_CATCH_EXPR:
2735 case WITH_CLEANUP_EXPR:
2736 case COMPOUND_EXPR:
2737 case MODIFY_EXPR:
2738 case TARGET_EXPR:
2739 case COND_EXPR:
2740 case BIND_EXPR:
2741 case VIEW_CONVERT_EXPR:
2742 break;
2744 default:
2745 /* Assume the worst for front-end tree codes. */
2746 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2747 break;
2748 return false;
2751 return true;
2754 /* Return an expr equal to X but certainly not valid as an lvalue. */
2756 tree
2757 non_lvalue_loc (location_t loc, tree x)
2759 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2760 us. */
2761 if (in_gimple_form)
2762 return x;
2764 if (! maybe_lvalue_p (x))
2765 return x;
2766 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2769 /* Given a tree comparison code, return the code that is the logical inverse.
2770 It is generally not safe to do this for floating-point comparisons, except
2771 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2772 ERROR_MARK in this case. */
2774 enum tree_code
2775 invert_tree_comparison (enum tree_code code, bool honor_nans)
2777 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2778 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2779 return ERROR_MARK;
2781 switch (code)
2783 case EQ_EXPR:
2784 return NE_EXPR;
2785 case NE_EXPR:
2786 return EQ_EXPR;
2787 case GT_EXPR:
2788 return honor_nans ? UNLE_EXPR : LE_EXPR;
2789 case GE_EXPR:
2790 return honor_nans ? UNLT_EXPR : LT_EXPR;
2791 case LT_EXPR:
2792 return honor_nans ? UNGE_EXPR : GE_EXPR;
2793 case LE_EXPR:
2794 return honor_nans ? UNGT_EXPR : GT_EXPR;
2795 case LTGT_EXPR:
2796 return UNEQ_EXPR;
2797 case UNEQ_EXPR:
2798 return LTGT_EXPR;
2799 case UNGT_EXPR:
2800 return LE_EXPR;
2801 case UNGE_EXPR:
2802 return LT_EXPR;
2803 case UNLT_EXPR:
2804 return GE_EXPR;
2805 case UNLE_EXPR:
2806 return GT_EXPR;
2807 case ORDERED_EXPR:
2808 return UNORDERED_EXPR;
2809 case UNORDERED_EXPR:
2810 return ORDERED_EXPR;
2811 default:
2812 gcc_unreachable ();
2816 /* Similar, but return the comparison that results if the operands are
2817 swapped. This is safe for floating-point. */
2819 enum tree_code
2820 swap_tree_comparison (enum tree_code code)
2822 switch (code)
2824 case EQ_EXPR:
2825 case NE_EXPR:
2826 case ORDERED_EXPR:
2827 case UNORDERED_EXPR:
2828 case LTGT_EXPR:
2829 case UNEQ_EXPR:
2830 return code;
2831 case GT_EXPR:
2832 return LT_EXPR;
2833 case GE_EXPR:
2834 return LE_EXPR;
2835 case LT_EXPR:
2836 return GT_EXPR;
2837 case LE_EXPR:
2838 return GE_EXPR;
2839 case UNGT_EXPR:
2840 return UNLT_EXPR;
2841 case UNGE_EXPR:
2842 return UNLE_EXPR;
2843 case UNLT_EXPR:
2844 return UNGT_EXPR;
2845 case UNLE_EXPR:
2846 return UNGE_EXPR;
2847 default:
2848 gcc_unreachable ();
2853 /* Convert a comparison tree code from an enum tree_code representation
2854 into a compcode bit-based encoding. This function is the inverse of
2855 compcode_to_comparison. */
2857 static enum comparison_code
2858 comparison_to_compcode (enum tree_code code)
2860 switch (code)
2862 case LT_EXPR:
2863 return COMPCODE_LT;
2864 case EQ_EXPR:
2865 return COMPCODE_EQ;
2866 case LE_EXPR:
2867 return COMPCODE_LE;
2868 case GT_EXPR:
2869 return COMPCODE_GT;
2870 case NE_EXPR:
2871 return COMPCODE_NE;
2872 case GE_EXPR:
2873 return COMPCODE_GE;
2874 case ORDERED_EXPR:
2875 return COMPCODE_ORD;
2876 case UNORDERED_EXPR:
2877 return COMPCODE_UNORD;
2878 case UNLT_EXPR:
2879 return COMPCODE_UNLT;
2880 case UNEQ_EXPR:
2881 return COMPCODE_UNEQ;
2882 case UNLE_EXPR:
2883 return COMPCODE_UNLE;
2884 case UNGT_EXPR:
2885 return COMPCODE_UNGT;
2886 case LTGT_EXPR:
2887 return COMPCODE_LTGT;
2888 case UNGE_EXPR:
2889 return COMPCODE_UNGE;
2890 default:
2891 gcc_unreachable ();
2895 /* Convert a compcode bit-based encoding of a comparison operator back
2896 to GCC's enum tree_code representation. This function is the
2897 inverse of comparison_to_compcode. */
2899 static enum tree_code
2900 compcode_to_comparison (enum comparison_code code)
2902 switch (code)
2904 case COMPCODE_LT:
2905 return LT_EXPR;
2906 case COMPCODE_EQ:
2907 return EQ_EXPR;
2908 case COMPCODE_LE:
2909 return LE_EXPR;
2910 case COMPCODE_GT:
2911 return GT_EXPR;
2912 case COMPCODE_NE:
2913 return NE_EXPR;
2914 case COMPCODE_GE:
2915 return GE_EXPR;
2916 case COMPCODE_ORD:
2917 return ORDERED_EXPR;
2918 case COMPCODE_UNORD:
2919 return UNORDERED_EXPR;
2920 case COMPCODE_UNLT:
2921 return UNLT_EXPR;
2922 case COMPCODE_UNEQ:
2923 return UNEQ_EXPR;
2924 case COMPCODE_UNLE:
2925 return UNLE_EXPR;
2926 case COMPCODE_UNGT:
2927 return UNGT_EXPR;
2928 case COMPCODE_LTGT:
2929 return LTGT_EXPR;
2930 case COMPCODE_UNGE:
2931 return UNGE_EXPR;
2932 default:
2933 gcc_unreachable ();
2937 /* Return true if COND1 tests the opposite condition of COND2. */
2939 bool
2940 inverse_conditions_p (const_tree cond1, const_tree cond2)
2942 return (COMPARISON_CLASS_P (cond1)
2943 && COMPARISON_CLASS_P (cond2)
2944 && (invert_tree_comparison
2945 (TREE_CODE (cond1),
2946 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2947 && operand_equal_p (TREE_OPERAND (cond1, 0),
2948 TREE_OPERAND (cond2, 0), 0)
2949 && operand_equal_p (TREE_OPERAND (cond1, 1),
2950 TREE_OPERAND (cond2, 1), 0));
2953 /* Return a tree for the comparison which is the combination of
2954 doing the AND or OR (depending on CODE) of the two operations LCODE
2955 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2956 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2957 if this makes the transformation invalid. */
2959 tree
2960 combine_comparisons (location_t loc,
2961 enum tree_code code, enum tree_code lcode,
2962 enum tree_code rcode, tree truth_type,
2963 tree ll_arg, tree lr_arg)
2965 bool honor_nans = HONOR_NANS (ll_arg);
2966 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2967 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2968 int compcode;
2970 switch (code)
2972 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2973 compcode = lcompcode & rcompcode;
2974 break;
2976 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2977 compcode = lcompcode | rcompcode;
2978 break;
2980 default:
2981 return NULL_TREE;
2984 if (!honor_nans)
2986 /* Eliminate unordered comparisons, as well as LTGT and ORD
2987 which are not used unless the mode has NaNs. */
2988 compcode &= ~COMPCODE_UNORD;
2989 if (compcode == COMPCODE_LTGT)
2990 compcode = COMPCODE_NE;
2991 else if (compcode == COMPCODE_ORD)
2992 compcode = COMPCODE_TRUE;
2994 else if (flag_trapping_math)
2996 /* Check that the original operation and the optimized ones will trap
2997 under the same condition. */
2998 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2999 && (lcompcode != COMPCODE_EQ)
3000 && (lcompcode != COMPCODE_ORD);
3001 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3002 && (rcompcode != COMPCODE_EQ)
3003 && (rcompcode != COMPCODE_ORD);
3004 bool trap = (compcode & COMPCODE_UNORD) == 0
3005 && (compcode != COMPCODE_EQ)
3006 && (compcode != COMPCODE_ORD);
3008 /* In a short-circuited boolean expression the LHS might be
3009 such that the RHS, if evaluated, will never trap. For
3010 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3011 if neither x nor y is NaN. (This is a mixed blessing: for
3012 example, the expression above will never trap, hence
3013 optimizing it to x < y would be invalid). */
3014 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3015 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3016 rtrap = false;
3018 /* If the comparison was short-circuited, and only the RHS
3019 trapped, we may now generate a spurious trap. */
3020 if (rtrap && !ltrap
3021 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3022 return NULL_TREE;
3024 /* If we changed the conditions that cause a trap, we lose. */
3025 if ((ltrap || rtrap) != trap)
3026 return NULL_TREE;
3029 if (compcode == COMPCODE_TRUE)
3030 return constant_boolean_node (true, truth_type);
3031 else if (compcode == COMPCODE_FALSE)
3032 return constant_boolean_node (false, truth_type);
3033 else
3035 enum tree_code tcode;
3037 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3038 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3042 /* Return nonzero if two operands (typically of the same tree node)
3043 are necessarily equal. FLAGS modifies behavior as follows:
3045 If OEP_ONLY_CONST is set, only return nonzero for constants.
3046 This function tests whether the operands are indistinguishable;
3047 it does not test whether they are equal using C's == operation.
3048 The distinction is important for IEEE floating point, because
3049 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3050 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3052 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3053 even though it may hold multiple values during a function.
3054 This is because a GCC tree node guarantees that nothing else is
3055 executed between the evaluation of its "operands" (which may often
3056 be evaluated in arbitrary order). Hence if the operands themselves
3057 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3058 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3059 unset means assuming isochronic (or instantaneous) tree equivalence.
3060 Unless comparing arbitrary expression trees, such as from different
3061 statements, this flag can usually be left unset.
3063 If OEP_PURE_SAME is set, then pure functions with identical arguments
3064 are considered the same. It is used when the caller has other ways
3065 to ensure that global memory is unchanged in between.
3067 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3068 not values of expressions.
3070 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3071 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3073 If OEP_BITWISE is set, then require the values to be bitwise identical
3074 rather than simply numerically equal. Do not take advantage of things
3075 like math-related flags or undefined behavior; only return true for
3076 values that are provably bitwise identical in all circumstances.
3078 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3079 any operand with side effect. This is unnecesarily conservative in the
3080 case we know that arg0 and arg1 are in disjoint code paths (such as in
3081 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3082 addresses with TREE_CONSTANT flag set so we know that &var == &var
3083 even if var is volatile. */
3085 bool
3086 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3087 unsigned int flags)
3089 bool r;
3090 if (verify_hash_value (arg0, arg1, flags, &r))
3091 return r;
3093 STRIP_ANY_LOCATION_WRAPPER (arg0);
3094 STRIP_ANY_LOCATION_WRAPPER (arg1);
3096 /* If either is ERROR_MARK, they aren't equal. */
3097 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3098 || TREE_TYPE (arg0) == error_mark_node
3099 || TREE_TYPE (arg1) == error_mark_node)
3100 return false;
3102 /* Similar, if either does not have a type (like a template id),
3103 they aren't equal. */
3104 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3105 return false;
3107 /* Bitwise identity makes no sense if the values have different layouts. */
3108 if ((flags & OEP_BITWISE)
3109 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3110 return false;
3112 /* We cannot consider pointers to different address space equal. */
3113 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3114 && POINTER_TYPE_P (TREE_TYPE (arg1))
3115 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3116 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3117 return false;
3119 /* Check equality of integer constants before bailing out due to
3120 precision differences. */
3121 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3123 /* Address of INTEGER_CST is not defined; check that we did not forget
3124 to drop the OEP_ADDRESS_OF flags. */
3125 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3126 return tree_int_cst_equal (arg0, arg1);
3129 if (!(flags & OEP_ADDRESS_OF))
3131 /* If both types don't have the same signedness, then we can't consider
3132 them equal. We must check this before the STRIP_NOPS calls
3133 because they may change the signedness of the arguments. As pointers
3134 strictly don't have a signedness, require either two pointers or
3135 two non-pointers as well. */
3136 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3137 || POINTER_TYPE_P (TREE_TYPE (arg0))
3138 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3139 return false;
3141 /* If both types don't have the same precision, then it is not safe
3142 to strip NOPs. */
3143 if (element_precision (TREE_TYPE (arg0))
3144 != element_precision (TREE_TYPE (arg1)))
3145 return false;
3147 STRIP_NOPS (arg0);
3148 STRIP_NOPS (arg1);
3150 #if 0
3151 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3152 sanity check once the issue is solved. */
3153 else
3154 /* Addresses of conversions and SSA_NAMEs (and many other things)
3155 are not defined. Check that we did not forget to drop the
3156 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3157 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3158 && TREE_CODE (arg0) != SSA_NAME);
3159 #endif
3161 /* In case both args are comparisons but with different comparison
3162 code, try to swap the comparison operands of one arg to produce
3163 a match and compare that variant. */
3164 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3165 && COMPARISON_CLASS_P (arg0)
3166 && COMPARISON_CLASS_P (arg1))
3168 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3170 if (TREE_CODE (arg0) == swap_code)
3171 return operand_equal_p (TREE_OPERAND (arg0, 0),
3172 TREE_OPERAND (arg1, 1), flags)
3173 && operand_equal_p (TREE_OPERAND (arg0, 1),
3174 TREE_OPERAND (arg1, 0), flags);
3177 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3179 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3180 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3182 else if (flags & OEP_ADDRESS_OF)
3184 /* If we are interested in comparing addresses ignore
3185 MEM_REF wrappings of the base that can appear just for
3186 TBAA reasons. */
3187 if (TREE_CODE (arg0) == MEM_REF
3188 && DECL_P (arg1)
3189 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3190 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3191 && integer_zerop (TREE_OPERAND (arg0, 1)))
3192 return true;
3193 else if (TREE_CODE (arg1) == MEM_REF
3194 && DECL_P (arg0)
3195 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3196 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3197 && integer_zerop (TREE_OPERAND (arg1, 1)))
3198 return true;
3199 return false;
3201 else
3202 return false;
3205 /* When not checking adddresses, this is needed for conversions and for
3206 COMPONENT_REF. Might as well play it safe and always test this. */
3207 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3208 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3209 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3210 && !(flags & OEP_ADDRESS_OF)))
3211 return false;
3213 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3214 We don't care about side effects in that case because the SAVE_EXPR
3215 takes care of that for us. In all other cases, two expressions are
3216 equal if they have no side effects. If we have two identical
3217 expressions with side effects that should be treated the same due
3218 to the only side effects being identical SAVE_EXPR's, that will
3219 be detected in the recursive calls below.
3220 If we are taking an invariant address of two identical objects
3221 they are necessarily equal as well. */
3222 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3223 && (TREE_CODE (arg0) == SAVE_EXPR
3224 || (flags & OEP_MATCH_SIDE_EFFECTS)
3225 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3226 return true;
3228 /* Next handle constant cases, those for which we can return 1 even
3229 if ONLY_CONST is set. */
3230 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3231 switch (TREE_CODE (arg0))
3233 case INTEGER_CST:
3234 return tree_int_cst_equal (arg0, arg1);
3236 case FIXED_CST:
3237 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3238 TREE_FIXED_CST (arg1));
3240 case REAL_CST:
3241 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3242 return true;
3244 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3246 /* If we do not distinguish between signed and unsigned zero,
3247 consider them equal. */
3248 if (real_zerop (arg0) && real_zerop (arg1))
3249 return true;
3251 return false;
3253 case VECTOR_CST:
3255 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3256 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3257 return false;
3259 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3260 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3261 return false;
3263 unsigned int count = vector_cst_encoded_nelts (arg0);
3264 for (unsigned int i = 0; i < count; ++i)
3265 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3266 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3267 return false;
3268 return true;
3271 case COMPLEX_CST:
3272 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3273 flags)
3274 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3275 flags));
3277 case STRING_CST:
3278 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3279 && ! memcmp (TREE_STRING_POINTER (arg0),
3280 TREE_STRING_POINTER (arg1),
3281 TREE_STRING_LENGTH (arg0)));
3283 case ADDR_EXPR:
3284 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3285 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3286 flags | OEP_ADDRESS_OF
3287 | OEP_MATCH_SIDE_EFFECTS);
3288 case CONSTRUCTOR:
3289 /* In GIMPLE empty constructors are allowed in initializers of
3290 aggregates. */
3291 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3292 default:
3293 break;
3296 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3297 two instances of undefined behavior will give identical results. */
3298 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3299 return false;
3301 /* Define macros to test an operand from arg0 and arg1 for equality and a
3302 variant that allows null and views null as being different from any
3303 non-null value. In the latter case, if either is null, the both
3304 must be; otherwise, do the normal comparison. */
3305 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3306 TREE_OPERAND (arg1, N), flags)
3308 #define OP_SAME_WITH_NULL(N) \
3309 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3310 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3312 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3314 case tcc_unary:
3315 /* Two conversions are equal only if signedness and modes match. */
3316 switch (TREE_CODE (arg0))
3318 CASE_CONVERT:
3319 case FIX_TRUNC_EXPR:
3320 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3321 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3322 return false;
3323 break;
3324 default:
3325 break;
3328 return OP_SAME (0);
3331 case tcc_comparison:
3332 case tcc_binary:
3333 if (OP_SAME (0) && OP_SAME (1))
3334 return true;
3336 /* For commutative ops, allow the other order. */
3337 return (commutative_tree_code (TREE_CODE (arg0))
3338 && operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 1), flags)
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 0), flags));
3343 case tcc_reference:
3344 /* If either of the pointer (or reference) expressions we are
3345 dereferencing contain a side effect, these cannot be equal,
3346 but their addresses can be. */
3347 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3348 && (TREE_SIDE_EFFECTS (arg0)
3349 || TREE_SIDE_EFFECTS (arg1)))
3350 return false;
3352 switch (TREE_CODE (arg0))
3354 case INDIRECT_REF:
3355 if (!(flags & OEP_ADDRESS_OF))
3357 if (TYPE_ALIGN (TREE_TYPE (arg0))
3358 != TYPE_ALIGN (TREE_TYPE (arg1)))
3359 return false;
3360 /* Verify that the access types are compatible. */
3361 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3362 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3363 return false;
3365 flags &= ~OEP_ADDRESS_OF;
3366 return OP_SAME (0);
3368 case IMAGPART_EXPR:
3369 /* Require the same offset. */
3370 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3371 TYPE_SIZE (TREE_TYPE (arg1)),
3372 flags & ~OEP_ADDRESS_OF))
3373 return false;
3375 /* Fallthru. */
3376 case REALPART_EXPR:
3377 case VIEW_CONVERT_EXPR:
3378 return OP_SAME (0);
3380 case TARGET_MEM_REF:
3381 case MEM_REF:
3382 if (!(flags & OEP_ADDRESS_OF))
3384 /* Require equal access sizes */
3385 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3386 && (!TYPE_SIZE (TREE_TYPE (arg0))
3387 || !TYPE_SIZE (TREE_TYPE (arg1))
3388 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3389 TYPE_SIZE (TREE_TYPE (arg1)),
3390 flags)))
3391 return false;
3392 /* Verify that access happens in similar types. */
3393 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3394 return false;
3395 /* Verify that accesses are TBAA compatible. */
3396 if (!alias_ptr_types_compatible_p
3397 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3398 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3399 || (MR_DEPENDENCE_CLIQUE (arg0)
3400 != MR_DEPENDENCE_CLIQUE (arg1))
3401 || (MR_DEPENDENCE_BASE (arg0)
3402 != MR_DEPENDENCE_BASE (arg1)))
3403 return false;
3404 /* Verify that alignment is compatible. */
3405 if (TYPE_ALIGN (TREE_TYPE (arg0))
3406 != TYPE_ALIGN (TREE_TYPE (arg1)))
3407 return false;
3409 flags &= ~OEP_ADDRESS_OF;
3410 return (OP_SAME (0) && OP_SAME (1)
3411 /* TARGET_MEM_REF require equal extra operands. */
3412 && (TREE_CODE (arg0) != TARGET_MEM_REF
3413 || (OP_SAME_WITH_NULL (2)
3414 && OP_SAME_WITH_NULL (3)
3415 && OP_SAME_WITH_NULL (4))));
3417 case ARRAY_REF:
3418 case ARRAY_RANGE_REF:
3419 if (!OP_SAME (0))
3420 return false;
3421 flags &= ~OEP_ADDRESS_OF;
3422 /* Compare the array index by value if it is constant first as we
3423 may have different types but same value here. */
3424 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3425 TREE_OPERAND (arg1, 1))
3426 || OP_SAME (1))
3427 && OP_SAME_WITH_NULL (2)
3428 && OP_SAME_WITH_NULL (3)
3429 /* Compare low bound and element size as with OEP_ADDRESS_OF
3430 we have to account for the offset of the ref. */
3431 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3432 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3433 || (operand_equal_p (array_ref_low_bound
3434 (CONST_CAST_TREE (arg0)),
3435 array_ref_low_bound
3436 (CONST_CAST_TREE (arg1)), flags)
3437 && operand_equal_p (array_ref_element_size
3438 (CONST_CAST_TREE (arg0)),
3439 array_ref_element_size
3440 (CONST_CAST_TREE (arg1)),
3441 flags))));
3443 case COMPONENT_REF:
3444 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3445 may be NULL when we're called to compare MEM_EXPRs. */
3446 if (!OP_SAME_WITH_NULL (0))
3447 return false;
3449 bool compare_address = flags & OEP_ADDRESS_OF;
3451 /* Most of time we only need to compare FIELD_DECLs for equality.
3452 However when determining address look into actual offsets.
3453 These may match for unions and unshared record types. */
3454 flags &= ~OEP_ADDRESS_OF;
3455 if (!OP_SAME (1))
3457 if (compare_address
3458 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3460 tree field0 = TREE_OPERAND (arg0, 1);
3461 tree field1 = TREE_OPERAND (arg1, 1);
3463 /* Non-FIELD_DECL operands can appear in C++ templates. */
3464 if (TREE_CODE (field0) != FIELD_DECL
3465 || TREE_CODE (field1) != FIELD_DECL
3466 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3467 DECL_FIELD_OFFSET (field1), flags)
3468 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3469 DECL_FIELD_BIT_OFFSET (field1),
3470 flags))
3471 return false;
3473 else
3474 return false;
3477 return OP_SAME_WITH_NULL (2);
3479 case BIT_FIELD_REF:
3480 if (!OP_SAME (0))
3481 return false;
3482 flags &= ~OEP_ADDRESS_OF;
3483 return OP_SAME (1) && OP_SAME (2);
3485 default:
3486 return false;
3489 case tcc_expression:
3490 switch (TREE_CODE (arg0))
3492 case ADDR_EXPR:
3493 /* Be sure we pass right ADDRESS_OF flag. */
3494 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3495 return operand_equal_p (TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0),
3497 flags | OEP_ADDRESS_OF);
3499 case TRUTH_NOT_EXPR:
3500 return OP_SAME (0);
3502 case TRUTH_ANDIF_EXPR:
3503 case TRUTH_ORIF_EXPR:
3504 return OP_SAME (0) && OP_SAME (1);
3506 case WIDEN_MULT_PLUS_EXPR:
3507 case WIDEN_MULT_MINUS_EXPR:
3508 if (!OP_SAME (2))
3509 return false;
3510 /* The multiplcation operands are commutative. */
3511 /* FALLTHRU */
3513 case TRUTH_AND_EXPR:
3514 case TRUTH_OR_EXPR:
3515 case TRUTH_XOR_EXPR:
3516 if (OP_SAME (0) && OP_SAME (1))
3517 return true;
3519 /* Otherwise take into account this is a commutative operation. */
3520 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3521 TREE_OPERAND (arg1, 1), flags)
3522 && operand_equal_p (TREE_OPERAND (arg0, 1),
3523 TREE_OPERAND (arg1, 0), flags));
3525 case COND_EXPR:
3526 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3527 return false;
3528 flags &= ~OEP_ADDRESS_OF;
3529 return OP_SAME (0);
3531 case BIT_INSERT_EXPR:
3532 /* BIT_INSERT_EXPR has an implict operand as the type precision
3533 of op1. Need to check to make sure they are the same. */
3534 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3535 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3536 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3537 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3538 return false;
3539 /* FALLTHRU */
3541 case VEC_COND_EXPR:
3542 case DOT_PROD_EXPR:
3543 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3545 case MODIFY_EXPR:
3546 case INIT_EXPR:
3547 case COMPOUND_EXPR:
3548 case PREDECREMENT_EXPR:
3549 case PREINCREMENT_EXPR:
3550 case POSTDECREMENT_EXPR:
3551 case POSTINCREMENT_EXPR:
3552 if (flags & OEP_LEXICOGRAPHIC)
3553 return OP_SAME (0) && OP_SAME (1);
3554 return false;
3556 case CLEANUP_POINT_EXPR:
3557 case EXPR_STMT:
3558 case SAVE_EXPR:
3559 if (flags & OEP_LEXICOGRAPHIC)
3560 return OP_SAME (0);
3561 return false;
3563 case OBJ_TYPE_REF:
3564 /* Virtual table reference. */
3565 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3566 OBJ_TYPE_REF_EXPR (arg1), flags))
3567 return false;
3568 flags &= ~OEP_ADDRESS_OF;
3569 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3570 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3571 return false;
3572 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3573 OBJ_TYPE_REF_OBJECT (arg1), flags))
3574 return false;
3575 if (virtual_method_call_p (arg0))
3577 if (!virtual_method_call_p (arg1))
3578 return false;
3579 return types_same_for_odr (obj_type_ref_class (arg0),
3580 obj_type_ref_class (arg1));
3582 return false;
3584 default:
3585 return false;
3588 case tcc_vl_exp:
3589 switch (TREE_CODE (arg0))
3591 case CALL_EXPR:
3592 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3593 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3594 /* If not both CALL_EXPRs are either internal or normal function
3595 functions, then they are not equal. */
3596 return false;
3597 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3599 /* If the CALL_EXPRs call different internal functions, then they
3600 are not equal. */
3601 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3602 return false;
3604 else
3606 /* If the CALL_EXPRs call different functions, then they are not
3607 equal. */
3608 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3609 flags))
3610 return false;
3613 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3615 unsigned int cef = call_expr_flags (arg0);
3616 if (flags & OEP_PURE_SAME)
3617 cef &= ECF_CONST | ECF_PURE;
3618 else
3619 cef &= ECF_CONST;
3620 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3621 return false;
3624 /* Now see if all the arguments are the same. */
3626 const_call_expr_arg_iterator iter0, iter1;
3627 const_tree a0, a1;
3628 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3629 a1 = first_const_call_expr_arg (arg1, &iter1);
3630 a0 && a1;
3631 a0 = next_const_call_expr_arg (&iter0),
3632 a1 = next_const_call_expr_arg (&iter1))
3633 if (! operand_equal_p (a0, a1, flags))
3634 return false;
3636 /* If we get here and both argument lists are exhausted
3637 then the CALL_EXPRs are equal. */
3638 return ! (a0 || a1);
3640 default:
3641 return false;
3644 case tcc_declaration:
3645 /* Consider __builtin_sqrt equal to sqrt. */
3646 if (TREE_CODE (arg0) == FUNCTION_DECL)
3647 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3648 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3649 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3650 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3652 if (DECL_P (arg0)
3653 && (flags & OEP_DECL_NAME)
3654 && (flags & OEP_LEXICOGRAPHIC))
3656 /* Consider decls with the same name equal. The caller needs
3657 to make sure they refer to the same entity (such as a function
3658 formal parameter). */
3659 tree a0name = DECL_NAME (arg0);
3660 tree a1name = DECL_NAME (arg1);
3661 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3662 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3663 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3665 return false;
3667 case tcc_exceptional:
3668 if (TREE_CODE (arg0) == CONSTRUCTOR)
3670 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3671 return false;
3673 /* In GIMPLE constructors are used only to build vectors from
3674 elements. Individual elements in the constructor must be
3675 indexed in increasing order and form an initial sequence.
3677 We make no effort to compare constructors in generic.
3678 (see sem_variable::equals in ipa-icf which can do so for
3679 constants). */
3680 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3681 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3682 return false;
3684 /* Be sure that vectors constructed have the same representation.
3685 We only tested element precision and modes to match.
3686 Vectors may be BLKmode and thus also check that the number of
3687 parts match. */
3688 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3689 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3690 return false;
3692 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3693 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3694 unsigned int len = vec_safe_length (v0);
3696 if (len != vec_safe_length (v1))
3697 return false;
3699 for (unsigned int i = 0; i < len; i++)
3701 constructor_elt *c0 = &(*v0)[i];
3702 constructor_elt *c1 = &(*v1)[i];
3704 if (!operand_equal_p (c0->value, c1->value, flags)
3705 /* In GIMPLE the indexes can be either NULL or matching i.
3706 Double check this so we won't get false
3707 positives for GENERIC. */
3708 || (c0->index
3709 && (TREE_CODE (c0->index) != INTEGER_CST
3710 || compare_tree_int (c0->index, i)))
3711 || (c1->index
3712 && (TREE_CODE (c1->index) != INTEGER_CST
3713 || compare_tree_int (c1->index, i))))
3714 return false;
3716 return true;
3718 else if (TREE_CODE (arg0) == STATEMENT_LIST
3719 && (flags & OEP_LEXICOGRAPHIC))
3721 /* Compare the STATEMENT_LISTs. */
3722 tree_stmt_iterator tsi1, tsi2;
3723 tree body1 = CONST_CAST_TREE (arg0);
3724 tree body2 = CONST_CAST_TREE (arg1);
3725 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3726 tsi_next (&tsi1), tsi_next (&tsi2))
3728 /* The lists don't have the same number of statements. */
3729 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3730 return false;
3731 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3732 return true;
3733 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3734 flags & (OEP_LEXICOGRAPHIC
3735 | OEP_NO_HASH_CHECK)))
3736 return false;
3739 return false;
3741 case tcc_statement:
3742 switch (TREE_CODE (arg0))
3744 case RETURN_EXPR:
3745 if (flags & OEP_LEXICOGRAPHIC)
3746 return OP_SAME_WITH_NULL (0);
3747 return false;
3748 case DEBUG_BEGIN_STMT:
3749 if (flags & OEP_LEXICOGRAPHIC)
3750 return true;
3751 return false;
3752 default:
3753 return false;
3756 default:
3757 return false;
3760 #undef OP_SAME
3761 #undef OP_SAME_WITH_NULL
3764 /* Generate a hash value for an expression. This can be used iteratively
3765 by passing a previous result as the HSTATE argument. */
3767 void
3768 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3769 unsigned int flags)
3771 int i;
3772 enum tree_code code;
3773 enum tree_code_class tclass;
3775 if (t == NULL_TREE || t == error_mark_node)
3777 hstate.merge_hash (0);
3778 return;
3781 STRIP_ANY_LOCATION_WRAPPER (t);
3783 if (!(flags & OEP_ADDRESS_OF))
3784 STRIP_NOPS (t);
3786 code = TREE_CODE (t);
3788 switch (code)
3790 /* Alas, constants aren't shared, so we can't rely on pointer
3791 identity. */
3792 case VOID_CST:
3793 hstate.merge_hash (0);
3794 return;
3795 case INTEGER_CST:
3796 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3797 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3798 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3799 return;
3800 case REAL_CST:
3802 unsigned int val2;
3803 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3804 val2 = rvc_zero;
3805 else
3806 val2 = real_hash (TREE_REAL_CST_PTR (t));
3807 hstate.merge_hash (val2);
3808 return;
3810 case FIXED_CST:
3812 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3813 hstate.merge_hash (val2);
3814 return;
3816 case STRING_CST:
3817 hstate.add ((const void *) TREE_STRING_POINTER (t),
3818 TREE_STRING_LENGTH (t));
3819 return;
3820 case COMPLEX_CST:
3821 hash_operand (TREE_REALPART (t), hstate, flags);
3822 hash_operand (TREE_IMAGPART (t), hstate, flags);
3823 return;
3824 case VECTOR_CST:
3826 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3827 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3828 unsigned int count = vector_cst_encoded_nelts (t);
3829 for (unsigned int i = 0; i < count; ++i)
3830 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3831 return;
3833 case SSA_NAME:
3834 /* We can just compare by pointer. */
3835 hstate.add_hwi (SSA_NAME_VERSION (t));
3836 return;
3837 case PLACEHOLDER_EXPR:
3838 /* The node itself doesn't matter. */
3839 return;
3840 case BLOCK:
3841 case OMP_CLAUSE:
3842 /* Ignore. */
3843 return;
3844 case TREE_LIST:
3845 /* A list of expressions, for a CALL_EXPR or as the elements of a
3846 VECTOR_CST. */
3847 for (; t; t = TREE_CHAIN (t))
3848 hash_operand (TREE_VALUE (t), hstate, flags);
3849 return;
3850 case CONSTRUCTOR:
3852 unsigned HOST_WIDE_INT idx;
3853 tree field, value;
3854 flags &= ~OEP_ADDRESS_OF;
3855 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3858 /* In GIMPLE the indexes can be either NULL or matching i. */
3859 if (field == NULL_TREE)
3860 field = bitsize_int (idx);
3861 hash_operand (field, hstate, flags);
3862 hash_operand (value, hstate, flags);
3864 return;
3866 case STATEMENT_LIST:
3868 tree_stmt_iterator i;
3869 for (i = tsi_start (CONST_CAST_TREE (t));
3870 !tsi_end_p (i); tsi_next (&i))
3871 hash_operand (tsi_stmt (i), hstate, flags);
3872 return;
3874 case TREE_VEC:
3875 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3876 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3877 return;
3878 case IDENTIFIER_NODE:
3879 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3880 return;
3881 case FUNCTION_DECL:
3882 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3883 Otherwise nodes that compare equal according to operand_equal_p might
3884 get different hash codes. However, don't do this for machine specific
3885 or front end builtins, since the function code is overloaded in those
3886 cases. */
3887 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3888 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3890 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3891 code = TREE_CODE (t);
3893 /* FALL THROUGH */
3894 default:
3895 if (POLY_INT_CST_P (t))
3897 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3898 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3899 return;
3901 tclass = TREE_CODE_CLASS (code);
3903 if (tclass == tcc_declaration)
3905 /* DECL's have a unique ID */
3906 hstate.add_hwi (DECL_UID (t));
3908 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3910 /* For comparisons that can be swapped, use the lower
3911 tree code. */
3912 enum tree_code ccode = swap_tree_comparison (code);
3913 if (code < ccode)
3914 ccode = code;
3915 hstate.add_object (ccode);
3916 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3917 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3919 else if (CONVERT_EXPR_CODE_P (code))
3921 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3922 operand_equal_p. */
3923 enum tree_code ccode = NOP_EXPR;
3924 hstate.add_object (ccode);
3926 /* Don't hash the type, that can lead to having nodes which
3927 compare equal according to operand_equal_p, but which
3928 have different hash codes. Make sure to include signedness
3929 in the hash computation. */
3930 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3931 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3933 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3934 else if (code == MEM_REF
3935 && (flags & OEP_ADDRESS_OF) != 0
3936 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3937 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3938 && integer_zerop (TREE_OPERAND (t, 1)))
3939 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3940 hstate, flags);
3941 /* Don't ICE on FE specific trees, or their arguments etc.
3942 during operand_equal_p hash verification. */
3943 else if (!IS_EXPR_CODE_CLASS (tclass))
3944 gcc_assert (flags & OEP_HASH_CHECK);
3945 else
3947 unsigned int sflags = flags;
3949 hstate.add_object (code);
3951 switch (code)
3953 case ADDR_EXPR:
3954 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3955 flags |= OEP_ADDRESS_OF;
3956 sflags = flags;
3957 break;
3959 case INDIRECT_REF:
3960 case MEM_REF:
3961 case TARGET_MEM_REF:
3962 flags &= ~OEP_ADDRESS_OF;
3963 sflags = flags;
3964 break;
3966 case COMPONENT_REF:
3967 if (sflags & OEP_ADDRESS_OF)
3969 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3970 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3971 hstate, flags & ~OEP_ADDRESS_OF);
3972 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3973 hstate, flags & ~OEP_ADDRESS_OF);
3974 return;
3976 break;
3977 case ARRAY_REF:
3978 case ARRAY_RANGE_REF:
3979 case BIT_FIELD_REF:
3980 sflags &= ~OEP_ADDRESS_OF;
3981 break;
3983 case COND_EXPR:
3984 flags &= ~OEP_ADDRESS_OF;
3985 break;
3987 case WIDEN_MULT_PLUS_EXPR:
3988 case WIDEN_MULT_MINUS_EXPR:
3990 /* The multiplication operands are commutative. */
3991 inchash::hash one, two;
3992 hash_operand (TREE_OPERAND (t, 0), one, flags);
3993 hash_operand (TREE_OPERAND (t, 1), two, flags);
3994 hstate.add_commutative (one, two);
3995 hash_operand (TREE_OPERAND (t, 2), two, flags);
3996 return;
3999 case CALL_EXPR:
4000 if (CALL_EXPR_FN (t) == NULL_TREE)
4001 hstate.add_int (CALL_EXPR_IFN (t));
4002 break;
4004 case TARGET_EXPR:
4005 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4006 Usually different TARGET_EXPRs just should use
4007 different temporaries in their slots. */
4008 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4009 return;
4011 case OBJ_TYPE_REF:
4012 /* Virtual table reference. */
4013 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4014 flags &= ~OEP_ADDRESS_OF;
4015 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4016 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4017 if (!virtual_method_call_p (t))
4018 return;
4019 if (tree c = obj_type_ref_class (t))
4021 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4022 /* We compute mangled names only when free_lang_data is run.
4023 In that case we can hash precisely. */
4024 if (TREE_CODE (c) == TYPE_DECL
4025 && DECL_ASSEMBLER_NAME_SET_P (c))
4026 hstate.add_object
4027 (IDENTIFIER_HASH_VALUE
4028 (DECL_ASSEMBLER_NAME (c)));
4030 return;
4031 default:
4032 break;
4035 /* Don't hash the type, that can lead to having nodes which
4036 compare equal according to operand_equal_p, but which
4037 have different hash codes. */
4038 if (code == NON_LVALUE_EXPR)
4040 /* Make sure to include signness in the hash computation. */
4041 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4042 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4045 else if (commutative_tree_code (code))
4047 /* It's a commutative expression. We want to hash it the same
4048 however it appears. We do this by first hashing both operands
4049 and then rehashing based on the order of their independent
4050 hashes. */
4051 inchash::hash one, two;
4052 hash_operand (TREE_OPERAND (t, 0), one, flags);
4053 hash_operand (TREE_OPERAND (t, 1), two, flags);
4054 hstate.add_commutative (one, two);
4056 else
4057 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4058 hash_operand (TREE_OPERAND (t, i), hstate,
4059 i == 0 ? flags : sflags);
4061 return;
4065 bool
4066 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4067 unsigned int flags, bool *ret)
4069 /* When checking and unless comparing DECL names, verify that if
4070 the outermost operand_equal_p call returns non-zero then ARG0
4071 and ARG1 have the same hash value. */
4072 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4074 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4076 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4078 inchash::hash hstate0 (0), hstate1 (0);
4079 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4080 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4081 hashval_t h0 = hstate0.end ();
4082 hashval_t h1 = hstate1.end ();
4083 gcc_assert (h0 == h1);
4085 *ret = true;
4087 else
4088 *ret = false;
4090 return true;
4093 return false;
4097 static operand_compare default_compare_instance;
4099 /* Conveinece wrapper around operand_compare class because usually we do
4100 not need to play with the valueizer. */
4102 bool
4103 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4105 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4108 namespace inchash
4111 /* Generate a hash value for an expression. This can be used iteratively
4112 by passing a previous result as the HSTATE argument.
4114 This function is intended to produce the same hash for expressions which
4115 would compare equal using operand_equal_p. */
4116 void
4117 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4119 default_compare_instance.hash_operand (t, hstate, flags);
4124 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4125 with a different signedness or a narrower precision. */
4127 static bool
4128 operand_equal_for_comparison_p (tree arg0, tree arg1)
4130 if (operand_equal_p (arg0, arg1, 0))
4131 return true;
4133 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4134 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4135 return false;
4137 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4138 and see if the inner values are the same. This removes any
4139 signedness comparison, which doesn't matter here. */
4140 tree op0 = arg0;
4141 tree op1 = arg1;
4142 STRIP_NOPS (op0);
4143 STRIP_NOPS (op1);
4144 if (operand_equal_p (op0, op1, 0))
4145 return true;
4147 /* Discard a single widening conversion from ARG1 and see if the inner
4148 value is the same as ARG0. */
4149 if (CONVERT_EXPR_P (arg1)
4150 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4151 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4152 < TYPE_PRECISION (TREE_TYPE (arg1))
4153 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4154 return true;
4156 return false;
4159 /* See if ARG is an expression that is either a comparison or is performing
4160 arithmetic on comparisons. The comparisons must only be comparing
4161 two different values, which will be stored in *CVAL1 and *CVAL2; if
4162 they are nonzero it means that some operands have already been found.
4163 No variables may be used anywhere else in the expression except in the
4164 comparisons.
4166 If this is true, return 1. Otherwise, return zero. */
4168 static bool
4169 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4171 enum tree_code code = TREE_CODE (arg);
4172 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4174 /* We can handle some of the tcc_expression cases here. */
4175 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4176 tclass = tcc_unary;
4177 else if (tclass == tcc_expression
4178 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4179 || code == COMPOUND_EXPR))
4180 tclass = tcc_binary;
4182 switch (tclass)
4184 case tcc_unary:
4185 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4187 case tcc_binary:
4188 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4189 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4191 case tcc_constant:
4192 return true;
4194 case tcc_expression:
4195 if (code == COND_EXPR)
4196 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4197 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4198 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4199 return false;
4201 case tcc_comparison:
4202 /* First see if we can handle the first operand, then the second. For
4203 the second operand, we know *CVAL1 can't be zero. It must be that
4204 one side of the comparison is each of the values; test for the
4205 case where this isn't true by failing if the two operands
4206 are the same. */
4208 if (operand_equal_p (TREE_OPERAND (arg, 0),
4209 TREE_OPERAND (arg, 1), 0))
4210 return false;
4212 if (*cval1 == 0)
4213 *cval1 = TREE_OPERAND (arg, 0);
4214 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4216 else if (*cval2 == 0)
4217 *cval2 = TREE_OPERAND (arg, 0);
4218 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4220 else
4221 return false;
4223 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4225 else if (*cval2 == 0)
4226 *cval2 = TREE_OPERAND (arg, 1);
4227 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4229 else
4230 return false;
4232 return true;
4234 default:
4235 return false;
4239 /* ARG is a tree that is known to contain just arithmetic operations and
4240 comparisons. Evaluate the operations in the tree substituting NEW0 for
4241 any occurrence of OLD0 as an operand of a comparison and likewise for
4242 NEW1 and OLD1. */
4244 static tree
4245 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4246 tree old1, tree new1)
4248 tree type = TREE_TYPE (arg);
4249 enum tree_code code = TREE_CODE (arg);
4250 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4252 /* We can handle some of the tcc_expression cases here. */
4253 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4254 tclass = tcc_unary;
4255 else if (tclass == tcc_expression
4256 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4257 tclass = tcc_binary;
4259 switch (tclass)
4261 case tcc_unary:
4262 return fold_build1_loc (loc, code, type,
4263 eval_subst (loc, TREE_OPERAND (arg, 0),
4264 old0, new0, old1, new1));
4266 case tcc_binary:
4267 return fold_build2_loc (loc, code, type,
4268 eval_subst (loc, TREE_OPERAND (arg, 0),
4269 old0, new0, old1, new1),
4270 eval_subst (loc, TREE_OPERAND (arg, 1),
4271 old0, new0, old1, new1));
4273 case tcc_expression:
4274 switch (code)
4276 case SAVE_EXPR:
4277 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4278 old1, new1);
4280 case COMPOUND_EXPR:
4281 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4282 old1, new1);
4284 case COND_EXPR:
4285 return fold_build3_loc (loc, code, type,
4286 eval_subst (loc, TREE_OPERAND (arg, 0),
4287 old0, new0, old1, new1),
4288 eval_subst (loc, TREE_OPERAND (arg, 1),
4289 old0, new0, old1, new1),
4290 eval_subst (loc, TREE_OPERAND (arg, 2),
4291 old0, new0, old1, new1));
4292 default:
4293 break;
4295 /* Fall through - ??? */
4297 case tcc_comparison:
4299 tree arg0 = TREE_OPERAND (arg, 0);
4300 tree arg1 = TREE_OPERAND (arg, 1);
4302 /* We need to check both for exact equality and tree equality. The
4303 former will be true if the operand has a side-effect. In that
4304 case, we know the operand occurred exactly once. */
4306 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4307 arg0 = new0;
4308 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4309 arg0 = new1;
4311 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4312 arg1 = new0;
4313 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4314 arg1 = new1;
4316 return fold_build2_loc (loc, code, type, arg0, arg1);
4319 default:
4320 return arg;
4324 /* Return a tree for the case when the result of an expression is RESULT
4325 converted to TYPE and OMITTED was previously an operand of the expression
4326 but is now not needed (e.g., we folded OMITTED * 0).
4328 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4329 the conversion of RESULT to TYPE. */
4331 tree
4332 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4334 tree t = fold_convert_loc (loc, type, result);
4336 /* If the resulting operand is an empty statement, just return the omitted
4337 statement casted to void. */
4338 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4339 return build1_loc (loc, NOP_EXPR, void_type_node,
4340 fold_ignored_result (omitted));
4342 if (TREE_SIDE_EFFECTS (omitted))
4343 return build2_loc (loc, COMPOUND_EXPR, type,
4344 fold_ignored_result (omitted), t);
4346 return non_lvalue_loc (loc, t);
4349 /* Return a tree for the case when the result of an expression is RESULT
4350 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4351 of the expression but are now not needed.
4353 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4354 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4355 evaluated before OMITTED2. Otherwise, if neither has side effects,
4356 just do the conversion of RESULT to TYPE. */
4358 tree
4359 omit_two_operands_loc (location_t loc, tree type, tree result,
4360 tree omitted1, tree omitted2)
4362 tree t = fold_convert_loc (loc, type, result);
4364 if (TREE_SIDE_EFFECTS (omitted2))
4365 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4366 if (TREE_SIDE_EFFECTS (omitted1))
4367 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4369 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4373 /* Return a simplified tree node for the truth-negation of ARG. This
4374 never alters ARG itself. We assume that ARG is an operation that
4375 returns a truth value (0 or 1).
4377 FIXME: one would think we would fold the result, but it causes
4378 problems with the dominator optimizer. */
4380 static tree
4381 fold_truth_not_expr (location_t loc, tree arg)
4383 tree type = TREE_TYPE (arg);
4384 enum tree_code code = TREE_CODE (arg);
4385 location_t loc1, loc2;
4387 /* If this is a comparison, we can simply invert it, except for
4388 floating-point non-equality comparisons, in which case we just
4389 enclose a TRUTH_NOT_EXPR around what we have. */
4391 if (TREE_CODE_CLASS (code) == tcc_comparison)
4393 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4394 if (FLOAT_TYPE_P (op_type)
4395 && flag_trapping_math
4396 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4397 && code != NE_EXPR && code != EQ_EXPR)
4398 return NULL_TREE;
4400 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4401 if (code == ERROR_MARK)
4402 return NULL_TREE;
4404 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4405 TREE_OPERAND (arg, 1));
4406 copy_warning (ret, arg);
4407 return ret;
4410 switch (code)
4412 case INTEGER_CST:
4413 return constant_boolean_node (integer_zerop (arg), type);
4415 case TRUTH_AND_EXPR:
4416 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4417 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4418 return build2_loc (loc, TRUTH_OR_EXPR, type,
4419 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4420 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4422 case TRUTH_OR_EXPR:
4423 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4424 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4425 return build2_loc (loc, TRUTH_AND_EXPR, type,
4426 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4427 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4429 case TRUTH_XOR_EXPR:
4430 /* Here we can invert either operand. We invert the first operand
4431 unless the second operand is a TRUTH_NOT_EXPR in which case our
4432 result is the XOR of the first operand with the inside of the
4433 negation of the second operand. */
4435 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4436 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4437 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4438 else
4439 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4440 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4441 TREE_OPERAND (arg, 1));
4443 case TRUTH_ANDIF_EXPR:
4444 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4445 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4446 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4447 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4448 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4450 case TRUTH_ORIF_EXPR:
4451 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4452 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4453 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4454 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4455 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4457 case TRUTH_NOT_EXPR:
4458 return TREE_OPERAND (arg, 0);
4460 case COND_EXPR:
4462 tree arg1 = TREE_OPERAND (arg, 1);
4463 tree arg2 = TREE_OPERAND (arg, 2);
4465 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4466 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4468 /* A COND_EXPR may have a throw as one operand, which
4469 then has void type. Just leave void operands
4470 as they are. */
4471 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4472 VOID_TYPE_P (TREE_TYPE (arg1))
4473 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4474 VOID_TYPE_P (TREE_TYPE (arg2))
4475 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4478 case COMPOUND_EXPR:
4479 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4480 return build2_loc (loc, COMPOUND_EXPR, type,
4481 TREE_OPERAND (arg, 0),
4482 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4484 case NON_LVALUE_EXPR:
4485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4486 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4488 CASE_CONVERT:
4489 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4490 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4492 /* fall through */
4494 case FLOAT_EXPR:
4495 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4496 return build1_loc (loc, TREE_CODE (arg), type,
4497 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4499 case BIT_AND_EXPR:
4500 if (!integer_onep (TREE_OPERAND (arg, 1)))
4501 return NULL_TREE;
4502 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4504 case SAVE_EXPR:
4505 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4507 case CLEANUP_POINT_EXPR:
4508 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4509 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4510 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4512 default:
4513 return NULL_TREE;
4517 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4518 assume that ARG is an operation that returns a truth value (0 or 1
4519 for scalars, 0 or -1 for vectors). Return the folded expression if
4520 folding is successful. Otherwise, return NULL_TREE. */
4522 static tree
4523 fold_invert_truthvalue (location_t loc, tree arg)
4525 tree type = TREE_TYPE (arg);
4526 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4527 ? BIT_NOT_EXPR
4528 : TRUTH_NOT_EXPR,
4529 type, arg);
4532 /* Return a simplified tree node for the truth-negation of ARG. This
4533 never alters ARG itself. We assume that ARG is an operation that
4534 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4536 tree
4537 invert_truthvalue_loc (location_t loc, tree arg)
4539 if (TREE_CODE (arg) == ERROR_MARK)
4540 return arg;
4542 tree type = TREE_TYPE (arg);
4543 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4544 ? BIT_NOT_EXPR
4545 : TRUTH_NOT_EXPR,
4546 type, arg);
4549 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4550 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4551 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4552 is the original memory reference used to preserve the alias set of
4553 the access. */
4555 static tree
4556 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4557 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4558 int unsignedp, int reversep)
4560 tree result, bftype;
4562 /* Attempt not to lose the access path if possible. */
4563 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4565 tree ninner = TREE_OPERAND (orig_inner, 0);
4566 machine_mode nmode;
4567 poly_int64 nbitsize, nbitpos;
4568 tree noffset;
4569 int nunsignedp, nreversep, nvolatilep = 0;
4570 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4571 &noffset, &nmode, &nunsignedp,
4572 &nreversep, &nvolatilep);
4573 if (base == inner
4574 && noffset == NULL_TREE
4575 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4576 && !reversep
4577 && !nreversep
4578 && !nvolatilep)
4580 inner = ninner;
4581 bitpos -= nbitpos;
4585 alias_set_type iset = get_alias_set (orig_inner);
4586 if (iset == 0 && get_alias_set (inner) != iset)
4587 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4588 build_fold_addr_expr (inner),
4589 build_int_cst (ptr_type_node, 0));
4591 if (known_eq (bitpos, 0) && !reversep)
4593 tree size = TYPE_SIZE (TREE_TYPE (inner));
4594 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4595 || POINTER_TYPE_P (TREE_TYPE (inner)))
4596 && tree_fits_shwi_p (size)
4597 && tree_to_shwi (size) == bitsize)
4598 return fold_convert_loc (loc, type, inner);
4601 bftype = type;
4602 if (TYPE_PRECISION (bftype) != bitsize
4603 || TYPE_UNSIGNED (bftype) == !unsignedp)
4604 bftype = build_nonstandard_integer_type (bitsize, 0);
4606 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4607 bitsize_int (bitsize), bitsize_int (bitpos));
4608 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4610 if (bftype != type)
4611 result = fold_convert_loc (loc, type, result);
4613 return result;
4616 /* Optimize a bit-field compare.
4618 There are two cases: First is a compare against a constant and the
4619 second is a comparison of two items where the fields are at the same
4620 bit position relative to the start of a chunk (byte, halfword, word)
4621 large enough to contain it. In these cases we can avoid the shift
4622 implicit in bitfield extractions.
4624 For constants, we emit a compare of the shifted constant with the
4625 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4626 compared. For two fields at the same position, we do the ANDs with the
4627 similar mask and compare the result of the ANDs.
4629 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4630 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4631 are the left and right operands of the comparison, respectively.
4633 If the optimization described above can be done, we return the resulting
4634 tree. Otherwise we return zero. */
4636 static tree
4637 optimize_bit_field_compare (location_t loc, enum tree_code code,
4638 tree compare_type, tree lhs, tree rhs)
4640 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4641 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4642 tree type = TREE_TYPE (lhs);
4643 tree unsigned_type;
4644 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4645 machine_mode lmode, rmode;
4646 scalar_int_mode nmode;
4647 int lunsignedp, runsignedp;
4648 int lreversep, rreversep;
4649 int lvolatilep = 0, rvolatilep = 0;
4650 tree linner, rinner = NULL_TREE;
4651 tree mask;
4652 tree offset;
4654 /* Get all the information about the extractions being done. If the bit size
4655 is the same as the size of the underlying object, we aren't doing an
4656 extraction at all and so can do nothing. We also don't want to
4657 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4658 then will no longer be able to replace it. */
4659 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4660 &lunsignedp, &lreversep, &lvolatilep);
4661 if (linner == lhs
4662 || !known_size_p (plbitsize)
4663 || !plbitsize.is_constant (&lbitsize)
4664 || !plbitpos.is_constant (&lbitpos)
4665 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4666 || offset != 0
4667 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4668 || lvolatilep)
4669 return 0;
4671 if (const_p)
4672 rreversep = lreversep;
4673 else
4675 /* If this is not a constant, we can only do something if bit positions,
4676 sizes, signedness and storage order are the same. */
4677 rinner
4678 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4679 &runsignedp, &rreversep, &rvolatilep);
4681 if (rinner == rhs
4682 || maybe_ne (lbitpos, rbitpos)
4683 || maybe_ne (lbitsize, rbitsize)
4684 || lunsignedp != runsignedp
4685 || lreversep != rreversep
4686 || offset != 0
4687 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4688 || rvolatilep)
4689 return 0;
4692 /* Honor the C++ memory model and mimic what RTL expansion does. */
4693 poly_uint64 bitstart = 0;
4694 poly_uint64 bitend = 0;
4695 if (TREE_CODE (lhs) == COMPONENT_REF)
4697 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4698 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4699 return 0;
4702 /* See if we can find a mode to refer to this field. We should be able to,
4703 but fail if we can't. */
4704 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4705 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4706 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4707 TYPE_ALIGN (TREE_TYPE (rinner))),
4708 BITS_PER_WORD, false, &nmode))
4709 return 0;
4711 /* Set signed and unsigned types of the precision of this mode for the
4712 shifts below. */
4713 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4715 /* Compute the bit position and size for the new reference and our offset
4716 within it. If the new reference is the same size as the original, we
4717 won't optimize anything, so return zero. */
4718 nbitsize = GET_MODE_BITSIZE (nmode);
4719 nbitpos = lbitpos & ~ (nbitsize - 1);
4720 lbitpos -= nbitpos;
4721 if (nbitsize == lbitsize)
4722 return 0;
4724 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4725 lbitpos = nbitsize - lbitsize - lbitpos;
4727 /* Make the mask to be used against the extracted field. */
4728 mask = build_int_cst_type (unsigned_type, -1);
4729 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4730 mask = const_binop (RSHIFT_EXPR, mask,
4731 size_int (nbitsize - lbitsize - lbitpos));
4733 if (! const_p)
4735 if (nbitpos < 0)
4736 return 0;
4738 /* If not comparing with constant, just rework the comparison
4739 and return. */
4740 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4741 nbitsize, nbitpos, 1, lreversep);
4742 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4743 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4744 nbitsize, nbitpos, 1, rreversep);
4745 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4746 return fold_build2_loc (loc, code, compare_type, t1, t2);
4749 /* Otherwise, we are handling the constant case. See if the constant is too
4750 big for the field. Warn and return a tree for 0 (false) if so. We do
4751 this not only for its own sake, but to avoid having to test for this
4752 error case below. If we didn't, we might generate wrong code.
4754 For unsigned fields, the constant shifted right by the field length should
4755 be all zero. For signed fields, the high-order bits should agree with
4756 the sign bit. */
4758 if (lunsignedp)
4760 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4762 warning (0, "comparison is always %d due to width of bit-field",
4763 code == NE_EXPR);
4764 return constant_boolean_node (code == NE_EXPR, compare_type);
4767 else
4769 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4770 if (tem != 0 && tem != -1)
4772 warning (0, "comparison is always %d due to width of bit-field",
4773 code == NE_EXPR);
4774 return constant_boolean_node (code == NE_EXPR, compare_type);
4778 if (nbitpos < 0)
4779 return 0;
4781 /* Single-bit compares should always be against zero. */
4782 if (lbitsize == 1 && ! integer_zerop (rhs))
4784 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4785 rhs = build_int_cst (type, 0);
4788 /* Make a new bitfield reference, shift the constant over the
4789 appropriate number of bits and mask it with the computed mask
4790 (in case this was a signed field). If we changed it, make a new one. */
4791 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4792 nbitsize, nbitpos, 1, lreversep);
4794 rhs = const_binop (BIT_AND_EXPR,
4795 const_binop (LSHIFT_EXPR,
4796 fold_convert_loc (loc, unsigned_type, rhs),
4797 size_int (lbitpos)),
4798 mask);
4800 lhs = build2_loc (loc, code, compare_type,
4801 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4802 return lhs;
4805 /* Subroutine for fold_truth_andor_1: decode a field reference.
4807 If EXP is a comparison reference, we return the innermost reference.
4809 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4810 set to the starting bit number.
4812 If the innermost field can be completely contained in a mode-sized
4813 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4815 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4816 otherwise it is not changed.
4818 *PUNSIGNEDP is set to the signedness of the field.
4820 *PREVERSEP is set to the storage order of the field.
4822 *PMASK is set to the mask used. This is either contained in a
4823 BIT_AND_EXPR or derived from the width of the field.
4825 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4827 Return 0 if this is not a component reference or is one that we can't
4828 do anything with. */
4830 static tree
4831 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4832 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4833 int *punsignedp, int *preversep, int *pvolatilep,
4834 tree *pmask, tree *pand_mask)
4836 tree exp = *exp_;
4837 tree outer_type = 0;
4838 tree and_mask = 0;
4839 tree mask, inner, offset;
4840 tree unsigned_type;
4841 unsigned int precision;
4843 /* All the optimizations using this function assume integer fields.
4844 There are problems with FP fields since the type_for_size call
4845 below can fail for, e.g., XFmode. */
4846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4847 return NULL_TREE;
4849 /* We are interested in the bare arrangement of bits, so strip everything
4850 that doesn't affect the machine mode. However, record the type of the
4851 outermost expression if it may matter below. */
4852 if (CONVERT_EXPR_P (exp)
4853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4854 outer_type = TREE_TYPE (exp);
4855 STRIP_NOPS (exp);
4857 if (TREE_CODE (exp) == BIT_AND_EXPR)
4859 and_mask = TREE_OPERAND (exp, 1);
4860 exp = TREE_OPERAND (exp, 0);
4861 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4862 if (TREE_CODE (and_mask) != INTEGER_CST)
4863 return NULL_TREE;
4866 poly_int64 poly_bitsize, poly_bitpos;
4867 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4868 pmode, punsignedp, preversep, pvolatilep);
4869 if ((inner == exp && and_mask == 0)
4870 || !poly_bitsize.is_constant (pbitsize)
4871 || !poly_bitpos.is_constant (pbitpos)
4872 || *pbitsize < 0
4873 || offset != 0
4874 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4875 /* Reject out-of-bound accesses (PR79731). */
4876 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4877 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4878 *pbitpos + *pbitsize) < 0))
4879 return NULL_TREE;
4881 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4882 if (unsigned_type == NULL_TREE)
4883 return NULL_TREE;
4885 *exp_ = exp;
4887 /* If the number of bits in the reference is the same as the bitsize of
4888 the outer type, then the outer type gives the signedness. Otherwise
4889 (in case of a small bitfield) the signedness is unchanged. */
4890 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4891 *punsignedp = TYPE_UNSIGNED (outer_type);
4893 /* Compute the mask to access the bitfield. */
4894 precision = TYPE_PRECISION (unsigned_type);
4896 mask = build_int_cst_type (unsigned_type, -1);
4898 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4899 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4901 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4902 if (and_mask != 0)
4903 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4904 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4906 *pmask = mask;
4907 *pand_mask = and_mask;
4908 return inner;
4911 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4912 bit positions and MASK is SIGNED. */
4914 static bool
4915 all_ones_mask_p (const_tree mask, unsigned int size)
4917 tree type = TREE_TYPE (mask);
4918 unsigned int precision = TYPE_PRECISION (type);
4920 /* If this function returns true when the type of the mask is
4921 UNSIGNED, then there will be errors. In particular see
4922 gcc.c-torture/execute/990326-1.c. There does not appear to be
4923 any documentation paper trail as to why this is so. But the pre
4924 wide-int worked with that restriction and it has been preserved
4925 here. */
4926 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4927 return false;
4929 return wi::mask (size, false, precision) == wi::to_wide (mask);
4932 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4933 represents the sign bit of EXP's type. If EXP represents a sign
4934 or zero extension, also test VAL against the unextended type.
4935 The return value is the (sub)expression whose sign bit is VAL,
4936 or NULL_TREE otherwise. */
4938 tree
4939 sign_bit_p (tree exp, const_tree val)
4941 int width;
4942 tree t;
4944 /* Tree EXP must have an integral type. */
4945 t = TREE_TYPE (exp);
4946 if (! INTEGRAL_TYPE_P (t))
4947 return NULL_TREE;
4949 /* Tree VAL must be an integer constant. */
4950 if (TREE_CODE (val) != INTEGER_CST
4951 || TREE_OVERFLOW (val))
4952 return NULL_TREE;
4954 width = TYPE_PRECISION (t);
4955 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4956 return exp;
4958 /* Handle extension from a narrower type. */
4959 if (TREE_CODE (exp) == NOP_EXPR
4960 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4961 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4963 return NULL_TREE;
4966 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4967 operand is simple enough to be evaluated unconditionally. */
4969 static bool
4970 simple_operand_p (const_tree exp)
4972 /* Strip any conversions that don't change the machine mode. */
4973 STRIP_NOPS (exp);
4975 return (CONSTANT_CLASS_P (exp)
4976 || TREE_CODE (exp) == SSA_NAME
4977 || (DECL_P (exp)
4978 && ! TREE_ADDRESSABLE (exp)
4979 && ! TREE_THIS_VOLATILE (exp)
4980 && ! DECL_NONLOCAL (exp)
4981 /* Don't regard global variables as simple. They may be
4982 allocated in ways unknown to the compiler (shared memory,
4983 #pragma weak, etc). */
4984 && ! TREE_PUBLIC (exp)
4985 && ! DECL_EXTERNAL (exp)
4986 /* Weakrefs are not safe to be read, since they can be NULL.
4987 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4988 have DECL_WEAK flag set. */
4989 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4990 /* Loading a static variable is unduly expensive, but global
4991 registers aren't expensive. */
4992 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4995 /* Determine if an operand is simple enough to be evaluated unconditionally.
4996 In addition to simple_operand_p, we assume that comparisons, conversions,
4997 and logic-not operations are simple, if their operands are simple, too. */
4999 bool
5000 simple_condition_p (tree exp)
5002 enum tree_code code;
5004 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5005 return false;
5007 while (CONVERT_EXPR_P (exp))
5008 exp = TREE_OPERAND (exp, 0);
5010 code = TREE_CODE (exp);
5012 if (TREE_CODE_CLASS (code) == tcc_comparison)
5013 return (simple_operand_p (TREE_OPERAND (exp, 0))
5014 && simple_operand_p (TREE_OPERAND (exp, 1)));
5016 if (code == TRUTH_NOT_EXPR)
5017 return simple_condition_p (TREE_OPERAND (exp, 0));
5019 return simple_operand_p (exp);
5023 /* The following functions are subroutines to fold_range_test and allow it to
5024 try to change a logical combination of comparisons into a range test.
5026 For example, both
5027 X == 2 || X == 3 || X == 4 || X == 5
5029 X >= 2 && X <= 5
5030 are converted to
5031 (unsigned) (X - 2) <= 3
5033 We describe each set of comparisons as being either inside or outside
5034 a range, using a variable named like IN_P, and then describe the
5035 range with a lower and upper bound. If one of the bounds is omitted,
5036 it represents either the highest or lowest value of the type.
5038 In the comments below, we represent a range by two numbers in brackets
5039 preceded by a "+" to designate being inside that range, or a "-" to
5040 designate being outside that range, so the condition can be inverted by
5041 flipping the prefix. An omitted bound is represented by a "-". For
5042 example, "- [-, 10]" means being outside the range starting at the lowest
5043 possible value and ending at 10, in other words, being greater than 10.
5044 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5045 always false.
5047 We set up things so that the missing bounds are handled in a consistent
5048 manner so neither a missing bound nor "true" and "false" need to be
5049 handled using a special case. */
5051 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5052 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5053 and UPPER1_P are nonzero if the respective argument is an upper bound
5054 and zero for a lower. TYPE, if nonzero, is the type of the result; it
5055 must be specified for a comparison. ARG1 will be converted to ARG0's
5056 type if both are specified. */
5058 static tree
5059 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5060 tree arg1, int upper1_p)
5062 tree tem;
5063 int result;
5064 int sgn0, sgn1;
5066 /* If neither arg represents infinity, do the normal operation.
5067 Else, if not a comparison, return infinity. Else handle the special
5068 comparison rules. Note that most of the cases below won't occur, but
5069 are handled for consistency. */
5071 if (arg0 != 0 && arg1 != 0)
5073 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5074 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5075 STRIP_NOPS (tem);
5076 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5079 if (TREE_CODE_CLASS (code) != tcc_comparison)
5080 return 0;
5082 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5083 for neither. In real maths, we cannot assume open ended ranges are
5084 the same. But, this is computer arithmetic, where numbers are finite.
5085 We can therefore make the transformation of any unbounded range with
5086 the value Z, Z being greater than any representable number. This permits
5087 us to treat unbounded ranges as equal. */
5088 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5089 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5090 switch (code)
5092 case EQ_EXPR:
5093 result = sgn0 == sgn1;
5094 break;
5095 case NE_EXPR:
5096 result = sgn0 != sgn1;
5097 break;
5098 case LT_EXPR:
5099 result = sgn0 < sgn1;
5100 break;
5101 case LE_EXPR:
5102 result = sgn0 <= sgn1;
5103 break;
5104 case GT_EXPR:
5105 result = sgn0 > sgn1;
5106 break;
5107 case GE_EXPR:
5108 result = sgn0 >= sgn1;
5109 break;
5110 default:
5111 gcc_unreachable ();
5114 return constant_boolean_node (result, type);
5117 /* Helper routine for make_range. Perform one step for it, return
5118 new expression if the loop should continue or NULL_TREE if it should
5119 stop. */
5121 tree
5122 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5123 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5124 bool *strict_overflow_p)
5126 tree arg0_type = TREE_TYPE (arg0);
5127 tree n_low, n_high, low = *p_low, high = *p_high;
5128 int in_p = *p_in_p, n_in_p;
5130 switch (code)
5132 case TRUTH_NOT_EXPR:
5133 /* We can only do something if the range is testing for zero. */
5134 if (low == NULL_TREE || high == NULL_TREE
5135 || ! integer_zerop (low) || ! integer_zerop (high))
5136 return NULL_TREE;
5137 *p_in_p = ! in_p;
5138 return arg0;
5140 case EQ_EXPR: case NE_EXPR:
5141 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5142 /* We can only do something if the range is testing for zero
5143 and if the second operand is an integer constant. Note that
5144 saying something is "in" the range we make is done by
5145 complementing IN_P since it will set in the initial case of
5146 being not equal to zero; "out" is leaving it alone. */
5147 if (low == NULL_TREE || high == NULL_TREE
5148 || ! integer_zerop (low) || ! integer_zerop (high)
5149 || TREE_CODE (arg1) != INTEGER_CST)
5150 return NULL_TREE;
5152 switch (code)
5154 case NE_EXPR: /* - [c, c] */
5155 low = high = arg1;
5156 break;
5157 case EQ_EXPR: /* + [c, c] */
5158 in_p = ! in_p, low = high = arg1;
5159 break;
5160 case GT_EXPR: /* - [-, c] */
5161 low = 0, high = arg1;
5162 break;
5163 case GE_EXPR: /* + [c, -] */
5164 in_p = ! in_p, low = arg1, high = 0;
5165 break;
5166 case LT_EXPR: /* - [c, -] */
5167 low = arg1, high = 0;
5168 break;
5169 case LE_EXPR: /* + [-, c] */
5170 in_p = ! in_p, low = 0, high = arg1;
5171 break;
5172 default:
5173 gcc_unreachable ();
5176 /* If this is an unsigned comparison, we also know that EXP is
5177 greater than or equal to zero. We base the range tests we make
5178 on that fact, so we record it here so we can parse existing
5179 range tests. We test arg0_type since often the return type
5180 of, e.g. EQ_EXPR, is boolean. */
5181 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5183 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5184 in_p, low, high, 1,
5185 build_int_cst (arg0_type, 0),
5186 NULL_TREE))
5187 return NULL_TREE;
5189 in_p = n_in_p, low = n_low, high = n_high;
5191 /* If the high bound is missing, but we have a nonzero low
5192 bound, reverse the range so it goes from zero to the low bound
5193 minus 1. */
5194 if (high == 0 && low && ! integer_zerop (low))
5196 in_p = ! in_p;
5197 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5198 build_int_cst (TREE_TYPE (low), 1), 0);
5199 low = build_int_cst (arg0_type, 0);
5203 *p_low = low;
5204 *p_high = high;
5205 *p_in_p = in_p;
5206 return arg0;
5208 case NEGATE_EXPR:
5209 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5210 low and high are non-NULL, then normalize will DTRT. */
5211 if (!TYPE_UNSIGNED (arg0_type)
5212 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5214 if (low == NULL_TREE)
5215 low = TYPE_MIN_VALUE (arg0_type);
5216 if (high == NULL_TREE)
5217 high = TYPE_MAX_VALUE (arg0_type);
5220 /* (-x) IN [a,b] -> x in [-b, -a] */
5221 n_low = range_binop (MINUS_EXPR, exp_type,
5222 build_int_cst (exp_type, 0),
5223 0, high, 1);
5224 n_high = range_binop (MINUS_EXPR, exp_type,
5225 build_int_cst (exp_type, 0),
5226 0, low, 0);
5227 if (n_high != 0 && TREE_OVERFLOW (n_high))
5228 return NULL_TREE;
5229 goto normalize;
5231 case BIT_NOT_EXPR:
5232 /* ~ X -> -X - 1 */
5233 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5234 build_int_cst (exp_type, 1));
5236 case PLUS_EXPR:
5237 case MINUS_EXPR:
5238 if (TREE_CODE (arg1) != INTEGER_CST)
5239 return NULL_TREE;
5241 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5242 move a constant to the other side. */
5243 if (!TYPE_UNSIGNED (arg0_type)
5244 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5245 return NULL_TREE;
5247 /* If EXP is signed, any overflow in the computation is undefined,
5248 so we don't worry about it so long as our computations on
5249 the bounds don't overflow. For unsigned, overflow is defined
5250 and this is exactly the right thing. */
5251 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5252 arg0_type, low, 0, arg1, 0);
5253 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5254 arg0_type, high, 1, arg1, 0);
5255 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5256 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5257 return NULL_TREE;
5259 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5260 *strict_overflow_p = true;
5262 normalize:
5263 /* Check for an unsigned range which has wrapped around the maximum
5264 value thus making n_high < n_low, and normalize it. */
5265 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5267 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5268 build_int_cst (TREE_TYPE (n_high), 1), 0);
5269 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5270 build_int_cst (TREE_TYPE (n_low), 1), 0);
5272 /* If the range is of the form +/- [ x+1, x ], we won't
5273 be able to normalize it. But then, it represents the
5274 whole range or the empty set, so make it
5275 +/- [ -, - ]. */
5276 if (tree_int_cst_equal (n_low, low)
5277 && tree_int_cst_equal (n_high, high))
5278 low = high = 0;
5279 else
5280 in_p = ! in_p;
5282 else
5283 low = n_low, high = n_high;
5285 *p_low = low;
5286 *p_high = high;
5287 *p_in_p = in_p;
5288 return arg0;
5290 CASE_CONVERT:
5291 case NON_LVALUE_EXPR:
5292 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5293 return NULL_TREE;
5295 if (! INTEGRAL_TYPE_P (arg0_type)
5296 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5297 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5298 return NULL_TREE;
5300 n_low = low, n_high = high;
5302 if (n_low != 0)
5303 n_low = fold_convert_loc (loc, arg0_type, n_low);
5305 if (n_high != 0)
5306 n_high = fold_convert_loc (loc, arg0_type, n_high);
5308 /* If we're converting arg0 from an unsigned type, to exp,
5309 a signed type, we will be doing the comparison as unsigned.
5310 The tests above have already verified that LOW and HIGH
5311 are both positive.
5313 So we have to ensure that we will handle large unsigned
5314 values the same way that the current signed bounds treat
5315 negative values. */
5317 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5319 tree high_positive;
5320 tree equiv_type;
5321 /* For fixed-point modes, we need to pass the saturating flag
5322 as the 2nd parameter. */
5323 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5324 equiv_type
5325 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5326 TYPE_SATURATING (arg0_type));
5327 else
5328 equiv_type
5329 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5331 /* A range without an upper bound is, naturally, unbounded.
5332 Since convert would have cropped a very large value, use
5333 the max value for the destination type. */
5334 high_positive
5335 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5336 : TYPE_MAX_VALUE (arg0_type);
5338 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5339 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5340 fold_convert_loc (loc, arg0_type,
5341 high_positive),
5342 build_int_cst (arg0_type, 1));
5344 /* If the low bound is specified, "and" the range with the
5345 range for which the original unsigned value will be
5346 positive. */
5347 if (low != 0)
5349 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5350 1, fold_convert_loc (loc, arg0_type,
5351 integer_zero_node),
5352 high_positive))
5353 return NULL_TREE;
5355 in_p = (n_in_p == in_p);
5357 else
5359 /* Otherwise, "or" the range with the range of the input
5360 that will be interpreted as negative. */
5361 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5362 1, fold_convert_loc (loc, arg0_type,
5363 integer_zero_node),
5364 high_positive))
5365 return NULL_TREE;
5367 in_p = (in_p != n_in_p);
5371 /* Otherwise, if we are converting arg0 from signed type, to exp,
5372 an unsigned type, we will do the comparison as signed. If
5373 high is non-NULL, we punt above if it doesn't fit in the signed
5374 type, so if we get through here, +[-, high] or +[low, high] are
5375 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5376 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5377 high is not, the +[low, -] range is equivalent to union of
5378 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5379 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5380 low being 0, which should be treated as [-, -]. */
5381 else if (TYPE_UNSIGNED (exp_type)
5382 && !TYPE_UNSIGNED (arg0_type)
5383 && low
5384 && !high)
5386 if (integer_zerop (low))
5387 n_low = NULL_TREE;
5388 else
5390 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5391 n_low, build_int_cst (arg0_type, -1));
5392 n_low = build_zero_cst (arg0_type);
5393 in_p = !in_p;
5397 *p_low = n_low;
5398 *p_high = n_high;
5399 *p_in_p = in_p;
5400 return arg0;
5402 default:
5403 return NULL_TREE;
5407 /* Given EXP, a logical expression, set the range it is testing into
5408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5409 actually being tested. *PLOW and *PHIGH will be made of the same
5410 type as the returned expression. If EXP is not a comparison, we
5411 will most likely not be returning a useful value and range. Set
5412 *STRICT_OVERFLOW_P to true if the return value is only valid
5413 because signed overflow is undefined; otherwise, do not change
5414 *STRICT_OVERFLOW_P. */
5416 tree
5417 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5418 bool *strict_overflow_p)
5420 enum tree_code code;
5421 tree arg0, arg1 = NULL_TREE;
5422 tree exp_type, nexp;
5423 int in_p;
5424 tree low, high;
5425 location_t loc = EXPR_LOCATION (exp);
5427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5428 and see if we can refine the range. Some of the cases below may not
5429 happen, but it doesn't seem worth worrying about this. We "continue"
5430 the outer loop when we've changed something; otherwise we "break"
5431 the switch, which will "break" the while. */
5433 in_p = 0;
5434 low = high = build_int_cst (TREE_TYPE (exp), 0);
5436 while (1)
5438 code = TREE_CODE (exp);
5439 exp_type = TREE_TYPE (exp);
5440 arg0 = NULL_TREE;
5442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5444 if (TREE_OPERAND_LENGTH (exp) > 0)
5445 arg0 = TREE_OPERAND (exp, 0);
5446 if (TREE_CODE_CLASS (code) == tcc_binary
5447 || TREE_CODE_CLASS (code) == tcc_comparison
5448 || (TREE_CODE_CLASS (code) == tcc_expression
5449 && TREE_OPERAND_LENGTH (exp) > 1))
5450 arg1 = TREE_OPERAND (exp, 1);
5452 if (arg0 == NULL_TREE)
5453 break;
5455 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5456 &high, &in_p, strict_overflow_p);
5457 if (nexp == NULL_TREE)
5458 break;
5459 exp = nexp;
5462 /* If EXP is a constant, we can evaluate whether this is true or false. */
5463 if (TREE_CODE (exp) == INTEGER_CST)
5465 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5466 exp, 0, low, 0))
5467 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5468 exp, 1, high, 1)));
5469 low = high = 0;
5470 exp = 0;
5473 *pin_p = in_p, *plow = low, *phigh = high;
5474 return exp;
5477 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5478 a bitwise check i.e. when
5479 LOW == 0xXX...X00...0
5480 HIGH == 0xXX...X11...1
5481 Return corresponding mask in MASK and stem in VALUE. */
5483 static bool
5484 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5485 tree *value)
5487 if (TREE_CODE (low) != INTEGER_CST
5488 || TREE_CODE (high) != INTEGER_CST)
5489 return false;
5491 unsigned prec = TYPE_PRECISION (type);
5492 wide_int lo = wi::to_wide (low, prec);
5493 wide_int hi = wi::to_wide (high, prec);
5495 wide_int end_mask = lo ^ hi;
5496 if ((end_mask & (end_mask + 1)) != 0
5497 || (lo & end_mask) != 0)
5498 return false;
5500 wide_int stem_mask = ~end_mask;
5501 wide_int stem = lo & stem_mask;
5502 if (stem != (hi & stem_mask))
5503 return false;
5505 *mask = wide_int_to_tree (type, stem_mask);
5506 *value = wide_int_to_tree (type, stem);
5508 return true;
5511 /* Helper routine for build_range_check and match.pd. Return the type to
5512 perform the check or NULL if it shouldn't be optimized. */
5514 tree
5515 range_check_type (tree etype)
5517 /* First make sure that arithmetics in this type is valid, then make sure
5518 that it wraps around. */
5519 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5520 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5522 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5524 tree utype, minv, maxv;
5526 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5527 for the type in question, as we rely on this here. */
5528 utype = unsigned_type_for (etype);
5529 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5530 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5531 build_int_cst (TREE_TYPE (maxv), 1), 1);
5532 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5534 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5535 minv, 1, maxv, 1)))
5536 etype = utype;
5537 else
5538 return NULL_TREE;
5540 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5541 etype = unsigned_type_for (etype);
5542 return etype;
5545 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5546 type, TYPE, return an expression to test if EXP is in (or out of, depending
5547 on IN_P) the range. Return 0 if the test couldn't be created. */
5549 tree
5550 build_range_check (location_t loc, tree type, tree exp, int in_p,
5551 tree low, tree high)
5553 tree etype = TREE_TYPE (exp), mask, value;
5555 /* Disable this optimization for function pointer expressions
5556 on targets that require function pointer canonicalization. */
5557 if (targetm.have_canonicalize_funcptr_for_compare ()
5558 && POINTER_TYPE_P (etype)
5559 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5560 return NULL_TREE;
5562 if (! in_p)
5564 value = build_range_check (loc, type, exp, 1, low, high);
5565 if (value != 0)
5566 return invert_truthvalue_loc (loc, value);
5568 return 0;
5571 if (low == 0 && high == 0)
5572 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5574 if (low == 0)
5575 return fold_build2_loc (loc, LE_EXPR, type, exp,
5576 fold_convert_loc (loc, etype, high));
5578 if (high == 0)
5579 return fold_build2_loc (loc, GE_EXPR, type, exp,
5580 fold_convert_loc (loc, etype, low));
5582 if (operand_equal_p (low, high, 0))
5583 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5584 fold_convert_loc (loc, etype, low));
5586 if (TREE_CODE (exp) == BIT_AND_EXPR
5587 && maskable_range_p (low, high, etype, &mask, &value))
5588 return fold_build2_loc (loc, EQ_EXPR, type,
5589 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5590 exp, mask),
5591 value);
5593 if (integer_zerop (low))
5595 if (! TYPE_UNSIGNED (etype))
5597 etype = unsigned_type_for (etype);
5598 high = fold_convert_loc (loc, etype, high);
5599 exp = fold_convert_loc (loc, etype, exp);
5601 return build_range_check (loc, type, exp, 1, 0, high);
5604 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5605 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5607 int prec = TYPE_PRECISION (etype);
5609 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5611 if (TYPE_UNSIGNED (etype))
5613 tree signed_etype = signed_type_for (etype);
5614 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5615 etype
5616 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5617 else
5618 etype = signed_etype;
5619 exp = fold_convert_loc (loc, etype, exp);
5621 return fold_build2_loc (loc, GT_EXPR, type, exp,
5622 build_int_cst (etype, 0));
5626 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5627 This requires wrap-around arithmetics for the type of the expression. */
5628 etype = range_check_type (etype);
5629 if (etype == NULL_TREE)
5630 return NULL_TREE;
5632 high = fold_convert_loc (loc, etype, high);
5633 low = fold_convert_loc (loc, etype, low);
5634 exp = fold_convert_loc (loc, etype, exp);
5636 value = const_binop (MINUS_EXPR, high, low);
5638 if (value != 0 && !TREE_OVERFLOW (value))
5639 return build_range_check (loc, type,
5640 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5641 1, build_int_cst (etype, 0), value);
5643 return 0;
5646 /* Return the predecessor of VAL in its type, handling the infinite case. */
5648 static tree
5649 range_predecessor (tree val)
5651 tree type = TREE_TYPE (val);
5653 if (INTEGRAL_TYPE_P (type)
5654 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5655 return 0;
5656 else
5657 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5658 build_int_cst (TREE_TYPE (val), 1), 0);
5661 /* Return the successor of VAL in its type, handling the infinite case. */
5663 static tree
5664 range_successor (tree val)
5666 tree type = TREE_TYPE (val);
5668 if (INTEGRAL_TYPE_P (type)
5669 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5670 return 0;
5671 else
5672 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5673 build_int_cst (TREE_TYPE (val), 1), 0);
5676 /* Given two ranges, see if we can merge them into one. Return 1 if we
5677 can, 0 if we can't. Set the output range into the specified parameters. */
5679 bool
5680 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5681 tree high0, int in1_p, tree low1, tree high1)
5683 bool no_overlap;
5684 int subset;
5685 int temp;
5686 tree tem;
5687 int in_p;
5688 tree low, high;
5689 int lowequal = ((low0 == 0 && low1 == 0)
5690 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5691 low0, 0, low1, 0)));
5692 int highequal = ((high0 == 0 && high1 == 0)
5693 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5694 high0, 1, high1, 1)));
5696 /* Make range 0 be the range that starts first, or ends last if they
5697 start at the same value. Swap them if it isn't. */
5698 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5699 low0, 0, low1, 0))
5700 || (lowequal
5701 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5702 high1, 1, high0, 1))))
5704 temp = in0_p, in0_p = in1_p, in1_p = temp;
5705 tem = low0, low0 = low1, low1 = tem;
5706 tem = high0, high0 = high1, high1 = tem;
5709 /* If the second range is != high1 where high1 is the type maximum of
5710 the type, try first merging with < high1 range. */
5711 if (low1
5712 && high1
5713 && TREE_CODE (low1) == INTEGER_CST
5714 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5715 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5716 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5717 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5718 && operand_equal_p (low1, high1, 0))
5720 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5721 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5722 !in1_p, NULL_TREE, range_predecessor (low1)))
5723 return true;
5724 /* Similarly for the second range != low1 where low1 is the type minimum
5725 of the type, try first merging with > low1 range. */
5726 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5727 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5728 !in1_p, range_successor (low1), NULL_TREE))
5729 return true;
5732 /* Now flag two cases, whether the ranges are disjoint or whether the
5733 second range is totally subsumed in the first. Note that the tests
5734 below are simplified by the ones above. */
5735 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5736 high0, 1, low1, 0));
5737 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5738 high1, 1, high0, 1));
5740 /* We now have four cases, depending on whether we are including or
5741 excluding the two ranges. */
5742 if (in0_p && in1_p)
5744 /* If they don't overlap, the result is false. If the second range
5745 is a subset it is the result. Otherwise, the range is from the start
5746 of the second to the end of the first. */
5747 if (no_overlap)
5748 in_p = 0, low = high = 0;
5749 else if (subset)
5750 in_p = 1, low = low1, high = high1;
5751 else
5752 in_p = 1, low = low1, high = high0;
5755 else if (in0_p && ! in1_p)
5757 /* If they don't overlap, the result is the first range. If they are
5758 equal, the result is false. If the second range is a subset of the
5759 first, and the ranges begin at the same place, we go from just after
5760 the end of the second range to the end of the first. If the second
5761 range is not a subset of the first, or if it is a subset and both
5762 ranges end at the same place, the range starts at the start of the
5763 first range and ends just before the second range.
5764 Otherwise, we can't describe this as a single range. */
5765 if (no_overlap)
5766 in_p = 1, low = low0, high = high0;
5767 else if (lowequal && highequal)
5768 in_p = 0, low = high = 0;
5769 else if (subset && lowequal)
5771 low = range_successor (high1);
5772 high = high0;
5773 in_p = 1;
5774 if (low == 0)
5776 /* We are in the weird situation where high0 > high1 but
5777 high1 has no successor. Punt. */
5778 return 0;
5781 else if (! subset || highequal)
5783 low = low0;
5784 high = range_predecessor (low1);
5785 in_p = 1;
5786 if (high == 0)
5788 /* low0 < low1 but low1 has no predecessor. Punt. */
5789 return 0;
5792 else
5793 return 0;
5796 else if (! in0_p && in1_p)
5798 /* If they don't overlap, the result is the second range. If the second
5799 is a subset of the first, the result is false. Otherwise,
5800 the range starts just after the first range and ends at the
5801 end of the second. */
5802 if (no_overlap)
5803 in_p = 1, low = low1, high = high1;
5804 else if (subset || highequal)
5805 in_p = 0, low = high = 0;
5806 else
5808 low = range_successor (high0);
5809 high = high1;
5810 in_p = 1;
5811 if (low == 0)
5813 /* high1 > high0 but high0 has no successor. Punt. */
5814 return 0;
5819 else
5821 /* The case where we are excluding both ranges. Here the complex case
5822 is if they don't overlap. In that case, the only time we have a
5823 range is if they are adjacent. If the second is a subset of the
5824 first, the result is the first. Otherwise, the range to exclude
5825 starts at the beginning of the first range and ends at the end of the
5826 second. */
5827 if (no_overlap)
5829 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5830 range_successor (high0),
5831 1, low1, 0)))
5832 in_p = 0, low = low0, high = high1;
5833 else
5835 /* Canonicalize - [min, x] into - [-, x]. */
5836 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5837 switch (TREE_CODE (TREE_TYPE (low0)))
5839 case ENUMERAL_TYPE:
5840 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5841 GET_MODE_BITSIZE
5842 (TYPE_MODE (TREE_TYPE (low0)))))
5843 break;
5844 /* FALLTHROUGH */
5845 case INTEGER_TYPE:
5846 if (tree_int_cst_equal (low0,
5847 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5848 low0 = 0;
5849 break;
5850 case POINTER_TYPE:
5851 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5852 && integer_zerop (low0))
5853 low0 = 0;
5854 break;
5855 default:
5856 break;
5859 /* Canonicalize - [x, max] into - [x, -]. */
5860 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5861 switch (TREE_CODE (TREE_TYPE (high1)))
5863 case ENUMERAL_TYPE:
5864 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5865 GET_MODE_BITSIZE
5866 (TYPE_MODE (TREE_TYPE (high1)))))
5867 break;
5868 /* FALLTHROUGH */
5869 case INTEGER_TYPE:
5870 if (tree_int_cst_equal (high1,
5871 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5872 high1 = 0;
5873 break;
5874 case POINTER_TYPE:
5875 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5876 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5877 high1, 1,
5878 build_int_cst (TREE_TYPE (high1), 1),
5879 1)))
5880 high1 = 0;
5881 break;
5882 default:
5883 break;
5886 /* The ranges might be also adjacent between the maximum and
5887 minimum values of the given type. For
5888 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5889 return + [x + 1, y - 1]. */
5890 if (low0 == 0 && high1 == 0)
5892 low = range_successor (high0);
5893 high = range_predecessor (low1);
5894 if (low == 0 || high == 0)
5895 return 0;
5897 in_p = 1;
5899 else
5900 return 0;
5903 else if (subset)
5904 in_p = 0, low = low0, high = high0;
5905 else
5906 in_p = 0, low = low0, high = high1;
5909 *pin_p = in_p, *plow = low, *phigh = high;
5910 return 1;
5914 /* Subroutine of fold, looking inside expressions of the form
5915 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5916 are the three operands of the COND_EXPR. This function is
5917 being used also to optimize A op B ? C : A, by reversing the
5918 comparison first.
5920 Return a folded expression whose code is not a COND_EXPR
5921 anymore, or NULL_TREE if no folding opportunity is found. */
5923 static tree
5924 fold_cond_expr_with_comparison (location_t loc, tree type,
5925 enum tree_code comp_code,
5926 tree arg00, tree arg01, tree arg1, tree arg2)
5928 tree arg1_type = TREE_TYPE (arg1);
5929 tree tem;
5931 STRIP_NOPS (arg1);
5932 STRIP_NOPS (arg2);
5934 /* If we have A op 0 ? A : -A, consider applying the following
5935 transformations:
5937 A == 0? A : -A same as -A
5938 A != 0? A : -A same as A
5939 A >= 0? A : -A same as abs (A)
5940 A > 0? A : -A same as abs (A)
5941 A <= 0? A : -A same as -abs (A)
5942 A < 0? A : -A same as -abs (A)
5944 None of these transformations work for modes with signed
5945 zeros. If A is +/-0, the first two transformations will
5946 change the sign of the result (from +0 to -0, or vice
5947 versa). The last four will fix the sign of the result,
5948 even though the original expressions could be positive or
5949 negative, depending on the sign of A.
5951 Note that all these transformations are correct if A is
5952 NaN, since the two alternatives (A and -A) are also NaNs. */
5953 if (!HONOR_SIGNED_ZEROS (type)
5954 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5955 ? real_zerop (arg01)
5956 : integer_zerop (arg01))
5957 && ((TREE_CODE (arg2) == NEGATE_EXPR
5958 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5959 /* In the case that A is of the form X-Y, '-A' (arg2) may
5960 have already been folded to Y-X, check for that. */
5961 || (TREE_CODE (arg1) == MINUS_EXPR
5962 && TREE_CODE (arg2) == MINUS_EXPR
5963 && operand_equal_p (TREE_OPERAND (arg1, 0),
5964 TREE_OPERAND (arg2, 1), 0)
5965 && operand_equal_p (TREE_OPERAND (arg1, 1),
5966 TREE_OPERAND (arg2, 0), 0))))
5967 switch (comp_code)
5969 case EQ_EXPR:
5970 case UNEQ_EXPR:
5971 tem = fold_convert_loc (loc, arg1_type, arg1);
5972 return fold_convert_loc (loc, type, negate_expr (tem));
5973 case NE_EXPR:
5974 case LTGT_EXPR:
5975 return fold_convert_loc (loc, type, arg1);
5976 case UNGE_EXPR:
5977 case UNGT_EXPR:
5978 if (flag_trapping_math)
5979 break;
5980 /* Fall through. */
5981 case GE_EXPR:
5982 case GT_EXPR:
5983 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5984 break;
5985 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5986 return fold_convert_loc (loc, type, tem);
5987 case UNLE_EXPR:
5988 case UNLT_EXPR:
5989 if (flag_trapping_math)
5990 break;
5991 /* FALLTHRU */
5992 case LE_EXPR:
5993 case LT_EXPR:
5994 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5995 break;
5996 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5997 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5999 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6000 is not, invokes UB both in abs and in the negation of it.
6001 So, use ABSU_EXPR instead. */
6002 tree utype = unsigned_type_for (TREE_TYPE (arg1));
6003 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6004 tem = negate_expr (tem);
6005 return fold_convert_loc (loc, type, tem);
6007 else
6009 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6010 return negate_expr (fold_convert_loc (loc, type, tem));
6012 default:
6013 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6014 break;
6017 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6018 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6019 both transformations are correct when A is NaN: A != 0
6020 is then true, and A == 0 is false. */
6022 if (!HONOR_SIGNED_ZEROS (type)
6023 && integer_zerop (arg01) && integer_zerop (arg2))
6025 if (comp_code == NE_EXPR)
6026 return fold_convert_loc (loc, type, arg1);
6027 else if (comp_code == EQ_EXPR)
6028 return build_zero_cst (type);
6031 /* Try some transformations of A op B ? A : B.
6033 A == B? A : B same as B
6034 A != B? A : B same as A
6035 A >= B? A : B same as max (A, B)
6036 A > B? A : B same as max (B, A)
6037 A <= B? A : B same as min (A, B)
6038 A < B? A : B same as min (B, A)
6040 As above, these transformations don't work in the presence
6041 of signed zeros. For example, if A and B are zeros of
6042 opposite sign, the first two transformations will change
6043 the sign of the result. In the last four, the original
6044 expressions give different results for (A=+0, B=-0) and
6045 (A=-0, B=+0), but the transformed expressions do not.
6047 The first two transformations are correct if either A or B
6048 is a NaN. In the first transformation, the condition will
6049 be false, and B will indeed be chosen. In the case of the
6050 second transformation, the condition A != B will be true,
6051 and A will be chosen.
6053 The conversions to max() and min() are not correct if B is
6054 a number and A is not. The conditions in the original
6055 expressions will be false, so all four give B. The min()
6056 and max() versions would give a NaN instead. */
6057 if (!HONOR_SIGNED_ZEROS (type)
6058 && operand_equal_for_comparison_p (arg01, arg2)
6059 /* Avoid these transformations if the COND_EXPR may be used
6060 as an lvalue in the C++ front-end. PR c++/19199. */
6061 && (in_gimple_form
6062 || VECTOR_TYPE_P (type)
6063 || (! lang_GNU_CXX ()
6064 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6065 || ! maybe_lvalue_p (arg1)
6066 || ! maybe_lvalue_p (arg2)))
6068 tree comp_op0 = arg00;
6069 tree comp_op1 = arg01;
6070 tree comp_type = TREE_TYPE (comp_op0);
6072 switch (comp_code)
6074 case EQ_EXPR:
6075 return fold_convert_loc (loc, type, arg2);
6076 case NE_EXPR:
6077 return fold_convert_loc (loc, type, arg1);
6078 case LE_EXPR:
6079 case LT_EXPR:
6080 case UNLE_EXPR:
6081 case UNLT_EXPR:
6082 /* In C++ a ?: expression can be an lvalue, so put the
6083 operand which will be used if they are equal first
6084 so that we can convert this back to the
6085 corresponding COND_EXPR. */
6086 if (!HONOR_NANS (arg1))
6088 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6089 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6090 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6091 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6092 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6093 comp_op1, comp_op0);
6094 return fold_convert_loc (loc, type, tem);
6096 break;
6097 case GE_EXPR:
6098 case GT_EXPR:
6099 case UNGE_EXPR:
6100 case UNGT_EXPR:
6101 if (!HONOR_NANS (arg1))
6103 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6104 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6105 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6106 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6107 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6108 comp_op1, comp_op0);
6109 return fold_convert_loc (loc, type, tem);
6111 break;
6112 case UNEQ_EXPR:
6113 if (!HONOR_NANS (arg1))
6114 return fold_convert_loc (loc, type, arg2);
6115 break;
6116 case LTGT_EXPR:
6117 if (!HONOR_NANS (arg1))
6118 return fold_convert_loc (loc, type, arg1);
6119 break;
6120 default:
6121 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6122 break;
6126 return NULL_TREE;
6131 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6132 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6133 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6134 false) >= 2)
6135 #endif
6137 /* EXP is some logical combination of boolean tests. See if we can
6138 merge it into some range test. Return the new tree if so. */
6140 static tree
6141 fold_range_test (location_t loc, enum tree_code code, tree type,
6142 tree op0, tree op1)
6144 int or_op = (code == TRUTH_ORIF_EXPR
6145 || code == TRUTH_OR_EXPR);
6146 int in0_p, in1_p, in_p;
6147 tree low0, low1, low, high0, high1, high;
6148 bool strict_overflow_p = false;
6149 tree tem, lhs, rhs;
6150 const char * const warnmsg = G_("assuming signed overflow does not occur "
6151 "when simplifying range test");
6153 if (!INTEGRAL_TYPE_P (type))
6154 return 0;
6156 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6157 /* If op0 is known true or false and this is a short-circuiting
6158 operation we must not merge with op1 since that makes side-effects
6159 unconditional. So special-case this. */
6160 if (!lhs
6161 && ((code == TRUTH_ORIF_EXPR && in0_p)
6162 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6163 return op0;
6164 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6166 /* If this is an OR operation, invert both sides; we will invert
6167 again at the end. */
6168 if (or_op)
6169 in0_p = ! in0_p, in1_p = ! in1_p;
6171 /* If both expressions are the same, if we can merge the ranges, and we
6172 can build the range test, return it or it inverted. If one of the
6173 ranges is always true or always false, consider it to be the same
6174 expression as the other. */
6175 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6176 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6177 in1_p, low1, high1)
6178 && (tem = (build_range_check (loc, type,
6179 lhs != 0 ? lhs
6180 : rhs != 0 ? rhs : integer_zero_node,
6181 in_p, low, high))) != 0)
6183 if (strict_overflow_p)
6184 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6185 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6188 /* On machines where the branch cost is expensive, if this is a
6189 short-circuited branch and the underlying object on both sides
6190 is the same, make a non-short-circuit operation. */
6191 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6192 if (param_logical_op_non_short_circuit != -1)
6193 logical_op_non_short_circuit
6194 = param_logical_op_non_short_circuit;
6195 if (logical_op_non_short_circuit
6196 && !sanitize_coverage_p ()
6197 && lhs != 0 && rhs != 0
6198 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6199 && operand_equal_p (lhs, rhs, 0))
6201 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6202 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6203 which cases we can't do this. */
6204 if (simple_operand_p (lhs))
6205 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6206 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6207 type, op0, op1);
6209 else if (!lang_hooks.decls.global_bindings_p ()
6210 && !CONTAINS_PLACEHOLDER_P (lhs))
6212 tree common = save_expr (lhs);
6214 if ((lhs = build_range_check (loc, type, common,
6215 or_op ? ! in0_p : in0_p,
6216 low0, high0)) != 0
6217 && (rhs = build_range_check (loc, type, common,
6218 or_op ? ! in1_p : in1_p,
6219 low1, high1)) != 0)
6221 if (strict_overflow_p)
6222 fold_overflow_warning (warnmsg,
6223 WARN_STRICT_OVERFLOW_COMPARISON);
6224 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6225 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6226 type, lhs, rhs);
6231 return 0;
6234 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6235 bit value. Arrange things so the extra bits will be set to zero if and
6236 only if C is signed-extended to its full width. If MASK is nonzero,
6237 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6239 static tree
6240 unextend (tree c, int p, int unsignedp, tree mask)
6242 tree type = TREE_TYPE (c);
6243 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6244 tree temp;
6246 if (p == modesize || unsignedp)
6247 return c;
6249 /* We work by getting just the sign bit into the low-order bit, then
6250 into the high-order bit, then sign-extend. We then XOR that value
6251 with C. */
6252 temp = build_int_cst (TREE_TYPE (c),
6253 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6255 /* We must use a signed type in order to get an arithmetic right shift.
6256 However, we must also avoid introducing accidental overflows, so that
6257 a subsequent call to integer_zerop will work. Hence we must
6258 do the type conversion here. At this point, the constant is either
6259 zero or one, and the conversion to a signed type can never overflow.
6260 We could get an overflow if this conversion is done anywhere else. */
6261 if (TYPE_UNSIGNED (type))
6262 temp = fold_convert (signed_type_for (type), temp);
6264 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6265 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6266 if (mask != 0)
6267 temp = const_binop (BIT_AND_EXPR, temp,
6268 fold_convert (TREE_TYPE (c), mask));
6269 /* If necessary, convert the type back to match the type of C. */
6270 if (TYPE_UNSIGNED (type))
6271 temp = fold_convert (type, temp);
6273 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6276 /* For an expression that has the form
6277 (A && B) || ~B
6279 (A || B) && ~B,
6280 we can drop one of the inner expressions and simplify to
6281 A || ~B
6283 A && ~B
6284 LOC is the location of the resulting expression. OP is the inner
6285 logical operation; the left-hand side in the examples above, while CMPOP
6286 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6287 removing a condition that guards another, as in
6288 (A != NULL && A->...) || A == NULL
6289 which we must not transform. If RHS_ONLY is true, only eliminate the
6290 right-most operand of the inner logical operation. */
6292 static tree
6293 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6294 bool rhs_only)
6296 tree type = TREE_TYPE (cmpop);
6297 enum tree_code code = TREE_CODE (cmpop);
6298 enum tree_code truthop_code = TREE_CODE (op);
6299 tree lhs = TREE_OPERAND (op, 0);
6300 tree rhs = TREE_OPERAND (op, 1);
6301 tree orig_lhs = lhs, orig_rhs = rhs;
6302 enum tree_code rhs_code = TREE_CODE (rhs);
6303 enum tree_code lhs_code = TREE_CODE (lhs);
6304 enum tree_code inv_code;
6306 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6307 return NULL_TREE;
6309 if (TREE_CODE_CLASS (code) != tcc_comparison)
6310 return NULL_TREE;
6312 if (rhs_code == truthop_code)
6314 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6315 if (newrhs != NULL_TREE)
6317 rhs = newrhs;
6318 rhs_code = TREE_CODE (rhs);
6321 if (lhs_code == truthop_code && !rhs_only)
6323 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6324 if (newlhs != NULL_TREE)
6326 lhs = newlhs;
6327 lhs_code = TREE_CODE (lhs);
6331 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6332 if (inv_code == rhs_code
6333 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6334 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6335 return lhs;
6336 if (!rhs_only && inv_code == lhs_code
6337 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6338 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6339 return rhs;
6340 if (rhs != orig_rhs || lhs != orig_lhs)
6341 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6342 lhs, rhs);
6343 return NULL_TREE;
6346 /* Find ways of folding logical expressions of LHS and RHS:
6347 Try to merge two comparisons to the same innermost item.
6348 Look for range tests like "ch >= '0' && ch <= '9'".
6349 Look for combinations of simple terms on machines with expensive branches
6350 and evaluate the RHS unconditionally.
6352 For example, if we have p->a == 2 && p->b == 4 and we can make an
6353 object large enough to span both A and B, we can do this with a comparison
6354 against the object ANDed with the a mask.
6356 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6357 operations to do this with one comparison.
6359 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6360 function and the one above.
6362 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6363 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6365 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6366 two operands.
6368 We return the simplified tree or 0 if no optimization is possible. */
6370 static tree
6371 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6372 tree lhs, tree rhs)
6374 /* If this is the "or" of two comparisons, we can do something if
6375 the comparisons are NE_EXPR. If this is the "and", we can do something
6376 if the comparisons are EQ_EXPR. I.e.,
6377 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6379 WANTED_CODE is this operation code. For single bit fields, we can
6380 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6381 comparison for one-bit fields. */
6383 enum tree_code wanted_code;
6384 enum tree_code lcode, rcode;
6385 tree ll_arg, lr_arg, rl_arg, rr_arg;
6386 tree ll_inner, lr_inner, rl_inner, rr_inner;
6387 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6388 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6389 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6390 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6391 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6392 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6393 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6394 scalar_int_mode lnmode, rnmode;
6395 tree ll_mask, lr_mask, rl_mask, rr_mask;
6396 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6397 tree l_const, r_const;
6398 tree lntype, rntype, result;
6399 HOST_WIDE_INT first_bit, end_bit;
6400 int volatilep;
6402 /* Start by getting the comparison codes. Fail if anything is volatile.
6403 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6404 it were surrounded with a NE_EXPR. */
6406 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6407 return 0;
6409 lcode = TREE_CODE (lhs);
6410 rcode = TREE_CODE (rhs);
6412 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6414 lhs = build2 (NE_EXPR, truth_type, lhs,
6415 build_int_cst (TREE_TYPE (lhs), 0));
6416 lcode = NE_EXPR;
6419 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6421 rhs = build2 (NE_EXPR, truth_type, rhs,
6422 build_int_cst (TREE_TYPE (rhs), 0));
6423 rcode = NE_EXPR;
6426 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6427 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6428 return 0;
6430 ll_arg = TREE_OPERAND (lhs, 0);
6431 lr_arg = TREE_OPERAND (lhs, 1);
6432 rl_arg = TREE_OPERAND (rhs, 0);
6433 rr_arg = TREE_OPERAND (rhs, 1);
6435 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6436 if (simple_operand_p (ll_arg)
6437 && simple_operand_p (lr_arg))
6439 if (operand_equal_p (ll_arg, rl_arg, 0)
6440 && operand_equal_p (lr_arg, rr_arg, 0))
6442 result = combine_comparisons (loc, code, lcode, rcode,
6443 truth_type, ll_arg, lr_arg);
6444 if (result)
6445 return result;
6447 else if (operand_equal_p (ll_arg, rr_arg, 0)
6448 && operand_equal_p (lr_arg, rl_arg, 0))
6450 result = combine_comparisons (loc, code, lcode,
6451 swap_tree_comparison (rcode),
6452 truth_type, ll_arg, lr_arg);
6453 if (result)
6454 return result;
6458 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6461 /* If the RHS can be evaluated unconditionally and its operands are
6462 simple, it wins to evaluate the RHS unconditionally on machines
6463 with expensive branches. In this case, this isn't a comparison
6464 that can be merged. */
6466 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6467 false) >= 2
6468 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6469 && simple_operand_p (rl_arg)
6470 && simple_operand_p (rr_arg))
6472 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6473 if (code == TRUTH_OR_EXPR
6474 && lcode == NE_EXPR && integer_zerop (lr_arg)
6475 && rcode == NE_EXPR && integer_zerop (rr_arg)
6476 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6477 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6478 return build2_loc (loc, NE_EXPR, truth_type,
6479 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6480 ll_arg, rl_arg),
6481 build_int_cst (TREE_TYPE (ll_arg), 0));
6483 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6484 if (code == TRUTH_AND_EXPR
6485 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6486 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6487 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6488 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6489 return build2_loc (loc, EQ_EXPR, truth_type,
6490 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6491 ll_arg, rl_arg),
6492 build_int_cst (TREE_TYPE (ll_arg), 0));
6495 /* See if the comparisons can be merged. Then get all the parameters for
6496 each side. */
6498 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6499 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6500 return 0;
6502 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6503 volatilep = 0;
6504 ll_inner = decode_field_reference (loc, &ll_arg,
6505 &ll_bitsize, &ll_bitpos, &ll_mode,
6506 &ll_unsignedp, &ll_reversep, &volatilep,
6507 &ll_mask, &ll_and_mask);
6508 lr_inner = decode_field_reference (loc, &lr_arg,
6509 &lr_bitsize, &lr_bitpos, &lr_mode,
6510 &lr_unsignedp, &lr_reversep, &volatilep,
6511 &lr_mask, &lr_and_mask);
6512 rl_inner = decode_field_reference (loc, &rl_arg,
6513 &rl_bitsize, &rl_bitpos, &rl_mode,
6514 &rl_unsignedp, &rl_reversep, &volatilep,
6515 &rl_mask, &rl_and_mask);
6516 rr_inner = decode_field_reference (loc, &rr_arg,
6517 &rr_bitsize, &rr_bitpos, &rr_mode,
6518 &rr_unsignedp, &rr_reversep, &volatilep,
6519 &rr_mask, &rr_and_mask);
6521 /* It must be true that the inner operation on the lhs of each
6522 comparison must be the same if we are to be able to do anything.
6523 Then see if we have constants. If not, the same must be true for
6524 the rhs's. */
6525 if (volatilep
6526 || ll_reversep != rl_reversep
6527 || ll_inner == 0 || rl_inner == 0
6528 || ! operand_equal_p (ll_inner, rl_inner, 0))
6529 return 0;
6531 if (TREE_CODE (lr_arg) == INTEGER_CST
6532 && TREE_CODE (rr_arg) == INTEGER_CST)
6534 l_const = lr_arg, r_const = rr_arg;
6535 lr_reversep = ll_reversep;
6537 else if (lr_reversep != rr_reversep
6538 || lr_inner == 0 || rr_inner == 0
6539 || ! operand_equal_p (lr_inner, rr_inner, 0))
6540 return 0;
6541 else
6542 l_const = r_const = 0;
6544 /* If either comparison code is not correct for our logical operation,
6545 fail. However, we can convert a one-bit comparison against zero into
6546 the opposite comparison against that bit being set in the field. */
6548 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6549 if (lcode != wanted_code)
6551 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6553 /* Make the left operand unsigned, since we are only interested
6554 in the value of one bit. Otherwise we are doing the wrong
6555 thing below. */
6556 ll_unsignedp = 1;
6557 l_const = ll_mask;
6559 else
6560 return 0;
6563 /* This is analogous to the code for l_const above. */
6564 if (rcode != wanted_code)
6566 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6568 rl_unsignedp = 1;
6569 r_const = rl_mask;
6571 else
6572 return 0;
6575 /* See if we can find a mode that contains both fields being compared on
6576 the left. If we can't, fail. Otherwise, update all constants and masks
6577 to be relative to a field of that size. */
6578 first_bit = MIN (ll_bitpos, rl_bitpos);
6579 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6580 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6581 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6582 volatilep, &lnmode))
6583 return 0;
6585 lnbitsize = GET_MODE_BITSIZE (lnmode);
6586 lnbitpos = first_bit & ~ (lnbitsize - 1);
6587 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6588 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6590 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6592 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6593 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6596 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6597 size_int (xll_bitpos));
6598 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6599 size_int (xrl_bitpos));
6600 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6601 return 0;
6603 if (l_const)
6605 l_const = fold_convert_loc (loc, lntype, l_const);
6606 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6607 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6608 if (l_const == NULL_TREE)
6609 return 0;
6610 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6611 fold_build1_loc (loc, BIT_NOT_EXPR,
6612 lntype, ll_mask))))
6614 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6616 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6619 if (r_const)
6621 r_const = fold_convert_loc (loc, lntype, r_const);
6622 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6623 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6624 if (r_const == NULL_TREE)
6625 return 0;
6626 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6627 fold_build1_loc (loc, BIT_NOT_EXPR,
6628 lntype, rl_mask))))
6630 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6632 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6636 /* If the right sides are not constant, do the same for it. Also,
6637 disallow this optimization if a size, signedness or storage order
6638 mismatch occurs between the left and right sides. */
6639 if (l_const == 0)
6641 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6642 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6643 || ll_reversep != lr_reversep
6644 /* Make sure the two fields on the right
6645 correspond to the left without being swapped. */
6646 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6647 return 0;
6649 first_bit = MIN (lr_bitpos, rr_bitpos);
6650 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6651 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6652 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6653 volatilep, &rnmode))
6654 return 0;
6656 rnbitsize = GET_MODE_BITSIZE (rnmode);
6657 rnbitpos = first_bit & ~ (rnbitsize - 1);
6658 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6659 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6661 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6663 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6664 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6667 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6668 rntype, lr_mask),
6669 size_int (xlr_bitpos));
6670 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6671 rntype, rr_mask),
6672 size_int (xrr_bitpos));
6673 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6674 return 0;
6676 /* Make a mask that corresponds to both fields being compared.
6677 Do this for both items being compared. If the operands are the
6678 same size and the bits being compared are in the same position
6679 then we can do this by masking both and comparing the masked
6680 results. */
6681 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6682 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6683 if (lnbitsize == rnbitsize
6684 && xll_bitpos == xlr_bitpos
6685 && lnbitpos >= 0
6686 && rnbitpos >= 0)
6688 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6689 lntype, lnbitsize, lnbitpos,
6690 ll_unsignedp || rl_unsignedp, ll_reversep);
6691 if (! all_ones_mask_p (ll_mask, lnbitsize))
6692 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6694 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6695 rntype, rnbitsize, rnbitpos,
6696 lr_unsignedp || rr_unsignedp, lr_reversep);
6697 if (! all_ones_mask_p (lr_mask, rnbitsize))
6698 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6700 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6703 /* There is still another way we can do something: If both pairs of
6704 fields being compared are adjacent, we may be able to make a wider
6705 field containing them both.
6707 Note that we still must mask the lhs/rhs expressions. Furthermore,
6708 the mask must be shifted to account for the shift done by
6709 make_bit_field_ref. */
6710 if (((ll_bitsize + ll_bitpos == rl_bitpos
6711 && lr_bitsize + lr_bitpos == rr_bitpos)
6712 || (ll_bitpos == rl_bitpos + rl_bitsize
6713 && lr_bitpos == rr_bitpos + rr_bitsize))
6714 && ll_bitpos >= 0
6715 && rl_bitpos >= 0
6716 && lr_bitpos >= 0
6717 && rr_bitpos >= 0)
6719 tree type;
6721 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6722 ll_bitsize + rl_bitsize,
6723 MIN (ll_bitpos, rl_bitpos),
6724 ll_unsignedp, ll_reversep);
6725 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6726 lr_bitsize + rr_bitsize,
6727 MIN (lr_bitpos, rr_bitpos),
6728 lr_unsignedp, lr_reversep);
6730 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6731 size_int (MIN (xll_bitpos, xrl_bitpos)));
6732 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6733 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6734 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6735 return 0;
6737 /* Convert to the smaller type before masking out unwanted bits. */
6738 type = lntype;
6739 if (lntype != rntype)
6741 if (lnbitsize > rnbitsize)
6743 lhs = fold_convert_loc (loc, rntype, lhs);
6744 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6745 type = rntype;
6747 else if (lnbitsize < rnbitsize)
6749 rhs = fold_convert_loc (loc, lntype, rhs);
6750 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6751 type = lntype;
6755 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6756 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6758 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6759 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6761 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6764 return 0;
6767 /* Handle the case of comparisons with constants. If there is something in
6768 common between the masks, those bits of the constants must be the same.
6769 If not, the condition is always false. Test for this to avoid generating
6770 incorrect code below. */
6771 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6772 if (! integer_zerop (result)
6773 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6774 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6776 if (wanted_code == NE_EXPR)
6778 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6779 return constant_boolean_node (true, truth_type);
6781 else
6783 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6784 return constant_boolean_node (false, truth_type);
6788 if (lnbitpos < 0)
6789 return 0;
6791 /* Construct the expression we will return. First get the component
6792 reference we will make. Unless the mask is all ones the width of
6793 that field, perform the mask operation. Then compare with the
6794 merged constant. */
6795 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6796 lntype, lnbitsize, lnbitpos,
6797 ll_unsignedp || rl_unsignedp, ll_reversep);
6799 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6800 if (! all_ones_mask_p (ll_mask, lnbitsize))
6801 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6803 return build2_loc (loc, wanted_code, truth_type, result,
6804 const_binop (BIT_IOR_EXPR, l_const, r_const));
6807 /* T is an integer expression that is being multiplied, divided, or taken a
6808 modulus (CODE says which and what kind of divide or modulus) by a
6809 constant C. See if we can eliminate that operation by folding it with
6810 other operations already in T. WIDE_TYPE, if non-null, is a type that
6811 should be used for the computation if wider than our type.
6813 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6814 (X * 2) + (Y * 4). We must, however, be assured that either the original
6815 expression would not overflow or that overflow is undefined for the type
6816 in the language in question.
6818 If we return a non-null expression, it is an equivalent form of the
6819 original computation, but need not be in the original type.
6821 We set *STRICT_OVERFLOW_P to true if the return values depends on
6822 signed overflow being undefined. Otherwise we do not change
6823 *STRICT_OVERFLOW_P. */
6825 static tree
6826 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6827 bool *strict_overflow_p)
6829 /* To avoid exponential search depth, refuse to allow recursion past
6830 three levels. Beyond that (1) it's highly unlikely that we'll find
6831 something interesting and (2) we've probably processed it before
6832 when we built the inner expression. */
6834 static int depth;
6835 tree ret;
6837 if (depth > 3)
6838 return NULL;
6840 depth++;
6841 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6842 depth--;
6844 return ret;
6847 static tree
6848 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6849 bool *strict_overflow_p)
6851 tree type = TREE_TYPE (t);
6852 enum tree_code tcode = TREE_CODE (t);
6853 tree ctype = (wide_type != 0
6854 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6855 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6856 ? wide_type : type);
6857 tree t1, t2;
6858 bool same_p = tcode == code;
6859 tree op0 = NULL_TREE, op1 = NULL_TREE;
6860 bool sub_strict_overflow_p;
6862 /* Don't deal with constants of zero here; they confuse the code below. */
6863 if (integer_zerop (c))
6864 return NULL_TREE;
6866 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6867 op0 = TREE_OPERAND (t, 0);
6869 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6870 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6872 /* Note that we need not handle conditional operations here since fold
6873 already handles those cases. So just do arithmetic here. */
6874 switch (tcode)
6876 case INTEGER_CST:
6877 /* For a constant, we can always simplify if we are a multiply
6878 or (for divide and modulus) if it is a multiple of our constant. */
6879 if (code == MULT_EXPR
6880 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6881 TYPE_SIGN (type)))
6883 tree tem = const_binop (code, fold_convert (ctype, t),
6884 fold_convert (ctype, c));
6885 /* If the multiplication overflowed, we lost information on it.
6886 See PR68142 and PR69845. */
6887 if (TREE_OVERFLOW (tem))
6888 return NULL_TREE;
6889 return tem;
6891 break;
6893 CASE_CONVERT: case NON_LVALUE_EXPR:
6894 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6895 break;
6896 /* If op0 is an expression ... */
6897 if ((COMPARISON_CLASS_P (op0)
6898 || UNARY_CLASS_P (op0)
6899 || BINARY_CLASS_P (op0)
6900 || VL_EXP_CLASS_P (op0)
6901 || EXPRESSION_CLASS_P (op0))
6902 /* ... and has wrapping overflow, and its type is smaller
6903 than ctype, then we cannot pass through as widening. */
6904 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6905 && (TYPE_PRECISION (ctype)
6906 > TYPE_PRECISION (TREE_TYPE (op0))))
6907 /* ... or this is a truncation (t is narrower than op0),
6908 then we cannot pass through this narrowing. */
6909 || (TYPE_PRECISION (type)
6910 < TYPE_PRECISION (TREE_TYPE (op0)))
6911 /* ... or signedness changes for division or modulus,
6912 then we cannot pass through this conversion. */
6913 || (code != MULT_EXPR
6914 && (TYPE_UNSIGNED (ctype)
6915 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6916 /* ... or has undefined overflow while the converted to
6917 type has not, we cannot do the operation in the inner type
6918 as that would introduce undefined overflow. */
6919 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6920 && !TYPE_OVERFLOW_UNDEFINED (type))))
6921 break;
6923 /* Pass the constant down and see if we can make a simplification. If
6924 we can, replace this expression with the inner simplification for
6925 possible later conversion to our or some other type. */
6926 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6927 && TREE_CODE (t2) == INTEGER_CST
6928 && !TREE_OVERFLOW (t2)
6929 && (t1 = extract_muldiv (op0, t2, code,
6930 code == MULT_EXPR ? ctype : NULL_TREE,
6931 strict_overflow_p)) != 0)
6932 return t1;
6933 break;
6935 case ABS_EXPR:
6936 /* If widening the type changes it from signed to unsigned, then we
6937 must avoid building ABS_EXPR itself as unsigned. */
6938 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6940 tree cstype = (*signed_type_for) (ctype);
6941 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6942 != 0)
6944 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6945 return fold_convert (ctype, t1);
6947 break;
6949 /* If the constant is negative, we cannot simplify this. */
6950 if (tree_int_cst_sgn (c) == -1)
6951 break;
6952 /* FALLTHROUGH */
6953 case NEGATE_EXPR:
6954 /* For division and modulus, type can't be unsigned, as e.g.
6955 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6956 For signed types, even with wrapping overflow, this is fine. */
6957 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6958 break;
6959 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6960 != 0)
6961 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6962 break;
6964 case MIN_EXPR: case MAX_EXPR:
6965 /* If widening the type changes the signedness, then we can't perform
6966 this optimization as that changes the result. */
6967 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6968 break;
6970 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6971 sub_strict_overflow_p = false;
6972 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6973 &sub_strict_overflow_p)) != 0
6974 && (t2 = extract_muldiv (op1, c, code, wide_type,
6975 &sub_strict_overflow_p)) != 0)
6977 if (tree_int_cst_sgn (c) < 0)
6978 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6979 if (sub_strict_overflow_p)
6980 *strict_overflow_p = true;
6981 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6982 fold_convert (ctype, t2));
6984 break;
6986 case LSHIFT_EXPR: case RSHIFT_EXPR:
6987 /* If the second operand is constant, this is a multiplication
6988 or floor division, by a power of two, so we can treat it that
6989 way unless the multiplier or divisor overflows. Signed
6990 left-shift overflow is implementation-defined rather than
6991 undefined in C90, so do not convert signed left shift into
6992 multiplication. */
6993 if (TREE_CODE (op1) == INTEGER_CST
6994 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6995 /* const_binop may not detect overflow correctly,
6996 so check for it explicitly here. */
6997 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6998 wi::to_wide (op1))
6999 && (t1 = fold_convert (ctype,
7000 const_binop (LSHIFT_EXPR, size_one_node,
7001 op1))) != 0
7002 && !TREE_OVERFLOW (t1))
7003 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7004 ? MULT_EXPR : FLOOR_DIV_EXPR,
7005 ctype,
7006 fold_convert (ctype, op0),
7007 t1),
7008 c, code, wide_type, strict_overflow_p);
7009 break;
7011 case PLUS_EXPR: case MINUS_EXPR:
7012 /* See if we can eliminate the operation on both sides. If we can, we
7013 can return a new PLUS or MINUS. If we can't, the only remaining
7014 cases where we can do anything are if the second operand is a
7015 constant. */
7016 sub_strict_overflow_p = false;
7017 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7018 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7019 if (t1 != 0 && t2 != 0
7020 && TYPE_OVERFLOW_WRAPS (ctype)
7021 && (code == MULT_EXPR
7022 /* If not multiplication, we can only do this if both operands
7023 are divisible by c. */
7024 || (multiple_of_p (ctype, op0, c)
7025 && multiple_of_p (ctype, op1, c))))
7027 if (sub_strict_overflow_p)
7028 *strict_overflow_p = true;
7029 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7030 fold_convert (ctype, t2));
7033 /* If this was a subtraction, negate OP1 and set it to be an addition.
7034 This simplifies the logic below. */
7035 if (tcode == MINUS_EXPR)
7037 tcode = PLUS_EXPR, op1 = negate_expr (op1);
7038 /* If OP1 was not easily negatable, the constant may be OP0. */
7039 if (TREE_CODE (op0) == INTEGER_CST)
7041 std::swap (op0, op1);
7042 std::swap (t1, t2);
7046 if (TREE_CODE (op1) != INTEGER_CST)
7047 break;
7049 /* If either OP1 or C are negative, this optimization is not safe for
7050 some of the division and remainder types while for others we need
7051 to change the code. */
7052 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7054 if (code == CEIL_DIV_EXPR)
7055 code = FLOOR_DIV_EXPR;
7056 else if (code == FLOOR_DIV_EXPR)
7057 code = CEIL_DIV_EXPR;
7058 else if (code != MULT_EXPR
7059 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7060 break;
7063 /* If it's a multiply or a division/modulus operation of a multiple
7064 of our constant, do the operation and verify it doesn't overflow. */
7065 if (code == MULT_EXPR
7066 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7067 TYPE_SIGN (type)))
7069 op1 = const_binop (code, fold_convert (ctype, op1),
7070 fold_convert (ctype, c));
7071 /* We allow the constant to overflow with wrapping semantics. */
7072 if (op1 == 0
7073 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7074 break;
7076 else
7077 break;
7079 /* If we have an unsigned type, we cannot widen the operation since it
7080 will change the result if the original computation overflowed. */
7081 if (TYPE_UNSIGNED (ctype) && ctype != type)
7082 break;
7084 /* The last case is if we are a multiply. In that case, we can
7085 apply the distributive law to commute the multiply and addition
7086 if the multiplication of the constants doesn't overflow
7087 and overflow is defined. With undefined overflow
7088 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7089 But fold_plusminus_mult_expr would factor back any power-of-two
7090 value so do not distribute in the first place in this case. */
7091 if (code == MULT_EXPR
7092 && TYPE_OVERFLOW_WRAPS (ctype)
7093 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7094 return fold_build2 (tcode, ctype,
7095 fold_build2 (code, ctype,
7096 fold_convert (ctype, op0),
7097 fold_convert (ctype, c)),
7098 op1);
7100 break;
7102 case MULT_EXPR:
7103 /* We have a special case here if we are doing something like
7104 (C * 8) % 4 since we know that's zero. */
7105 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7106 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7107 /* If the multiplication can overflow we cannot optimize this. */
7108 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7109 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7110 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7111 TYPE_SIGN (type)))
7113 *strict_overflow_p = true;
7114 return omit_one_operand (type, integer_zero_node, op0);
7117 /* ... fall through ... */
7119 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7120 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7121 /* If we can extract our operation from the LHS, do so and return a
7122 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7123 do something only if the second operand is a constant. */
7124 if (same_p
7125 && TYPE_OVERFLOW_WRAPS (ctype)
7126 && (t1 = extract_muldiv (op0, c, code, wide_type,
7127 strict_overflow_p)) != 0)
7128 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7129 fold_convert (ctype, op1));
7130 else if (tcode == MULT_EXPR && code == MULT_EXPR
7131 && TYPE_OVERFLOW_WRAPS (ctype)
7132 && (t1 = extract_muldiv (op1, c, code, wide_type,
7133 strict_overflow_p)) != 0)
7134 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7135 fold_convert (ctype, t1));
7136 else if (TREE_CODE (op1) != INTEGER_CST)
7137 return 0;
7139 /* If these are the same operation types, we can associate them
7140 assuming no overflow. */
7141 if (tcode == code)
7143 bool overflow_p = false;
7144 wi::overflow_type overflow_mul;
7145 signop sign = TYPE_SIGN (ctype);
7146 unsigned prec = TYPE_PRECISION (ctype);
7147 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7148 wi::to_wide (c, prec),
7149 sign, &overflow_mul);
7150 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7151 if (overflow_mul
7152 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7153 overflow_p = true;
7154 if (!overflow_p)
7155 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7156 wide_int_to_tree (ctype, mul));
7159 /* If these operations "cancel" each other, we have the main
7160 optimizations of this pass, which occur when either constant is a
7161 multiple of the other, in which case we replace this with either an
7162 operation or CODE or TCODE.
7164 If we have an unsigned type, we cannot do this since it will change
7165 the result if the original computation overflowed. */
7166 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7167 && !TYPE_OVERFLOW_SANITIZED (ctype)
7168 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7169 || (tcode == MULT_EXPR
7170 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7171 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7172 && code != MULT_EXPR)))
7174 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7175 TYPE_SIGN (type)))
7177 *strict_overflow_p = true;
7178 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7179 fold_convert (ctype,
7180 const_binop (TRUNC_DIV_EXPR,
7181 op1, c)));
7183 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7184 TYPE_SIGN (type)))
7186 *strict_overflow_p = true;
7187 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7188 fold_convert (ctype,
7189 const_binop (TRUNC_DIV_EXPR,
7190 c, op1)));
7193 break;
7195 default:
7196 break;
7199 return 0;
7202 /* Return a node which has the indicated constant VALUE (either 0 or
7203 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7204 and is of the indicated TYPE. */
7206 tree
7207 constant_boolean_node (bool value, tree type)
7209 if (type == integer_type_node)
7210 return value ? integer_one_node : integer_zero_node;
7211 else if (type == boolean_type_node)
7212 return value ? boolean_true_node : boolean_false_node;
7213 else if (VECTOR_TYPE_P (type))
7214 return build_vector_from_val (type,
7215 build_int_cst (TREE_TYPE (type),
7216 value ? -1 : 0));
7217 else
7218 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7222 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7223 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7224 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7225 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7226 COND is the first argument to CODE; otherwise (as in the example
7227 given here), it is the second argument. TYPE is the type of the
7228 original expression. Return NULL_TREE if no simplification is
7229 possible. */
7231 static tree
7232 fold_binary_op_with_conditional_arg (location_t loc,
7233 enum tree_code code,
7234 tree type, tree op0, tree op1,
7235 tree cond, tree arg, int cond_first_p)
7237 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7238 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7239 tree test, true_value, false_value;
7240 tree lhs = NULL_TREE;
7241 tree rhs = NULL_TREE;
7242 enum tree_code cond_code = COND_EXPR;
7244 /* Do not move possibly trapping operations into the conditional as this
7245 pessimizes code and causes gimplification issues when applied late. */
7246 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7247 ANY_INTEGRAL_TYPE_P (type)
7248 && TYPE_OVERFLOW_TRAPS (type), op1))
7249 return NULL_TREE;
7251 if (TREE_CODE (cond) == COND_EXPR
7252 || TREE_CODE (cond) == VEC_COND_EXPR)
7254 test = TREE_OPERAND (cond, 0);
7255 true_value = TREE_OPERAND (cond, 1);
7256 false_value = TREE_OPERAND (cond, 2);
7257 /* If this operand throws an expression, then it does not make
7258 sense to try to perform a logical or arithmetic operation
7259 involving it. */
7260 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7261 lhs = true_value;
7262 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7263 rhs = false_value;
7265 else if (!(TREE_CODE (type) != VECTOR_TYPE
7266 && VECTOR_TYPE_P (TREE_TYPE (cond))))
7268 tree testtype = TREE_TYPE (cond);
7269 test = cond;
7270 true_value = constant_boolean_node (true, testtype);
7271 false_value = constant_boolean_node (false, testtype);
7273 else
7274 /* Detect the case of mixing vector and scalar types - bail out. */
7275 return NULL_TREE;
7277 if (VECTOR_TYPE_P (TREE_TYPE (test)))
7278 cond_code = VEC_COND_EXPR;
7280 /* This transformation is only worthwhile if we don't have to wrap ARG
7281 in a SAVE_EXPR and the operation can be simplified without recursing
7282 on at least one of the branches once its pushed inside the COND_EXPR. */
7283 if (!TREE_CONSTANT (arg)
7284 && (TREE_SIDE_EFFECTS (arg)
7285 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7286 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7287 return NULL_TREE;
7289 arg = fold_convert_loc (loc, arg_type, arg);
7290 if (lhs == 0)
7292 true_value = fold_convert_loc (loc, cond_type, true_value);
7293 if (cond_first_p)
7294 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7295 else
7296 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7298 if (rhs == 0)
7300 false_value = fold_convert_loc (loc, cond_type, false_value);
7301 if (cond_first_p)
7302 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7303 else
7304 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7307 /* Check that we have simplified at least one of the branches. */
7308 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7309 return NULL_TREE;
7311 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7315 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7317 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7318 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7319 if ARG - ZERO_ARG is the same as X.
7321 If ARG is NULL, check for any value of type TYPE.
7323 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7324 and finite. The problematic cases are when X is zero, and its mode
7325 has signed zeros. In the case of rounding towards -infinity,
7326 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7327 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7329 bool
7330 fold_real_zero_addition_p (const_tree type, const_tree arg,
7331 const_tree zero_arg, int negate)
7333 if (!real_zerop (zero_arg))
7334 return false;
7336 /* Don't allow the fold with -fsignaling-nans. */
7337 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7338 return false;
7340 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7341 if (!HONOR_SIGNED_ZEROS (type))
7342 return true;
7344 /* There is no case that is safe for all rounding modes. */
7345 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7346 return false;
7348 /* In a vector or complex, we would need to check the sign of all zeros. */
7349 if (TREE_CODE (zero_arg) == VECTOR_CST)
7350 zero_arg = uniform_vector_p (zero_arg);
7351 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7352 return false;
7354 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7355 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7356 negate = !negate;
7358 /* The mode has signed zeros, and we have to honor their sign.
7359 In this situation, there are only two cases we can return true for.
7360 (i) X - 0 is the same as X with default rounding.
7361 (ii) X + 0 is X when X can't possibly be -0.0. */
7362 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7365 /* Subroutine of match.pd that optimizes comparisons of a division by
7366 a nonzero integer constant against an integer constant, i.e.
7367 X/C1 op C2.
7369 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7370 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7372 enum tree_code
7373 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7374 tree *hi, bool *neg_overflow)
7376 tree prod, tmp, type = TREE_TYPE (c1);
7377 signop sign = TYPE_SIGN (type);
7378 wi::overflow_type overflow;
7380 /* We have to do this the hard way to detect unsigned overflow.
7381 prod = int_const_binop (MULT_EXPR, c1, c2); */
7382 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7383 prod = force_fit_type (type, val, -1, overflow);
7384 *neg_overflow = false;
7386 if (sign == UNSIGNED)
7388 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7389 *lo = prod;
7391 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7392 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7393 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7395 else if (tree_int_cst_sgn (c1) >= 0)
7397 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7398 switch (tree_int_cst_sgn (c2))
7400 case -1:
7401 *neg_overflow = true;
7402 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7403 *hi = prod;
7404 break;
7406 case 0:
7407 *lo = fold_negate_const (tmp, type);
7408 *hi = tmp;
7409 break;
7411 case 1:
7412 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7413 *lo = prod;
7414 break;
7416 default:
7417 gcc_unreachable ();
7420 else
7422 /* A negative divisor reverses the relational operators. */
7423 code = swap_tree_comparison (code);
7425 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7426 switch (tree_int_cst_sgn (c2))
7428 case -1:
7429 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7430 *lo = prod;
7431 break;
7433 case 0:
7434 *hi = fold_negate_const (tmp, type);
7435 *lo = tmp;
7436 break;
7438 case 1:
7439 *neg_overflow = true;
7440 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7441 *hi = prod;
7442 break;
7444 default:
7445 gcc_unreachable ();
7449 if (code != EQ_EXPR && code != NE_EXPR)
7450 return code;
7452 if (TREE_OVERFLOW (*lo)
7453 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7454 *lo = NULL_TREE;
7455 if (TREE_OVERFLOW (*hi)
7456 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7457 *hi = NULL_TREE;
7459 return code;
7462 /* Test whether it is preferable to swap two operands, ARG0 and
7463 ARG1, for example because ARG0 is an integer constant and ARG1
7464 isn't. */
7466 bool
7467 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7469 if (CONSTANT_CLASS_P (arg1))
7470 return false;
7471 if (CONSTANT_CLASS_P (arg0))
7472 return true;
7474 STRIP_NOPS (arg0);
7475 STRIP_NOPS (arg1);
7477 if (TREE_CONSTANT (arg1))
7478 return false;
7479 if (TREE_CONSTANT (arg0))
7480 return true;
7482 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7483 for commutative and comparison operators. Ensuring a canonical
7484 form allows the optimizers to find additional redundancies without
7485 having to explicitly check for both orderings. */
7486 if (TREE_CODE (arg0) == SSA_NAME
7487 && TREE_CODE (arg1) == SSA_NAME
7488 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7489 return true;
7491 /* Put SSA_NAMEs last. */
7492 if (TREE_CODE (arg1) == SSA_NAME)
7493 return false;
7494 if (TREE_CODE (arg0) == SSA_NAME)
7495 return true;
7497 /* Put variables last. */
7498 if (DECL_P (arg1))
7499 return false;
7500 if (DECL_P (arg0))
7501 return true;
7503 return false;
7507 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7508 means A >= Y && A != MAX, but in this case we know that
7509 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7511 static tree
7512 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7514 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7516 if (TREE_CODE (bound) == LT_EXPR)
7517 a = TREE_OPERAND (bound, 0);
7518 else if (TREE_CODE (bound) == GT_EXPR)
7519 a = TREE_OPERAND (bound, 1);
7520 else
7521 return NULL_TREE;
7523 typea = TREE_TYPE (a);
7524 if (!INTEGRAL_TYPE_P (typea)
7525 && !POINTER_TYPE_P (typea))
7526 return NULL_TREE;
7528 if (TREE_CODE (ineq) == LT_EXPR)
7530 a1 = TREE_OPERAND (ineq, 1);
7531 y = TREE_OPERAND (ineq, 0);
7533 else if (TREE_CODE (ineq) == GT_EXPR)
7535 a1 = TREE_OPERAND (ineq, 0);
7536 y = TREE_OPERAND (ineq, 1);
7538 else
7539 return NULL_TREE;
7541 if (TREE_TYPE (a1) != typea)
7542 return NULL_TREE;
7544 if (POINTER_TYPE_P (typea))
7546 /* Convert the pointer types into integer before taking the difference. */
7547 tree ta = fold_convert_loc (loc, ssizetype, a);
7548 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7549 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7551 else
7552 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7554 if (!diff || !integer_onep (diff))
7555 return NULL_TREE;
7557 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7560 /* Fold a sum or difference of at least one multiplication.
7561 Returns the folded tree or NULL if no simplification could be made. */
7563 static tree
7564 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7565 tree arg0, tree arg1)
7567 tree arg00, arg01, arg10, arg11;
7568 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7570 /* (A * C) +- (B * C) -> (A+-B) * C.
7571 (A * C) +- A -> A * (C+-1).
7572 We are most concerned about the case where C is a constant,
7573 but other combinations show up during loop reduction. Since
7574 it is not difficult, try all four possibilities. */
7576 if (TREE_CODE (arg0) == MULT_EXPR)
7578 arg00 = TREE_OPERAND (arg0, 0);
7579 arg01 = TREE_OPERAND (arg0, 1);
7581 else if (TREE_CODE (arg0) == INTEGER_CST)
7583 arg00 = build_one_cst (type);
7584 arg01 = arg0;
7586 else
7588 /* We cannot generate constant 1 for fract. */
7589 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7590 return NULL_TREE;
7591 arg00 = arg0;
7592 arg01 = build_one_cst (type);
7594 if (TREE_CODE (arg1) == MULT_EXPR)
7596 arg10 = TREE_OPERAND (arg1, 0);
7597 arg11 = TREE_OPERAND (arg1, 1);
7599 else if (TREE_CODE (arg1) == INTEGER_CST)
7601 arg10 = build_one_cst (type);
7602 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7603 the purpose of this canonicalization. */
7604 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7605 && negate_expr_p (arg1)
7606 && code == PLUS_EXPR)
7608 arg11 = negate_expr (arg1);
7609 code = MINUS_EXPR;
7611 else
7612 arg11 = arg1;
7614 else
7616 /* We cannot generate constant 1 for fract. */
7617 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7618 return NULL_TREE;
7619 arg10 = arg1;
7620 arg11 = build_one_cst (type);
7622 same = NULL_TREE;
7624 /* Prefer factoring a common non-constant. */
7625 if (operand_equal_p (arg00, arg10, 0))
7626 same = arg00, alt0 = arg01, alt1 = arg11;
7627 else if (operand_equal_p (arg01, arg11, 0))
7628 same = arg01, alt0 = arg00, alt1 = arg10;
7629 else if (operand_equal_p (arg00, arg11, 0))
7630 same = arg00, alt0 = arg01, alt1 = arg10;
7631 else if (operand_equal_p (arg01, arg10, 0))
7632 same = arg01, alt0 = arg00, alt1 = arg11;
7634 /* No identical multiplicands; see if we can find a common
7635 power-of-two factor in non-power-of-two multiplies. This
7636 can help in multi-dimensional array access. */
7637 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7639 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7640 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7641 HOST_WIDE_INT tmp;
7642 bool swap = false;
7643 tree maybe_same;
7645 /* Move min of absolute values to int11. */
7646 if (absu_hwi (int01) < absu_hwi (int11))
7648 tmp = int01, int01 = int11, int11 = tmp;
7649 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7650 maybe_same = arg01;
7651 swap = true;
7653 else
7654 maybe_same = arg11;
7656 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7657 if (factor > 1
7658 && pow2p_hwi (factor)
7659 && (int01 & (factor - 1)) == 0
7660 /* The remainder should not be a constant, otherwise we
7661 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7662 increased the number of multiplications necessary. */
7663 && TREE_CODE (arg10) != INTEGER_CST)
7665 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7666 build_int_cst (TREE_TYPE (arg00),
7667 int01 / int11));
7668 alt1 = arg10;
7669 same = maybe_same;
7670 if (swap)
7671 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7675 if (!same)
7676 return NULL_TREE;
7678 if (! ANY_INTEGRAL_TYPE_P (type)
7679 || TYPE_OVERFLOW_WRAPS (type)
7680 /* We are neither factoring zero nor minus one. */
7681 || TREE_CODE (same) == INTEGER_CST)
7682 return fold_build2_loc (loc, MULT_EXPR, type,
7683 fold_build2_loc (loc, code, type,
7684 fold_convert_loc (loc, type, alt0),
7685 fold_convert_loc (loc, type, alt1)),
7686 fold_convert_loc (loc, type, same));
7688 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7689 same may be minus one and thus the multiplication may overflow. Perform
7690 the sum operation in an unsigned type. */
7691 tree utype = unsigned_type_for (type);
7692 tree tem = fold_build2_loc (loc, code, utype,
7693 fold_convert_loc (loc, utype, alt0),
7694 fold_convert_loc (loc, utype, alt1));
7695 /* If the sum evaluated to a constant that is not -INF the multiplication
7696 cannot overflow. */
7697 if (TREE_CODE (tem) == INTEGER_CST
7698 && (wi::to_wide (tem)
7699 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7700 return fold_build2_loc (loc, MULT_EXPR, type,
7701 fold_convert (type, tem), same);
7703 /* Do not resort to unsigned multiplication because
7704 we lose the no-overflow property of the expression. */
7705 return NULL_TREE;
7708 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7709 specified by EXPR into the buffer PTR of length LEN bytes.
7710 Return the number of bytes placed in the buffer, or zero
7711 upon failure. */
7713 static int
7714 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7716 tree type = TREE_TYPE (expr);
7717 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7718 int byte, offset, word, words;
7719 unsigned char value;
7721 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7722 return 0;
7723 if (off == -1)
7724 off = 0;
7726 if (ptr == NULL)
7727 /* Dry run. */
7728 return MIN (len, total_bytes - off);
7730 words = total_bytes / UNITS_PER_WORD;
7732 for (byte = 0; byte < total_bytes; byte++)
7734 int bitpos = byte * BITS_PER_UNIT;
7735 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7736 number of bytes. */
7737 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7739 if (total_bytes > UNITS_PER_WORD)
7741 word = byte / UNITS_PER_WORD;
7742 if (WORDS_BIG_ENDIAN)
7743 word = (words - 1) - word;
7744 offset = word * UNITS_PER_WORD;
7745 if (BYTES_BIG_ENDIAN)
7746 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7747 else
7748 offset += byte % UNITS_PER_WORD;
7750 else
7751 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7752 if (offset >= off && offset - off < len)
7753 ptr[offset - off] = value;
7755 return MIN (len, total_bytes - off);
7759 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7760 specified by EXPR into the buffer PTR of length LEN bytes.
7761 Return the number of bytes placed in the buffer, or zero
7762 upon failure. */
7764 static int
7765 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7767 tree type = TREE_TYPE (expr);
7768 scalar_mode mode = SCALAR_TYPE_MODE (type);
7769 int total_bytes = GET_MODE_SIZE (mode);
7770 FIXED_VALUE_TYPE value;
7771 tree i_value, i_type;
7773 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7774 return 0;
7776 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7778 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7779 return 0;
7781 value = TREE_FIXED_CST (expr);
7782 i_value = double_int_to_tree (i_type, value.data);
7784 return native_encode_int (i_value, ptr, len, off);
7788 /* Subroutine of native_encode_expr. Encode the REAL_CST
7789 specified by EXPR into the buffer PTR of length LEN bytes.
7790 Return the number of bytes placed in the buffer, or zero
7791 upon failure. */
7793 static int
7794 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7796 tree type = TREE_TYPE (expr);
7797 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7798 int byte, offset, word, words, bitpos;
7799 unsigned char value;
7801 /* There are always 32 bits in each long, no matter the size of
7802 the hosts long. We handle floating point representations with
7803 up to 192 bits. */
7804 long tmp[6];
7806 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7807 return 0;
7808 if (off == -1)
7809 off = 0;
7811 if (ptr == NULL)
7812 /* Dry run. */
7813 return MIN (len, total_bytes - off);
7815 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7817 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7819 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7820 bitpos += BITS_PER_UNIT)
7822 byte = (bitpos / BITS_PER_UNIT) & 3;
7823 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7825 if (UNITS_PER_WORD < 4)
7827 word = byte / UNITS_PER_WORD;
7828 if (WORDS_BIG_ENDIAN)
7829 word = (words - 1) - word;
7830 offset = word * UNITS_PER_WORD;
7831 if (BYTES_BIG_ENDIAN)
7832 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7833 else
7834 offset += byte % UNITS_PER_WORD;
7836 else
7838 offset = byte;
7839 if (BYTES_BIG_ENDIAN)
7841 /* Reverse bytes within each long, or within the entire float
7842 if it's smaller than a long (for HFmode). */
7843 offset = MIN (3, total_bytes - 1) - offset;
7844 gcc_assert (offset >= 0);
7847 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7848 if (offset >= off
7849 && offset - off < len)
7850 ptr[offset - off] = value;
7852 return MIN (len, total_bytes - off);
7855 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7856 specified by EXPR into the buffer PTR of length LEN bytes.
7857 Return the number of bytes placed in the buffer, or zero
7858 upon failure. */
7860 static int
7861 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7863 int rsize, isize;
7864 tree part;
7866 part = TREE_REALPART (expr);
7867 rsize = native_encode_expr (part, ptr, len, off);
7868 if (off == -1 && rsize == 0)
7869 return 0;
7870 part = TREE_IMAGPART (expr);
7871 if (off != -1)
7872 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7873 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7874 len - rsize, off);
7875 if (off == -1 && isize != rsize)
7876 return 0;
7877 return rsize + isize;
7880 /* Like native_encode_vector, but only encode the first COUNT elements.
7881 The other arguments are as for native_encode_vector. */
7883 static int
7884 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7885 int off, unsigned HOST_WIDE_INT count)
7887 tree itype = TREE_TYPE (TREE_TYPE (expr));
7888 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7889 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7891 /* This is the only case in which elements can be smaller than a byte.
7892 Element 0 is always in the lsb of the containing byte. */
7893 unsigned int elt_bits = TYPE_PRECISION (itype);
7894 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7895 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7896 return 0;
7898 if (off == -1)
7899 off = 0;
7901 /* Zero the buffer and then set bits later where necessary. */
7902 int extract_bytes = MIN (len, total_bytes - off);
7903 if (ptr)
7904 memset (ptr, 0, extract_bytes);
7906 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7907 unsigned int first_elt = off * elts_per_byte;
7908 unsigned int extract_elts = extract_bytes * elts_per_byte;
7909 for (unsigned int i = 0; i < extract_elts; ++i)
7911 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7912 if (TREE_CODE (elt) != INTEGER_CST)
7913 return 0;
7915 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7917 unsigned int bit = i * elt_bits;
7918 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7921 return extract_bytes;
7924 int offset = 0;
7925 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7926 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7928 if (off >= size)
7930 off -= size;
7931 continue;
7933 tree elem = VECTOR_CST_ELT (expr, i);
7934 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7935 len - offset, off);
7936 if ((off == -1 && res != size) || res == 0)
7937 return 0;
7938 offset += res;
7939 if (offset >= len)
7940 return (off == -1 && i < count - 1) ? 0 : offset;
7941 if (off != -1)
7942 off = 0;
7944 return offset;
7947 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7950 upon failure. */
7952 static int
7953 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7955 unsigned HOST_WIDE_INT count;
7956 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7957 return 0;
7958 return native_encode_vector_part (expr, ptr, len, off, count);
7962 /* Subroutine of native_encode_expr. Encode the STRING_CST
7963 specified by EXPR into the buffer PTR of length LEN bytes.
7964 Return the number of bytes placed in the buffer, or zero
7965 upon failure. */
7967 static int
7968 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7970 tree type = TREE_TYPE (expr);
7972 /* Wide-char strings are encoded in target byte-order so native
7973 encoding them is trivial. */
7974 if (BITS_PER_UNIT != CHAR_BIT
7975 || TREE_CODE (type) != ARRAY_TYPE
7976 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7977 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7978 return 0;
7980 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7981 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7982 return 0;
7983 if (off == -1)
7984 off = 0;
7985 len = MIN (total_bytes - off, len);
7986 if (ptr == NULL)
7987 /* Dry run. */;
7988 else
7990 int written = 0;
7991 if (off < TREE_STRING_LENGTH (expr))
7993 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7994 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7996 memset (ptr + written, 0, len - written);
7998 return len;
8002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8003 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8004 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8005 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8006 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8007 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8008 Return the number of bytes placed in the buffer, or zero upon failure. */
8011 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8013 /* We don't support starting at negative offset and -1 is special. */
8014 if (off < -1)
8015 return 0;
8017 switch (TREE_CODE (expr))
8019 case INTEGER_CST:
8020 return native_encode_int (expr, ptr, len, off);
8022 case REAL_CST:
8023 return native_encode_real (expr, ptr, len, off);
8025 case FIXED_CST:
8026 return native_encode_fixed (expr, ptr, len, off);
8028 case COMPLEX_CST:
8029 return native_encode_complex (expr, ptr, len, off);
8031 case VECTOR_CST:
8032 return native_encode_vector (expr, ptr, len, off);
8034 case STRING_CST:
8035 return native_encode_string (expr, ptr, len, off);
8037 default:
8038 return 0;
8042 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8043 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8044 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8045 machine modes, we can't just use build_nonstandard_integer_type. */
8047 tree
8048 find_bitfield_repr_type (int fieldsize, int len)
8050 machine_mode mode;
8051 for (int pass = 0; pass < 2; pass++)
8053 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8054 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8055 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8056 && known_eq (GET_MODE_PRECISION (mode),
8057 GET_MODE_BITSIZE (mode))
8058 && known_le (GET_MODE_SIZE (mode), len))
8060 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8061 if (ret && TYPE_MODE (ret) == mode)
8062 return ret;
8066 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8067 if (int_n_enabled_p[i]
8068 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8069 && int_n_trees[i].unsigned_type)
8071 tree ret = int_n_trees[i].unsigned_type;
8072 mode = TYPE_MODE (ret);
8073 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8074 && known_eq (GET_MODE_PRECISION (mode),
8075 GET_MODE_BITSIZE (mode))
8076 && known_le (GET_MODE_SIZE (mode), len))
8077 return ret;
8080 return NULL_TREE;
8083 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8084 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8085 to be non-NULL and OFF zero), then in addition to filling the
8086 bytes pointed by PTR with the value also clear any bits pointed
8087 by MASK that are known to be initialized, keep them as is for
8088 e.g. uninitialized padding bits or uninitialized fields. */
8091 native_encode_initializer (tree init, unsigned char *ptr, int len,
8092 int off, unsigned char *mask)
8094 int r;
8096 /* We don't support starting at negative offset and -1 is special. */
8097 if (off < -1 || init == NULL_TREE)
8098 return 0;
8100 gcc_assert (mask == NULL || (off == 0 && ptr));
8102 STRIP_NOPS (init);
8103 switch (TREE_CODE (init))
8105 case VIEW_CONVERT_EXPR:
8106 case NON_LVALUE_EXPR:
8107 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8108 mask);
8109 default:
8110 r = native_encode_expr (init, ptr, len, off);
8111 if (mask)
8112 memset (mask, 0, r);
8113 return r;
8114 case CONSTRUCTOR:
8115 tree type = TREE_TYPE (init);
8116 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8117 if (total_bytes < 0)
8118 return 0;
8119 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8120 return 0;
8121 int o = off == -1 ? 0 : off;
8122 if (TREE_CODE (type) == ARRAY_TYPE)
8124 tree min_index;
8125 unsigned HOST_WIDE_INT cnt;
8126 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8127 constructor_elt *ce;
8129 if (!TYPE_DOMAIN (type)
8130 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8131 return 0;
8133 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8134 if (fieldsize <= 0)
8135 return 0;
8137 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8138 if (ptr)
8139 memset (ptr, '\0', MIN (total_bytes - off, len));
8141 for (cnt = 0; ; cnt++)
8143 tree val = NULL_TREE, index = NULL_TREE;
8144 HOST_WIDE_INT pos = curpos, count = 0;
8145 bool full = false;
8146 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8148 val = ce->value;
8149 index = ce->index;
8151 else if (mask == NULL
8152 || CONSTRUCTOR_NO_CLEARING (init)
8153 || curpos >= total_bytes)
8154 break;
8155 else
8156 pos = total_bytes;
8158 if (index && TREE_CODE (index) == RANGE_EXPR)
8160 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8161 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8162 return 0;
8163 offset_int wpos
8164 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8165 - wi::to_offset (min_index),
8166 TYPE_PRECISION (sizetype));
8167 wpos *= fieldsize;
8168 if (!wi::fits_shwi_p (pos))
8169 return 0;
8170 pos = wpos.to_shwi ();
8171 offset_int wcount
8172 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8173 - wi::to_offset (TREE_OPERAND (index, 0)),
8174 TYPE_PRECISION (sizetype));
8175 if (!wi::fits_shwi_p (wcount))
8176 return 0;
8177 count = wcount.to_shwi ();
8179 else if (index)
8181 if (TREE_CODE (index) != INTEGER_CST)
8182 return 0;
8183 offset_int wpos
8184 = wi::sext (wi::to_offset (index)
8185 - wi::to_offset (min_index),
8186 TYPE_PRECISION (sizetype));
8187 wpos *= fieldsize;
8188 if (!wi::fits_shwi_p (wpos))
8189 return 0;
8190 pos = wpos.to_shwi ();
8193 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8195 if (valueinit == -1)
8197 tree zero = build_zero_cst (TREE_TYPE (type));
8198 r = native_encode_initializer (zero, ptr + curpos,
8199 fieldsize, 0,
8200 mask + curpos);
8201 if (TREE_CODE (zero) == CONSTRUCTOR)
8202 ggc_free (zero);
8203 if (!r)
8204 return 0;
8205 valueinit = curpos;
8206 curpos += fieldsize;
8208 while (curpos != pos)
8210 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8211 memcpy (mask + curpos, mask + valueinit, fieldsize);
8212 curpos += fieldsize;
8216 curpos = pos;
8217 if (val)
8220 if (off == -1
8221 || (curpos >= off
8222 && (curpos + fieldsize
8223 <= (HOST_WIDE_INT) off + len)))
8225 if (full)
8227 if (ptr)
8228 memcpy (ptr + (curpos - o), ptr + (pos - o),
8229 fieldsize);
8230 if (mask)
8231 memcpy (mask + curpos, mask + pos, fieldsize);
8233 else if (!native_encode_initializer (val,
8235 ? ptr + curpos - o
8236 : NULL,
8237 fieldsize,
8238 off == -1 ? -1
8239 : 0,
8240 mask
8241 ? mask + curpos
8242 : NULL))
8243 return 0;
8244 else
8246 full = true;
8247 pos = curpos;
8250 else if (curpos + fieldsize > off
8251 && curpos < (HOST_WIDE_INT) off + len)
8253 /* Partial overlap. */
8254 unsigned char *p = NULL;
8255 int no = 0;
8256 int l;
8257 gcc_assert (mask == NULL);
8258 if (curpos >= off)
8260 if (ptr)
8261 p = ptr + curpos - off;
8262 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8263 fieldsize);
8265 else
8267 p = ptr;
8268 no = off - curpos;
8269 l = len;
8271 if (!native_encode_initializer (val, p, l, no, NULL))
8272 return 0;
8274 curpos += fieldsize;
8276 while (count-- != 0);
8278 return MIN (total_bytes - off, len);
8280 else if (TREE_CODE (type) == RECORD_TYPE
8281 || TREE_CODE (type) == UNION_TYPE)
8283 unsigned HOST_WIDE_INT cnt;
8284 constructor_elt *ce;
8285 tree fld_base = TYPE_FIELDS (type);
8286 tree to_free = NULL_TREE;
8288 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8289 if (ptr != NULL)
8290 memset (ptr, '\0', MIN (total_bytes - o, len));
8291 for (cnt = 0; ; cnt++)
8293 tree val = NULL_TREE, field = NULL_TREE;
8294 HOST_WIDE_INT pos = 0, fieldsize;
8295 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8297 if (to_free)
8299 ggc_free (to_free);
8300 to_free = NULL_TREE;
8303 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8305 val = ce->value;
8306 field = ce->index;
8307 if (field == NULL_TREE)
8308 return 0;
8310 pos = int_byte_position (field);
8311 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8312 continue;
8314 else if (mask == NULL
8315 || CONSTRUCTOR_NO_CLEARING (init))
8316 break;
8317 else
8318 pos = total_bytes;
8320 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8322 tree fld;
8323 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8325 if (TREE_CODE (fld) != FIELD_DECL)
8326 continue;
8327 if (fld == field)
8328 break;
8329 if (DECL_PADDING_P (fld))
8330 continue;
8331 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8332 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8333 return 0;
8334 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8335 continue;
8336 break;
8338 if (fld == NULL_TREE)
8340 if (ce == NULL)
8341 break;
8342 return 0;
8344 fld_base = DECL_CHAIN (fld);
8345 if (fld != field)
8347 cnt--;
8348 field = fld;
8349 pos = int_byte_position (field);
8350 val = build_zero_cst (TREE_TYPE (fld));
8351 if (TREE_CODE (val) == CONSTRUCTOR)
8352 to_free = val;
8356 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8357 && TYPE_DOMAIN (TREE_TYPE (field))
8358 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8360 if (mask || off != -1)
8361 return 0;
8362 if (val == NULL_TREE)
8363 continue;
8364 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8365 return 0;
8366 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8367 if (fieldsize < 0
8368 || (int) fieldsize != fieldsize
8369 || (pos + fieldsize) > INT_MAX)
8370 return 0;
8371 if (pos + fieldsize > total_bytes)
8373 if (ptr != NULL && total_bytes < len)
8374 memset (ptr + total_bytes, '\0',
8375 MIN (pos + fieldsize, len) - total_bytes);
8376 total_bytes = pos + fieldsize;
8379 else
8381 if (DECL_SIZE_UNIT (field) == NULL_TREE
8382 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8383 return 0;
8384 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8386 if (fieldsize == 0)
8387 continue;
8389 /* Prepare to deal with integral bit-fields and filter out other
8390 bit-fields that do not start and end on a byte boundary. */
8391 if (DECL_BIT_FIELD (field))
8393 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8394 return 0;
8395 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8396 if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8398 bpos %= BITS_PER_UNIT;
8399 fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8400 epos = fieldsize % BITS_PER_UNIT;
8401 fieldsize += BITS_PER_UNIT - 1;
8402 fieldsize /= BITS_PER_UNIT;
8404 else if (bpos % BITS_PER_UNIT
8405 || DECL_SIZE (field) == NULL_TREE
8406 || !tree_fits_shwi_p (DECL_SIZE (field))
8407 || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8408 return 0;
8411 if (off != -1 && pos + fieldsize <= off)
8412 continue;
8414 if (val == NULL_TREE)
8415 continue;
8417 if (DECL_BIT_FIELD (field)
8418 && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8420 /* FIXME: Handle PDP endian. */
8421 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8422 return 0;
8424 if (TREE_CODE (val) != INTEGER_CST)
8425 return 0;
8427 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8428 tree repr_type = NULL_TREE;
8429 HOST_WIDE_INT rpos = 0;
8430 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8432 rpos = int_byte_position (repr);
8433 repr_type = TREE_TYPE (repr);
8435 else
8437 repr_type = find_bitfield_repr_type (fieldsize, len);
8438 if (repr_type == NULL_TREE)
8439 return 0;
8440 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8441 gcc_assert (repr_size > 0 && repr_size <= len);
8442 if (pos + repr_size <= o + len)
8443 rpos = pos;
8444 else
8446 rpos = o + len - repr_size;
8447 gcc_assert (rpos <= pos);
8451 if (rpos > pos)
8452 return 0;
8453 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8454 int diff = (TYPE_PRECISION (repr_type)
8455 - TYPE_PRECISION (TREE_TYPE (field)));
8456 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8457 if (!BYTES_BIG_ENDIAN)
8458 w = wi::lshift (w, bitoff);
8459 else
8460 w = wi::lshift (w, diff - bitoff);
8461 val = wide_int_to_tree (repr_type, w);
8463 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8464 / BITS_PER_UNIT + 1];
8465 int l = native_encode_int (val, buf, sizeof buf, 0);
8466 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8467 return 0;
8469 if (ptr == NULL)
8470 continue;
8472 /* If the bitfield does not start at byte boundary, handle
8473 the partial byte at the start. */
8474 if (bpos
8475 && (off == -1 || (pos >= off && len >= 1)))
8477 if (!BYTES_BIG_ENDIAN)
8479 int msk = (1 << bpos) - 1;
8480 buf[pos - rpos] &= ~msk;
8481 buf[pos - rpos] |= ptr[pos - o] & msk;
8482 if (mask)
8484 if (fieldsize > 1 || epos == 0)
8485 mask[pos] &= msk;
8486 else
8487 mask[pos] &= (msk | ~((1 << epos) - 1));
8490 else
8492 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8493 buf[pos - rpos] &= msk;
8494 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8495 if (mask)
8497 if (fieldsize > 1 || epos == 0)
8498 mask[pos] &= ~msk;
8499 else
8500 mask[pos] &= (~msk
8501 | ((1 << (BITS_PER_UNIT - epos))
8502 - 1));
8506 /* If the bitfield does not end at byte boundary, handle
8507 the partial byte at the end. */
8508 if (epos
8509 && (off == -1
8510 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8512 if (!BYTES_BIG_ENDIAN)
8514 int msk = (1 << epos) - 1;
8515 buf[pos - rpos + fieldsize - 1] &= msk;
8516 buf[pos - rpos + fieldsize - 1]
8517 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8518 if (mask && (fieldsize > 1 || bpos == 0))
8519 mask[pos + fieldsize - 1] &= ~msk;
8521 else
8523 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8524 buf[pos - rpos + fieldsize - 1] &= ~msk;
8525 buf[pos - rpos + fieldsize - 1]
8526 |= ptr[pos + fieldsize - 1 - o] & msk;
8527 if (mask && (fieldsize > 1 || bpos == 0))
8528 mask[pos + fieldsize - 1] &= msk;
8531 if (off == -1
8532 || (pos >= off
8533 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8535 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8536 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8537 memset (mask + pos + (bpos != 0), 0,
8538 fieldsize - (bpos != 0) - (epos != 0));
8540 else
8542 /* Partial overlap. */
8543 HOST_WIDE_INT fsz = fieldsize;
8544 gcc_assert (mask == NULL);
8545 if (pos < off)
8547 fsz -= (off - pos);
8548 pos = off;
8550 if (pos + fsz > (HOST_WIDE_INT) off + len)
8551 fsz = (HOST_WIDE_INT) off + len - pos;
8552 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8554 continue;
8557 if (off == -1
8558 || (pos >= off
8559 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8561 int fldsize = fieldsize;
8562 if (off == -1)
8564 tree fld = DECL_CHAIN (field);
8565 while (fld)
8567 if (TREE_CODE (fld) == FIELD_DECL)
8568 break;
8569 fld = DECL_CHAIN (fld);
8571 if (fld == NULL_TREE)
8572 fldsize = len - pos;
8574 r = native_encode_initializer (val, ptr ? ptr + pos - o
8575 : NULL,
8576 fldsize,
8577 off == -1 ? -1 : 0,
8578 mask ? mask + pos : NULL);
8579 if (!r)
8580 return 0;
8581 if (off == -1
8582 && fldsize != fieldsize
8583 && r > fieldsize
8584 && pos + r > total_bytes)
8585 total_bytes = pos + r;
8587 else
8589 /* Partial overlap. */
8590 unsigned char *p = NULL;
8591 int no = 0;
8592 int l;
8593 gcc_assert (mask == NULL);
8594 if (pos >= off)
8596 if (ptr)
8597 p = ptr + pos - off;
8598 l = MIN ((HOST_WIDE_INT) off + len - pos,
8599 fieldsize);
8601 else
8603 p = ptr;
8604 no = off - pos;
8605 l = len;
8607 if (!native_encode_initializer (val, p, l, no, NULL))
8608 return 0;
8611 return MIN (total_bytes - off, len);
8613 return 0;
8618 /* Subroutine of native_interpret_expr. Interpret the contents of
8619 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8620 If the buffer cannot be interpreted, return NULL_TREE. */
8622 static tree
8623 native_interpret_int (tree type, const unsigned char *ptr, int len)
8625 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8627 if (total_bytes > len
8628 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8629 return NULL_TREE;
8631 wide_int result = wi::from_buffer (ptr, total_bytes);
8633 return wide_int_to_tree (type, result);
8637 /* Subroutine of native_interpret_expr. Interpret the contents of
8638 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8639 If the buffer cannot be interpreted, return NULL_TREE. */
8641 static tree
8642 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8644 scalar_mode mode = SCALAR_TYPE_MODE (type);
8645 int total_bytes = GET_MODE_SIZE (mode);
8646 double_int result;
8647 FIXED_VALUE_TYPE fixed_value;
8649 if (total_bytes > len
8650 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8651 return NULL_TREE;
8653 result = double_int::from_buffer (ptr, total_bytes);
8654 fixed_value = fixed_from_double_int (result, mode);
8656 return build_fixed (type, fixed_value);
8660 /* Subroutine of native_interpret_expr. Interpret the contents of
8661 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8662 If the buffer cannot be interpreted, return NULL_TREE. */
8664 tree
8665 native_interpret_real (tree type, const unsigned char *ptr, int len)
8667 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8668 int total_bytes = GET_MODE_SIZE (mode);
8669 unsigned char value;
8670 /* There are always 32 bits in each long, no matter the size of
8671 the hosts long. We handle floating point representations with
8672 up to 192 bits. */
8673 REAL_VALUE_TYPE r;
8674 long tmp[6];
8676 if (total_bytes > len || total_bytes > 24)
8677 return NULL_TREE;
8678 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8680 memset (tmp, 0, sizeof (tmp));
8681 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8682 bitpos += BITS_PER_UNIT)
8684 /* Both OFFSET and BYTE index within a long;
8685 bitpos indexes the whole float. */
8686 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8687 if (UNITS_PER_WORD < 4)
8689 int word = byte / UNITS_PER_WORD;
8690 if (WORDS_BIG_ENDIAN)
8691 word = (words - 1) - word;
8692 offset = word * UNITS_PER_WORD;
8693 if (BYTES_BIG_ENDIAN)
8694 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8695 else
8696 offset += byte % UNITS_PER_WORD;
8698 else
8700 offset = byte;
8701 if (BYTES_BIG_ENDIAN)
8703 /* Reverse bytes within each long, or within the entire float
8704 if it's smaller than a long (for HFmode). */
8705 offset = MIN (3, total_bytes - 1) - offset;
8706 gcc_assert (offset >= 0);
8709 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8711 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8714 real_from_target (&r, tmp, mode);
8715 return build_real (type, r);
8719 /* Subroutine of native_interpret_expr. Interpret the contents of
8720 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8721 If the buffer cannot be interpreted, return NULL_TREE. */
8723 static tree
8724 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8726 tree etype, rpart, ipart;
8727 int size;
8729 etype = TREE_TYPE (type);
8730 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8731 if (size * 2 > len)
8732 return NULL_TREE;
8733 rpart = native_interpret_expr (etype, ptr, size);
8734 if (!rpart)
8735 return NULL_TREE;
8736 ipart = native_interpret_expr (etype, ptr+size, size);
8737 if (!ipart)
8738 return NULL_TREE;
8739 return build_complex (type, rpart, ipart);
8742 /* Read a vector of type TYPE from the target memory image given by BYTES,
8743 which contains LEN bytes. The vector is known to be encodable using
8744 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8746 Return the vector on success, otherwise return null. */
8748 static tree
8749 native_interpret_vector_part (tree type, const unsigned char *bytes,
8750 unsigned int len, unsigned int npatterns,
8751 unsigned int nelts_per_pattern)
8753 tree elt_type = TREE_TYPE (type);
8754 if (VECTOR_BOOLEAN_TYPE_P (type)
8755 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8757 /* This is the only case in which elements can be smaller than a byte.
8758 Element 0 is always in the lsb of the containing byte. */
8759 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8760 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8761 return NULL_TREE;
8763 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8764 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8766 unsigned int bit_index = i * elt_bits;
8767 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8768 unsigned int lsb = bit_index % BITS_PER_UNIT;
8769 builder.quick_push (bytes[byte_index] & (1 << lsb)
8770 ? build_all_ones_cst (elt_type)
8771 : build_zero_cst (elt_type));
8773 return builder.build ();
8776 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8777 if (elt_bytes * npatterns * nelts_per_pattern > len)
8778 return NULL_TREE;
8780 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8781 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8783 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8784 if (!elt)
8785 return NULL_TREE;
8786 builder.quick_push (elt);
8787 bytes += elt_bytes;
8789 return builder.build ();
8792 /* Subroutine of native_interpret_expr. Interpret the contents of
8793 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8794 If the buffer cannot be interpreted, return NULL_TREE. */
8796 static tree
8797 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8799 unsigned HOST_WIDE_INT size;
8801 if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8802 || size > len)
8803 return NULL_TREE;
8805 unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8806 return native_interpret_vector_part (type, ptr, len, count, 1);
8810 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8811 the buffer PTR of length LEN as a constant of type TYPE. For
8812 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8813 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8814 return NULL_TREE. */
8816 tree
8817 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8819 switch (TREE_CODE (type))
8821 case INTEGER_TYPE:
8822 case ENUMERAL_TYPE:
8823 case BOOLEAN_TYPE:
8824 case POINTER_TYPE:
8825 case REFERENCE_TYPE:
8826 case OFFSET_TYPE:
8827 return native_interpret_int (type, ptr, len);
8829 case REAL_TYPE:
8830 if (tree ret = native_interpret_real (type, ptr, len))
8832 /* For floating point values in composite modes, punt if this
8833 folding doesn't preserve bit representation. As the mode doesn't
8834 have fixed precision while GCC pretends it does, there could be
8835 valid values that GCC can't really represent accurately.
8836 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8837 bit combinationations which GCC doesn't preserve. */
8838 unsigned char buf[24 * 2];
8839 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8840 int total_bytes = GET_MODE_SIZE (mode);
8841 memcpy (buf + 24, ptr, total_bytes);
8842 clear_type_padding_in_mask (type, buf + 24);
8843 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8844 || memcmp (buf + 24, buf, total_bytes) != 0)
8845 return NULL_TREE;
8846 return ret;
8848 return NULL_TREE;
8850 case FIXED_POINT_TYPE:
8851 return native_interpret_fixed (type, ptr, len);
8853 case COMPLEX_TYPE:
8854 return native_interpret_complex (type, ptr, len);
8856 case VECTOR_TYPE:
8857 return native_interpret_vector (type, ptr, len);
8859 default:
8860 return NULL_TREE;
8864 /* Returns true if we can interpret the contents of a native encoding
8865 as TYPE. */
8867 bool
8868 can_native_interpret_type_p (tree type)
8870 switch (TREE_CODE (type))
8872 case INTEGER_TYPE:
8873 case ENUMERAL_TYPE:
8874 case BOOLEAN_TYPE:
8875 case POINTER_TYPE:
8876 case REFERENCE_TYPE:
8877 case FIXED_POINT_TYPE:
8878 case REAL_TYPE:
8879 case COMPLEX_TYPE:
8880 case VECTOR_TYPE:
8881 case OFFSET_TYPE:
8882 return true;
8883 default:
8884 return false;
8888 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8889 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8891 tree
8892 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8893 int len)
8895 vec<constructor_elt, va_gc> *elts = NULL;
8896 if (TREE_CODE (type) == ARRAY_TYPE)
8898 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8899 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8900 return NULL_TREE;
8902 HOST_WIDE_INT cnt = 0;
8903 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8905 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8906 return NULL_TREE;
8907 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8909 if (eltsz == 0)
8910 cnt = 0;
8911 HOST_WIDE_INT pos = 0;
8912 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8914 tree v = NULL_TREE;
8915 if (pos >= len || pos + eltsz > len)
8916 return NULL_TREE;
8917 if (can_native_interpret_type_p (TREE_TYPE (type)))
8919 v = native_interpret_expr (TREE_TYPE (type),
8920 ptr + off + pos, eltsz);
8921 if (v == NULL_TREE)
8922 return NULL_TREE;
8924 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8925 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8926 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8927 eltsz);
8928 if (v == NULL_TREE)
8929 return NULL_TREE;
8930 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8932 return build_constructor (type, elts);
8934 if (TREE_CODE (type) != RECORD_TYPE)
8935 return NULL_TREE;
8936 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8938 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
8939 || is_empty_type (TREE_TYPE (field)))
8940 continue;
8941 tree fld = field;
8942 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8943 int diff = 0;
8944 tree v = NULL_TREE;
8945 if (DECL_BIT_FIELD (field))
8947 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8948 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8950 poly_int64 bitoffset;
8951 poly_uint64 field_offset, fld_offset;
8952 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8953 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8954 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8955 else
8956 bitoffset = 0;
8957 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8958 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8959 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8960 - TYPE_PRECISION (TREE_TYPE (field)));
8961 if (!bitoffset.is_constant (&bitoff)
8962 || bitoff < 0
8963 || bitoff > diff)
8964 return NULL_TREE;
8966 else
8968 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8969 return NULL_TREE;
8970 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8971 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8972 bpos %= BITS_PER_UNIT;
8973 fieldsize += bpos;
8974 fieldsize += BITS_PER_UNIT - 1;
8975 fieldsize /= BITS_PER_UNIT;
8976 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8977 if (repr_type == NULL_TREE)
8978 return NULL_TREE;
8979 sz = int_size_in_bytes (repr_type);
8980 if (sz < 0 || sz > len)
8981 return NULL_TREE;
8982 pos = int_byte_position (field);
8983 if (pos < 0 || pos > len || pos + fieldsize > len)
8984 return NULL_TREE;
8985 HOST_WIDE_INT rpos;
8986 if (pos + sz <= len)
8987 rpos = pos;
8988 else
8990 rpos = len - sz;
8991 gcc_assert (rpos <= pos);
8993 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8994 pos = rpos;
8995 diff = (TYPE_PRECISION (repr_type)
8996 - TYPE_PRECISION (TREE_TYPE (field)));
8997 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8998 if (v == NULL_TREE)
8999 return NULL_TREE;
9000 fld = NULL_TREE;
9004 if (fld)
9006 sz = int_size_in_bytes (TREE_TYPE (fld));
9007 if (sz < 0 || sz > len)
9008 return NULL_TREE;
9009 tree byte_pos = byte_position (fld);
9010 if (!tree_fits_shwi_p (byte_pos))
9011 return NULL_TREE;
9012 pos = tree_to_shwi (byte_pos);
9013 if (pos < 0 || pos > len || pos + sz > len)
9014 return NULL_TREE;
9016 if (fld == NULL_TREE)
9017 /* Already handled above. */;
9018 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9020 v = native_interpret_expr (TREE_TYPE (fld),
9021 ptr + off + pos, sz);
9022 if (v == NULL_TREE)
9023 return NULL_TREE;
9025 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9026 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9027 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9028 if (v == NULL_TREE)
9029 return NULL_TREE;
9030 if (fld != field)
9032 if (TREE_CODE (v) != INTEGER_CST)
9033 return NULL_TREE;
9035 /* FIXME: Figure out how to handle PDP endian bitfields. */
9036 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9037 return NULL_TREE;
9038 if (!BYTES_BIG_ENDIAN)
9039 v = wide_int_to_tree (TREE_TYPE (field),
9040 wi::lrshift (wi::to_wide (v), bitoff));
9041 else
9042 v = wide_int_to_tree (TREE_TYPE (field),
9043 wi::lrshift (wi::to_wide (v),
9044 diff - bitoff));
9046 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9048 return build_constructor (type, elts);
9051 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9052 or extracted constant positions and/or sizes aren't byte aligned. */
9054 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9055 bits between adjacent elements. AMNT should be within
9056 [0, BITS_PER_UNIT).
9057 Example, AMNT = 2:
9058 00011111|11100000 << 2 = 01111111|10000000
9059 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9061 void
9062 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9063 unsigned int amnt)
9065 if (amnt == 0)
9066 return;
9068 unsigned char carry_over = 0U;
9069 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9070 unsigned char clear_mask = (~0U) << amnt;
9072 for (unsigned int i = 0; i < sz; i++)
9074 unsigned prev_carry_over = carry_over;
9075 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9077 ptr[i] <<= amnt;
9078 if (i != 0)
9080 ptr[i] &= clear_mask;
9081 ptr[i] |= prev_carry_over;
9086 /* Like shift_bytes_in_array_left but for big-endian.
9087 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9088 bits between adjacent elements. AMNT should be within
9089 [0, BITS_PER_UNIT).
9090 Example, AMNT = 2:
9091 00011111|11100000 >> 2 = 00000111|11111000
9092 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9094 void
9095 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9096 unsigned int amnt)
9098 if (amnt == 0)
9099 return;
9101 unsigned char carry_over = 0U;
9102 unsigned char carry_mask = ~(~0U << amnt);
9104 for (unsigned int i = 0; i < sz; i++)
9106 unsigned prev_carry_over = carry_over;
9107 carry_over = ptr[i] & carry_mask;
9109 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9110 ptr[i] >>= amnt;
9111 ptr[i] |= prev_carry_over;
9115 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9116 directly on the VECTOR_CST encoding, in a way that works for variable-
9117 length vectors. Return the resulting VECTOR_CST on success or null
9118 on failure. */
9120 static tree
9121 fold_view_convert_vector_encoding (tree type, tree expr)
9123 tree expr_type = TREE_TYPE (expr);
9124 poly_uint64 type_bits, expr_bits;
9125 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9126 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9127 return NULL_TREE;
9129 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9130 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9131 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9132 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9134 /* We can only preserve the semantics of a stepped pattern if the new
9135 vector element is an integer of the same size. */
9136 if (VECTOR_CST_STEPPED_P (expr)
9137 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9138 return NULL_TREE;
9140 /* The number of bits needed to encode one element from every pattern
9141 of the original vector. */
9142 unsigned int expr_sequence_bits
9143 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9145 /* The number of bits needed to encode one element from every pattern
9146 of the result. */
9147 unsigned int type_sequence_bits
9148 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9150 /* Don't try to read more bytes than are available, which can happen
9151 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9152 The general VIEW_CONVERT handling can cope with that case, so there's
9153 no point complicating things here. */
9154 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9155 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9156 BITS_PER_UNIT);
9157 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9158 if (known_gt (buffer_bits, expr_bits))
9159 return NULL_TREE;
9161 /* Get enough bytes of EXPR to form the new encoding. */
9162 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9163 buffer.quick_grow (buffer_bytes);
9164 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9165 buffer_bits / expr_elt_bits)
9166 != (int) buffer_bytes)
9167 return NULL_TREE;
9169 /* Reencode the bytes as TYPE. */
9170 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9171 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9172 type_npatterns, nelts_per_pattern);
9175 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9176 TYPE at compile-time. If we're unable to perform the conversion
9177 return NULL_TREE. */
9179 static tree
9180 fold_view_convert_expr (tree type, tree expr)
9182 /* We support up to 512-bit values (for V8DFmode). */
9183 unsigned char buffer[64];
9184 int len;
9186 /* Check that the host and target are sane. */
9187 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9188 return NULL_TREE;
9190 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9191 if (tree res = fold_view_convert_vector_encoding (type, expr))
9192 return res;
9194 len = native_encode_expr (expr, buffer, sizeof (buffer));
9195 if (len == 0)
9196 return NULL_TREE;
9198 return native_interpret_expr (type, buffer, len);
9201 /* Build an expression for the address of T. Folds away INDIRECT_REF
9202 to avoid confusing the gimplify process. */
9204 tree
9205 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9207 /* The size of the object is not relevant when talking about its address. */
9208 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9209 t = TREE_OPERAND (t, 0);
9211 if (INDIRECT_REF_P (t))
9213 t = TREE_OPERAND (t, 0);
9215 if (TREE_TYPE (t) != ptrtype)
9216 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9218 else if (TREE_CODE (t) == MEM_REF
9219 && integer_zerop (TREE_OPERAND (t, 1)))
9221 t = TREE_OPERAND (t, 0);
9223 if (TREE_TYPE (t) != ptrtype)
9224 t = fold_convert_loc (loc, ptrtype, t);
9226 else if (TREE_CODE (t) == MEM_REF
9227 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9228 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9229 TREE_OPERAND (t, 0),
9230 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9231 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9233 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9235 if (TREE_TYPE (t) != ptrtype)
9236 t = fold_convert_loc (loc, ptrtype, t);
9238 else
9239 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9241 return t;
9244 /* Build an expression for the address of T. */
9246 tree
9247 build_fold_addr_expr_loc (location_t loc, tree t)
9249 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9251 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9254 /* Fold a unary expression of code CODE and type TYPE with operand
9255 OP0. Return the folded expression if folding is successful.
9256 Otherwise, return NULL_TREE. */
9258 tree
9259 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9261 tree tem;
9262 tree arg0;
9263 enum tree_code_class kind = TREE_CODE_CLASS (code);
9265 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9266 && TREE_CODE_LENGTH (code) == 1);
9268 arg0 = op0;
9269 if (arg0)
9271 if (CONVERT_EXPR_CODE_P (code)
9272 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9274 /* Don't use STRIP_NOPS, because signedness of argument type
9275 matters. */
9276 STRIP_SIGN_NOPS (arg0);
9278 else
9280 /* Strip any conversions that don't change the mode. This
9281 is safe for every expression, except for a comparison
9282 expression because its signedness is derived from its
9283 operands.
9285 Note that this is done as an internal manipulation within
9286 the constant folder, in order to find the simplest
9287 representation of the arguments so that their form can be
9288 studied. In any cases, the appropriate type conversions
9289 should be put back in the tree that will get out of the
9290 constant folder. */
9291 STRIP_NOPS (arg0);
9294 if (CONSTANT_CLASS_P (arg0))
9296 tree tem = const_unop (code, type, arg0);
9297 if (tem)
9299 if (TREE_TYPE (tem) != type)
9300 tem = fold_convert_loc (loc, type, tem);
9301 return tem;
9306 tem = generic_simplify (loc, code, type, op0);
9307 if (tem)
9308 return tem;
9310 if (TREE_CODE_CLASS (code) == tcc_unary)
9312 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9313 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9314 fold_build1_loc (loc, code, type,
9315 fold_convert_loc (loc, TREE_TYPE (op0),
9316 TREE_OPERAND (arg0, 1))));
9317 else if (TREE_CODE (arg0) == COND_EXPR)
9319 tree arg01 = TREE_OPERAND (arg0, 1);
9320 tree arg02 = TREE_OPERAND (arg0, 2);
9321 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9322 arg01 = fold_build1_loc (loc, code, type,
9323 fold_convert_loc (loc,
9324 TREE_TYPE (op0), arg01));
9325 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9326 arg02 = fold_build1_loc (loc, code, type,
9327 fold_convert_loc (loc,
9328 TREE_TYPE (op0), arg02));
9329 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9330 arg01, arg02);
9332 /* If this was a conversion, and all we did was to move into
9333 inside the COND_EXPR, bring it back out. But leave it if
9334 it is a conversion from integer to integer and the
9335 result precision is no wider than a word since such a
9336 conversion is cheap and may be optimized away by combine,
9337 while it couldn't if it were outside the COND_EXPR. Then return
9338 so we don't get into an infinite recursion loop taking the
9339 conversion out and then back in. */
9341 if ((CONVERT_EXPR_CODE_P (code)
9342 || code == NON_LVALUE_EXPR)
9343 && TREE_CODE (tem) == COND_EXPR
9344 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9345 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9346 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9347 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9348 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9349 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9350 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9351 && (INTEGRAL_TYPE_P
9352 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9353 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9354 || flag_syntax_only))
9355 tem = build1_loc (loc, code, type,
9356 build3 (COND_EXPR,
9357 TREE_TYPE (TREE_OPERAND
9358 (TREE_OPERAND (tem, 1), 0)),
9359 TREE_OPERAND (tem, 0),
9360 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9361 TREE_OPERAND (TREE_OPERAND (tem, 2),
9362 0)));
9363 return tem;
9367 switch (code)
9369 case NON_LVALUE_EXPR:
9370 if (!maybe_lvalue_p (op0))
9371 return fold_convert_loc (loc, type, op0);
9372 return NULL_TREE;
9374 CASE_CONVERT:
9375 case FLOAT_EXPR:
9376 case FIX_TRUNC_EXPR:
9377 if (COMPARISON_CLASS_P (op0))
9379 /* If we have (type) (a CMP b) and type is an integral type, return
9380 new expression involving the new type. Canonicalize
9381 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9382 non-integral type.
9383 Do not fold the result as that would not simplify further, also
9384 folding again results in recursions. */
9385 if (TREE_CODE (type) == BOOLEAN_TYPE)
9386 return build2_loc (loc, TREE_CODE (op0), type,
9387 TREE_OPERAND (op0, 0),
9388 TREE_OPERAND (op0, 1));
9389 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9390 && TREE_CODE (type) != VECTOR_TYPE)
9391 return build3_loc (loc, COND_EXPR, type, op0,
9392 constant_boolean_node (true, type),
9393 constant_boolean_node (false, type));
9396 /* Handle (T *)&A.B.C for A being of type T and B and C
9397 living at offset zero. This occurs frequently in
9398 C++ upcasting and then accessing the base. */
9399 if (TREE_CODE (op0) == ADDR_EXPR
9400 && POINTER_TYPE_P (type)
9401 && handled_component_p (TREE_OPERAND (op0, 0)))
9403 poly_int64 bitsize, bitpos;
9404 tree offset;
9405 machine_mode mode;
9406 int unsignedp, reversep, volatilep;
9407 tree base
9408 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9409 &offset, &mode, &unsignedp, &reversep,
9410 &volatilep);
9411 /* If the reference was to a (constant) zero offset, we can use
9412 the address of the base if it has the same base type
9413 as the result type and the pointer type is unqualified. */
9414 if (!offset
9415 && known_eq (bitpos, 0)
9416 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9417 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9418 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9419 return fold_convert_loc (loc, type,
9420 build_fold_addr_expr_loc (loc, base));
9423 if (TREE_CODE (op0) == MODIFY_EXPR
9424 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9425 /* Detect assigning a bitfield. */
9426 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9427 && DECL_BIT_FIELD
9428 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9430 /* Don't leave an assignment inside a conversion
9431 unless assigning a bitfield. */
9432 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9433 /* First do the assignment, then return converted constant. */
9434 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9435 suppress_warning (tem /* What warning? */);
9436 TREE_USED (tem) = 1;
9437 return tem;
9440 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9441 constants (if x has signed type, the sign bit cannot be set
9442 in c). This folds extension into the BIT_AND_EXPR.
9443 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9444 very likely don't have maximal range for their precision and this
9445 transformation effectively doesn't preserve non-maximal ranges. */
9446 if (TREE_CODE (type) == INTEGER_TYPE
9447 && TREE_CODE (op0) == BIT_AND_EXPR
9448 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9450 tree and_expr = op0;
9451 tree and0 = TREE_OPERAND (and_expr, 0);
9452 tree and1 = TREE_OPERAND (and_expr, 1);
9453 int change = 0;
9455 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9456 || (TYPE_PRECISION (type)
9457 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9458 change = 1;
9459 else if (TYPE_PRECISION (TREE_TYPE (and1))
9460 <= HOST_BITS_PER_WIDE_INT
9461 && tree_fits_uhwi_p (and1))
9463 unsigned HOST_WIDE_INT cst;
9465 cst = tree_to_uhwi (and1);
9466 cst &= HOST_WIDE_INT_M1U
9467 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9468 change = (cst == 0);
9469 if (change
9470 && !flag_syntax_only
9471 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9472 == ZERO_EXTEND))
9474 tree uns = unsigned_type_for (TREE_TYPE (and0));
9475 and0 = fold_convert_loc (loc, uns, and0);
9476 and1 = fold_convert_loc (loc, uns, and1);
9479 if (change)
9481 tem = force_fit_type (type, wi::to_widest (and1), 0,
9482 TREE_OVERFLOW (and1));
9483 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9484 fold_convert_loc (loc, type, and0), tem);
9488 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9489 cast (T1)X will fold away. We assume that this happens when X itself
9490 is a cast. */
9491 if (POINTER_TYPE_P (type)
9492 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9493 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9495 tree arg00 = TREE_OPERAND (arg0, 0);
9496 tree arg01 = TREE_OPERAND (arg0, 1);
9498 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9499 when the pointed type needs higher alignment than
9500 the p+ first operand's pointed type. */
9501 if (!in_gimple_form
9502 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9503 && (min_align_of_type (TREE_TYPE (type))
9504 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9505 return NULL_TREE;
9507 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9508 when type is a reference type and arg00's type is not,
9509 because arg00 could be validly nullptr and if arg01 doesn't return,
9510 we don't want false positive binding of reference to nullptr. */
9511 if (TREE_CODE (type) == REFERENCE_TYPE
9512 && !in_gimple_form
9513 && sanitize_flags_p (SANITIZE_NULL)
9514 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9515 return NULL_TREE;
9517 arg00 = fold_convert_loc (loc, type, arg00);
9518 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9521 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9522 of the same precision, and X is an integer type not narrower than
9523 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9524 if (INTEGRAL_TYPE_P (type)
9525 && TREE_CODE (op0) == BIT_NOT_EXPR
9526 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9527 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9528 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9530 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9531 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9532 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9533 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9534 fold_convert_loc (loc, type, tem));
9537 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9538 type of X and Y (integer types only). */
9539 if (INTEGRAL_TYPE_P (type)
9540 && TREE_CODE (op0) == MULT_EXPR
9541 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9542 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9543 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9544 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9546 /* Be careful not to introduce new overflows. */
9547 tree mult_type;
9548 if (TYPE_OVERFLOW_WRAPS (type))
9549 mult_type = type;
9550 else
9551 mult_type = unsigned_type_for (type);
9553 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9555 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9556 fold_convert_loc (loc, mult_type,
9557 TREE_OPERAND (op0, 0)),
9558 fold_convert_loc (loc, mult_type,
9559 TREE_OPERAND (op0, 1)));
9560 return fold_convert_loc (loc, type, tem);
9564 return NULL_TREE;
9566 case VIEW_CONVERT_EXPR:
9567 if (TREE_CODE (op0) == MEM_REF)
9569 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9570 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9571 tem = fold_build2_loc (loc, MEM_REF, type,
9572 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9573 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9574 return tem;
9577 return NULL_TREE;
9579 case NEGATE_EXPR:
9580 tem = fold_negate_expr (loc, arg0);
9581 if (tem)
9582 return fold_convert_loc (loc, type, tem);
9583 return NULL_TREE;
9585 case ABS_EXPR:
9586 /* Convert fabs((double)float) into (double)fabsf(float). */
9587 if (TREE_CODE (arg0) == NOP_EXPR
9588 && TREE_CODE (type) == REAL_TYPE)
9590 tree targ0 = strip_float_extensions (arg0);
9591 if (targ0 != arg0)
9592 return fold_convert_loc (loc, type,
9593 fold_build1_loc (loc, ABS_EXPR,
9594 TREE_TYPE (targ0),
9595 targ0));
9597 return NULL_TREE;
9599 case BIT_NOT_EXPR:
9600 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9601 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9602 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9603 fold_convert_loc (loc, type,
9604 TREE_OPERAND (arg0, 0)))))
9605 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9606 fold_convert_loc (loc, type,
9607 TREE_OPERAND (arg0, 1)));
9608 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9609 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9610 fold_convert_loc (loc, type,
9611 TREE_OPERAND (arg0, 1)))))
9612 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9613 fold_convert_loc (loc, type,
9614 TREE_OPERAND (arg0, 0)), tem);
9616 return NULL_TREE;
9618 case TRUTH_NOT_EXPR:
9619 /* Note that the operand of this must be an int
9620 and its values must be 0 or 1.
9621 ("true" is a fixed value perhaps depending on the language,
9622 but we don't handle values other than 1 correctly yet.) */
9623 tem = fold_truth_not_expr (loc, arg0);
9624 if (!tem)
9625 return NULL_TREE;
9626 return fold_convert_loc (loc, type, tem);
9628 case INDIRECT_REF:
9629 /* Fold *&X to X if X is an lvalue. */
9630 if (TREE_CODE (op0) == ADDR_EXPR)
9632 tree op00 = TREE_OPERAND (op0, 0);
9633 if ((VAR_P (op00)
9634 || TREE_CODE (op00) == PARM_DECL
9635 || TREE_CODE (op00) == RESULT_DECL)
9636 && !TREE_READONLY (op00))
9637 return op00;
9639 return NULL_TREE;
9641 default:
9642 return NULL_TREE;
9643 } /* switch (code) */
9647 /* If the operation was a conversion do _not_ mark a resulting constant
9648 with TREE_OVERFLOW if the original constant was not. These conversions
9649 have implementation defined behavior and retaining the TREE_OVERFLOW
9650 flag here would confuse later passes such as VRP. */
9651 tree
9652 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9653 tree type, tree op0)
9655 tree res = fold_unary_loc (loc, code, type, op0);
9656 if (res
9657 && TREE_CODE (res) == INTEGER_CST
9658 && TREE_CODE (op0) == INTEGER_CST
9659 && CONVERT_EXPR_CODE_P (code))
9660 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9662 return res;
9665 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9666 operands OP0 and OP1. LOC is the location of the resulting expression.
9667 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9668 Return the folded expression if folding is successful. Otherwise,
9669 return NULL_TREE. */
9670 static tree
9671 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9672 tree arg0, tree arg1, tree op0, tree op1)
9674 tree tem;
9676 /* We only do these simplifications if we are optimizing. */
9677 if (!optimize)
9678 return NULL_TREE;
9680 /* Check for things like (A || B) && (A || C). We can convert this
9681 to A || (B && C). Note that either operator can be any of the four
9682 truth and/or operations and the transformation will still be
9683 valid. Also note that we only care about order for the
9684 ANDIF and ORIF operators. If B contains side effects, this
9685 might change the truth-value of A. */
9686 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9687 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9688 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9689 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9690 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9691 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9693 tree a00 = TREE_OPERAND (arg0, 0);
9694 tree a01 = TREE_OPERAND (arg0, 1);
9695 tree a10 = TREE_OPERAND (arg1, 0);
9696 tree a11 = TREE_OPERAND (arg1, 1);
9697 bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9698 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9699 && (code == TRUTH_AND_EXPR
9700 || code == TRUTH_OR_EXPR));
9702 if (operand_equal_p (a00, a10, 0))
9703 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9704 fold_build2_loc (loc, code, type, a01, a11));
9705 else if (commutative && operand_equal_p (a00, a11, 0))
9706 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9707 fold_build2_loc (loc, code, type, a01, a10));
9708 else if (commutative && operand_equal_p (a01, a10, 0))
9709 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9710 fold_build2_loc (loc, code, type, a00, a11));
9712 /* This case if tricky because we must either have commutative
9713 operators or else A10 must not have side-effects. */
9715 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9716 && operand_equal_p (a01, a11, 0))
9717 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9718 fold_build2_loc (loc, code, type, a00, a10),
9719 a01);
9722 /* See if we can build a range comparison. */
9723 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9724 return tem;
9726 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9727 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9729 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9730 if (tem)
9731 return fold_build2_loc (loc, code, type, tem, arg1);
9734 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9735 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9737 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9738 if (tem)
9739 return fold_build2_loc (loc, code, type, arg0, tem);
9742 /* Check for the possibility of merging component references. If our
9743 lhs is another similar operation, try to merge its rhs with our
9744 rhs. Then try to merge our lhs and rhs. */
9745 if (TREE_CODE (arg0) == code
9746 && (tem = fold_truth_andor_1 (loc, code, type,
9747 TREE_OPERAND (arg0, 1), arg1)) != 0)
9748 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9750 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9751 return tem;
9753 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9754 if (param_logical_op_non_short_circuit != -1)
9755 logical_op_non_short_circuit
9756 = param_logical_op_non_short_circuit;
9757 if (logical_op_non_short_circuit
9758 && !sanitize_coverage_p ()
9759 && (code == TRUTH_AND_EXPR
9760 || code == TRUTH_ANDIF_EXPR
9761 || code == TRUTH_OR_EXPR
9762 || code == TRUTH_ORIF_EXPR))
9764 enum tree_code ncode, icode;
9766 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9767 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9768 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9770 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9771 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9772 We don't want to pack more than two leafs to a non-IF AND/OR
9773 expression.
9774 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9775 equal to IF-CODE, then we don't want to add right-hand operand.
9776 If the inner right-hand side of left-hand operand has
9777 side-effects, or isn't simple, then we can't add to it,
9778 as otherwise we might destroy if-sequence. */
9779 if (TREE_CODE (arg0) == icode
9780 && simple_condition_p (arg1)
9781 /* Needed for sequence points to handle trappings, and
9782 side-effects. */
9783 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9785 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9786 arg1);
9787 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9788 tem);
9790 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9791 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9792 else if (TREE_CODE (arg1) == icode
9793 && simple_condition_p (arg0)
9794 /* Needed for sequence points to handle trappings, and
9795 side-effects. */
9796 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9798 tem = fold_build2_loc (loc, ncode, type,
9799 arg0, TREE_OPERAND (arg1, 0));
9800 return fold_build2_loc (loc, icode, type, tem,
9801 TREE_OPERAND (arg1, 1));
9803 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9804 into (A OR B).
9805 For sequence point consistancy, we need to check for trapping,
9806 and side-effects. */
9807 else if (code == icode && simple_condition_p (arg0)
9808 && simple_condition_p (arg1))
9809 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9812 return NULL_TREE;
9815 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9816 by changing CODE to reduce the magnitude of constants involved in
9817 ARG0 of the comparison.
9818 Returns a canonicalized comparison tree if a simplification was
9819 possible, otherwise returns NULL_TREE.
9820 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9821 valid if signed overflow is undefined. */
9823 static tree
9824 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9825 tree arg0, tree arg1,
9826 bool *strict_overflow_p)
9828 enum tree_code code0 = TREE_CODE (arg0);
9829 tree t, cst0 = NULL_TREE;
9830 int sgn0;
9832 /* Match A +- CST code arg1. We can change this only if overflow
9833 is undefined. */
9834 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9836 /* In principle pointers also have undefined overflow behavior,
9837 but that causes problems elsewhere. */
9838 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9839 && (code0 == MINUS_EXPR
9840 || code0 == PLUS_EXPR)
9841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9842 return NULL_TREE;
9844 /* Identify the constant in arg0 and its sign. */
9845 cst0 = TREE_OPERAND (arg0, 1);
9846 sgn0 = tree_int_cst_sgn (cst0);
9848 /* Overflowed constants and zero will cause problems. */
9849 if (integer_zerop (cst0)
9850 || TREE_OVERFLOW (cst0))
9851 return NULL_TREE;
9853 /* See if we can reduce the magnitude of the constant in
9854 arg0 by changing the comparison code. */
9855 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9856 if (code == LT_EXPR
9857 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9858 code = LE_EXPR;
9859 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9860 else if (code == GT_EXPR
9861 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9862 code = GE_EXPR;
9863 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9864 else if (code == LE_EXPR
9865 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9866 code = LT_EXPR;
9867 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9868 else if (code == GE_EXPR
9869 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9870 code = GT_EXPR;
9871 else
9872 return NULL_TREE;
9873 *strict_overflow_p = true;
9875 /* Now build the constant reduced in magnitude. But not if that
9876 would produce one outside of its types range. */
9877 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9878 && ((sgn0 == 1
9879 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9880 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9881 || (sgn0 == -1
9882 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9883 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9884 return NULL_TREE;
9886 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9887 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9888 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9889 t = fold_convert (TREE_TYPE (arg1), t);
9891 return fold_build2_loc (loc, code, type, t, arg1);
9894 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9895 overflow further. Try to decrease the magnitude of constants involved
9896 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9897 and put sole constants at the second argument position.
9898 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9900 static tree
9901 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9902 tree arg0, tree arg1)
9904 tree t;
9905 bool strict_overflow_p;
9906 const char * const warnmsg = G_("assuming signed overflow does not occur "
9907 "when reducing constant in comparison");
9909 /* Try canonicalization by simplifying arg0. */
9910 strict_overflow_p = false;
9911 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9912 &strict_overflow_p);
9913 if (t)
9915 if (strict_overflow_p)
9916 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9917 return t;
9920 /* Try canonicalization by simplifying arg1 using the swapped
9921 comparison. */
9922 code = swap_tree_comparison (code);
9923 strict_overflow_p = false;
9924 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9925 &strict_overflow_p);
9926 if (t && strict_overflow_p)
9927 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9928 return t;
9931 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9932 space. This is used to avoid issuing overflow warnings for
9933 expressions like &p->x which cannot wrap. */
9935 static bool
9936 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9938 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9939 return true;
9941 if (maybe_lt (bitpos, 0))
9942 return true;
9944 poly_wide_int wi_offset;
9945 int precision = TYPE_PRECISION (TREE_TYPE (base));
9946 if (offset == NULL_TREE)
9947 wi_offset = wi::zero (precision);
9948 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9949 return true;
9950 else
9951 wi_offset = wi::to_poly_wide (offset);
9953 wi::overflow_type overflow;
9954 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9955 precision);
9956 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9957 if (overflow)
9958 return true;
9960 poly_uint64 total_hwi, size;
9961 if (!total.to_uhwi (&total_hwi)
9962 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9963 &size)
9964 || known_eq (size, 0U))
9965 return true;
9967 if (known_le (total_hwi, size))
9968 return false;
9970 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9971 array. */
9972 if (TREE_CODE (base) == ADDR_EXPR
9973 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9974 &size)
9975 && maybe_ne (size, 0U)
9976 && known_le (total_hwi, size))
9977 return false;
9979 return true;
9982 /* Return a positive integer when the symbol DECL is known to have
9983 a nonzero address, zero when it's known not to (e.g., it's a weak
9984 symbol), and a negative integer when the symbol is not yet in the
9985 symbol table and so whether or not its address is zero is unknown.
9986 For function local objects always return positive integer. */
9987 static int
9988 maybe_nonzero_address (tree decl)
9990 /* Normally, don't do anything for variables and functions before symtab is
9991 built; it is quite possible that DECL will be declared weak later.
9992 But if folding_initializer, we need a constant answer now, so create
9993 the symtab entry and prevent later weak declaration. */
9994 if (DECL_P (decl) && decl_in_symtab_p (decl))
9995 if (struct symtab_node *symbol
9996 = (folding_initializer
9997 ? symtab_node::get_create (decl)
9998 : symtab_node::get (decl)))
9999 return symbol->nonzero_address ();
10001 /* Function local objects are never NULL. */
10002 if (DECL_P (decl)
10003 && (DECL_CONTEXT (decl)
10004 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10005 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10006 return 1;
10008 return -1;
10011 /* Subroutine of fold_binary. This routine performs all of the
10012 transformations that are common to the equality/inequality
10013 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10014 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10015 fold_binary should call fold_binary. Fold a comparison with
10016 tree code CODE and type TYPE with operands OP0 and OP1. Return
10017 the folded comparison or NULL_TREE. */
10019 static tree
10020 fold_comparison (location_t loc, enum tree_code code, tree type,
10021 tree op0, tree op1)
10023 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10024 tree arg0, arg1, tem;
10026 arg0 = op0;
10027 arg1 = op1;
10029 STRIP_SIGN_NOPS (arg0);
10030 STRIP_SIGN_NOPS (arg1);
10032 /* For comparisons of pointers we can decompose it to a compile time
10033 comparison of the base objects and the offsets into the object.
10034 This requires at least one operand being an ADDR_EXPR or a
10035 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10036 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10037 && (TREE_CODE (arg0) == ADDR_EXPR
10038 || TREE_CODE (arg1) == ADDR_EXPR
10039 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10040 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10042 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10043 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10044 machine_mode mode;
10045 int volatilep, reversep, unsignedp;
10046 bool indirect_base0 = false, indirect_base1 = false;
10048 /* Get base and offset for the access. Strip ADDR_EXPR for
10049 get_inner_reference, but put it back by stripping INDIRECT_REF
10050 off the base object if possible. indirect_baseN will be true
10051 if baseN is not an address but refers to the object itself. */
10052 base0 = arg0;
10053 if (TREE_CODE (arg0) == ADDR_EXPR)
10055 base0
10056 = get_inner_reference (TREE_OPERAND (arg0, 0),
10057 &bitsize, &bitpos0, &offset0, &mode,
10058 &unsignedp, &reversep, &volatilep);
10059 if (INDIRECT_REF_P (base0))
10060 base0 = TREE_OPERAND (base0, 0);
10061 else
10062 indirect_base0 = true;
10064 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10066 base0 = TREE_OPERAND (arg0, 0);
10067 STRIP_SIGN_NOPS (base0);
10068 if (TREE_CODE (base0) == ADDR_EXPR)
10070 base0
10071 = get_inner_reference (TREE_OPERAND (base0, 0),
10072 &bitsize, &bitpos0, &offset0, &mode,
10073 &unsignedp, &reversep, &volatilep);
10074 if (INDIRECT_REF_P (base0))
10075 base0 = TREE_OPERAND (base0, 0);
10076 else
10077 indirect_base0 = true;
10079 if (offset0 == NULL_TREE || integer_zerop (offset0))
10080 offset0 = TREE_OPERAND (arg0, 1);
10081 else
10082 offset0 = size_binop (PLUS_EXPR, offset0,
10083 TREE_OPERAND (arg0, 1));
10084 if (poly_int_tree_p (offset0))
10086 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10087 TYPE_PRECISION (sizetype));
10088 tem <<= LOG2_BITS_PER_UNIT;
10089 tem += bitpos0;
10090 if (tem.to_shwi (&bitpos0))
10091 offset0 = NULL_TREE;
10095 base1 = arg1;
10096 if (TREE_CODE (arg1) == ADDR_EXPR)
10098 base1
10099 = get_inner_reference (TREE_OPERAND (arg1, 0),
10100 &bitsize, &bitpos1, &offset1, &mode,
10101 &unsignedp, &reversep, &volatilep);
10102 if (INDIRECT_REF_P (base1))
10103 base1 = TREE_OPERAND (base1, 0);
10104 else
10105 indirect_base1 = true;
10107 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10109 base1 = TREE_OPERAND (arg1, 0);
10110 STRIP_SIGN_NOPS (base1);
10111 if (TREE_CODE (base1) == ADDR_EXPR)
10113 base1
10114 = get_inner_reference (TREE_OPERAND (base1, 0),
10115 &bitsize, &bitpos1, &offset1, &mode,
10116 &unsignedp, &reversep, &volatilep);
10117 if (INDIRECT_REF_P (base1))
10118 base1 = TREE_OPERAND (base1, 0);
10119 else
10120 indirect_base1 = true;
10122 if (offset1 == NULL_TREE || integer_zerop (offset1))
10123 offset1 = TREE_OPERAND (arg1, 1);
10124 else
10125 offset1 = size_binop (PLUS_EXPR, offset1,
10126 TREE_OPERAND (arg1, 1));
10127 if (poly_int_tree_p (offset1))
10129 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10130 TYPE_PRECISION (sizetype));
10131 tem <<= LOG2_BITS_PER_UNIT;
10132 tem += bitpos1;
10133 if (tem.to_shwi (&bitpos1))
10134 offset1 = NULL_TREE;
10138 /* If we have equivalent bases we might be able to simplify. */
10139 if (indirect_base0 == indirect_base1
10140 && operand_equal_p (base0, base1,
10141 indirect_base0 ? OEP_ADDRESS_OF : 0))
10143 /* We can fold this expression to a constant if the non-constant
10144 offset parts are equal. */
10145 if ((offset0 == offset1
10146 || (offset0 && offset1
10147 && operand_equal_p (offset0, offset1, 0)))
10148 && (equality_code
10149 || (indirect_base0
10150 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10151 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10153 if (!equality_code
10154 && maybe_ne (bitpos0, bitpos1)
10155 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10156 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10157 fold_overflow_warning (("assuming pointer wraparound does not "
10158 "occur when comparing P +- C1 with "
10159 "P +- C2"),
10160 WARN_STRICT_OVERFLOW_CONDITIONAL);
10162 switch (code)
10164 case EQ_EXPR:
10165 if (known_eq (bitpos0, bitpos1))
10166 return constant_boolean_node (true, type);
10167 if (known_ne (bitpos0, bitpos1))
10168 return constant_boolean_node (false, type);
10169 break;
10170 case NE_EXPR:
10171 if (known_ne (bitpos0, bitpos1))
10172 return constant_boolean_node (true, type);
10173 if (known_eq (bitpos0, bitpos1))
10174 return constant_boolean_node (false, type);
10175 break;
10176 case LT_EXPR:
10177 if (known_lt (bitpos0, bitpos1))
10178 return constant_boolean_node (true, type);
10179 if (known_ge (bitpos0, bitpos1))
10180 return constant_boolean_node (false, type);
10181 break;
10182 case LE_EXPR:
10183 if (known_le (bitpos0, bitpos1))
10184 return constant_boolean_node (true, type);
10185 if (known_gt (bitpos0, bitpos1))
10186 return constant_boolean_node (false, type);
10187 break;
10188 case GE_EXPR:
10189 if (known_ge (bitpos0, bitpos1))
10190 return constant_boolean_node (true, type);
10191 if (known_lt (bitpos0, bitpos1))
10192 return constant_boolean_node (false, type);
10193 break;
10194 case GT_EXPR:
10195 if (known_gt (bitpos0, bitpos1))
10196 return constant_boolean_node (true, type);
10197 if (known_le (bitpos0, bitpos1))
10198 return constant_boolean_node (false, type);
10199 break;
10200 default:;
10203 /* We can simplify the comparison to a comparison of the variable
10204 offset parts if the constant offset parts are equal.
10205 Be careful to use signed sizetype here because otherwise we
10206 mess with array offsets in the wrong way. This is possible
10207 because pointer arithmetic is restricted to retain within an
10208 object and overflow on pointer differences is undefined as of
10209 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10210 else if (known_eq (bitpos0, bitpos1)
10211 && (equality_code
10212 || (indirect_base0
10213 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10214 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10216 /* By converting to signed sizetype we cover middle-end pointer
10217 arithmetic which operates on unsigned pointer types of size
10218 type size and ARRAY_REF offsets which are properly sign or
10219 zero extended from their type in case it is narrower than
10220 sizetype. */
10221 if (offset0 == NULL_TREE)
10222 offset0 = build_int_cst (ssizetype, 0);
10223 else
10224 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10225 if (offset1 == NULL_TREE)
10226 offset1 = build_int_cst (ssizetype, 0);
10227 else
10228 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10230 if (!equality_code
10231 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10232 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10233 fold_overflow_warning (("assuming pointer wraparound does not "
10234 "occur when comparing P +- C1 with "
10235 "P +- C2"),
10236 WARN_STRICT_OVERFLOW_COMPARISON);
10238 return fold_build2_loc (loc, code, type, offset0, offset1);
10241 /* For equal offsets we can simplify to a comparison of the
10242 base addresses. */
10243 else if (known_eq (bitpos0, bitpos1)
10244 && (indirect_base0
10245 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10246 && (indirect_base1
10247 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10248 && ((offset0 == offset1)
10249 || (offset0 && offset1
10250 && operand_equal_p (offset0, offset1, 0))))
10252 if (indirect_base0)
10253 base0 = build_fold_addr_expr_loc (loc, base0);
10254 if (indirect_base1)
10255 base1 = build_fold_addr_expr_loc (loc, base1);
10256 return fold_build2_loc (loc, code, type, base0, base1);
10258 /* Comparison between an ordinary (non-weak) symbol and a null
10259 pointer can be eliminated since such symbols must have a non
10260 null address. In C, relational expressions between pointers
10261 to objects and null pointers are undefined. The results
10262 below follow the C++ rules with the additional property that
10263 every object pointer compares greater than a null pointer.
10265 else if (((DECL_P (base0)
10266 && maybe_nonzero_address (base0) > 0
10267 /* Avoid folding references to struct members at offset 0 to
10268 prevent tests like '&ptr->firstmember == 0' from getting
10269 eliminated. When ptr is null, although the -> expression
10270 is strictly speaking invalid, GCC retains it as a matter
10271 of QoI. See PR c/44555. */
10272 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10273 || CONSTANT_CLASS_P (base0))
10274 && indirect_base0
10275 /* The caller guarantees that when one of the arguments is
10276 constant (i.e., null in this case) it is second. */
10277 && integer_zerop (arg1))
10279 switch (code)
10281 case EQ_EXPR:
10282 case LE_EXPR:
10283 case LT_EXPR:
10284 return constant_boolean_node (false, type);
10285 case GE_EXPR:
10286 case GT_EXPR:
10287 case NE_EXPR:
10288 return constant_boolean_node (true, type);
10289 default:
10290 gcc_unreachable ();
10295 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10296 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10297 the resulting offset is smaller in absolute value than the
10298 original one and has the same sign. */
10299 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10300 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10301 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10302 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10303 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10304 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10305 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10306 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10308 tree const1 = TREE_OPERAND (arg0, 1);
10309 tree const2 = TREE_OPERAND (arg1, 1);
10310 tree variable1 = TREE_OPERAND (arg0, 0);
10311 tree variable2 = TREE_OPERAND (arg1, 0);
10312 tree cst;
10313 const char * const warnmsg = G_("assuming signed overflow does not "
10314 "occur when combining constants around "
10315 "a comparison");
10317 /* Put the constant on the side where it doesn't overflow and is
10318 of lower absolute value and of same sign than before. */
10319 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10320 ? MINUS_EXPR : PLUS_EXPR,
10321 const2, const1);
10322 if (!TREE_OVERFLOW (cst)
10323 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10324 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10326 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10327 return fold_build2_loc (loc, code, type,
10328 variable1,
10329 fold_build2_loc (loc, TREE_CODE (arg1),
10330 TREE_TYPE (arg1),
10331 variable2, cst));
10334 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10335 ? MINUS_EXPR : PLUS_EXPR,
10336 const1, const2);
10337 if (!TREE_OVERFLOW (cst)
10338 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10339 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10341 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10342 return fold_build2_loc (loc, code, type,
10343 fold_build2_loc (loc, TREE_CODE (arg0),
10344 TREE_TYPE (arg0),
10345 variable1, cst),
10346 variable2);
10350 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10351 if (tem)
10352 return tem;
10354 /* If we are comparing an expression that just has comparisons
10355 of two integer values, arithmetic expressions of those comparisons,
10356 and constants, we can simplify it. There are only three cases
10357 to check: the two values can either be equal, the first can be
10358 greater, or the second can be greater. Fold the expression for
10359 those three values. Since each value must be 0 or 1, we have
10360 eight possibilities, each of which corresponds to the constant 0
10361 or 1 or one of the six possible comparisons.
10363 This handles common cases like (a > b) == 0 but also handles
10364 expressions like ((x > y) - (y > x)) > 0, which supposedly
10365 occur in macroized code. */
10367 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10369 tree cval1 = 0, cval2 = 0;
10371 if (twoval_comparison_p (arg0, &cval1, &cval2)
10372 /* Don't handle degenerate cases here; they should already
10373 have been handled anyway. */
10374 && cval1 != 0 && cval2 != 0
10375 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10376 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10377 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10378 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10379 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10380 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10381 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10383 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10384 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10386 /* We can't just pass T to eval_subst in case cval1 or cval2
10387 was the same as ARG1. */
10389 tree high_result
10390 = fold_build2_loc (loc, code, type,
10391 eval_subst (loc, arg0, cval1, maxval,
10392 cval2, minval),
10393 arg1);
10394 tree equal_result
10395 = fold_build2_loc (loc, code, type,
10396 eval_subst (loc, arg0, cval1, maxval,
10397 cval2, maxval),
10398 arg1);
10399 tree low_result
10400 = fold_build2_loc (loc, code, type,
10401 eval_subst (loc, arg0, cval1, minval,
10402 cval2, maxval),
10403 arg1);
10405 /* All three of these results should be 0 or 1. Confirm they are.
10406 Then use those values to select the proper code to use. */
10408 if (TREE_CODE (high_result) == INTEGER_CST
10409 && TREE_CODE (equal_result) == INTEGER_CST
10410 && TREE_CODE (low_result) == INTEGER_CST)
10412 /* Make a 3-bit mask with the high-order bit being the
10413 value for `>', the next for '=', and the low for '<'. */
10414 switch ((integer_onep (high_result) * 4)
10415 + (integer_onep (equal_result) * 2)
10416 + integer_onep (low_result))
10418 case 0:
10419 /* Always false. */
10420 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10421 case 1:
10422 code = LT_EXPR;
10423 break;
10424 case 2:
10425 code = EQ_EXPR;
10426 break;
10427 case 3:
10428 code = LE_EXPR;
10429 break;
10430 case 4:
10431 code = GT_EXPR;
10432 break;
10433 case 5:
10434 code = NE_EXPR;
10435 break;
10436 case 6:
10437 code = GE_EXPR;
10438 break;
10439 case 7:
10440 /* Always true. */
10441 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10444 return fold_build2_loc (loc, code, type, cval1, cval2);
10449 return NULL_TREE;
10453 /* Subroutine of fold_binary. Optimize complex multiplications of the
10454 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10455 argument EXPR represents the expression "z" of type TYPE. */
10457 static tree
10458 fold_mult_zconjz (location_t loc, tree type, tree expr)
10460 tree itype = TREE_TYPE (type);
10461 tree rpart, ipart, tem;
10463 if (TREE_CODE (expr) == COMPLEX_EXPR)
10465 rpart = TREE_OPERAND (expr, 0);
10466 ipart = TREE_OPERAND (expr, 1);
10468 else if (TREE_CODE (expr) == COMPLEX_CST)
10470 rpart = TREE_REALPART (expr);
10471 ipart = TREE_IMAGPART (expr);
10473 else
10475 expr = save_expr (expr);
10476 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10477 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10480 rpart = save_expr (rpart);
10481 ipart = save_expr (ipart);
10482 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10483 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10484 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10485 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10486 build_zero_cst (itype));
10490 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10491 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10492 true if successful. */
10494 static bool
10495 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10497 unsigned HOST_WIDE_INT i, nunits;
10499 if (TREE_CODE (arg) == VECTOR_CST
10500 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10502 for (i = 0; i < nunits; ++i)
10503 elts[i] = VECTOR_CST_ELT (arg, i);
10505 else if (TREE_CODE (arg) == CONSTRUCTOR)
10507 constructor_elt *elt;
10509 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10510 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10511 return false;
10512 else
10513 elts[i] = elt->value;
10515 else
10516 return false;
10517 for (; i < nelts; i++)
10518 elts[i]
10519 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10520 return true;
10523 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10524 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10525 NULL_TREE otherwise. */
10527 tree
10528 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10530 unsigned int i;
10531 unsigned HOST_WIDE_INT nelts;
10532 bool need_ctor = false;
10534 if (!sel.length ().is_constant (&nelts))
10535 return NULL_TREE;
10536 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10537 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10538 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10539 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10540 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10541 return NULL_TREE;
10543 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10544 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10545 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10546 return NULL_TREE;
10548 tree_vector_builder out_elts (type, nelts, 1);
10549 for (i = 0; i < nelts; i++)
10551 HOST_WIDE_INT index;
10552 if (!sel[i].is_constant (&index))
10553 return NULL_TREE;
10554 if (!CONSTANT_CLASS_P (in_elts[index]))
10555 need_ctor = true;
10556 out_elts.quick_push (unshare_expr (in_elts[index]));
10559 if (need_ctor)
10561 vec<constructor_elt, va_gc> *v;
10562 vec_alloc (v, nelts);
10563 for (i = 0; i < nelts; i++)
10564 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10565 return build_constructor (type, v);
10567 else
10568 return out_elts.build ();
10571 /* Try to fold a pointer difference of type TYPE two address expressions of
10572 array references AREF0 and AREF1 using location LOC. Return a
10573 simplified expression for the difference or NULL_TREE. */
10575 static tree
10576 fold_addr_of_array_ref_difference (location_t loc, tree type,
10577 tree aref0, tree aref1,
10578 bool use_pointer_diff)
10580 tree base0 = TREE_OPERAND (aref0, 0);
10581 tree base1 = TREE_OPERAND (aref1, 0);
10582 tree base_offset = build_int_cst (type, 0);
10584 /* If the bases are array references as well, recurse. If the bases
10585 are pointer indirections compute the difference of the pointers.
10586 If the bases are equal, we are set. */
10587 if ((TREE_CODE (base0) == ARRAY_REF
10588 && TREE_CODE (base1) == ARRAY_REF
10589 && (base_offset
10590 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10591 use_pointer_diff)))
10592 || (INDIRECT_REF_P (base0)
10593 && INDIRECT_REF_P (base1)
10594 && (base_offset
10595 = use_pointer_diff
10596 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10597 TREE_OPERAND (base0, 0),
10598 TREE_OPERAND (base1, 0))
10599 : fold_binary_loc (loc, MINUS_EXPR, type,
10600 fold_convert (type,
10601 TREE_OPERAND (base0, 0)),
10602 fold_convert (type,
10603 TREE_OPERAND (base1, 0)))))
10604 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10606 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10607 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10608 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10609 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10610 return fold_build2_loc (loc, PLUS_EXPR, type,
10611 base_offset,
10612 fold_build2_loc (loc, MULT_EXPR, type,
10613 diff, esz));
10615 return NULL_TREE;
10618 /* If the real or vector real constant CST of type TYPE has an exact
10619 inverse, return it, else return NULL. */
10621 tree
10622 exact_inverse (tree type, tree cst)
10624 REAL_VALUE_TYPE r;
10625 tree unit_type;
10626 machine_mode mode;
10628 switch (TREE_CODE (cst))
10630 case REAL_CST:
10631 r = TREE_REAL_CST (cst);
10633 if (exact_real_inverse (TYPE_MODE (type), &r))
10634 return build_real (type, r);
10636 return NULL_TREE;
10638 case VECTOR_CST:
10640 unit_type = TREE_TYPE (type);
10641 mode = TYPE_MODE (unit_type);
10643 tree_vector_builder elts;
10644 if (!elts.new_unary_operation (type, cst, false))
10645 return NULL_TREE;
10646 unsigned int count = elts.encoded_nelts ();
10647 for (unsigned int i = 0; i < count; ++i)
10649 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10650 if (!exact_real_inverse (mode, &r))
10651 return NULL_TREE;
10652 elts.quick_push (build_real (unit_type, r));
10655 return elts.build ();
10658 default:
10659 return NULL_TREE;
10663 /* Mask out the tz least significant bits of X of type TYPE where
10664 tz is the number of trailing zeroes in Y. */
10665 static wide_int
10666 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10668 int tz = wi::ctz (y);
10669 if (tz > 0)
10670 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10671 return x;
10674 /* Return true when T is an address and is known to be nonzero.
10675 For floating point we further ensure that T is not denormal.
10676 Similar logic is present in nonzero_address in rtlanal.h.
10678 If the return value is based on the assumption that signed overflow
10679 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10680 change *STRICT_OVERFLOW_P. */
10682 static bool
10683 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10685 tree type = TREE_TYPE (t);
10686 enum tree_code code;
10688 /* Doing something useful for floating point would need more work. */
10689 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10690 return false;
10692 code = TREE_CODE (t);
10693 switch (TREE_CODE_CLASS (code))
10695 case tcc_unary:
10696 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10697 strict_overflow_p);
10698 case tcc_binary:
10699 case tcc_comparison:
10700 return tree_binary_nonzero_warnv_p (code, type,
10701 TREE_OPERAND (t, 0),
10702 TREE_OPERAND (t, 1),
10703 strict_overflow_p);
10704 case tcc_constant:
10705 case tcc_declaration:
10706 case tcc_reference:
10707 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10709 default:
10710 break;
10713 switch (code)
10715 case TRUTH_NOT_EXPR:
10716 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10717 strict_overflow_p);
10719 case TRUTH_AND_EXPR:
10720 case TRUTH_OR_EXPR:
10721 case TRUTH_XOR_EXPR:
10722 return tree_binary_nonzero_warnv_p (code, type,
10723 TREE_OPERAND (t, 0),
10724 TREE_OPERAND (t, 1),
10725 strict_overflow_p);
10727 case COND_EXPR:
10728 case CONSTRUCTOR:
10729 case OBJ_TYPE_REF:
10730 case ADDR_EXPR:
10731 case WITH_SIZE_EXPR:
10732 case SSA_NAME:
10733 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10735 case COMPOUND_EXPR:
10736 case MODIFY_EXPR:
10737 case BIND_EXPR:
10738 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10739 strict_overflow_p);
10741 case SAVE_EXPR:
10742 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10743 strict_overflow_p);
10745 case CALL_EXPR:
10747 tree fndecl = get_callee_fndecl (t);
10748 if (!fndecl) return false;
10749 if (flag_delete_null_pointer_checks && !flag_check_new
10750 && DECL_IS_OPERATOR_NEW_P (fndecl)
10751 && !TREE_NOTHROW (fndecl))
10752 return true;
10753 if (flag_delete_null_pointer_checks
10754 && lookup_attribute ("returns_nonnull",
10755 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10756 return true;
10757 return alloca_call_p (t);
10760 default:
10761 break;
10763 return false;
10766 /* Return true when T is an address and is known to be nonzero.
10767 Handle warnings about undefined signed overflow. */
10769 bool
10770 tree_expr_nonzero_p (tree t)
10772 bool ret, strict_overflow_p;
10774 strict_overflow_p = false;
10775 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10776 if (strict_overflow_p)
10777 fold_overflow_warning (("assuming signed overflow does not occur when "
10778 "determining that expression is always "
10779 "non-zero"),
10780 WARN_STRICT_OVERFLOW_MISC);
10781 return ret;
10784 /* Return true if T is known not to be equal to an integer W. */
10786 bool
10787 expr_not_equal_to (tree t, const wide_int &w)
10789 int_range_max vr;
10790 switch (TREE_CODE (t))
10792 case INTEGER_CST:
10793 return wi::to_wide (t) != w;
10795 case SSA_NAME:
10796 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10797 return false;
10799 if (cfun)
10800 get_range_query (cfun)->range_of_expr (vr, t);
10801 else
10802 get_global_range_query ()->range_of_expr (vr, t);
10804 if (!vr.undefined_p () && !vr.contains_p (w))
10805 return true;
10806 /* If T has some known zero bits and W has any of those bits set,
10807 then T is known not to be equal to W. */
10808 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10809 TYPE_PRECISION (TREE_TYPE (t))), 0))
10810 return true;
10811 return false;
10813 default:
10814 return false;
10818 /* Fold a binary expression of code CODE and type TYPE with operands
10819 OP0 and OP1. LOC is the location of the resulting expression.
10820 Return the folded expression if folding is successful. Otherwise,
10821 return NULL_TREE. */
10823 tree
10824 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10825 tree op0, tree op1)
10827 enum tree_code_class kind = TREE_CODE_CLASS (code);
10828 tree arg0, arg1, tem;
10829 tree t1 = NULL_TREE;
10830 bool strict_overflow_p;
10831 unsigned int prec;
10833 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10834 && TREE_CODE_LENGTH (code) == 2
10835 && op0 != NULL_TREE
10836 && op1 != NULL_TREE);
10838 arg0 = op0;
10839 arg1 = op1;
10841 /* Strip any conversions that don't change the mode. This is
10842 safe for every expression, except for a comparison expression
10843 because its signedness is derived from its operands. So, in
10844 the latter case, only strip conversions that don't change the
10845 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10846 preserved.
10848 Note that this is done as an internal manipulation within the
10849 constant folder, in order to find the simplest representation
10850 of the arguments so that their form can be studied. In any
10851 cases, the appropriate type conversions should be put back in
10852 the tree that will get out of the constant folder. */
10854 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10856 STRIP_SIGN_NOPS (arg0);
10857 STRIP_SIGN_NOPS (arg1);
10859 else
10861 STRIP_NOPS (arg0);
10862 STRIP_NOPS (arg1);
10865 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10866 constant but we can't do arithmetic on them. */
10867 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10869 tem = const_binop (code, type, arg0, arg1);
10870 if (tem != NULL_TREE)
10872 if (TREE_TYPE (tem) != type)
10873 tem = fold_convert_loc (loc, type, tem);
10874 return tem;
10878 /* If this is a commutative operation, and ARG0 is a constant, move it
10879 to ARG1 to reduce the number of tests below. */
10880 if (commutative_tree_code (code)
10881 && tree_swap_operands_p (arg0, arg1))
10882 return fold_build2_loc (loc, code, type, op1, op0);
10884 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10885 to ARG1 to reduce the number of tests below. */
10886 if (kind == tcc_comparison
10887 && tree_swap_operands_p (arg0, arg1))
10888 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10890 tem = generic_simplify (loc, code, type, op0, op1);
10891 if (tem)
10892 return tem;
10894 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10896 First check for cases where an arithmetic operation is applied to a
10897 compound, conditional, or comparison operation. Push the arithmetic
10898 operation inside the compound or conditional to see if any folding
10899 can then be done. Convert comparison to conditional for this purpose.
10900 The also optimizes non-constant cases that used to be done in
10901 expand_expr.
10903 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10904 one of the operands is a comparison and the other is a comparison, a
10905 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10906 code below would make the expression more complex. Change it to a
10907 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10908 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10910 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10911 || code == EQ_EXPR || code == NE_EXPR)
10912 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10913 && ((truth_value_p (TREE_CODE (arg0))
10914 && (truth_value_p (TREE_CODE (arg1))
10915 || (TREE_CODE (arg1) == BIT_AND_EXPR
10916 && integer_onep (TREE_OPERAND (arg1, 1)))))
10917 || (truth_value_p (TREE_CODE (arg1))
10918 && (truth_value_p (TREE_CODE (arg0))
10919 || (TREE_CODE (arg0) == BIT_AND_EXPR
10920 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10922 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10923 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10924 : TRUTH_XOR_EXPR,
10925 boolean_type_node,
10926 fold_convert_loc (loc, boolean_type_node, arg0),
10927 fold_convert_loc (loc, boolean_type_node, arg1));
10929 if (code == EQ_EXPR)
10930 tem = invert_truthvalue_loc (loc, tem);
10932 return fold_convert_loc (loc, type, tem);
10935 if (TREE_CODE_CLASS (code) == tcc_binary
10936 || TREE_CODE_CLASS (code) == tcc_comparison)
10938 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10940 tem = fold_build2_loc (loc, code, type,
10941 fold_convert_loc (loc, TREE_TYPE (op0),
10942 TREE_OPERAND (arg0, 1)), op1);
10943 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10944 tem);
10946 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10948 tem = fold_build2_loc (loc, code, type, op0,
10949 fold_convert_loc (loc, TREE_TYPE (op1),
10950 TREE_OPERAND (arg1, 1)));
10951 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10952 tem);
10955 if (TREE_CODE (arg0) == COND_EXPR
10956 || TREE_CODE (arg0) == VEC_COND_EXPR
10957 || COMPARISON_CLASS_P (arg0))
10959 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10960 arg0, arg1,
10961 /*cond_first_p=*/1);
10962 if (tem != NULL_TREE)
10963 return tem;
10966 if (TREE_CODE (arg1) == COND_EXPR
10967 || TREE_CODE (arg1) == VEC_COND_EXPR
10968 || COMPARISON_CLASS_P (arg1))
10970 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10971 arg1, arg0,
10972 /*cond_first_p=*/0);
10973 if (tem != NULL_TREE)
10974 return tem;
10978 switch (code)
10980 case MEM_REF:
10981 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10982 if (TREE_CODE (arg0) == ADDR_EXPR
10983 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10985 tree iref = TREE_OPERAND (arg0, 0);
10986 return fold_build2 (MEM_REF, type,
10987 TREE_OPERAND (iref, 0),
10988 int_const_binop (PLUS_EXPR, arg1,
10989 TREE_OPERAND (iref, 1)));
10992 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10993 if (TREE_CODE (arg0) == ADDR_EXPR
10994 && handled_component_p (TREE_OPERAND (arg0, 0)))
10996 tree base;
10997 poly_int64 coffset;
10998 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10999 &coffset);
11000 if (!base)
11001 return NULL_TREE;
11002 return fold_build2 (MEM_REF, type,
11003 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11004 int_const_binop (PLUS_EXPR, arg1,
11005 size_int (coffset)));
11008 return NULL_TREE;
11010 case POINTER_PLUS_EXPR:
11011 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11012 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11013 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11014 return fold_convert_loc (loc, type,
11015 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11016 fold_convert_loc (loc, sizetype,
11017 arg1),
11018 fold_convert_loc (loc, sizetype,
11019 arg0)));
11021 return NULL_TREE;
11023 case PLUS_EXPR:
11024 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11026 /* X + (X / CST) * -CST is X % CST. */
11027 if (TREE_CODE (arg1) == MULT_EXPR
11028 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11029 && operand_equal_p (arg0,
11030 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11032 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11033 tree cst1 = TREE_OPERAND (arg1, 1);
11034 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11035 cst1, cst0);
11036 if (sum && integer_zerop (sum))
11037 return fold_convert_loc (loc, type,
11038 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11039 TREE_TYPE (arg0), arg0,
11040 cst0));
11044 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11045 one. Make sure the type is not saturating and has the signedness of
11046 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11047 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11048 if ((TREE_CODE (arg0) == MULT_EXPR
11049 || TREE_CODE (arg1) == MULT_EXPR)
11050 && !TYPE_SATURATING (type)
11051 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11052 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11053 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11055 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11056 if (tem)
11057 return tem;
11060 if (! FLOAT_TYPE_P (type))
11062 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11063 (plus (plus (mult) (mult)) (foo)) so that we can
11064 take advantage of the factoring cases below. */
11065 if (ANY_INTEGRAL_TYPE_P (type)
11066 && TYPE_OVERFLOW_WRAPS (type)
11067 && (((TREE_CODE (arg0) == PLUS_EXPR
11068 || TREE_CODE (arg0) == MINUS_EXPR)
11069 && TREE_CODE (arg1) == MULT_EXPR)
11070 || ((TREE_CODE (arg1) == PLUS_EXPR
11071 || TREE_CODE (arg1) == MINUS_EXPR)
11072 && TREE_CODE (arg0) == MULT_EXPR)))
11074 tree parg0, parg1, parg, marg;
11075 enum tree_code pcode;
11077 if (TREE_CODE (arg1) == MULT_EXPR)
11078 parg = arg0, marg = arg1;
11079 else
11080 parg = arg1, marg = arg0;
11081 pcode = TREE_CODE (parg);
11082 parg0 = TREE_OPERAND (parg, 0);
11083 parg1 = TREE_OPERAND (parg, 1);
11084 STRIP_NOPS (parg0);
11085 STRIP_NOPS (parg1);
11087 if (TREE_CODE (parg0) == MULT_EXPR
11088 && TREE_CODE (parg1) != MULT_EXPR)
11089 return fold_build2_loc (loc, pcode, type,
11090 fold_build2_loc (loc, PLUS_EXPR, type,
11091 fold_convert_loc (loc, type,
11092 parg0),
11093 fold_convert_loc (loc, type,
11094 marg)),
11095 fold_convert_loc (loc, type, parg1));
11096 if (TREE_CODE (parg0) != MULT_EXPR
11097 && TREE_CODE (parg1) == MULT_EXPR)
11098 return
11099 fold_build2_loc (loc, PLUS_EXPR, type,
11100 fold_convert_loc (loc, type, parg0),
11101 fold_build2_loc (loc, pcode, type,
11102 fold_convert_loc (loc, type, marg),
11103 fold_convert_loc (loc, type,
11104 parg1)));
11107 else
11109 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11110 to __complex__ ( x, y ). This is not the same for SNaNs or
11111 if signed zeros are involved. */
11112 if (!HONOR_SNANS (arg0)
11113 && !HONOR_SIGNED_ZEROS (arg0)
11114 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11116 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11117 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11118 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11119 bool arg0rz = false, arg0iz = false;
11120 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11121 || (arg0i && (arg0iz = real_zerop (arg0i))))
11123 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11124 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11125 if (arg0rz && arg1i && real_zerop (arg1i))
11127 tree rp = arg1r ? arg1r
11128 : build1 (REALPART_EXPR, rtype, arg1);
11129 tree ip = arg0i ? arg0i
11130 : build1 (IMAGPART_EXPR, rtype, arg0);
11131 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11133 else if (arg0iz && arg1r && real_zerop (arg1r))
11135 tree rp = arg0r ? arg0r
11136 : build1 (REALPART_EXPR, rtype, arg0);
11137 tree ip = arg1i ? arg1i
11138 : build1 (IMAGPART_EXPR, rtype, arg1);
11139 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11144 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11145 We associate floats only if the user has specified
11146 -fassociative-math. */
11147 if (flag_associative_math
11148 && TREE_CODE (arg1) == PLUS_EXPR
11149 && TREE_CODE (arg0) != MULT_EXPR)
11151 tree tree10 = TREE_OPERAND (arg1, 0);
11152 tree tree11 = TREE_OPERAND (arg1, 1);
11153 if (TREE_CODE (tree11) == MULT_EXPR
11154 && TREE_CODE (tree10) == MULT_EXPR)
11156 tree tree0;
11157 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11158 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11161 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11162 We associate floats only if the user has specified
11163 -fassociative-math. */
11164 if (flag_associative_math
11165 && TREE_CODE (arg0) == PLUS_EXPR
11166 && TREE_CODE (arg1) != MULT_EXPR)
11168 tree tree00 = TREE_OPERAND (arg0, 0);
11169 tree tree01 = TREE_OPERAND (arg0, 1);
11170 if (TREE_CODE (tree01) == MULT_EXPR
11171 && TREE_CODE (tree00) == MULT_EXPR)
11173 tree tree0;
11174 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11175 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11180 bit_rotate:
11181 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11182 is a rotate of A by C1 bits. */
11183 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11184 is a rotate of A by B bits.
11185 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11186 though in this case CODE must be | and not + or ^, otherwise
11187 it doesn't return A when B is 0. */
11189 enum tree_code code0, code1;
11190 tree rtype;
11191 code0 = TREE_CODE (arg0);
11192 code1 = TREE_CODE (arg1);
11193 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11194 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11195 && operand_equal_p (TREE_OPERAND (arg0, 0),
11196 TREE_OPERAND (arg1, 0), 0)
11197 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11198 TYPE_UNSIGNED (rtype))
11199 /* Only create rotates in complete modes. Other cases are not
11200 expanded properly. */
11201 && (element_precision (rtype)
11202 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11204 tree tree01, tree11;
11205 tree orig_tree01, orig_tree11;
11206 enum tree_code code01, code11;
11208 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11209 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11210 STRIP_NOPS (tree01);
11211 STRIP_NOPS (tree11);
11212 code01 = TREE_CODE (tree01);
11213 code11 = TREE_CODE (tree11);
11214 if (code11 != MINUS_EXPR
11215 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11217 std::swap (code0, code1);
11218 std::swap (code01, code11);
11219 std::swap (tree01, tree11);
11220 std::swap (orig_tree01, orig_tree11);
11222 if (code01 == INTEGER_CST
11223 && code11 == INTEGER_CST
11224 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11225 == element_precision (rtype)))
11227 tem = build2_loc (loc, LROTATE_EXPR,
11228 rtype, TREE_OPERAND (arg0, 0),
11229 code0 == LSHIFT_EXPR
11230 ? orig_tree01 : orig_tree11);
11231 return fold_convert_loc (loc, type, tem);
11233 else if (code11 == MINUS_EXPR)
11235 tree tree110, tree111;
11236 tree110 = TREE_OPERAND (tree11, 0);
11237 tree111 = TREE_OPERAND (tree11, 1);
11238 STRIP_NOPS (tree110);
11239 STRIP_NOPS (tree111);
11240 if (TREE_CODE (tree110) == INTEGER_CST
11241 && compare_tree_int (tree110,
11242 element_precision (rtype)) == 0
11243 && operand_equal_p (tree01, tree111, 0))
11245 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11246 ? LROTATE_EXPR : RROTATE_EXPR),
11247 rtype, TREE_OPERAND (arg0, 0),
11248 orig_tree01);
11249 return fold_convert_loc (loc, type, tem);
11252 else if (code == BIT_IOR_EXPR
11253 && code11 == BIT_AND_EXPR
11254 && pow2p_hwi (element_precision (rtype)))
11256 tree tree110, tree111;
11257 tree110 = TREE_OPERAND (tree11, 0);
11258 tree111 = TREE_OPERAND (tree11, 1);
11259 STRIP_NOPS (tree110);
11260 STRIP_NOPS (tree111);
11261 if (TREE_CODE (tree110) == NEGATE_EXPR
11262 && TREE_CODE (tree111) == INTEGER_CST
11263 && compare_tree_int (tree111,
11264 element_precision (rtype) - 1) == 0
11265 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11267 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11268 ? LROTATE_EXPR : RROTATE_EXPR),
11269 rtype, TREE_OPERAND (arg0, 0),
11270 orig_tree01);
11271 return fold_convert_loc (loc, type, tem);
11277 associate:
11278 /* In most languages, can't associate operations on floats through
11279 parentheses. Rather than remember where the parentheses were, we
11280 don't associate floats at all, unless the user has specified
11281 -fassociative-math.
11282 And, we need to make sure type is not saturating. */
11284 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11285 && !TYPE_SATURATING (type)
11286 && !TYPE_OVERFLOW_SANITIZED (type))
11288 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11289 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11290 tree atype = type;
11291 bool ok = true;
11293 /* Split both trees into variables, constants, and literals. Then
11294 associate each group together, the constants with literals,
11295 then the result with variables. This increases the chances of
11296 literals being recombined later and of generating relocatable
11297 expressions for the sum of a constant and literal. */
11298 var0 = split_tree (arg0, type, code,
11299 &minus_var0, &con0, &minus_con0,
11300 &lit0, &minus_lit0, 0);
11301 var1 = split_tree (arg1, type, code,
11302 &minus_var1, &con1, &minus_con1,
11303 &lit1, &minus_lit1, code == MINUS_EXPR);
11305 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11306 if (code == MINUS_EXPR)
11307 code = PLUS_EXPR;
11309 /* With undefined overflow prefer doing association in a type
11310 which wraps on overflow, if that is one of the operand types. */
11311 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11312 && !TYPE_OVERFLOW_WRAPS (type))
11314 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11315 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11316 atype = TREE_TYPE (arg0);
11317 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11318 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11319 atype = TREE_TYPE (arg1);
11320 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11323 /* With undefined overflow we can only associate constants with one
11324 variable, and constants whose association doesn't overflow. */
11325 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11326 && !TYPE_OVERFLOW_WRAPS (atype))
11328 if ((var0 && var1) || (minus_var0 && minus_var1))
11330 /* ??? If split_tree would handle NEGATE_EXPR we could
11331 simply reject these cases and the allowed cases would
11332 be the var0/minus_var1 ones. */
11333 tree tmp0 = var0 ? var0 : minus_var0;
11334 tree tmp1 = var1 ? var1 : minus_var1;
11335 bool one_neg = false;
11337 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11339 tmp0 = TREE_OPERAND (tmp0, 0);
11340 one_neg = !one_neg;
11342 if (CONVERT_EXPR_P (tmp0)
11343 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11344 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11345 <= TYPE_PRECISION (atype)))
11346 tmp0 = TREE_OPERAND (tmp0, 0);
11347 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11349 tmp1 = TREE_OPERAND (tmp1, 0);
11350 one_neg = !one_neg;
11352 if (CONVERT_EXPR_P (tmp1)
11353 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11354 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11355 <= TYPE_PRECISION (atype)))
11356 tmp1 = TREE_OPERAND (tmp1, 0);
11357 /* The only case we can still associate with two variables
11358 is if they cancel out. */
11359 if (!one_neg
11360 || !operand_equal_p (tmp0, tmp1, 0))
11361 ok = false;
11363 else if ((var0 && minus_var1
11364 && ! operand_equal_p (var0, minus_var1, 0))
11365 || (minus_var0 && var1
11366 && ! operand_equal_p (minus_var0, var1, 0)))
11367 ok = false;
11370 /* Only do something if we found more than two objects. Otherwise,
11371 nothing has changed and we risk infinite recursion. */
11372 if (ok
11373 && ((var0 != 0) + (var1 != 0)
11374 + (minus_var0 != 0) + (minus_var1 != 0)
11375 + (con0 != 0) + (con1 != 0)
11376 + (minus_con0 != 0) + (minus_con1 != 0)
11377 + (lit0 != 0) + (lit1 != 0)
11378 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11380 var0 = associate_trees (loc, var0, var1, code, atype);
11381 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11382 code, atype);
11383 con0 = associate_trees (loc, con0, con1, code, atype);
11384 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11385 code, atype);
11386 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11387 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11388 code, atype);
11390 if (minus_var0 && var0)
11392 var0 = associate_trees (loc, var0, minus_var0,
11393 MINUS_EXPR, atype);
11394 minus_var0 = 0;
11396 if (minus_con0 && con0)
11398 con0 = associate_trees (loc, con0, minus_con0,
11399 MINUS_EXPR, atype);
11400 minus_con0 = 0;
11403 /* Preserve the MINUS_EXPR if the negative part of the literal is
11404 greater than the positive part. Otherwise, the multiplicative
11405 folding code (i.e extract_muldiv) may be fooled in case
11406 unsigned constants are subtracted, like in the following
11407 example: ((X*2 + 4) - 8U)/2. */
11408 if (minus_lit0 && lit0)
11410 if (TREE_CODE (lit0) == INTEGER_CST
11411 && TREE_CODE (minus_lit0) == INTEGER_CST
11412 && tree_int_cst_lt (lit0, minus_lit0)
11413 /* But avoid ending up with only negated parts. */
11414 && (var0 || con0))
11416 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11417 MINUS_EXPR, atype);
11418 lit0 = 0;
11420 else
11422 lit0 = associate_trees (loc, lit0, minus_lit0,
11423 MINUS_EXPR, atype);
11424 minus_lit0 = 0;
11428 /* Don't introduce overflows through reassociation. */
11429 if ((lit0 && TREE_OVERFLOW_P (lit0))
11430 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11431 return NULL_TREE;
11433 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11434 con0 = associate_trees (loc, con0, lit0, code, atype);
11435 lit0 = 0;
11436 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11437 code, atype);
11438 minus_lit0 = 0;
11440 /* Eliminate minus_con0. */
11441 if (minus_con0)
11443 if (con0)
11444 con0 = associate_trees (loc, con0, minus_con0,
11445 MINUS_EXPR, atype);
11446 else if (var0)
11447 var0 = associate_trees (loc, var0, minus_con0,
11448 MINUS_EXPR, atype);
11449 else
11450 gcc_unreachable ();
11451 minus_con0 = 0;
11454 /* Eliminate minus_var0. */
11455 if (minus_var0)
11457 if (con0)
11458 con0 = associate_trees (loc, con0, minus_var0,
11459 MINUS_EXPR, atype);
11460 else
11461 gcc_unreachable ();
11462 minus_var0 = 0;
11465 return
11466 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11467 code, atype));
11471 return NULL_TREE;
11473 case POINTER_DIFF_EXPR:
11474 case MINUS_EXPR:
11475 /* Fold &a[i] - &a[j] to i-j. */
11476 if (TREE_CODE (arg0) == ADDR_EXPR
11477 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11478 && TREE_CODE (arg1) == ADDR_EXPR
11479 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11481 tree tem = fold_addr_of_array_ref_difference (loc, type,
11482 TREE_OPERAND (arg0, 0),
11483 TREE_OPERAND (arg1, 0),
11484 code
11485 == POINTER_DIFF_EXPR);
11486 if (tem)
11487 return tem;
11490 /* Further transformations are not for pointers. */
11491 if (code == POINTER_DIFF_EXPR)
11492 return NULL_TREE;
11494 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11495 if (TREE_CODE (arg0) == NEGATE_EXPR
11496 && negate_expr_p (op1)
11497 /* If arg0 is e.g. unsigned int and type is int, then this could
11498 introduce UB, because if A is INT_MIN at runtime, the original
11499 expression can be well defined while the latter is not.
11500 See PR83269. */
11501 && !(ANY_INTEGRAL_TYPE_P (type)
11502 && TYPE_OVERFLOW_UNDEFINED (type)
11503 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11504 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11505 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11506 fold_convert_loc (loc, type,
11507 TREE_OPERAND (arg0, 0)));
11509 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11510 __complex__ ( x, -y ). This is not the same for SNaNs or if
11511 signed zeros are involved. */
11512 if (!HONOR_SNANS (arg0)
11513 && !HONOR_SIGNED_ZEROS (arg0)
11514 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11516 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11517 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11518 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11519 bool arg0rz = false, arg0iz = false;
11520 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11521 || (arg0i && (arg0iz = real_zerop (arg0i))))
11523 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11524 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11525 if (arg0rz && arg1i && real_zerop (arg1i))
11527 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11528 arg1r ? arg1r
11529 : build1 (REALPART_EXPR, rtype, arg1));
11530 tree ip = arg0i ? arg0i
11531 : build1 (IMAGPART_EXPR, rtype, arg0);
11532 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11534 else if (arg0iz && arg1r && real_zerop (arg1r))
11536 tree rp = arg0r ? arg0r
11537 : build1 (REALPART_EXPR, rtype, arg0);
11538 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11539 arg1i ? arg1i
11540 : build1 (IMAGPART_EXPR, rtype, arg1));
11541 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11546 /* A - B -> A + (-B) if B is easily negatable. */
11547 if (negate_expr_p (op1)
11548 && ! TYPE_OVERFLOW_SANITIZED (type)
11549 && ((FLOAT_TYPE_P (type)
11550 /* Avoid this transformation if B is a positive REAL_CST. */
11551 && (TREE_CODE (op1) != REAL_CST
11552 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11553 || INTEGRAL_TYPE_P (type)))
11554 return fold_build2_loc (loc, PLUS_EXPR, type,
11555 fold_convert_loc (loc, type, arg0),
11556 negate_expr (op1));
11558 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11559 one. Make sure the type is not saturating and has the signedness of
11560 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11561 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11562 if ((TREE_CODE (arg0) == MULT_EXPR
11563 || TREE_CODE (arg1) == MULT_EXPR)
11564 && !TYPE_SATURATING (type)
11565 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11566 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11567 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11569 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11570 if (tem)
11571 return tem;
11574 goto associate;
11576 case MULT_EXPR:
11577 if (! FLOAT_TYPE_P (type))
11579 /* Transform x * -C into -x * C if x is easily negatable. */
11580 if (TREE_CODE (op1) == INTEGER_CST
11581 && tree_int_cst_sgn (op1) == -1
11582 && negate_expr_p (op0)
11583 && negate_expr_p (op1)
11584 && (tem = negate_expr (op1)) != op1
11585 && ! TREE_OVERFLOW (tem))
11586 return fold_build2_loc (loc, MULT_EXPR, type,
11587 fold_convert_loc (loc, type,
11588 negate_expr (op0)), tem);
11590 strict_overflow_p = false;
11591 if (TREE_CODE (arg1) == INTEGER_CST
11592 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11593 &strict_overflow_p)) != 0)
11595 if (strict_overflow_p)
11596 fold_overflow_warning (("assuming signed overflow does not "
11597 "occur when simplifying "
11598 "multiplication"),
11599 WARN_STRICT_OVERFLOW_MISC);
11600 return fold_convert_loc (loc, type, tem);
11603 /* Optimize z * conj(z) for integer complex numbers. */
11604 if (TREE_CODE (arg0) == CONJ_EXPR
11605 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11606 return fold_mult_zconjz (loc, type, arg1);
11607 if (TREE_CODE (arg1) == CONJ_EXPR
11608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11609 return fold_mult_zconjz (loc, type, arg0);
11611 else
11613 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11614 This is not the same for NaNs or if signed zeros are
11615 involved. */
11616 if (!HONOR_NANS (arg0)
11617 && !HONOR_SIGNED_ZEROS (arg0)
11618 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11619 && TREE_CODE (arg1) == COMPLEX_CST
11620 && real_zerop (TREE_REALPART (arg1)))
11622 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11623 if (real_onep (TREE_IMAGPART (arg1)))
11624 return
11625 fold_build2_loc (loc, COMPLEX_EXPR, type,
11626 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11627 rtype, arg0)),
11628 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11629 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11630 return
11631 fold_build2_loc (loc, COMPLEX_EXPR, type,
11632 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11633 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11634 rtype, arg0)));
11637 /* Optimize z * conj(z) for floating point complex numbers.
11638 Guarded by flag_unsafe_math_optimizations as non-finite
11639 imaginary components don't produce scalar results. */
11640 if (flag_unsafe_math_optimizations
11641 && TREE_CODE (arg0) == CONJ_EXPR
11642 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11643 return fold_mult_zconjz (loc, type, arg1);
11644 if (flag_unsafe_math_optimizations
11645 && TREE_CODE (arg1) == CONJ_EXPR
11646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11647 return fold_mult_zconjz (loc, type, arg0);
11649 goto associate;
11651 case BIT_IOR_EXPR:
11652 /* Canonicalize (X & C1) | C2. */
11653 if (TREE_CODE (arg0) == BIT_AND_EXPR
11654 && TREE_CODE (arg1) == INTEGER_CST
11655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11657 int width = TYPE_PRECISION (type), w;
11658 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11659 wide_int c2 = wi::to_wide (arg1);
11661 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11662 if ((c1 & c2) == c1)
11663 return omit_one_operand_loc (loc, type, arg1,
11664 TREE_OPERAND (arg0, 0));
11666 wide_int msk = wi::mask (width, false,
11667 TYPE_PRECISION (TREE_TYPE (arg1)));
11669 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11670 if (wi::bit_and_not (msk, c1 | c2) == 0)
11672 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11673 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11676 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11677 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11678 mode which allows further optimizations. */
11679 c1 &= msk;
11680 c2 &= msk;
11681 wide_int c3 = wi::bit_and_not (c1, c2);
11682 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11684 wide_int mask = wi::mask (w, false,
11685 TYPE_PRECISION (type));
11686 if (((c1 | c2) & mask) == mask
11687 && wi::bit_and_not (c1, mask) == 0)
11689 c3 = mask;
11690 break;
11694 if (c3 != c1)
11696 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11697 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11698 wide_int_to_tree (type, c3));
11699 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11703 /* See if this can be simplified into a rotate first. If that
11704 is unsuccessful continue in the association code. */
11705 goto bit_rotate;
11707 case BIT_XOR_EXPR:
11708 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11709 if (TREE_CODE (arg0) == BIT_AND_EXPR
11710 && INTEGRAL_TYPE_P (type)
11711 && integer_onep (TREE_OPERAND (arg0, 1))
11712 && integer_onep (arg1))
11713 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11714 build_zero_cst (TREE_TYPE (arg0)));
11716 /* See if this can be simplified into a rotate first. If that
11717 is unsuccessful continue in the association code. */
11718 goto bit_rotate;
11720 case BIT_AND_EXPR:
11721 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11722 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11723 && INTEGRAL_TYPE_P (type)
11724 && integer_onep (TREE_OPERAND (arg0, 1))
11725 && integer_onep (arg1))
11727 tree tem2;
11728 tem = TREE_OPERAND (arg0, 0);
11729 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11730 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11731 tem, tem2);
11732 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11733 build_zero_cst (TREE_TYPE (tem)));
11735 /* Fold ~X & 1 as (X & 1) == 0. */
11736 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11737 && INTEGRAL_TYPE_P (type)
11738 && integer_onep (arg1))
11740 tree tem2;
11741 tem = TREE_OPERAND (arg0, 0);
11742 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11743 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11744 tem, tem2);
11745 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11746 build_zero_cst (TREE_TYPE (tem)));
11748 /* Fold !X & 1 as X == 0. */
11749 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11750 && integer_onep (arg1))
11752 tem = TREE_OPERAND (arg0, 0);
11753 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11754 build_zero_cst (TREE_TYPE (tem)));
11757 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11758 multiple of 1 << CST. */
11759 if (TREE_CODE (arg1) == INTEGER_CST)
11761 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11762 wide_int ncst1 = -cst1;
11763 if ((cst1 & ncst1) == ncst1
11764 && multiple_of_p (type, arg0,
11765 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11766 return fold_convert_loc (loc, type, arg0);
11769 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11770 bits from CST2. */
11771 if (TREE_CODE (arg1) == INTEGER_CST
11772 && TREE_CODE (arg0) == MULT_EXPR
11773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11775 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11776 wide_int masked
11777 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11779 if (masked == 0)
11780 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11781 arg0, arg1);
11782 else if (masked != warg1)
11784 /* Avoid the transform if arg1 is a mask of some
11785 mode which allows further optimizations. */
11786 int pop = wi::popcount (warg1);
11787 if (!(pop >= BITS_PER_UNIT
11788 && pow2p_hwi (pop)
11789 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11790 return fold_build2_loc (loc, code, type, op0,
11791 wide_int_to_tree (type, masked));
11795 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11796 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11797 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11799 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11801 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11802 if (mask == -1)
11803 return
11804 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11807 goto associate;
11809 case RDIV_EXPR:
11810 /* Don't touch a floating-point divide by zero unless the mode
11811 of the constant can represent infinity. */
11812 if (TREE_CODE (arg1) == REAL_CST
11813 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11814 && real_zerop (arg1))
11815 return NULL_TREE;
11817 /* (-A) / (-B) -> A / B */
11818 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11819 return fold_build2_loc (loc, RDIV_EXPR, type,
11820 TREE_OPERAND (arg0, 0),
11821 negate_expr (arg1));
11822 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11823 return fold_build2_loc (loc, RDIV_EXPR, type,
11824 negate_expr (arg0),
11825 TREE_OPERAND (arg1, 0));
11826 return NULL_TREE;
11828 case TRUNC_DIV_EXPR:
11829 /* Fall through */
11831 case FLOOR_DIV_EXPR:
11832 /* Simplify A / (B << N) where A and B are positive and B is
11833 a power of 2, to A >> (N + log2(B)). */
11834 strict_overflow_p = false;
11835 if (TREE_CODE (arg1) == LSHIFT_EXPR
11836 && (TYPE_UNSIGNED (type)
11837 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11839 tree sval = TREE_OPERAND (arg1, 0);
11840 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11842 tree sh_cnt = TREE_OPERAND (arg1, 1);
11843 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11844 wi::exact_log2 (wi::to_wide (sval)));
11846 if (strict_overflow_p)
11847 fold_overflow_warning (("assuming signed overflow does not "
11848 "occur when simplifying A / (B << N)"),
11849 WARN_STRICT_OVERFLOW_MISC);
11851 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11852 sh_cnt, pow2);
11853 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11854 fold_convert_loc (loc, type, arg0), sh_cnt);
11858 /* Fall through */
11860 case ROUND_DIV_EXPR:
11861 case CEIL_DIV_EXPR:
11862 case EXACT_DIV_EXPR:
11863 if (integer_zerop (arg1))
11864 return NULL_TREE;
11866 /* Convert -A / -B to A / B when the type is signed and overflow is
11867 undefined. */
11868 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11869 && TREE_CODE (op0) == NEGATE_EXPR
11870 && negate_expr_p (op1))
11872 if (ANY_INTEGRAL_TYPE_P (type))
11873 fold_overflow_warning (("assuming signed overflow does not occur "
11874 "when distributing negation across "
11875 "division"),
11876 WARN_STRICT_OVERFLOW_MISC);
11877 return fold_build2_loc (loc, code, type,
11878 fold_convert_loc (loc, type,
11879 TREE_OPERAND (arg0, 0)),
11880 negate_expr (op1));
11882 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11883 && TREE_CODE (arg1) == NEGATE_EXPR
11884 && negate_expr_p (op0))
11886 if (ANY_INTEGRAL_TYPE_P (type))
11887 fold_overflow_warning (("assuming signed overflow does not occur "
11888 "when distributing negation across "
11889 "division"),
11890 WARN_STRICT_OVERFLOW_MISC);
11891 return fold_build2_loc (loc, code, type,
11892 negate_expr (op0),
11893 fold_convert_loc (loc, type,
11894 TREE_OPERAND (arg1, 0)));
11897 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11898 operation, EXACT_DIV_EXPR.
11900 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11901 At one time others generated faster code, it's not clear if they do
11902 after the last round to changes to the DIV code in expmed.cc. */
11903 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11904 && multiple_of_p (type, arg0, arg1))
11905 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11906 fold_convert (type, arg0),
11907 fold_convert (type, arg1));
11909 strict_overflow_p = false;
11910 if (TREE_CODE (arg1) == INTEGER_CST
11911 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11912 &strict_overflow_p)) != 0)
11914 if (strict_overflow_p)
11915 fold_overflow_warning (("assuming signed overflow does not occur "
11916 "when simplifying division"),
11917 WARN_STRICT_OVERFLOW_MISC);
11918 return fold_convert_loc (loc, type, tem);
11921 return NULL_TREE;
11923 case CEIL_MOD_EXPR:
11924 case FLOOR_MOD_EXPR:
11925 case ROUND_MOD_EXPR:
11926 case TRUNC_MOD_EXPR:
11927 strict_overflow_p = false;
11928 if (TREE_CODE (arg1) == INTEGER_CST
11929 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11930 &strict_overflow_p)) != 0)
11932 if (strict_overflow_p)
11933 fold_overflow_warning (("assuming signed overflow does not occur "
11934 "when simplifying modulus"),
11935 WARN_STRICT_OVERFLOW_MISC);
11936 return fold_convert_loc (loc, type, tem);
11939 return NULL_TREE;
11941 case LROTATE_EXPR:
11942 case RROTATE_EXPR:
11943 case RSHIFT_EXPR:
11944 case LSHIFT_EXPR:
11945 /* Since negative shift count is not well-defined,
11946 don't try to compute it in the compiler. */
11947 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11948 return NULL_TREE;
11950 prec = element_precision (type);
11952 /* If we have a rotate of a bit operation with the rotate count and
11953 the second operand of the bit operation both constant,
11954 permute the two operations. */
11955 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11956 && (TREE_CODE (arg0) == BIT_AND_EXPR
11957 || TREE_CODE (arg0) == BIT_IOR_EXPR
11958 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11959 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11961 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11962 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11963 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11964 fold_build2_loc (loc, code, type,
11965 arg00, arg1),
11966 fold_build2_loc (loc, code, type,
11967 arg01, arg1));
11970 /* Two consecutive rotates adding up to the some integer
11971 multiple of the precision of the type can be ignored. */
11972 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11973 && TREE_CODE (arg0) == RROTATE_EXPR
11974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11975 && wi::umod_trunc (wi::to_wide (arg1)
11976 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11977 prec) == 0)
11978 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11980 return NULL_TREE;
11982 case MIN_EXPR:
11983 case MAX_EXPR:
11984 goto associate;
11986 case TRUTH_ANDIF_EXPR:
11987 /* Note that the operands of this must be ints
11988 and their values must be 0 or 1.
11989 ("true" is a fixed value perhaps depending on the language.) */
11990 /* If first arg is constant zero, return it. */
11991 if (integer_zerop (arg0))
11992 return fold_convert_loc (loc, type, arg0);
11993 /* FALLTHRU */
11994 case TRUTH_AND_EXPR:
11995 /* If either arg is constant true, drop it. */
11996 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11997 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11998 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11999 /* Preserve sequence points. */
12000 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12001 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12002 /* If second arg is constant zero, result is zero, but first arg
12003 must be evaluated. */
12004 if (integer_zerop (arg1))
12005 return omit_one_operand_loc (loc, type, arg1, arg0);
12006 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12007 case will be handled here. */
12008 if (integer_zerop (arg0))
12009 return omit_one_operand_loc (loc, type, arg0, arg1);
12011 /* !X && X is always false. */
12012 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12014 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12015 /* X && !X is always false. */
12016 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12018 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12020 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12021 means A >= Y && A != MAX, but in this case we know that
12022 A < X <= MAX. */
12024 if (!TREE_SIDE_EFFECTS (arg0)
12025 && !TREE_SIDE_EFFECTS (arg1))
12027 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12028 if (tem && !operand_equal_p (tem, arg0, 0))
12029 return fold_convert (type,
12030 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12031 tem, arg1));
12033 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12034 if (tem && !operand_equal_p (tem, arg1, 0))
12035 return fold_convert (type,
12036 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12037 arg0, tem));
12040 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12041 != NULL_TREE)
12042 return tem;
12044 return NULL_TREE;
12046 case TRUTH_ORIF_EXPR:
12047 /* Note that the operands of this must be ints
12048 and their values must be 0 or true.
12049 ("true" is a fixed value perhaps depending on the language.) */
12050 /* If first arg is constant true, return it. */
12051 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12052 return fold_convert_loc (loc, type, arg0);
12053 /* FALLTHRU */
12054 case TRUTH_OR_EXPR:
12055 /* If either arg is constant zero, drop it. */
12056 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12057 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12058 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12059 /* Preserve sequence points. */
12060 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12062 /* If second arg is constant true, result is true, but we must
12063 evaluate first arg. */
12064 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12065 return omit_one_operand_loc (loc, type, arg1, arg0);
12066 /* Likewise for first arg, but note this only occurs here for
12067 TRUTH_OR_EXPR. */
12068 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12069 return omit_one_operand_loc (loc, type, arg0, arg1);
12071 /* !X || X is always true. */
12072 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12073 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12074 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12075 /* X || !X is always true. */
12076 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12078 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12080 /* (X && !Y) || (!X && Y) is X ^ Y */
12081 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12082 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12084 tree a0, a1, l0, l1, n0, n1;
12086 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12087 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12089 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12090 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12092 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12093 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12095 if ((operand_equal_p (n0, a0, 0)
12096 && operand_equal_p (n1, a1, 0))
12097 || (operand_equal_p (n0, a1, 0)
12098 && operand_equal_p (n1, a0, 0)))
12099 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12102 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12103 != NULL_TREE)
12104 return tem;
12106 return NULL_TREE;
12108 case TRUTH_XOR_EXPR:
12109 /* If the second arg is constant zero, drop it. */
12110 if (integer_zerop (arg1))
12111 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12112 /* If the second arg is constant true, this is a logical inversion. */
12113 if (integer_onep (arg1))
12115 tem = invert_truthvalue_loc (loc, arg0);
12116 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12118 /* Identical arguments cancel to zero. */
12119 if (operand_equal_p (arg0, arg1, 0))
12120 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12122 /* !X ^ X is always true. */
12123 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12124 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12125 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12127 /* X ^ !X is always true. */
12128 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12129 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12130 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12132 return NULL_TREE;
12134 case EQ_EXPR:
12135 case NE_EXPR:
12136 STRIP_NOPS (arg0);
12137 STRIP_NOPS (arg1);
12139 tem = fold_comparison (loc, code, type, op0, op1);
12140 if (tem != NULL_TREE)
12141 return tem;
12143 /* bool_var != 1 becomes !bool_var. */
12144 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12145 && code == NE_EXPR)
12146 return fold_convert_loc (loc, type,
12147 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12148 TREE_TYPE (arg0), arg0));
12150 /* bool_var == 0 becomes !bool_var. */
12151 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12152 && code == EQ_EXPR)
12153 return fold_convert_loc (loc, type,
12154 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12155 TREE_TYPE (arg0), arg0));
12157 /* !exp != 0 becomes !exp */
12158 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12159 && code == NE_EXPR)
12160 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12162 /* If this is an EQ or NE comparison with zero and ARG0 is
12163 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12164 two operations, but the latter can be done in one less insn
12165 on machines that have only two-operand insns or on which a
12166 constant cannot be the first operand. */
12167 if (TREE_CODE (arg0) == BIT_AND_EXPR
12168 && integer_zerop (arg1))
12170 tree arg00 = TREE_OPERAND (arg0, 0);
12171 tree arg01 = TREE_OPERAND (arg0, 1);
12172 if (TREE_CODE (arg00) == LSHIFT_EXPR
12173 && integer_onep (TREE_OPERAND (arg00, 0)))
12175 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12176 arg01, TREE_OPERAND (arg00, 1));
12177 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12178 build_one_cst (TREE_TYPE (arg0)));
12179 return fold_build2_loc (loc, code, type,
12180 fold_convert_loc (loc, TREE_TYPE (arg1),
12181 tem), arg1);
12183 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12184 && integer_onep (TREE_OPERAND (arg01, 0)))
12186 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12187 arg00, TREE_OPERAND (arg01, 1));
12188 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12189 build_one_cst (TREE_TYPE (arg0)));
12190 return fold_build2_loc (loc, code, type,
12191 fold_convert_loc (loc, TREE_TYPE (arg1),
12192 tem), arg1);
12196 /* If this is a comparison of a field, we may be able to simplify it. */
12197 if ((TREE_CODE (arg0) == COMPONENT_REF
12198 || TREE_CODE (arg0) == BIT_FIELD_REF)
12199 /* Handle the constant case even without -O
12200 to make sure the warnings are given. */
12201 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12203 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12204 if (t1)
12205 return t1;
12208 /* Optimize comparisons of strlen vs zero to a compare of the
12209 first character of the string vs zero. To wit,
12210 strlen(ptr) == 0 => *ptr == 0
12211 strlen(ptr) != 0 => *ptr != 0
12212 Other cases should reduce to one of these two (or a constant)
12213 due to the return value of strlen being unsigned. */
12214 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12216 tree fndecl = get_callee_fndecl (arg0);
12218 if (fndecl
12219 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12220 && call_expr_nargs (arg0) == 1
12221 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12222 == POINTER_TYPE))
12224 tree ptrtype
12225 = build_pointer_type (build_qualified_type (char_type_node,
12226 TYPE_QUAL_CONST));
12227 tree ptr = fold_convert_loc (loc, ptrtype,
12228 CALL_EXPR_ARG (arg0, 0));
12229 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12230 return fold_build2_loc (loc, code, type, iref,
12231 build_int_cst (TREE_TYPE (iref), 0));
12235 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12236 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12237 if (TREE_CODE (arg0) == RSHIFT_EXPR
12238 && integer_zerop (arg1)
12239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12241 tree arg00 = TREE_OPERAND (arg0, 0);
12242 tree arg01 = TREE_OPERAND (arg0, 1);
12243 tree itype = TREE_TYPE (arg00);
12244 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12246 if (TYPE_UNSIGNED (itype))
12248 itype = signed_type_for (itype);
12249 arg00 = fold_convert_loc (loc, itype, arg00);
12251 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12252 type, arg00, build_zero_cst (itype));
12256 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12257 (X & C) == 0 when C is a single bit. */
12258 if (TREE_CODE (arg0) == BIT_AND_EXPR
12259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12260 && integer_zerop (arg1)
12261 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12263 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12264 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12265 TREE_OPERAND (arg0, 1));
12266 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12267 type, tem,
12268 fold_convert_loc (loc, TREE_TYPE (arg0),
12269 arg1));
12272 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12273 constant C is a power of two, i.e. a single bit. */
12274 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12275 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12276 && integer_zerop (arg1)
12277 && integer_pow2p (TREE_OPERAND (arg0, 1))
12278 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12279 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12281 tree arg00 = TREE_OPERAND (arg0, 0);
12282 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12283 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12286 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12287 when is C is a power of two, i.e. a single bit. */
12288 if (TREE_CODE (arg0) == BIT_AND_EXPR
12289 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12290 && integer_zerop (arg1)
12291 && integer_pow2p (TREE_OPERAND (arg0, 1))
12292 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12293 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12295 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12296 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12297 arg000, TREE_OPERAND (arg0, 1));
12298 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12299 tem, build_int_cst (TREE_TYPE (tem), 0));
12302 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12303 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12305 tree arg00 = TREE_OPERAND (arg0, 0);
12306 tree arg01 = TREE_OPERAND (arg0, 1);
12307 tree arg10 = TREE_OPERAND (arg1, 0);
12308 tree arg11 = TREE_OPERAND (arg1, 1);
12309 tree itype = TREE_TYPE (arg0);
12311 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12312 operand_equal_p guarantees no side-effects so we don't need
12313 to use omit_one_operand on Z. */
12314 if (operand_equal_p (arg01, arg11, 0))
12315 return fold_build2_loc (loc, code, type, arg00,
12316 fold_convert_loc (loc, TREE_TYPE (arg00),
12317 arg10));
12318 if (operand_equal_p (arg01, arg10, 0))
12319 return fold_build2_loc (loc, code, type, arg00,
12320 fold_convert_loc (loc, TREE_TYPE (arg00),
12321 arg11));
12322 if (operand_equal_p (arg00, arg11, 0))
12323 return fold_build2_loc (loc, code, type, arg01,
12324 fold_convert_loc (loc, TREE_TYPE (arg01),
12325 arg10));
12326 if (operand_equal_p (arg00, arg10, 0))
12327 return fold_build2_loc (loc, code, type, arg01,
12328 fold_convert_loc (loc, TREE_TYPE (arg01),
12329 arg11));
12331 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12332 if (TREE_CODE (arg01) == INTEGER_CST
12333 && TREE_CODE (arg11) == INTEGER_CST)
12335 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12336 fold_convert_loc (loc, itype, arg11));
12337 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12338 return fold_build2_loc (loc, code, type, tem,
12339 fold_convert_loc (loc, itype, arg10));
12343 /* Attempt to simplify equality/inequality comparisons of complex
12344 values. Only lower the comparison if the result is known or
12345 can be simplified to a single scalar comparison. */
12346 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12347 || TREE_CODE (arg0) == COMPLEX_CST)
12348 && (TREE_CODE (arg1) == COMPLEX_EXPR
12349 || TREE_CODE (arg1) == COMPLEX_CST))
12351 tree real0, imag0, real1, imag1;
12352 tree rcond, icond;
12354 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12356 real0 = TREE_OPERAND (arg0, 0);
12357 imag0 = TREE_OPERAND (arg0, 1);
12359 else
12361 real0 = TREE_REALPART (arg0);
12362 imag0 = TREE_IMAGPART (arg0);
12365 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12367 real1 = TREE_OPERAND (arg1, 0);
12368 imag1 = TREE_OPERAND (arg1, 1);
12370 else
12372 real1 = TREE_REALPART (arg1);
12373 imag1 = TREE_IMAGPART (arg1);
12376 rcond = fold_binary_loc (loc, code, type, real0, real1);
12377 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12379 if (integer_zerop (rcond))
12381 if (code == EQ_EXPR)
12382 return omit_two_operands_loc (loc, type, boolean_false_node,
12383 imag0, imag1);
12384 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12386 else
12388 if (code == NE_EXPR)
12389 return omit_two_operands_loc (loc, type, boolean_true_node,
12390 imag0, imag1);
12391 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12395 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12396 if (icond && TREE_CODE (icond) == INTEGER_CST)
12398 if (integer_zerop (icond))
12400 if (code == EQ_EXPR)
12401 return omit_two_operands_loc (loc, type, boolean_false_node,
12402 real0, real1);
12403 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12405 else
12407 if (code == NE_EXPR)
12408 return omit_two_operands_loc (loc, type, boolean_true_node,
12409 real0, real1);
12410 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12415 return NULL_TREE;
12417 case LT_EXPR:
12418 case GT_EXPR:
12419 case LE_EXPR:
12420 case GE_EXPR:
12421 tem = fold_comparison (loc, code, type, op0, op1);
12422 if (tem != NULL_TREE)
12423 return tem;
12425 /* Transform comparisons of the form X +- C CMP X. */
12426 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12427 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12428 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12429 && !HONOR_SNANS (arg0))
12431 tree arg01 = TREE_OPERAND (arg0, 1);
12432 enum tree_code code0 = TREE_CODE (arg0);
12433 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12435 /* (X - c) > X becomes false. */
12436 if (code == GT_EXPR
12437 && ((code0 == MINUS_EXPR && is_positive >= 0)
12438 || (code0 == PLUS_EXPR && is_positive <= 0)))
12439 return constant_boolean_node (0, type);
12441 /* Likewise (X + c) < X becomes false. */
12442 if (code == LT_EXPR
12443 && ((code0 == PLUS_EXPR && is_positive >= 0)
12444 || (code0 == MINUS_EXPR && is_positive <= 0)))
12445 return constant_boolean_node (0, type);
12447 /* Convert (X - c) <= X to true. */
12448 if (!HONOR_NANS (arg1)
12449 && code == LE_EXPR
12450 && ((code0 == MINUS_EXPR && is_positive >= 0)
12451 || (code0 == PLUS_EXPR && is_positive <= 0)))
12452 return constant_boolean_node (1, type);
12454 /* Convert (X + c) >= X to true. */
12455 if (!HONOR_NANS (arg1)
12456 && code == GE_EXPR
12457 && ((code0 == PLUS_EXPR && is_positive >= 0)
12458 || (code0 == MINUS_EXPR && is_positive <= 0)))
12459 return constant_boolean_node (1, type);
12462 /* If we are comparing an ABS_EXPR with a constant, we can
12463 convert all the cases into explicit comparisons, but they may
12464 well not be faster than doing the ABS and one comparison.
12465 But ABS (X) <= C is a range comparison, which becomes a subtraction
12466 and a comparison, and is probably faster. */
12467 if (code == LE_EXPR
12468 && TREE_CODE (arg1) == INTEGER_CST
12469 && TREE_CODE (arg0) == ABS_EXPR
12470 && ! TREE_SIDE_EFFECTS (arg0)
12471 && (tem = negate_expr (arg1)) != 0
12472 && TREE_CODE (tem) == INTEGER_CST
12473 && !TREE_OVERFLOW (tem))
12474 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12475 build2 (GE_EXPR, type,
12476 TREE_OPERAND (arg0, 0), tem),
12477 build2 (LE_EXPR, type,
12478 TREE_OPERAND (arg0, 0), arg1));
12480 /* Convert ABS_EXPR<x> >= 0 to true. */
12481 strict_overflow_p = false;
12482 if (code == GE_EXPR
12483 && (integer_zerop (arg1)
12484 || (! HONOR_NANS (arg0)
12485 && real_zerop (arg1)))
12486 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12488 if (strict_overflow_p)
12489 fold_overflow_warning (("assuming signed overflow does not occur "
12490 "when simplifying comparison of "
12491 "absolute value and zero"),
12492 WARN_STRICT_OVERFLOW_CONDITIONAL);
12493 return omit_one_operand_loc (loc, type,
12494 constant_boolean_node (true, type),
12495 arg0);
12498 /* Convert ABS_EXPR<x> < 0 to false. */
12499 strict_overflow_p = false;
12500 if (code == LT_EXPR
12501 && (integer_zerop (arg1) || real_zerop (arg1))
12502 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12504 if (strict_overflow_p)
12505 fold_overflow_warning (("assuming signed overflow does not occur "
12506 "when simplifying comparison of "
12507 "absolute value and zero"),
12508 WARN_STRICT_OVERFLOW_CONDITIONAL);
12509 return omit_one_operand_loc (loc, type,
12510 constant_boolean_node (false, type),
12511 arg0);
12514 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12515 and similarly for >= into !=. */
12516 if ((code == LT_EXPR || code == GE_EXPR)
12517 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12518 && TREE_CODE (arg1) == LSHIFT_EXPR
12519 && integer_onep (TREE_OPERAND (arg1, 0)))
12520 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12521 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12522 TREE_OPERAND (arg1, 1)),
12523 build_zero_cst (TREE_TYPE (arg0)));
12525 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12526 otherwise Y might be >= # of bits in X's type and thus e.g.
12527 (unsigned char) (1 << Y) for Y 15 might be 0.
12528 If the cast is widening, then 1 << Y should have unsigned type,
12529 otherwise if Y is number of bits in the signed shift type minus 1,
12530 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12531 31 might be 0xffffffff80000000. */
12532 if ((code == LT_EXPR || code == GE_EXPR)
12533 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12534 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12535 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12536 && CONVERT_EXPR_P (arg1)
12537 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12538 && (element_precision (TREE_TYPE (arg1))
12539 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12540 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12541 || (element_precision (TREE_TYPE (arg1))
12542 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12543 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12545 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12546 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12547 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12548 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12549 build_zero_cst (TREE_TYPE (arg0)));
12552 return NULL_TREE;
12554 case UNORDERED_EXPR:
12555 case ORDERED_EXPR:
12556 case UNLT_EXPR:
12557 case UNLE_EXPR:
12558 case UNGT_EXPR:
12559 case UNGE_EXPR:
12560 case UNEQ_EXPR:
12561 case LTGT_EXPR:
12562 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12564 tree targ0 = strip_float_extensions (arg0);
12565 tree targ1 = strip_float_extensions (arg1);
12566 tree newtype = TREE_TYPE (targ0);
12568 if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12569 newtype = TREE_TYPE (targ1);
12571 if (element_precision (newtype) < element_precision (TREE_TYPE (arg0)))
12572 return fold_build2_loc (loc, code, type,
12573 fold_convert_loc (loc, newtype, targ0),
12574 fold_convert_loc (loc, newtype, targ1));
12577 return NULL_TREE;
12579 case COMPOUND_EXPR:
12580 /* When pedantic, a compound expression can be neither an lvalue
12581 nor an integer constant expression. */
12582 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12583 return NULL_TREE;
12584 /* Don't let (0, 0) be null pointer constant. */
12585 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12586 : fold_convert_loc (loc, type, arg1);
12587 return tem;
12589 default:
12590 return NULL_TREE;
12591 } /* switch (code) */
12594 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12595 ((A & N) + B) & M -> (A + B) & M
12596 Similarly if (N & M) == 0,
12597 ((A | N) + B) & M -> (A + B) & M
12598 and for - instead of + (or unary - instead of +)
12599 and/or ^ instead of |.
12600 If B is constant and (B & M) == 0, fold into A & M.
12602 This function is a helper for match.pd patterns. Return non-NULL
12603 type in which the simplified operation should be performed only
12604 if any optimization is possible.
12606 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12607 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12608 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12609 +/-. */
12610 tree
12611 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12612 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12613 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12614 tree *pmop)
12616 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12617 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12618 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12619 if (~cst1 == 0
12620 || (cst1 & (cst1 + 1)) != 0
12621 || !INTEGRAL_TYPE_P (type)
12622 || (!TYPE_OVERFLOW_WRAPS (type)
12623 && TREE_CODE (type) != INTEGER_TYPE)
12624 || (wi::max_value (type) & cst1) != cst1)
12625 return NULL_TREE;
12627 enum tree_code codes[2] = { code00, code01 };
12628 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12629 int which = 0;
12630 wide_int cst0;
12632 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12633 arg1 (M) is == (1LL << cst) - 1.
12634 Store C into PMOP[0] and D into PMOP[1]. */
12635 pmop[0] = arg00;
12636 pmop[1] = arg01;
12637 which = code != NEGATE_EXPR;
12639 for (; which >= 0; which--)
12640 switch (codes[which])
12642 case BIT_AND_EXPR:
12643 case BIT_IOR_EXPR:
12644 case BIT_XOR_EXPR:
12645 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12646 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12647 if (codes[which] == BIT_AND_EXPR)
12649 if (cst0 != cst1)
12650 break;
12652 else if (cst0 != 0)
12653 break;
12654 /* If C or D is of the form (A & N) where
12655 (N & M) == M, or of the form (A | N) or
12656 (A ^ N) where (N & M) == 0, replace it with A. */
12657 pmop[which] = arg0xx[2 * which];
12658 break;
12659 case ERROR_MARK:
12660 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12661 break;
12662 /* If C or D is a N where (N & M) == 0, it can be
12663 omitted (replaced with 0). */
12664 if ((code == PLUS_EXPR
12665 || (code == MINUS_EXPR && which == 0))
12666 && (cst1 & wi::to_wide (pmop[which])) == 0)
12667 pmop[which] = build_int_cst (type, 0);
12668 /* Similarly, with C - N where (-N & M) == 0. */
12669 if (code == MINUS_EXPR
12670 && which == 1
12671 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12672 pmop[which] = build_int_cst (type, 0);
12673 break;
12674 default:
12675 gcc_unreachable ();
12678 /* Only build anything new if we optimized one or both arguments above. */
12679 if (pmop[0] == arg00 && pmop[1] == arg01)
12680 return NULL_TREE;
12682 if (TYPE_OVERFLOW_WRAPS (type))
12683 return type;
12684 else
12685 return unsigned_type_for (type);
12688 /* Used by contains_label_[p1]. */
12690 struct contains_label_data
12692 hash_set<tree> *pset;
12693 bool inside_switch_p;
12696 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12697 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12698 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12700 static tree
12701 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12703 contains_label_data *d = (contains_label_data *) data;
12704 switch (TREE_CODE (*tp))
12706 case LABEL_EXPR:
12707 return *tp;
12709 case CASE_LABEL_EXPR:
12710 if (!d->inside_switch_p)
12711 return *tp;
12712 return NULL_TREE;
12714 case SWITCH_EXPR:
12715 if (!d->inside_switch_p)
12717 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12718 return *tp;
12719 d->inside_switch_p = true;
12720 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12721 return *tp;
12722 d->inside_switch_p = false;
12723 *walk_subtrees = 0;
12725 return NULL_TREE;
12727 case GOTO_EXPR:
12728 *walk_subtrees = 0;
12729 return NULL_TREE;
12731 default:
12732 return NULL_TREE;
12736 /* Return whether the sub-tree ST contains a label which is accessible from
12737 outside the sub-tree. */
12739 static bool
12740 contains_label_p (tree st)
12742 hash_set<tree> pset;
12743 contains_label_data data = { &pset, false };
12744 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12747 /* Fold a ternary expression of code CODE and type TYPE with operands
12748 OP0, OP1, and OP2. Return the folded expression if folding is
12749 successful. Otherwise, return NULL_TREE. */
12751 tree
12752 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12753 tree op0, tree op1, tree op2)
12755 tree tem;
12756 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12757 enum tree_code_class kind = TREE_CODE_CLASS (code);
12759 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12760 && TREE_CODE_LENGTH (code) == 3);
12762 /* If this is a commutative operation, and OP0 is a constant, move it
12763 to OP1 to reduce the number of tests below. */
12764 if (commutative_ternary_tree_code (code)
12765 && tree_swap_operands_p (op0, op1))
12766 return fold_build3_loc (loc, code, type, op1, op0, op2);
12768 tem = generic_simplify (loc, code, type, op0, op1, op2);
12769 if (tem)
12770 return tem;
12772 /* Strip any conversions that don't change the mode. This is safe
12773 for every expression, except for a comparison expression because
12774 its signedness is derived from its operands. So, in the latter
12775 case, only strip conversions that don't change the signedness.
12777 Note that this is done as an internal manipulation within the
12778 constant folder, in order to find the simplest representation of
12779 the arguments so that their form can be studied. In any cases,
12780 the appropriate type conversions should be put back in the tree
12781 that will get out of the constant folder. */
12782 if (op0)
12784 arg0 = op0;
12785 STRIP_NOPS (arg0);
12788 if (op1)
12790 arg1 = op1;
12791 STRIP_NOPS (arg1);
12794 if (op2)
12796 arg2 = op2;
12797 STRIP_NOPS (arg2);
12800 switch (code)
12802 case COMPONENT_REF:
12803 if (TREE_CODE (arg0) == CONSTRUCTOR
12804 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12806 unsigned HOST_WIDE_INT idx;
12807 tree field, value;
12808 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12809 if (field == arg1)
12810 return value;
12812 return NULL_TREE;
12814 case COND_EXPR:
12815 case VEC_COND_EXPR:
12816 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12817 so all simple results must be passed through pedantic_non_lvalue. */
12818 if (TREE_CODE (arg0) == INTEGER_CST)
12820 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12821 tem = integer_zerop (arg0) ? op2 : op1;
12822 /* Only optimize constant conditions when the selected branch
12823 has the same type as the COND_EXPR. This avoids optimizing
12824 away "c ? x : throw", where the throw has a void type.
12825 Avoid throwing away that operand which contains label. */
12826 if ((!TREE_SIDE_EFFECTS (unused_op)
12827 || !contains_label_p (unused_op))
12828 && (! VOID_TYPE_P (TREE_TYPE (tem))
12829 || VOID_TYPE_P (type)))
12830 return protected_set_expr_location_unshare (tem, loc);
12831 return NULL_TREE;
12833 else if (TREE_CODE (arg0) == VECTOR_CST)
12835 unsigned HOST_WIDE_INT nelts;
12836 if ((TREE_CODE (arg1) == VECTOR_CST
12837 || TREE_CODE (arg1) == CONSTRUCTOR)
12838 && (TREE_CODE (arg2) == VECTOR_CST
12839 || TREE_CODE (arg2) == CONSTRUCTOR)
12840 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12842 vec_perm_builder sel (nelts, nelts, 1);
12843 for (unsigned int i = 0; i < nelts; i++)
12845 tree val = VECTOR_CST_ELT (arg0, i);
12846 if (integer_all_onesp (val))
12847 sel.quick_push (i);
12848 else if (integer_zerop (val))
12849 sel.quick_push (nelts + i);
12850 else /* Currently unreachable. */
12851 return NULL_TREE;
12853 vec_perm_indices indices (sel, 2, nelts);
12854 tree t = fold_vec_perm (type, arg1, arg2, indices);
12855 if (t != NULL_TREE)
12856 return t;
12860 /* If we have A op B ? A : C, we may be able to convert this to a
12861 simpler expression, depending on the operation and the values
12862 of B and C. Signed zeros prevent all of these transformations,
12863 for reasons given above each one.
12865 Also try swapping the arguments and inverting the conditional. */
12866 if (COMPARISON_CLASS_P (arg0)
12867 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12868 && !HONOR_SIGNED_ZEROS (op1))
12870 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12871 TREE_OPERAND (arg0, 0),
12872 TREE_OPERAND (arg0, 1),
12873 op1, op2);
12874 if (tem)
12875 return tem;
12878 if (COMPARISON_CLASS_P (arg0)
12879 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12880 && !HONOR_SIGNED_ZEROS (op2))
12882 enum tree_code comp_code = TREE_CODE (arg0);
12883 tree arg00 = TREE_OPERAND (arg0, 0);
12884 tree arg01 = TREE_OPERAND (arg0, 1);
12885 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12886 if (comp_code != ERROR_MARK)
12887 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12888 arg00,
12889 arg01,
12890 op2, op1);
12891 if (tem)
12892 return tem;
12895 /* If the second operand is simpler than the third, swap them
12896 since that produces better jump optimization results. */
12897 if (truth_value_p (TREE_CODE (arg0))
12898 && tree_swap_operands_p (op1, op2))
12900 location_t loc0 = expr_location_or (arg0, loc);
12901 /* See if this can be inverted. If it can't, possibly because
12902 it was a floating-point inequality comparison, don't do
12903 anything. */
12904 tem = fold_invert_truthvalue (loc0, arg0);
12905 if (tem)
12906 return fold_build3_loc (loc, code, type, tem, op2, op1);
12909 /* Convert A ? 1 : 0 to simply A. */
12910 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12911 : (integer_onep (op1)
12912 && !VECTOR_TYPE_P (type)))
12913 && integer_zerop (op2)
12914 /* If we try to convert OP0 to our type, the
12915 call to fold will try to move the conversion inside
12916 a COND, which will recurse. In that case, the COND_EXPR
12917 is probably the best choice, so leave it alone. */
12918 && type == TREE_TYPE (arg0))
12919 return protected_set_expr_location_unshare (arg0, loc);
12921 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12922 over COND_EXPR in cases such as floating point comparisons. */
12923 if (integer_zerop (op1)
12924 && code == COND_EXPR
12925 && integer_onep (op2)
12926 && !VECTOR_TYPE_P (type)
12927 && truth_value_p (TREE_CODE (arg0)))
12928 return fold_convert_loc (loc, type,
12929 invert_truthvalue_loc (loc, arg0));
12931 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12932 if (TREE_CODE (arg0) == LT_EXPR
12933 && integer_zerop (TREE_OPERAND (arg0, 1))
12934 && integer_zerop (op2)
12935 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12937 /* sign_bit_p looks through both zero and sign extensions,
12938 but for this optimization only sign extensions are
12939 usable. */
12940 tree tem2 = TREE_OPERAND (arg0, 0);
12941 while (tem != tem2)
12943 if (TREE_CODE (tem2) != NOP_EXPR
12944 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12946 tem = NULL_TREE;
12947 break;
12949 tem2 = TREE_OPERAND (tem2, 0);
12951 /* sign_bit_p only checks ARG1 bits within A's precision.
12952 If <sign bit of A> has wider type than A, bits outside
12953 of A's precision in <sign bit of A> need to be checked.
12954 If they are all 0, this optimization needs to be done
12955 in unsigned A's type, if they are all 1 in signed A's type,
12956 otherwise this can't be done. */
12957 if (tem
12958 && TYPE_PRECISION (TREE_TYPE (tem))
12959 < TYPE_PRECISION (TREE_TYPE (arg1))
12960 && TYPE_PRECISION (TREE_TYPE (tem))
12961 < TYPE_PRECISION (type))
12963 int inner_width, outer_width;
12964 tree tem_type;
12966 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12967 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12968 if (outer_width > TYPE_PRECISION (type))
12969 outer_width = TYPE_PRECISION (type);
12971 wide_int mask = wi::shifted_mask
12972 (inner_width, outer_width - inner_width, false,
12973 TYPE_PRECISION (TREE_TYPE (arg1)));
12975 wide_int common = mask & wi::to_wide (arg1);
12976 if (common == mask)
12978 tem_type = signed_type_for (TREE_TYPE (tem));
12979 tem = fold_convert_loc (loc, tem_type, tem);
12981 else if (common == 0)
12983 tem_type = unsigned_type_for (TREE_TYPE (tem));
12984 tem = fold_convert_loc (loc, tem_type, tem);
12986 else
12987 tem = NULL;
12990 if (tem)
12991 return
12992 fold_convert_loc (loc, type,
12993 fold_build2_loc (loc, BIT_AND_EXPR,
12994 TREE_TYPE (tem), tem,
12995 fold_convert_loc (loc,
12996 TREE_TYPE (tem),
12997 arg1)));
13000 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13001 already handled above. */
13002 if (TREE_CODE (arg0) == BIT_AND_EXPR
13003 && integer_onep (TREE_OPERAND (arg0, 1))
13004 && integer_zerop (op2)
13005 && integer_pow2p (arg1))
13007 tree tem = TREE_OPERAND (arg0, 0);
13008 STRIP_NOPS (tem);
13009 if (TREE_CODE (tem) == RSHIFT_EXPR
13010 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13011 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13012 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13014 fold_convert_loc (loc, type,
13015 TREE_OPERAND (tem, 0)),
13016 op1);
13019 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13020 is probably obsolete because the first operand should be a
13021 truth value (that's why we have the two cases above), but let's
13022 leave it in until we can confirm this for all front-ends. */
13023 if (integer_zerop (op2)
13024 && TREE_CODE (arg0) == NE_EXPR
13025 && integer_zerop (TREE_OPERAND (arg0, 1))
13026 && integer_pow2p (arg1)
13027 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13028 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13029 arg1, OEP_ONLY_CONST)
13030 /* operand_equal_p compares just value, not precision, so e.g.
13031 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13032 second operand 32-bit -128, which is not a power of two (or vice
13033 versa. */
13034 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13035 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13037 /* Disable the transformations below for vectors, since
13038 fold_binary_op_with_conditional_arg may undo them immediately,
13039 yielding an infinite loop. */
13040 if (code == VEC_COND_EXPR)
13041 return NULL_TREE;
13043 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13044 if (integer_zerop (op2)
13045 && truth_value_p (TREE_CODE (arg0))
13046 && truth_value_p (TREE_CODE (arg1))
13047 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13048 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13049 : TRUTH_ANDIF_EXPR,
13050 type, fold_convert_loc (loc, type, arg0), op1);
13052 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13053 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13054 && truth_value_p (TREE_CODE (arg0))
13055 && truth_value_p (TREE_CODE (arg1))
13056 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13058 location_t loc0 = expr_location_or (arg0, loc);
13059 /* Only perform transformation if ARG0 is easily inverted. */
13060 tem = fold_invert_truthvalue (loc0, arg0);
13061 if (tem)
13062 return fold_build2_loc (loc, code == VEC_COND_EXPR
13063 ? BIT_IOR_EXPR
13064 : TRUTH_ORIF_EXPR,
13065 type, fold_convert_loc (loc, type, tem),
13066 op1);
13069 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13070 if (integer_zerop (arg1)
13071 && truth_value_p (TREE_CODE (arg0))
13072 && truth_value_p (TREE_CODE (op2))
13073 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13075 location_t loc0 = expr_location_or (arg0, loc);
13076 /* Only perform transformation if ARG0 is easily inverted. */
13077 tem = fold_invert_truthvalue (loc0, arg0);
13078 if (tem)
13079 return fold_build2_loc (loc, code == VEC_COND_EXPR
13080 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13081 type, fold_convert_loc (loc, type, tem),
13082 op2);
13085 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13086 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13087 && truth_value_p (TREE_CODE (arg0))
13088 && truth_value_p (TREE_CODE (op2))
13089 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13090 return fold_build2_loc (loc, code == VEC_COND_EXPR
13091 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13092 type, fold_convert_loc (loc, type, arg0), op2);
13094 return NULL_TREE;
13096 case CALL_EXPR:
13097 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13098 of fold_ternary on them. */
13099 gcc_unreachable ();
13101 case BIT_FIELD_REF:
13102 if (TREE_CODE (arg0) == VECTOR_CST
13103 && (type == TREE_TYPE (TREE_TYPE (arg0))
13104 || (VECTOR_TYPE_P (type)
13105 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13106 && tree_fits_uhwi_p (op1)
13107 && tree_fits_uhwi_p (op2))
13109 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13110 unsigned HOST_WIDE_INT width
13111 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13112 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13113 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13114 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13116 if (n != 0
13117 && (idx % width) == 0
13118 && (n % width) == 0
13119 && known_le ((idx + n) / width,
13120 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13122 idx = idx / width;
13123 n = n / width;
13125 if (TREE_CODE (arg0) == VECTOR_CST)
13127 if (n == 1)
13129 tem = VECTOR_CST_ELT (arg0, idx);
13130 if (VECTOR_TYPE_P (type))
13131 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13132 return tem;
13135 tree_vector_builder vals (type, n, 1);
13136 for (unsigned i = 0; i < n; ++i)
13137 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13138 return vals.build ();
13143 /* On constants we can use native encode/interpret to constant
13144 fold (nearly) all BIT_FIELD_REFs. */
13145 if (CONSTANT_CLASS_P (arg0)
13146 && can_native_interpret_type_p (type)
13147 && BITS_PER_UNIT == 8
13148 && tree_fits_uhwi_p (op1)
13149 && tree_fits_uhwi_p (op2))
13151 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13152 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13153 /* Limit us to a reasonable amount of work. To relax the
13154 other limitations we need bit-shifting of the buffer
13155 and rounding up the size. */
13156 if (bitpos % BITS_PER_UNIT == 0
13157 && bitsize % BITS_PER_UNIT == 0
13158 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13160 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13161 unsigned HOST_WIDE_INT len
13162 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13163 bitpos / BITS_PER_UNIT);
13164 if (len > 0
13165 && len * BITS_PER_UNIT >= bitsize)
13167 tree v = native_interpret_expr (type, b,
13168 bitsize / BITS_PER_UNIT);
13169 if (v)
13170 return v;
13175 return NULL_TREE;
13177 case VEC_PERM_EXPR:
13178 /* Perform constant folding of BIT_INSERT_EXPR. */
13179 if (TREE_CODE (arg2) == VECTOR_CST
13180 && TREE_CODE (op0) == VECTOR_CST
13181 && TREE_CODE (op1) == VECTOR_CST)
13183 /* Build a vector of integers from the tree mask. */
13184 vec_perm_builder builder;
13185 if (!tree_to_vec_perm_builder (&builder, arg2))
13186 return NULL_TREE;
13188 /* Create a vec_perm_indices for the integer vector. */
13189 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13190 bool single_arg = (op0 == op1);
13191 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13192 return fold_vec_perm (type, op0, op1, sel);
13194 return NULL_TREE;
13196 case BIT_INSERT_EXPR:
13197 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13198 if (TREE_CODE (arg0) == INTEGER_CST
13199 && TREE_CODE (arg1) == INTEGER_CST)
13201 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13202 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13203 wide_int tem = (wi::to_wide (arg0)
13204 & wi::shifted_mask (bitpos, bitsize, true,
13205 TYPE_PRECISION (type)));
13206 wide_int tem2
13207 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13208 bitsize), bitpos);
13209 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13211 else if (TREE_CODE (arg0) == VECTOR_CST
13212 && CONSTANT_CLASS_P (arg1)
13213 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13214 TREE_TYPE (arg1)))
13216 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13217 unsigned HOST_WIDE_INT elsize
13218 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13219 if (bitpos % elsize == 0)
13221 unsigned k = bitpos / elsize;
13222 unsigned HOST_WIDE_INT nelts;
13223 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13224 return arg0;
13225 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13227 tree_vector_builder elts (type, nelts, 1);
13228 elts.quick_grow (nelts);
13229 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13230 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13231 return elts.build ();
13235 return NULL_TREE;
13237 default:
13238 return NULL_TREE;
13239 } /* switch (code) */
13242 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13243 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13244 constructor element index of the value returned. If the element is
13245 not found NULL_TREE is returned and *CTOR_IDX is updated to
13246 the index of the element after the ACCESS_INDEX position (which
13247 may be outside of the CTOR array). */
13249 tree
13250 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13251 unsigned *ctor_idx)
13253 tree index_type = NULL_TREE;
13254 signop index_sgn = UNSIGNED;
13255 offset_int low_bound = 0;
13257 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13259 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13260 if (domain_type && TYPE_MIN_VALUE (domain_type))
13262 /* Static constructors for variably sized objects makes no sense. */
13263 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13264 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13265 /* ??? When it is obvious that the range is signed, treat it so. */
13266 if (TYPE_UNSIGNED (index_type)
13267 && TYPE_MAX_VALUE (domain_type)
13268 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13269 TYPE_MIN_VALUE (domain_type)))
13271 index_sgn = SIGNED;
13272 low_bound
13273 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13274 SIGNED);
13276 else
13278 index_sgn = TYPE_SIGN (index_type);
13279 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13284 if (index_type)
13285 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13286 index_sgn);
13288 offset_int index = low_bound;
13289 if (index_type)
13290 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13292 offset_int max_index = index;
13293 unsigned cnt;
13294 tree cfield, cval;
13295 bool first_p = true;
13297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13299 /* Array constructor might explicitly set index, or specify a range,
13300 or leave index NULL meaning that it is next index after previous
13301 one. */
13302 if (cfield)
13304 if (TREE_CODE (cfield) == INTEGER_CST)
13305 max_index = index
13306 = offset_int::from (wi::to_wide (cfield), index_sgn);
13307 else
13309 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13310 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13311 index_sgn);
13312 max_index
13313 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13314 index_sgn);
13315 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13318 else if (!first_p)
13320 index = max_index + 1;
13321 if (index_type)
13322 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13323 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13324 max_index = index;
13326 else
13327 first_p = false;
13329 /* Do we have match? */
13330 if (wi::cmp (access_index, index, index_sgn) >= 0)
13332 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13334 if (ctor_idx)
13335 *ctor_idx = cnt;
13336 return cval;
13339 else if (in_gimple_form)
13340 /* We're past the element we search for. Note during parsing
13341 the elements might not be sorted.
13342 ??? We should use a binary search and a flag on the
13343 CONSTRUCTOR as to whether elements are sorted in declaration
13344 order. */
13345 break;
13347 if (ctor_idx)
13348 *ctor_idx = cnt;
13349 return NULL_TREE;
13352 /* Perform constant folding and related simplification of EXPR.
13353 The related simplifications include x*1 => x, x*0 => 0, etc.,
13354 and application of the associative law.
13355 NOP_EXPR conversions may be removed freely (as long as we
13356 are careful not to change the type of the overall expression).
13357 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13358 but we can constant-fold them if they have constant operands. */
13360 #ifdef ENABLE_FOLD_CHECKING
13361 # define fold(x) fold_1 (x)
13362 static tree fold_1 (tree);
13363 static
13364 #endif
13365 tree
13366 fold (tree expr)
13368 const tree t = expr;
13369 enum tree_code code = TREE_CODE (t);
13370 enum tree_code_class kind = TREE_CODE_CLASS (code);
13371 tree tem;
13372 location_t loc = EXPR_LOCATION (expr);
13374 /* Return right away if a constant. */
13375 if (kind == tcc_constant)
13376 return t;
13378 /* CALL_EXPR-like objects with variable numbers of operands are
13379 treated specially. */
13380 if (kind == tcc_vl_exp)
13382 if (code == CALL_EXPR)
13384 tem = fold_call_expr (loc, expr, false);
13385 return tem ? tem : expr;
13387 return expr;
13390 if (IS_EXPR_CODE_CLASS (kind))
13392 tree type = TREE_TYPE (t);
13393 tree op0, op1, op2;
13395 switch (TREE_CODE_LENGTH (code))
13397 case 1:
13398 op0 = TREE_OPERAND (t, 0);
13399 tem = fold_unary_loc (loc, code, type, op0);
13400 return tem ? tem : expr;
13401 case 2:
13402 op0 = TREE_OPERAND (t, 0);
13403 op1 = TREE_OPERAND (t, 1);
13404 tem = fold_binary_loc (loc, code, type, op0, op1);
13405 return tem ? tem : expr;
13406 case 3:
13407 op0 = TREE_OPERAND (t, 0);
13408 op1 = TREE_OPERAND (t, 1);
13409 op2 = TREE_OPERAND (t, 2);
13410 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13411 return tem ? tem : expr;
13412 default:
13413 break;
13417 switch (code)
13419 case ARRAY_REF:
13421 tree op0 = TREE_OPERAND (t, 0);
13422 tree op1 = TREE_OPERAND (t, 1);
13424 if (TREE_CODE (op1) == INTEGER_CST
13425 && TREE_CODE (op0) == CONSTRUCTOR
13426 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13428 tree val = get_array_ctor_element_at_index (op0,
13429 wi::to_offset (op1));
13430 if (val)
13431 return val;
13434 return t;
13437 /* Return a VECTOR_CST if possible. */
13438 case CONSTRUCTOR:
13440 tree type = TREE_TYPE (t);
13441 if (TREE_CODE (type) != VECTOR_TYPE)
13442 return t;
13444 unsigned i;
13445 tree val;
13446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13447 if (! CONSTANT_CLASS_P (val))
13448 return t;
13450 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13453 case CONST_DECL:
13454 return fold (DECL_INITIAL (t));
13456 default:
13457 return t;
13458 } /* switch (code) */
13461 #ifdef ENABLE_FOLD_CHECKING
13462 #undef fold
13464 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13465 hash_table<nofree_ptr_hash<const tree_node> > *);
13466 static void fold_check_failed (const_tree, const_tree);
13467 void print_fold_checksum (const_tree);
13469 /* When --enable-checking=fold, compute a digest of expr before
13470 and after actual fold call to see if fold did not accidentally
13471 change original expr. */
13473 tree
13474 fold (tree expr)
13476 tree ret;
13477 struct md5_ctx ctx;
13478 unsigned char checksum_before[16], checksum_after[16];
13479 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13481 md5_init_ctx (&ctx);
13482 fold_checksum_tree (expr, &ctx, &ht);
13483 md5_finish_ctx (&ctx, checksum_before);
13484 ht.empty ();
13486 ret = fold_1 (expr);
13488 md5_init_ctx (&ctx);
13489 fold_checksum_tree (expr, &ctx, &ht);
13490 md5_finish_ctx (&ctx, checksum_after);
13492 if (memcmp (checksum_before, checksum_after, 16))
13493 fold_check_failed (expr, ret);
13495 return ret;
13498 void
13499 print_fold_checksum (const_tree expr)
13501 struct md5_ctx ctx;
13502 unsigned char checksum[16], cnt;
13503 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13505 md5_init_ctx (&ctx);
13506 fold_checksum_tree (expr, &ctx, &ht);
13507 md5_finish_ctx (&ctx, checksum);
13508 for (cnt = 0; cnt < 16; ++cnt)
13509 fprintf (stderr, "%02x", checksum[cnt]);
13510 putc ('\n', stderr);
13513 static void
13514 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13516 internal_error ("fold check: original tree changed by fold");
13519 static void
13520 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13521 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13523 const tree_node **slot;
13524 enum tree_code code;
13525 union tree_node *buf;
13526 int i, len;
13528 recursive_label:
13529 if (expr == NULL)
13530 return;
13531 slot = ht->find_slot (expr, INSERT);
13532 if (*slot != NULL)
13533 return;
13534 *slot = expr;
13535 code = TREE_CODE (expr);
13536 if (TREE_CODE_CLASS (code) == tcc_declaration
13537 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13539 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13540 size_t sz = tree_size (expr);
13541 buf = XALLOCAVAR (union tree_node, sz);
13542 memcpy ((char *) buf, expr, sz);
13543 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13544 buf->decl_with_vis.symtab_node = NULL;
13545 buf->base.nowarning_flag = 0;
13546 expr = (tree) buf;
13548 else if (TREE_CODE_CLASS (code) == tcc_type
13549 && (TYPE_POINTER_TO (expr)
13550 || TYPE_REFERENCE_TO (expr)
13551 || TYPE_CACHED_VALUES_P (expr)
13552 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13553 || TYPE_NEXT_VARIANT (expr)
13554 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13556 /* Allow these fields to be modified. */
13557 tree tmp;
13558 size_t sz = tree_size (expr);
13559 buf = XALLOCAVAR (union tree_node, sz);
13560 memcpy ((char *) buf, expr, sz);
13561 expr = tmp = (tree) buf;
13562 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13563 TYPE_POINTER_TO (tmp) = NULL;
13564 TYPE_REFERENCE_TO (tmp) = NULL;
13565 TYPE_NEXT_VARIANT (tmp) = NULL;
13566 TYPE_ALIAS_SET (tmp) = -1;
13567 if (TYPE_CACHED_VALUES_P (tmp))
13569 TYPE_CACHED_VALUES_P (tmp) = 0;
13570 TYPE_CACHED_VALUES (tmp) = NULL;
13573 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13575 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13576 that and change builtins.cc etc. instead - see PR89543. */
13577 size_t sz = tree_size (expr);
13578 buf = XALLOCAVAR (union tree_node, sz);
13579 memcpy ((char *) buf, expr, sz);
13580 buf->base.nowarning_flag = 0;
13581 expr = (tree) buf;
13583 md5_process_bytes (expr, tree_size (expr), ctx);
13584 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13585 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13586 if (TREE_CODE_CLASS (code) != tcc_type
13587 && TREE_CODE_CLASS (code) != tcc_declaration
13588 && code != TREE_LIST
13589 && code != SSA_NAME
13590 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13591 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13592 switch (TREE_CODE_CLASS (code))
13594 case tcc_constant:
13595 switch (code)
13597 case STRING_CST:
13598 md5_process_bytes (TREE_STRING_POINTER (expr),
13599 TREE_STRING_LENGTH (expr), ctx);
13600 break;
13601 case COMPLEX_CST:
13602 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13603 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13604 break;
13605 case VECTOR_CST:
13606 len = vector_cst_encoded_nelts (expr);
13607 for (i = 0; i < len; ++i)
13608 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13609 break;
13610 default:
13611 break;
13613 break;
13614 case tcc_exceptional:
13615 switch (code)
13617 case TREE_LIST:
13618 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13619 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13620 expr = TREE_CHAIN (expr);
13621 goto recursive_label;
13622 break;
13623 case TREE_VEC:
13624 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13625 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13626 break;
13627 default:
13628 break;
13630 break;
13631 case tcc_expression:
13632 case tcc_reference:
13633 case tcc_comparison:
13634 case tcc_unary:
13635 case tcc_binary:
13636 case tcc_statement:
13637 case tcc_vl_exp:
13638 len = TREE_OPERAND_LENGTH (expr);
13639 for (i = 0; i < len; ++i)
13640 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13641 break;
13642 case tcc_declaration:
13643 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13644 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13645 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13647 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13648 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13649 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13650 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13651 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13654 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13656 if (TREE_CODE (expr) == FUNCTION_DECL)
13658 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13659 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13661 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13663 break;
13664 case tcc_type:
13665 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13666 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13667 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13668 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13669 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13670 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13671 if (INTEGRAL_TYPE_P (expr)
13672 || SCALAR_FLOAT_TYPE_P (expr))
13674 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13675 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13677 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13678 if (RECORD_OR_UNION_TYPE_P (expr))
13679 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13680 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13681 break;
13682 default:
13683 break;
13687 /* Helper function for outputting the checksum of a tree T. When
13688 debugging with gdb, you can "define mynext" to be "next" followed
13689 by "call debug_fold_checksum (op0)", then just trace down till the
13690 outputs differ. */
13692 DEBUG_FUNCTION void
13693 debug_fold_checksum (const_tree t)
13695 int i;
13696 unsigned char checksum[16];
13697 struct md5_ctx ctx;
13698 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13700 md5_init_ctx (&ctx);
13701 fold_checksum_tree (t, &ctx, &ht);
13702 md5_finish_ctx (&ctx, checksum);
13703 ht.empty ();
13705 for (i = 0; i < 16; i++)
13706 fprintf (stderr, "%d ", checksum[i]);
13708 fprintf (stderr, "\n");
13711 #endif
13713 /* Fold a unary tree expression with code CODE of type TYPE with an
13714 operand OP0. LOC is the location of the resulting expression.
13715 Return a folded expression if successful. Otherwise, return a tree
13716 expression with code CODE of type TYPE with an operand OP0. */
13718 tree
13719 fold_build1_loc (location_t loc,
13720 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13722 tree tem;
13723 #ifdef ENABLE_FOLD_CHECKING
13724 unsigned char checksum_before[16], checksum_after[16];
13725 struct md5_ctx ctx;
13726 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13728 md5_init_ctx (&ctx);
13729 fold_checksum_tree (op0, &ctx, &ht);
13730 md5_finish_ctx (&ctx, checksum_before);
13731 ht.empty ();
13732 #endif
13734 tem = fold_unary_loc (loc, code, type, op0);
13735 if (!tem)
13736 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13738 #ifdef ENABLE_FOLD_CHECKING
13739 md5_init_ctx (&ctx);
13740 fold_checksum_tree (op0, &ctx, &ht);
13741 md5_finish_ctx (&ctx, checksum_after);
13743 if (memcmp (checksum_before, checksum_after, 16))
13744 fold_check_failed (op0, tem);
13745 #endif
13746 return tem;
13749 /* Fold a binary tree expression with code CODE of type TYPE with
13750 operands OP0 and OP1. LOC is the location of the resulting
13751 expression. Return a folded expression if successful. Otherwise,
13752 return a tree expression with code CODE of type TYPE with operands
13753 OP0 and OP1. */
13755 tree
13756 fold_build2_loc (location_t loc,
13757 enum tree_code code, tree type, tree op0, tree op1
13758 MEM_STAT_DECL)
13760 tree tem;
13761 #ifdef ENABLE_FOLD_CHECKING
13762 unsigned char checksum_before_op0[16],
13763 checksum_before_op1[16],
13764 checksum_after_op0[16],
13765 checksum_after_op1[16];
13766 struct md5_ctx ctx;
13767 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13769 md5_init_ctx (&ctx);
13770 fold_checksum_tree (op0, &ctx, &ht);
13771 md5_finish_ctx (&ctx, checksum_before_op0);
13772 ht.empty ();
13774 md5_init_ctx (&ctx);
13775 fold_checksum_tree (op1, &ctx, &ht);
13776 md5_finish_ctx (&ctx, checksum_before_op1);
13777 ht.empty ();
13778 #endif
13780 tem = fold_binary_loc (loc, code, type, op0, op1);
13781 if (!tem)
13782 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13784 #ifdef ENABLE_FOLD_CHECKING
13785 md5_init_ctx (&ctx);
13786 fold_checksum_tree (op0, &ctx, &ht);
13787 md5_finish_ctx (&ctx, checksum_after_op0);
13788 ht.empty ();
13790 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13791 fold_check_failed (op0, tem);
13793 md5_init_ctx (&ctx);
13794 fold_checksum_tree (op1, &ctx, &ht);
13795 md5_finish_ctx (&ctx, checksum_after_op1);
13797 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13798 fold_check_failed (op1, tem);
13799 #endif
13800 return tem;
13803 /* Fold a ternary tree expression with code CODE of type TYPE with
13804 operands OP0, OP1, and OP2. Return a folded expression if
13805 successful. Otherwise, return a tree expression with code CODE of
13806 type TYPE with operands OP0, OP1, and OP2. */
13808 tree
13809 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13810 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13812 tree tem;
13813 #ifdef ENABLE_FOLD_CHECKING
13814 unsigned char checksum_before_op0[16],
13815 checksum_before_op1[16],
13816 checksum_before_op2[16],
13817 checksum_after_op0[16],
13818 checksum_after_op1[16],
13819 checksum_after_op2[16];
13820 struct md5_ctx ctx;
13821 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13823 md5_init_ctx (&ctx);
13824 fold_checksum_tree (op0, &ctx, &ht);
13825 md5_finish_ctx (&ctx, checksum_before_op0);
13826 ht.empty ();
13828 md5_init_ctx (&ctx);
13829 fold_checksum_tree (op1, &ctx, &ht);
13830 md5_finish_ctx (&ctx, checksum_before_op1);
13831 ht.empty ();
13833 md5_init_ctx (&ctx);
13834 fold_checksum_tree (op2, &ctx, &ht);
13835 md5_finish_ctx (&ctx, checksum_before_op2);
13836 ht.empty ();
13837 #endif
13839 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13840 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13841 if (!tem)
13842 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13844 #ifdef ENABLE_FOLD_CHECKING
13845 md5_init_ctx (&ctx);
13846 fold_checksum_tree (op0, &ctx, &ht);
13847 md5_finish_ctx (&ctx, checksum_after_op0);
13848 ht.empty ();
13850 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13851 fold_check_failed (op0, tem);
13853 md5_init_ctx (&ctx);
13854 fold_checksum_tree (op1, &ctx, &ht);
13855 md5_finish_ctx (&ctx, checksum_after_op1);
13856 ht.empty ();
13858 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13859 fold_check_failed (op1, tem);
13861 md5_init_ctx (&ctx);
13862 fold_checksum_tree (op2, &ctx, &ht);
13863 md5_finish_ctx (&ctx, checksum_after_op2);
13865 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13866 fold_check_failed (op2, tem);
13867 #endif
13868 return tem;
13871 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13872 arguments in ARGARRAY, and a null static chain.
13873 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13874 of type TYPE from the given operands as constructed by build_call_array. */
13876 tree
13877 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13878 int nargs, tree *argarray)
13880 tree tem;
13881 #ifdef ENABLE_FOLD_CHECKING
13882 unsigned char checksum_before_fn[16],
13883 checksum_before_arglist[16],
13884 checksum_after_fn[16],
13885 checksum_after_arglist[16];
13886 struct md5_ctx ctx;
13887 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13888 int i;
13890 md5_init_ctx (&ctx);
13891 fold_checksum_tree (fn, &ctx, &ht);
13892 md5_finish_ctx (&ctx, checksum_before_fn);
13893 ht.empty ();
13895 md5_init_ctx (&ctx);
13896 for (i = 0; i < nargs; i++)
13897 fold_checksum_tree (argarray[i], &ctx, &ht);
13898 md5_finish_ctx (&ctx, checksum_before_arglist);
13899 ht.empty ();
13900 #endif
13902 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13903 if (!tem)
13904 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13906 #ifdef ENABLE_FOLD_CHECKING
13907 md5_init_ctx (&ctx);
13908 fold_checksum_tree (fn, &ctx, &ht);
13909 md5_finish_ctx (&ctx, checksum_after_fn);
13910 ht.empty ();
13912 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13913 fold_check_failed (fn, tem);
13915 md5_init_ctx (&ctx);
13916 for (i = 0; i < nargs; i++)
13917 fold_checksum_tree (argarray[i], &ctx, &ht);
13918 md5_finish_ctx (&ctx, checksum_after_arglist);
13920 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13921 fold_check_failed (NULL_TREE, tem);
13922 #endif
13923 return tem;
13926 /* Perform constant folding and related simplification of initializer
13927 expression EXPR. These behave identically to "fold_buildN" but ignore
13928 potential run-time traps and exceptions that fold must preserve. */
13930 #define START_FOLD_INIT \
13931 int saved_signaling_nans = flag_signaling_nans;\
13932 int saved_trapping_math = flag_trapping_math;\
13933 int saved_rounding_math = flag_rounding_math;\
13934 int saved_trapv = flag_trapv;\
13935 int saved_folding_initializer = folding_initializer;\
13936 flag_signaling_nans = 0;\
13937 flag_trapping_math = 0;\
13938 flag_rounding_math = 0;\
13939 flag_trapv = 0;\
13940 folding_initializer = 1;
13942 #define END_FOLD_INIT \
13943 flag_signaling_nans = saved_signaling_nans;\
13944 flag_trapping_math = saved_trapping_math;\
13945 flag_rounding_math = saved_rounding_math;\
13946 flag_trapv = saved_trapv;\
13947 folding_initializer = saved_folding_initializer;
13949 tree
13950 fold_init (tree expr)
13952 tree result;
13953 START_FOLD_INIT;
13955 result = fold (expr);
13957 END_FOLD_INIT;
13958 return result;
13961 tree
13962 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13963 tree type, tree op)
13965 tree result;
13966 START_FOLD_INIT;
13968 result = fold_build1_loc (loc, code, type, op);
13970 END_FOLD_INIT;
13971 return result;
13974 tree
13975 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13976 tree type, tree op0, tree op1)
13978 tree result;
13979 START_FOLD_INIT;
13981 result = fold_build2_loc (loc, code, type, op0, op1);
13983 END_FOLD_INIT;
13984 return result;
13987 tree
13988 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13989 int nargs, tree *argarray)
13991 tree result;
13992 START_FOLD_INIT;
13994 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13996 END_FOLD_INIT;
13997 return result;
14000 tree
14001 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14002 tree lhs, tree rhs)
14004 tree result;
14005 START_FOLD_INIT;
14007 result = fold_binary_loc (loc, code, type, lhs, rhs);
14009 END_FOLD_INIT;
14010 return result;
14013 #undef START_FOLD_INIT
14014 #undef END_FOLD_INIT
14016 /* Determine if first argument is a multiple of second argument. Return
14017 false if it is not, or we cannot easily determined it to be.
14019 An example of the sort of thing we care about (at this point; this routine
14020 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14021 fold cases do now) is discovering that
14023 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14025 is a multiple of
14027 SAVE_EXPR (J * 8)
14029 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14031 This code also handles discovering that
14033 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14035 is a multiple of 8 so we don't have to worry about dealing with a
14036 possible remainder.
14038 Note that we *look* inside a SAVE_EXPR only to determine how it was
14039 calculated; it is not safe for fold to do much of anything else with the
14040 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14041 at run time. For example, the latter example above *cannot* be implemented
14042 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14043 evaluation time of the original SAVE_EXPR is not necessarily the same at
14044 the time the new expression is evaluated. The only optimization of this
14045 sort that would be valid is changing
14047 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14049 divided by 8 to
14051 SAVE_EXPR (I) * SAVE_EXPR (J)
14053 (where the same SAVE_EXPR (J) is used in the original and the
14054 transformed version).
14056 NOWRAP specifies whether all outer operations in TYPE should
14057 be considered not wrapping. Any type conversion within TOP acts
14058 as a barrier and we will fall back to NOWRAP being false.
14059 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14060 as not wrapping even though they are generally using unsigned arithmetic. */
14062 bool
14063 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14065 gimple *stmt;
14066 tree op1, op2;
14068 if (operand_equal_p (top, bottom, 0))
14069 return true;
14071 if (TREE_CODE (type) != INTEGER_TYPE)
14072 return false;
14074 switch (TREE_CODE (top))
14076 case BIT_AND_EXPR:
14077 /* Bitwise and provides a power of two multiple. If the mask is
14078 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14079 if (!integer_pow2p (bottom))
14080 return false;
14081 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14082 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14084 case MULT_EXPR:
14085 /* If the multiplication can wrap we cannot recurse further unless
14086 the bottom is a power of two which is where wrapping does not
14087 matter. */
14088 if (!nowrap
14089 && !TYPE_OVERFLOW_UNDEFINED (type)
14090 && !integer_pow2p (bottom))
14091 return false;
14092 if (TREE_CODE (bottom) == INTEGER_CST)
14094 op1 = TREE_OPERAND (top, 0);
14095 op2 = TREE_OPERAND (top, 1);
14096 if (TREE_CODE (op1) == INTEGER_CST)
14097 std::swap (op1, op2);
14098 if (TREE_CODE (op2) == INTEGER_CST)
14100 if (multiple_of_p (type, op2, bottom, nowrap))
14101 return true;
14102 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14103 if (multiple_of_p (type, bottom, op2, nowrap))
14105 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14106 wi::to_widest (op2));
14107 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14109 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14110 return multiple_of_p (type, op1, op2, nowrap);
14113 return multiple_of_p (type, op1, bottom, nowrap);
14116 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14117 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14119 case LSHIFT_EXPR:
14120 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14121 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14123 op1 = TREE_OPERAND (top, 1);
14124 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14126 wide_int mul_op
14127 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14128 return multiple_of_p (type,
14129 wide_int_to_tree (type, mul_op), bottom,
14130 nowrap);
14133 return false;
14135 case MINUS_EXPR:
14136 case PLUS_EXPR:
14137 /* If the addition or subtraction can wrap we cannot recurse further
14138 unless bottom is a power of two which is where wrapping does not
14139 matter. */
14140 if (!nowrap
14141 && !TYPE_OVERFLOW_UNDEFINED (type)
14142 && !integer_pow2p (bottom))
14143 return false;
14145 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14146 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14147 but 0xfffffffd is not. */
14148 op1 = TREE_OPERAND (top, 1);
14149 if (TREE_CODE (top) == PLUS_EXPR
14150 && nowrap
14151 && TYPE_UNSIGNED (type)
14152 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14153 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14155 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14156 precisely, so be conservative here checking if both op0 and op1
14157 are multiple of bottom. Note we check the second operand first
14158 since it's usually simpler. */
14159 return (multiple_of_p (type, op1, bottom, nowrap)
14160 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14162 CASE_CONVERT:
14163 /* Can't handle conversions from non-integral or wider integral type. */
14164 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14165 || (TYPE_PRECISION (type)
14166 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14167 return false;
14168 /* NOWRAP only extends to operations in the outermost type so
14169 make sure to strip it off here. */
14170 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14171 TREE_OPERAND (top, 0), bottom, false);
14173 case SAVE_EXPR:
14174 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14176 case COND_EXPR:
14177 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14178 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14180 case INTEGER_CST:
14181 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14182 return false;
14183 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14184 SIGNED);
14186 case SSA_NAME:
14187 if (TREE_CODE (bottom) == INTEGER_CST
14188 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14189 && gimple_code (stmt) == GIMPLE_ASSIGN)
14191 enum tree_code code = gimple_assign_rhs_code (stmt);
14193 /* Check for special cases to see if top is defined as multiple
14194 of bottom:
14196 top = (X & ~(bottom - 1) ; bottom is power of 2
14200 Y = X % bottom
14201 top = X - Y. */
14202 if (code == BIT_AND_EXPR
14203 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14204 && TREE_CODE (op2) == INTEGER_CST
14205 && integer_pow2p (bottom)
14206 && wi::multiple_of_p (wi::to_widest (op2),
14207 wi::to_widest (bottom), UNSIGNED))
14208 return true;
14210 op1 = gimple_assign_rhs1 (stmt);
14211 if (code == MINUS_EXPR
14212 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14213 && TREE_CODE (op2) == SSA_NAME
14214 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14215 && gimple_code (stmt) == GIMPLE_ASSIGN
14216 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14217 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14218 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14219 return true;
14222 /* fall through */
14224 default:
14225 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14226 return multiple_p (wi::to_poly_widest (top),
14227 wi::to_poly_widest (bottom));
14229 return false;
14233 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14234 This function returns true for integer expressions, and returns
14235 false if uncertain. */
14237 bool
14238 tree_expr_finite_p (const_tree x)
14240 machine_mode mode = element_mode (x);
14241 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14242 return true;
14243 switch (TREE_CODE (x))
14245 case REAL_CST:
14246 return real_isfinite (TREE_REAL_CST_PTR (x));
14247 case COMPLEX_CST:
14248 return tree_expr_finite_p (TREE_REALPART (x))
14249 && tree_expr_finite_p (TREE_IMAGPART (x));
14250 case FLOAT_EXPR:
14251 return true;
14252 case ABS_EXPR:
14253 case CONVERT_EXPR:
14254 case NON_LVALUE_EXPR:
14255 case NEGATE_EXPR:
14256 case SAVE_EXPR:
14257 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14258 case MIN_EXPR:
14259 case MAX_EXPR:
14260 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14261 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14262 case COND_EXPR:
14263 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14264 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14265 case CALL_EXPR:
14266 switch (get_call_combined_fn (x))
14268 CASE_CFN_FABS:
14269 CASE_CFN_FABS_FN:
14270 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14271 CASE_CFN_FMAX:
14272 CASE_CFN_FMAX_FN:
14273 CASE_CFN_FMIN:
14274 CASE_CFN_FMIN_FN:
14275 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14276 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14277 default:
14278 return false;
14281 default:
14282 return false;
14286 /* Return true if expression X evaluates to an infinity.
14287 This function returns false for integer expressions. */
14289 bool
14290 tree_expr_infinite_p (const_tree x)
14292 if (!HONOR_INFINITIES (x))
14293 return false;
14294 switch (TREE_CODE (x))
14296 case REAL_CST:
14297 return real_isinf (TREE_REAL_CST_PTR (x));
14298 case ABS_EXPR:
14299 case NEGATE_EXPR:
14300 case NON_LVALUE_EXPR:
14301 case SAVE_EXPR:
14302 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14303 case COND_EXPR:
14304 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14305 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14306 default:
14307 return false;
14311 /* Return true if expression X could evaluate to an infinity.
14312 This function returns false for integer expressions, and returns
14313 true if uncertain. */
14315 bool
14316 tree_expr_maybe_infinite_p (const_tree x)
14318 if (!HONOR_INFINITIES (x))
14319 return false;
14320 switch (TREE_CODE (x))
14322 case REAL_CST:
14323 return real_isinf (TREE_REAL_CST_PTR (x));
14324 case FLOAT_EXPR:
14325 return false;
14326 case ABS_EXPR:
14327 case NEGATE_EXPR:
14328 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14329 case COND_EXPR:
14330 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14331 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14332 default:
14333 return true;
14337 /* Return true if expression X evaluates to a signaling NaN.
14338 This function returns false for integer expressions. */
14340 bool
14341 tree_expr_signaling_nan_p (const_tree x)
14343 if (!HONOR_SNANS (x))
14344 return false;
14345 switch (TREE_CODE (x))
14347 case REAL_CST:
14348 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14349 case NON_LVALUE_EXPR:
14350 case SAVE_EXPR:
14351 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14352 case COND_EXPR:
14353 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14354 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14355 default:
14356 return false;
14360 /* Return true if expression X could evaluate to a signaling NaN.
14361 This function returns false for integer expressions, and returns
14362 true if uncertain. */
14364 bool
14365 tree_expr_maybe_signaling_nan_p (const_tree x)
14367 if (!HONOR_SNANS (x))
14368 return false;
14369 switch (TREE_CODE (x))
14371 case REAL_CST:
14372 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14373 case FLOAT_EXPR:
14374 return false;
14375 case ABS_EXPR:
14376 case CONVERT_EXPR:
14377 case NEGATE_EXPR:
14378 case NON_LVALUE_EXPR:
14379 case SAVE_EXPR:
14380 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14381 case MIN_EXPR:
14382 case MAX_EXPR:
14383 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14384 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14385 case COND_EXPR:
14386 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14387 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14388 case CALL_EXPR:
14389 switch (get_call_combined_fn (x))
14391 CASE_CFN_FABS:
14392 CASE_CFN_FABS_FN:
14393 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14394 CASE_CFN_FMAX:
14395 CASE_CFN_FMAX_FN:
14396 CASE_CFN_FMIN:
14397 CASE_CFN_FMIN_FN:
14398 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14399 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14400 default:
14401 return true;
14403 default:
14404 return true;
14408 /* Return true if expression X evaluates to a NaN.
14409 This function returns false for integer expressions. */
14411 bool
14412 tree_expr_nan_p (const_tree x)
14414 if (!HONOR_NANS (x))
14415 return false;
14416 switch (TREE_CODE (x))
14418 case REAL_CST:
14419 return real_isnan (TREE_REAL_CST_PTR (x));
14420 case NON_LVALUE_EXPR:
14421 case SAVE_EXPR:
14422 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14423 case COND_EXPR:
14424 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14425 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14426 default:
14427 return false;
14431 /* Return true if expression X could evaluate to a NaN.
14432 This function returns false for integer expressions, and returns
14433 true if uncertain. */
14435 bool
14436 tree_expr_maybe_nan_p (const_tree x)
14438 if (!HONOR_NANS (x))
14439 return false;
14440 switch (TREE_CODE (x))
14442 case REAL_CST:
14443 return real_isnan (TREE_REAL_CST_PTR (x));
14444 case FLOAT_EXPR:
14445 return false;
14446 case PLUS_EXPR:
14447 case MINUS_EXPR:
14448 case MULT_EXPR:
14449 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14450 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14451 case ABS_EXPR:
14452 case CONVERT_EXPR:
14453 case NEGATE_EXPR:
14454 case NON_LVALUE_EXPR:
14455 case SAVE_EXPR:
14456 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14457 case MIN_EXPR:
14458 case MAX_EXPR:
14459 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14460 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14461 case COND_EXPR:
14462 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14463 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14464 case CALL_EXPR:
14465 switch (get_call_combined_fn (x))
14467 CASE_CFN_FABS:
14468 CASE_CFN_FABS_FN:
14469 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14470 CASE_CFN_FMAX:
14471 CASE_CFN_FMAX_FN:
14472 CASE_CFN_FMIN:
14473 CASE_CFN_FMIN_FN:
14474 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14475 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14476 default:
14477 return true;
14479 default:
14480 return true;
14484 /* Return true if expression X could evaluate to -0.0.
14485 This function returns true if uncertain. */
14487 bool
14488 tree_expr_maybe_real_minus_zero_p (const_tree x)
14490 if (!HONOR_SIGNED_ZEROS (x))
14491 return false;
14492 switch (TREE_CODE (x))
14494 case REAL_CST:
14495 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14496 case INTEGER_CST:
14497 case FLOAT_EXPR:
14498 case ABS_EXPR:
14499 return false;
14500 case NON_LVALUE_EXPR:
14501 case SAVE_EXPR:
14502 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14503 case COND_EXPR:
14504 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14505 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14506 case CALL_EXPR:
14507 switch (get_call_combined_fn (x))
14509 CASE_CFN_FABS:
14510 CASE_CFN_FABS_FN:
14511 return false;
14512 default:
14513 break;
14515 default:
14516 break;
14518 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14519 * but currently those predicates require tree and not const_tree. */
14520 return true;
14523 #define tree_expr_nonnegative_warnv_p(X, Y) \
14524 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14526 #define RECURSE(X) \
14527 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14529 /* Return true if CODE or TYPE is known to be non-negative. */
14531 static bool
14532 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14534 if (!VECTOR_TYPE_P (type)
14535 && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14536 && truth_value_p (code))
14537 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14538 have a signed:1 type (where the value is -1 and 0). */
14539 return true;
14540 return false;
14543 /* Return true if (CODE OP0) is known to be non-negative. If the return
14544 value is based on the assumption that signed overflow is undefined,
14545 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14546 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14548 bool
14549 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14550 bool *strict_overflow_p, int depth)
14552 if (TYPE_UNSIGNED (type))
14553 return true;
14555 switch (code)
14557 case ABS_EXPR:
14558 /* We can't return 1 if flag_wrapv is set because
14559 ABS_EXPR<INT_MIN> = INT_MIN. */
14560 if (!ANY_INTEGRAL_TYPE_P (type))
14561 return true;
14562 if (TYPE_OVERFLOW_UNDEFINED (type))
14564 *strict_overflow_p = true;
14565 return true;
14567 break;
14569 case NON_LVALUE_EXPR:
14570 case FLOAT_EXPR:
14571 case FIX_TRUNC_EXPR:
14572 return RECURSE (op0);
14574 CASE_CONVERT:
14576 tree inner_type = TREE_TYPE (op0);
14577 tree outer_type = type;
14579 if (SCALAR_FLOAT_TYPE_P (outer_type))
14581 if (SCALAR_FLOAT_TYPE_P (inner_type))
14582 return RECURSE (op0);
14583 if (INTEGRAL_TYPE_P (inner_type))
14585 if (TYPE_UNSIGNED (inner_type))
14586 return true;
14587 return RECURSE (op0);
14590 else if (INTEGRAL_TYPE_P (outer_type))
14592 if (SCALAR_FLOAT_TYPE_P (inner_type))
14593 return RECURSE (op0);
14594 if (INTEGRAL_TYPE_P (inner_type))
14595 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14596 && TYPE_UNSIGNED (inner_type);
14599 break;
14601 default:
14602 return tree_simple_nonnegative_warnv_p (code, type);
14605 /* We don't know sign of `t', so be conservative and return false. */
14606 return false;
14609 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14610 value is based on the assumption that signed overflow is undefined,
14611 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14612 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14614 bool
14615 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14616 tree op1, bool *strict_overflow_p,
14617 int depth)
14619 if (TYPE_UNSIGNED (type))
14620 return true;
14622 switch (code)
14624 case POINTER_PLUS_EXPR:
14625 case PLUS_EXPR:
14626 if (FLOAT_TYPE_P (type))
14627 return RECURSE (op0) && RECURSE (op1);
14629 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14630 both unsigned and at least 2 bits shorter than the result. */
14631 if (TREE_CODE (type) == INTEGER_TYPE
14632 && TREE_CODE (op0) == NOP_EXPR
14633 && TREE_CODE (op1) == NOP_EXPR)
14635 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14636 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14637 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14638 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14640 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14641 TYPE_PRECISION (inner2)) + 1;
14642 return prec < TYPE_PRECISION (type);
14645 break;
14647 case MULT_EXPR:
14648 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14650 /* x * x is always non-negative for floating point x
14651 or without overflow. */
14652 if (operand_equal_p (op0, op1, 0)
14653 || (RECURSE (op0) && RECURSE (op1)))
14655 if (ANY_INTEGRAL_TYPE_P (type)
14656 && TYPE_OVERFLOW_UNDEFINED (type))
14657 *strict_overflow_p = true;
14658 return true;
14662 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14663 both unsigned and their total bits is shorter than the result. */
14664 if (TREE_CODE (type) == INTEGER_TYPE
14665 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14666 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14668 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14669 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14670 : TREE_TYPE (op0);
14671 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14672 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14673 : TREE_TYPE (op1);
14675 bool unsigned0 = TYPE_UNSIGNED (inner0);
14676 bool unsigned1 = TYPE_UNSIGNED (inner1);
14678 if (TREE_CODE (op0) == INTEGER_CST)
14679 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14681 if (TREE_CODE (op1) == INTEGER_CST)
14682 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14684 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14685 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14687 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14688 ? tree_int_cst_min_precision (op0, UNSIGNED)
14689 : TYPE_PRECISION (inner0);
14691 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14692 ? tree_int_cst_min_precision (op1, UNSIGNED)
14693 : TYPE_PRECISION (inner1);
14695 return precision0 + precision1 < TYPE_PRECISION (type);
14698 return false;
14700 case BIT_AND_EXPR:
14701 return RECURSE (op0) || RECURSE (op1);
14703 case MAX_EXPR:
14704 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14705 things. */
14706 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14707 return RECURSE (op0) && RECURSE (op1);
14708 return RECURSE (op0) || RECURSE (op1);
14710 case BIT_IOR_EXPR:
14711 case BIT_XOR_EXPR:
14712 case MIN_EXPR:
14713 case RDIV_EXPR:
14714 case TRUNC_DIV_EXPR:
14715 case CEIL_DIV_EXPR:
14716 case FLOOR_DIV_EXPR:
14717 case ROUND_DIV_EXPR:
14718 return RECURSE (op0) && RECURSE (op1);
14720 case TRUNC_MOD_EXPR:
14721 return RECURSE (op0);
14723 case FLOOR_MOD_EXPR:
14724 return RECURSE (op1);
14726 case CEIL_MOD_EXPR:
14727 case ROUND_MOD_EXPR:
14728 default:
14729 return tree_simple_nonnegative_warnv_p (code, type);
14732 /* We don't know sign of `t', so be conservative and return false. */
14733 return false;
14736 /* Return true if T is known to be non-negative. If the return
14737 value is based on the assumption that signed overflow is undefined,
14738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14739 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14741 bool
14742 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14744 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14745 return true;
14747 switch (TREE_CODE (t))
14749 case INTEGER_CST:
14750 return tree_int_cst_sgn (t) >= 0;
14752 case REAL_CST:
14753 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14755 case FIXED_CST:
14756 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14758 case COND_EXPR:
14759 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14761 case SSA_NAME:
14762 /* Limit the depth of recursion to avoid quadratic behavior.
14763 This is expected to catch almost all occurrences in practice.
14764 If this code misses important cases that unbounded recursion
14765 would not, passes that need this information could be revised
14766 to provide it through dataflow propagation. */
14767 return (!name_registered_for_update_p (t)
14768 && depth < param_max_ssa_name_query_depth
14769 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14770 strict_overflow_p, depth));
14772 default:
14773 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14777 /* Return true if T is known to be non-negative. If the return
14778 value is based on the assumption that signed overflow is undefined,
14779 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14780 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14782 bool
14783 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14784 bool *strict_overflow_p, int depth)
14786 switch (fn)
14788 CASE_CFN_ACOS:
14789 CASE_CFN_ACOS_FN:
14790 CASE_CFN_ACOSH:
14791 CASE_CFN_ACOSH_FN:
14792 CASE_CFN_CABS:
14793 CASE_CFN_CABS_FN:
14794 CASE_CFN_COSH:
14795 CASE_CFN_COSH_FN:
14796 CASE_CFN_ERFC:
14797 CASE_CFN_ERFC_FN:
14798 CASE_CFN_EXP:
14799 CASE_CFN_EXP_FN:
14800 CASE_CFN_EXP10:
14801 CASE_CFN_EXP2:
14802 CASE_CFN_EXP2_FN:
14803 CASE_CFN_FABS:
14804 CASE_CFN_FABS_FN:
14805 CASE_CFN_FDIM:
14806 CASE_CFN_FDIM_FN:
14807 CASE_CFN_HYPOT:
14808 CASE_CFN_HYPOT_FN:
14809 CASE_CFN_POW10:
14810 CASE_CFN_FFS:
14811 CASE_CFN_PARITY:
14812 CASE_CFN_POPCOUNT:
14813 CASE_CFN_CLZ:
14814 CASE_CFN_CLRSB:
14815 case CFN_BUILT_IN_BSWAP16:
14816 case CFN_BUILT_IN_BSWAP32:
14817 case CFN_BUILT_IN_BSWAP64:
14818 case CFN_BUILT_IN_BSWAP128:
14819 /* Always true. */
14820 return true;
14822 CASE_CFN_SQRT:
14823 CASE_CFN_SQRT_FN:
14824 /* sqrt(-0.0) is -0.0. */
14825 if (!HONOR_SIGNED_ZEROS (type))
14826 return true;
14827 return RECURSE (arg0);
14829 CASE_CFN_ASINH:
14830 CASE_CFN_ASINH_FN:
14831 CASE_CFN_ATAN:
14832 CASE_CFN_ATAN_FN:
14833 CASE_CFN_ATANH:
14834 CASE_CFN_ATANH_FN:
14835 CASE_CFN_CBRT:
14836 CASE_CFN_CBRT_FN:
14837 CASE_CFN_CEIL:
14838 CASE_CFN_CEIL_FN:
14839 CASE_CFN_ERF:
14840 CASE_CFN_ERF_FN:
14841 CASE_CFN_EXPM1:
14842 CASE_CFN_EXPM1_FN:
14843 CASE_CFN_FLOOR:
14844 CASE_CFN_FLOOR_FN:
14845 CASE_CFN_FMOD:
14846 CASE_CFN_FMOD_FN:
14847 CASE_CFN_FREXP:
14848 CASE_CFN_FREXP_FN:
14849 CASE_CFN_ICEIL:
14850 CASE_CFN_IFLOOR:
14851 CASE_CFN_IRINT:
14852 CASE_CFN_IROUND:
14853 CASE_CFN_LCEIL:
14854 CASE_CFN_LDEXP:
14855 CASE_CFN_LFLOOR:
14856 CASE_CFN_LLCEIL:
14857 CASE_CFN_LLFLOOR:
14858 CASE_CFN_LLRINT:
14859 CASE_CFN_LLRINT_FN:
14860 CASE_CFN_LLROUND:
14861 CASE_CFN_LLROUND_FN:
14862 CASE_CFN_LRINT:
14863 CASE_CFN_LRINT_FN:
14864 CASE_CFN_LROUND:
14865 CASE_CFN_LROUND_FN:
14866 CASE_CFN_MODF:
14867 CASE_CFN_MODF_FN:
14868 CASE_CFN_NEARBYINT:
14869 CASE_CFN_NEARBYINT_FN:
14870 CASE_CFN_RINT:
14871 CASE_CFN_RINT_FN:
14872 CASE_CFN_ROUND:
14873 CASE_CFN_ROUND_FN:
14874 CASE_CFN_ROUNDEVEN:
14875 CASE_CFN_ROUNDEVEN_FN:
14876 CASE_CFN_SCALB:
14877 CASE_CFN_SCALBLN:
14878 CASE_CFN_SCALBLN_FN:
14879 CASE_CFN_SCALBN:
14880 CASE_CFN_SCALBN_FN:
14881 CASE_CFN_SIGNBIT:
14882 CASE_CFN_SIGNIFICAND:
14883 CASE_CFN_SINH:
14884 CASE_CFN_SINH_FN:
14885 CASE_CFN_TANH:
14886 CASE_CFN_TANH_FN:
14887 CASE_CFN_TRUNC:
14888 CASE_CFN_TRUNC_FN:
14889 /* True if the 1st argument is nonnegative. */
14890 return RECURSE (arg0);
14892 CASE_CFN_FMAX:
14893 CASE_CFN_FMAX_FN:
14894 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14895 things. In the presence of sNaNs, we're only guaranteed to be
14896 non-negative if both operands are non-negative. In the presence
14897 of qNaNs, we're non-negative if either operand is non-negative
14898 and can't be a qNaN, or if both operands are non-negative. */
14899 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14900 tree_expr_maybe_signaling_nan_p (arg1))
14901 return RECURSE (arg0) && RECURSE (arg1);
14902 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14903 || RECURSE (arg1))
14904 : (RECURSE (arg1)
14905 && !tree_expr_maybe_nan_p (arg1));
14907 CASE_CFN_FMIN:
14908 CASE_CFN_FMIN_FN:
14909 /* True if the 1st AND 2nd arguments are nonnegative. */
14910 return RECURSE (arg0) && RECURSE (arg1);
14912 CASE_CFN_COPYSIGN:
14913 CASE_CFN_COPYSIGN_FN:
14914 /* True if the 2nd argument is nonnegative. */
14915 return RECURSE (arg1);
14917 CASE_CFN_POWI:
14918 /* True if the 1st argument is nonnegative or the second
14919 argument is an even integer. */
14920 if (TREE_CODE (arg1) == INTEGER_CST
14921 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14922 return true;
14923 return RECURSE (arg0);
14925 CASE_CFN_POW:
14926 CASE_CFN_POW_FN:
14927 /* True if the 1st argument is nonnegative or the second
14928 argument is an even integer valued real. */
14929 if (TREE_CODE (arg1) == REAL_CST)
14931 REAL_VALUE_TYPE c;
14932 HOST_WIDE_INT n;
14934 c = TREE_REAL_CST (arg1);
14935 n = real_to_integer (&c);
14936 if ((n & 1) == 0)
14938 REAL_VALUE_TYPE cint;
14939 real_from_integer (&cint, VOIDmode, n, SIGNED);
14940 if (real_identical (&c, &cint))
14941 return true;
14944 return RECURSE (arg0);
14946 default:
14947 break;
14949 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14952 /* Return true if T is known to be non-negative. If the return
14953 value is based on the assumption that signed overflow is undefined,
14954 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14955 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14957 static bool
14958 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14960 enum tree_code code = TREE_CODE (t);
14961 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14962 return true;
14964 switch (code)
14966 case TARGET_EXPR:
14968 tree temp = TARGET_EXPR_SLOT (t);
14969 t = TARGET_EXPR_INITIAL (t);
14971 /* If the initializer is non-void, then it's a normal expression
14972 that will be assigned to the slot. */
14973 if (!VOID_TYPE_P (TREE_TYPE (t)))
14974 return RECURSE (t);
14976 /* Otherwise, the initializer sets the slot in some way. One common
14977 way is an assignment statement at the end of the initializer. */
14978 while (1)
14980 if (TREE_CODE (t) == BIND_EXPR)
14981 t = expr_last (BIND_EXPR_BODY (t));
14982 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14983 || TREE_CODE (t) == TRY_CATCH_EXPR)
14984 t = expr_last (TREE_OPERAND (t, 0));
14985 else if (TREE_CODE (t) == STATEMENT_LIST)
14986 t = expr_last (t);
14987 else
14988 break;
14990 if (TREE_CODE (t) == MODIFY_EXPR
14991 && TREE_OPERAND (t, 0) == temp)
14992 return RECURSE (TREE_OPERAND (t, 1));
14994 return false;
14997 case CALL_EXPR:
14999 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15000 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15002 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15003 get_call_combined_fn (t),
15004 arg0,
15005 arg1,
15006 strict_overflow_p, depth);
15008 case COMPOUND_EXPR:
15009 case MODIFY_EXPR:
15010 return RECURSE (TREE_OPERAND (t, 1));
15012 case BIND_EXPR:
15013 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15015 case SAVE_EXPR:
15016 return RECURSE (TREE_OPERAND (t, 0));
15018 default:
15019 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15023 #undef RECURSE
15024 #undef tree_expr_nonnegative_warnv_p
15026 /* Return true if T is known to be non-negative. If the return
15027 value is based on the assumption that signed overflow is undefined,
15028 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15029 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15031 bool
15032 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15034 enum tree_code code;
15035 if (t == error_mark_node)
15036 return false;
15038 code = TREE_CODE (t);
15039 switch (TREE_CODE_CLASS (code))
15041 case tcc_binary:
15042 case tcc_comparison:
15043 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15044 TREE_TYPE (t),
15045 TREE_OPERAND (t, 0),
15046 TREE_OPERAND (t, 1),
15047 strict_overflow_p, depth);
15049 case tcc_unary:
15050 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15051 TREE_TYPE (t),
15052 TREE_OPERAND (t, 0),
15053 strict_overflow_p, depth);
15055 case tcc_constant:
15056 case tcc_declaration:
15057 case tcc_reference:
15058 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15060 default:
15061 break;
15064 switch (code)
15066 case TRUTH_AND_EXPR:
15067 case TRUTH_OR_EXPR:
15068 case TRUTH_XOR_EXPR:
15069 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15070 TREE_TYPE (t),
15071 TREE_OPERAND (t, 0),
15072 TREE_OPERAND (t, 1),
15073 strict_overflow_p, depth);
15074 case TRUTH_NOT_EXPR:
15075 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15076 TREE_TYPE (t),
15077 TREE_OPERAND (t, 0),
15078 strict_overflow_p, depth);
15080 case COND_EXPR:
15081 case CONSTRUCTOR:
15082 case OBJ_TYPE_REF:
15083 case ADDR_EXPR:
15084 case WITH_SIZE_EXPR:
15085 case SSA_NAME:
15086 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15088 default:
15089 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15093 /* Return true if `t' is known to be non-negative. Handle warnings
15094 about undefined signed overflow. */
15096 bool
15097 tree_expr_nonnegative_p (tree t)
15099 bool ret, strict_overflow_p;
15101 strict_overflow_p = false;
15102 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15103 if (strict_overflow_p)
15104 fold_overflow_warning (("assuming signed overflow does not occur when "
15105 "determining that expression is always "
15106 "non-negative"),
15107 WARN_STRICT_OVERFLOW_MISC);
15108 return ret;
15112 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15113 For floating point we further ensure that T is not denormal.
15114 Similar logic is present in nonzero_address in rtlanal.h.
15116 If the return value is based on the assumption that signed overflow
15117 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15118 change *STRICT_OVERFLOW_P. */
15120 bool
15121 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15122 bool *strict_overflow_p)
15124 switch (code)
15126 case ABS_EXPR:
15127 return tree_expr_nonzero_warnv_p (op0,
15128 strict_overflow_p);
15130 case NOP_EXPR:
15132 tree inner_type = TREE_TYPE (op0);
15133 tree outer_type = type;
15135 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15136 && tree_expr_nonzero_warnv_p (op0,
15137 strict_overflow_p));
15139 break;
15141 case NON_LVALUE_EXPR:
15142 return tree_expr_nonzero_warnv_p (op0,
15143 strict_overflow_p);
15145 default:
15146 break;
15149 return false;
15152 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15160 bool
15161 tree_binary_nonzero_warnv_p (enum tree_code code,
15162 tree type,
15163 tree op0,
15164 tree op1, bool *strict_overflow_p)
15166 bool sub_strict_overflow_p;
15167 switch (code)
15169 case POINTER_PLUS_EXPR:
15170 case PLUS_EXPR:
15171 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15173 /* With the presence of negative values it is hard
15174 to say something. */
15175 sub_strict_overflow_p = false;
15176 if (!tree_expr_nonnegative_warnv_p (op0,
15177 &sub_strict_overflow_p)
15178 || !tree_expr_nonnegative_warnv_p (op1,
15179 &sub_strict_overflow_p))
15180 return false;
15181 /* One of operands must be positive and the other non-negative. */
15182 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15183 overflows, on a twos-complement machine the sum of two
15184 nonnegative numbers can never be zero. */
15185 return (tree_expr_nonzero_warnv_p (op0,
15186 strict_overflow_p)
15187 || tree_expr_nonzero_warnv_p (op1,
15188 strict_overflow_p));
15190 break;
15192 case MULT_EXPR:
15193 if (TYPE_OVERFLOW_UNDEFINED (type))
15195 if (tree_expr_nonzero_warnv_p (op0,
15196 strict_overflow_p)
15197 && tree_expr_nonzero_warnv_p (op1,
15198 strict_overflow_p))
15200 *strict_overflow_p = true;
15201 return true;
15204 break;
15206 case MIN_EXPR:
15207 sub_strict_overflow_p = false;
15208 if (tree_expr_nonzero_warnv_p (op0,
15209 &sub_strict_overflow_p)
15210 && tree_expr_nonzero_warnv_p (op1,
15211 &sub_strict_overflow_p))
15213 if (sub_strict_overflow_p)
15214 *strict_overflow_p = true;
15216 break;
15218 case MAX_EXPR:
15219 sub_strict_overflow_p = false;
15220 if (tree_expr_nonzero_warnv_p (op0,
15221 &sub_strict_overflow_p))
15223 if (sub_strict_overflow_p)
15224 *strict_overflow_p = true;
15226 /* When both operands are nonzero, then MAX must be too. */
15227 if (tree_expr_nonzero_warnv_p (op1,
15228 strict_overflow_p))
15229 return true;
15231 /* MAX where operand 0 is positive is positive. */
15232 return tree_expr_nonnegative_warnv_p (op0,
15233 strict_overflow_p);
15235 /* MAX where operand 1 is positive is positive. */
15236 else if (tree_expr_nonzero_warnv_p (op1,
15237 &sub_strict_overflow_p)
15238 && tree_expr_nonnegative_warnv_p (op1,
15239 &sub_strict_overflow_p))
15241 if (sub_strict_overflow_p)
15242 *strict_overflow_p = true;
15243 return true;
15245 break;
15247 case BIT_IOR_EXPR:
15248 return (tree_expr_nonzero_warnv_p (op1,
15249 strict_overflow_p)
15250 || tree_expr_nonzero_warnv_p (op0,
15251 strict_overflow_p));
15253 default:
15254 break;
15257 return false;
15260 /* Return true when T is an address and is known to be nonzero.
15261 For floating point we further ensure that T is not denormal.
15262 Similar logic is present in nonzero_address in rtlanal.h.
15264 If the return value is based on the assumption that signed overflow
15265 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15266 change *STRICT_OVERFLOW_P. */
15268 bool
15269 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15271 bool sub_strict_overflow_p;
15272 switch (TREE_CODE (t))
15274 case INTEGER_CST:
15275 return !integer_zerop (t);
15277 case ADDR_EXPR:
15279 tree base = TREE_OPERAND (t, 0);
15281 if (!DECL_P (base))
15282 base = get_base_address (base);
15284 if (base && TREE_CODE (base) == TARGET_EXPR)
15285 base = TARGET_EXPR_SLOT (base);
15287 if (!base)
15288 return false;
15290 /* For objects in symbol table check if we know they are non-zero.
15291 Don't do anything for variables and functions before symtab is built;
15292 it is quite possible that they will be declared weak later. */
15293 int nonzero_addr = maybe_nonzero_address (base);
15294 if (nonzero_addr >= 0)
15295 return nonzero_addr;
15297 /* Constants are never weak. */
15298 if (CONSTANT_CLASS_P (base))
15299 return true;
15301 return false;
15304 case COND_EXPR:
15305 sub_strict_overflow_p = false;
15306 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15307 &sub_strict_overflow_p)
15308 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15309 &sub_strict_overflow_p))
15311 if (sub_strict_overflow_p)
15312 *strict_overflow_p = true;
15313 return true;
15315 break;
15317 case SSA_NAME:
15318 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15319 break;
15320 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15322 default:
15323 break;
15325 return false;
15328 #define integer_valued_real_p(X) \
15329 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15331 #define RECURSE(X) \
15332 ((integer_valued_real_p) (X, depth + 1))
15334 /* Return true if the floating point result of (CODE OP0) has an
15335 integer value. We also allow +Inf, -Inf and NaN to be considered
15336 integer values. Return false for signaling NaN.
15338 DEPTH is the current nesting depth of the query. */
15340 bool
15341 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15343 switch (code)
15345 case FLOAT_EXPR:
15346 return true;
15348 case ABS_EXPR:
15349 return RECURSE (op0);
15351 CASE_CONVERT:
15353 tree type = TREE_TYPE (op0);
15354 if (TREE_CODE (type) == INTEGER_TYPE)
15355 return true;
15356 if (SCALAR_FLOAT_TYPE_P (type))
15357 return RECURSE (op0);
15358 break;
15361 default:
15362 break;
15364 return false;
15367 /* Return true if the floating point result of (CODE OP0 OP1) has an
15368 integer value. We also allow +Inf, -Inf and NaN to be considered
15369 integer values. Return false for signaling NaN.
15371 DEPTH is the current nesting depth of the query. */
15373 bool
15374 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15376 switch (code)
15378 case PLUS_EXPR:
15379 case MINUS_EXPR:
15380 case MULT_EXPR:
15381 case MIN_EXPR:
15382 case MAX_EXPR:
15383 return RECURSE (op0) && RECURSE (op1);
15385 default:
15386 break;
15388 return false;
15391 /* Return true if the floating point result of calling FNDECL with arguments
15392 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15393 considered integer values. Return false for signaling NaN. If FNDECL
15394 takes fewer than 2 arguments, the remaining ARGn are null.
15396 DEPTH is the current nesting depth of the query. */
15398 bool
15399 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15401 switch (fn)
15403 CASE_CFN_CEIL:
15404 CASE_CFN_CEIL_FN:
15405 CASE_CFN_FLOOR:
15406 CASE_CFN_FLOOR_FN:
15407 CASE_CFN_NEARBYINT:
15408 CASE_CFN_NEARBYINT_FN:
15409 CASE_CFN_RINT:
15410 CASE_CFN_RINT_FN:
15411 CASE_CFN_ROUND:
15412 CASE_CFN_ROUND_FN:
15413 CASE_CFN_ROUNDEVEN:
15414 CASE_CFN_ROUNDEVEN_FN:
15415 CASE_CFN_TRUNC:
15416 CASE_CFN_TRUNC_FN:
15417 return true;
15419 CASE_CFN_FMIN:
15420 CASE_CFN_FMIN_FN:
15421 CASE_CFN_FMAX:
15422 CASE_CFN_FMAX_FN:
15423 return RECURSE (arg0) && RECURSE (arg1);
15425 default:
15426 break;
15428 return false;
15431 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15432 has an integer value. We also allow +Inf, -Inf and NaN to be
15433 considered integer values. Return false for signaling NaN.
15435 DEPTH is the current nesting depth of the query. */
15437 bool
15438 integer_valued_real_single_p (tree t, int depth)
15440 switch (TREE_CODE (t))
15442 case REAL_CST:
15443 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15445 case COND_EXPR:
15446 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15448 case SSA_NAME:
15449 /* Limit the depth of recursion to avoid quadratic behavior.
15450 This is expected to catch almost all occurrences in practice.
15451 If this code misses important cases that unbounded recursion
15452 would not, passes that need this information could be revised
15453 to provide it through dataflow propagation. */
15454 return (!name_registered_for_update_p (t)
15455 && depth < param_max_ssa_name_query_depth
15456 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15457 depth));
15459 default:
15460 break;
15462 return false;
15465 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15466 has an integer value. We also allow +Inf, -Inf and NaN to be
15467 considered integer values. Return false for signaling NaN.
15469 DEPTH is the current nesting depth of the query. */
15471 static bool
15472 integer_valued_real_invalid_p (tree t, int depth)
15474 switch (TREE_CODE (t))
15476 case COMPOUND_EXPR:
15477 case MODIFY_EXPR:
15478 case BIND_EXPR:
15479 return RECURSE (TREE_OPERAND (t, 1));
15481 case SAVE_EXPR:
15482 return RECURSE (TREE_OPERAND (t, 0));
15484 default:
15485 break;
15487 return false;
15490 #undef RECURSE
15491 #undef integer_valued_real_p
15493 /* Return true if the floating point expression T has an integer value.
15494 We also allow +Inf, -Inf and NaN to be considered integer values.
15495 Return false for signaling NaN.
15497 DEPTH is the current nesting depth of the query. */
15499 bool
15500 integer_valued_real_p (tree t, int depth)
15502 if (t == error_mark_node)
15503 return false;
15505 STRIP_ANY_LOCATION_WRAPPER (t);
15507 tree_code code = TREE_CODE (t);
15508 switch (TREE_CODE_CLASS (code))
15510 case tcc_binary:
15511 case tcc_comparison:
15512 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15513 TREE_OPERAND (t, 1), depth);
15515 case tcc_unary:
15516 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15518 case tcc_constant:
15519 case tcc_declaration:
15520 case tcc_reference:
15521 return integer_valued_real_single_p (t, depth);
15523 default:
15524 break;
15527 switch (code)
15529 case COND_EXPR:
15530 case SSA_NAME:
15531 return integer_valued_real_single_p (t, depth);
15533 case CALL_EXPR:
15535 tree arg0 = (call_expr_nargs (t) > 0
15536 ? CALL_EXPR_ARG (t, 0)
15537 : NULL_TREE);
15538 tree arg1 = (call_expr_nargs (t) > 1
15539 ? CALL_EXPR_ARG (t, 1)
15540 : NULL_TREE);
15541 return integer_valued_real_call_p (get_call_combined_fn (t),
15542 arg0, arg1, depth);
15545 default:
15546 return integer_valued_real_invalid_p (t, depth);
15550 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15551 attempt to fold the expression to a constant without modifying TYPE,
15552 OP0 or OP1.
15554 If the expression could be simplified to a constant, then return
15555 the constant. If the expression would not be simplified to a
15556 constant, then return NULL_TREE. */
15558 tree
15559 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15561 tree tem = fold_binary (code, type, op0, op1);
15562 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15565 /* Given the components of a unary expression CODE, TYPE and OP0,
15566 attempt to fold the expression to a constant without modifying
15567 TYPE or OP0.
15569 If the expression could be simplified to a constant, then return
15570 the constant. If the expression would not be simplified to a
15571 constant, then return NULL_TREE. */
15573 tree
15574 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15576 tree tem = fold_unary (code, type, op0);
15577 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15580 /* If EXP represents referencing an element in a constant string
15581 (either via pointer arithmetic or array indexing), return the
15582 tree representing the value accessed, otherwise return NULL. */
15584 tree
15585 fold_read_from_constant_string (tree exp)
15587 if ((INDIRECT_REF_P (exp)
15588 || TREE_CODE (exp) == ARRAY_REF)
15589 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15591 tree exp1 = TREE_OPERAND (exp, 0);
15592 tree index;
15593 tree string;
15594 location_t loc = EXPR_LOCATION (exp);
15596 if (INDIRECT_REF_P (exp))
15597 string = string_constant (exp1, &index, NULL, NULL);
15598 else
15600 tree low_bound = array_ref_low_bound (exp);
15601 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15603 /* Optimize the special-case of a zero lower bound.
15605 We convert the low_bound to sizetype to avoid some problems
15606 with constant folding. (E.g. suppose the lower bound is 1,
15607 and its mode is QI. Without the conversion,l (ARRAY
15608 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15609 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15610 if (! integer_zerop (low_bound))
15611 index = size_diffop_loc (loc, index,
15612 fold_convert_loc (loc, sizetype, low_bound));
15614 string = exp1;
15617 scalar_int_mode char_mode;
15618 if (string
15619 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15620 && TREE_CODE (string) == STRING_CST
15621 && tree_fits_uhwi_p (index)
15622 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15623 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15624 &char_mode)
15625 && GET_MODE_SIZE (char_mode) == 1)
15626 return build_int_cst_type (TREE_TYPE (exp),
15627 (TREE_STRING_POINTER (string)
15628 [TREE_INT_CST_LOW (index)]));
15630 return NULL;
15633 /* Folds a read from vector element at IDX of vector ARG. */
15635 tree
15636 fold_read_from_vector (tree arg, poly_uint64 idx)
15638 unsigned HOST_WIDE_INT i;
15639 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15640 && known_ge (idx, 0u)
15641 && idx.is_constant (&i))
15643 if (TREE_CODE (arg) == VECTOR_CST)
15644 return VECTOR_CST_ELT (arg, i);
15645 else if (TREE_CODE (arg) == CONSTRUCTOR)
15647 if (CONSTRUCTOR_NELTS (arg)
15648 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15649 return NULL_TREE;
15650 if (i >= CONSTRUCTOR_NELTS (arg))
15651 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15652 return CONSTRUCTOR_ELT (arg, i)->value;
15655 return NULL_TREE;
15658 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15659 an integer constant, real, or fixed-point constant.
15661 TYPE is the type of the result. */
15663 static tree
15664 fold_negate_const (tree arg0, tree type)
15666 tree t = NULL_TREE;
15668 switch (TREE_CODE (arg0))
15670 case REAL_CST:
15671 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15672 break;
15674 case FIXED_CST:
15676 FIXED_VALUE_TYPE f;
15677 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15678 &(TREE_FIXED_CST (arg0)), NULL,
15679 TYPE_SATURATING (type));
15680 t = build_fixed (type, f);
15681 /* Propagate overflow flags. */
15682 if (overflow_p | TREE_OVERFLOW (arg0))
15683 TREE_OVERFLOW (t) = 1;
15684 break;
15687 default:
15688 if (poly_int_tree_p (arg0))
15690 wi::overflow_type overflow;
15691 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15692 t = force_fit_type (type, res, 1,
15693 (overflow && ! TYPE_UNSIGNED (type))
15694 || TREE_OVERFLOW (arg0));
15695 break;
15698 gcc_unreachable ();
15701 return t;
15704 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15705 an integer constant or real constant.
15707 TYPE is the type of the result. */
15709 tree
15710 fold_abs_const (tree arg0, tree type)
15712 tree t = NULL_TREE;
15714 switch (TREE_CODE (arg0))
15716 case INTEGER_CST:
15718 /* If the value is unsigned or non-negative, then the absolute value
15719 is the same as the ordinary value. */
15720 wide_int val = wi::to_wide (arg0);
15721 wi::overflow_type overflow = wi::OVF_NONE;
15722 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15725 /* If the value is negative, then the absolute value is
15726 its negation. */
15727 else
15728 val = wi::neg (val, &overflow);
15730 /* Force to the destination type, set TREE_OVERFLOW for signed
15731 TYPE only. */
15732 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15734 break;
15736 case REAL_CST:
15737 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15738 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15739 else
15740 t = arg0;
15741 break;
15743 default:
15744 gcc_unreachable ();
15747 return t;
15750 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15751 constant. TYPE is the type of the result. */
15753 static tree
15754 fold_not_const (const_tree arg0, tree type)
15756 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15758 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15761 /* Given CODE, a relational operator, the target type, TYPE and two
15762 constant operands OP0 and OP1, return the result of the
15763 relational operation. If the result is not a compile time
15764 constant, then return NULL_TREE. */
15766 static tree
15767 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15769 int result, invert;
15771 /* From here on, the only cases we handle are when the result is
15772 known to be a constant. */
15774 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15776 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15777 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15779 /* Handle the cases where either operand is a NaN. */
15780 if (real_isnan (c0) || real_isnan (c1))
15782 switch (code)
15784 case EQ_EXPR:
15785 case ORDERED_EXPR:
15786 result = 0;
15787 break;
15789 case NE_EXPR:
15790 case UNORDERED_EXPR:
15791 case UNLT_EXPR:
15792 case UNLE_EXPR:
15793 case UNGT_EXPR:
15794 case UNGE_EXPR:
15795 case UNEQ_EXPR:
15796 result = 1;
15797 break;
15799 case LT_EXPR:
15800 case LE_EXPR:
15801 case GT_EXPR:
15802 case GE_EXPR:
15803 case LTGT_EXPR:
15804 if (flag_trapping_math)
15805 return NULL_TREE;
15806 result = 0;
15807 break;
15809 default:
15810 gcc_unreachable ();
15813 return constant_boolean_node (result, type);
15816 return constant_boolean_node (real_compare (code, c0, c1), type);
15819 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15821 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15822 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15823 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15826 /* Handle equality/inequality of complex constants. */
15827 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15829 tree rcond = fold_relational_const (code, type,
15830 TREE_REALPART (op0),
15831 TREE_REALPART (op1));
15832 tree icond = fold_relational_const (code, type,
15833 TREE_IMAGPART (op0),
15834 TREE_IMAGPART (op1));
15835 if (code == EQ_EXPR)
15836 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15837 else if (code == NE_EXPR)
15838 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15839 else
15840 return NULL_TREE;
15843 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15845 if (!VECTOR_TYPE_P (type))
15847 /* Have vector comparison with scalar boolean result. */
15848 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15849 && known_eq (VECTOR_CST_NELTS (op0),
15850 VECTOR_CST_NELTS (op1)));
15851 unsigned HOST_WIDE_INT nunits;
15852 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15853 return NULL_TREE;
15854 for (unsigned i = 0; i < nunits; i++)
15856 tree elem0 = VECTOR_CST_ELT (op0, i);
15857 tree elem1 = VECTOR_CST_ELT (op1, i);
15858 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15859 if (tmp == NULL_TREE)
15860 return NULL_TREE;
15861 if (integer_zerop (tmp))
15862 return constant_boolean_node (code == NE_EXPR, type);
15864 return constant_boolean_node (code == EQ_EXPR, type);
15866 tree_vector_builder elts;
15867 if (!elts.new_binary_operation (type, op0, op1, false))
15868 return NULL_TREE;
15869 unsigned int count = elts.encoded_nelts ();
15870 for (unsigned i = 0; i < count; i++)
15872 tree elem_type = TREE_TYPE (type);
15873 tree elem0 = VECTOR_CST_ELT (op0, i);
15874 tree elem1 = VECTOR_CST_ELT (op1, i);
15876 tree tem = fold_relational_const (code, elem_type,
15877 elem0, elem1);
15879 if (tem == NULL_TREE)
15880 return NULL_TREE;
15882 elts.quick_push (build_int_cst (elem_type,
15883 integer_zerop (tem) ? 0 : -1));
15886 return elts.build ();
15889 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15891 To compute GT, swap the arguments and do LT.
15892 To compute GE, do LT and invert the result.
15893 To compute LE, swap the arguments, do LT and invert the result.
15894 To compute NE, do EQ and invert the result.
15896 Therefore, the code below must handle only EQ and LT. */
15898 if (code == LE_EXPR || code == GT_EXPR)
15900 std::swap (op0, op1);
15901 code = swap_tree_comparison (code);
15904 /* Note that it is safe to invert for real values here because we
15905 have already handled the one case that it matters. */
15907 invert = 0;
15908 if (code == NE_EXPR || code == GE_EXPR)
15910 invert = 1;
15911 code = invert_tree_comparison (code, false);
15914 /* Compute a result for LT or EQ if args permit;
15915 Otherwise return T. */
15916 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15918 if (code == EQ_EXPR)
15919 result = tree_int_cst_equal (op0, op1);
15920 else
15921 result = tree_int_cst_lt (op0, op1);
15923 else
15924 return NULL_TREE;
15926 if (invert)
15927 result ^= 1;
15928 return constant_boolean_node (result, type);
15931 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15932 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15933 itself. */
15935 tree
15936 fold_build_cleanup_point_expr (tree type, tree expr)
15938 /* If the expression does not have side effects then we don't have to wrap
15939 it with a cleanup point expression. */
15940 if (!TREE_SIDE_EFFECTS (expr))
15941 return expr;
15943 /* If the expression is a return, check to see if the expression inside the
15944 return has no side effects or the right hand side of the modify expression
15945 inside the return. If either don't have side effects set we don't need to
15946 wrap the expression in a cleanup point expression. Note we don't check the
15947 left hand side of the modify because it should always be a return decl. */
15948 if (TREE_CODE (expr) == RETURN_EXPR)
15950 tree op = TREE_OPERAND (expr, 0);
15951 if (!op || !TREE_SIDE_EFFECTS (op))
15952 return expr;
15953 op = TREE_OPERAND (op, 1);
15954 if (!TREE_SIDE_EFFECTS (op))
15955 return expr;
15958 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15961 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15962 of an indirection through OP0, or NULL_TREE if no simplification is
15963 possible. */
15965 tree
15966 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15968 tree sub = op0;
15969 tree subtype;
15970 poly_uint64 const_op01;
15972 STRIP_NOPS (sub);
15973 subtype = TREE_TYPE (sub);
15974 if (!POINTER_TYPE_P (subtype)
15975 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15976 return NULL_TREE;
15978 if (TREE_CODE (sub) == ADDR_EXPR)
15980 tree op = TREE_OPERAND (sub, 0);
15981 tree optype = TREE_TYPE (op);
15983 /* *&CONST_DECL -> to the value of the const decl. */
15984 if (TREE_CODE (op) == CONST_DECL)
15985 return DECL_INITIAL (op);
15986 /* *&p => p; make sure to handle *&"str"[cst] here. */
15987 if (type == optype)
15989 tree fop = fold_read_from_constant_string (op);
15990 if (fop)
15991 return fop;
15992 else
15993 return op;
15995 /* *(foo *)&fooarray => fooarray[0] */
15996 else if (TREE_CODE (optype) == ARRAY_TYPE
15997 && type == TREE_TYPE (optype)
15998 && (!in_gimple_form
15999 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16001 tree type_domain = TYPE_DOMAIN (optype);
16002 tree min_val = size_zero_node;
16003 if (type_domain && TYPE_MIN_VALUE (type_domain))
16004 min_val = TYPE_MIN_VALUE (type_domain);
16005 if (in_gimple_form
16006 && TREE_CODE (min_val) != INTEGER_CST)
16007 return NULL_TREE;
16008 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16009 NULL_TREE, NULL_TREE);
16011 /* *(foo *)&complexfoo => __real__ complexfoo */
16012 else if (TREE_CODE (optype) == COMPLEX_TYPE
16013 && type == TREE_TYPE (optype))
16014 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16015 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16016 else if (VECTOR_TYPE_P (optype)
16017 && type == TREE_TYPE (optype))
16019 tree part_width = TYPE_SIZE (type);
16020 tree index = bitsize_int (0);
16021 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16022 index);
16026 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16027 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16029 tree op00 = TREE_OPERAND (sub, 0);
16030 tree op01 = TREE_OPERAND (sub, 1);
16032 STRIP_NOPS (op00);
16033 if (TREE_CODE (op00) == ADDR_EXPR)
16035 tree op00type;
16036 op00 = TREE_OPERAND (op00, 0);
16037 op00type = TREE_TYPE (op00);
16039 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16040 if (VECTOR_TYPE_P (op00type)
16041 && type == TREE_TYPE (op00type)
16042 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16043 but we want to treat offsets with MSB set as negative.
16044 For the code below negative offsets are invalid and
16045 TYPE_SIZE of the element is something unsigned, so
16046 check whether op01 fits into poly_int64, which implies
16047 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16048 then just use poly_uint64 because we want to treat the
16049 value as unsigned. */
16050 && tree_fits_poly_int64_p (op01))
16052 tree part_width = TYPE_SIZE (type);
16053 poly_uint64 max_offset
16054 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16055 * TYPE_VECTOR_SUBPARTS (op00type));
16056 if (known_lt (const_op01, max_offset))
16058 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16059 return fold_build3_loc (loc,
16060 BIT_FIELD_REF, type, op00,
16061 part_width, index);
16064 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16065 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16066 && type == TREE_TYPE (op00type))
16068 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16069 const_op01))
16070 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16072 /* ((foo *)&fooarray)[1] => fooarray[1] */
16073 else if (TREE_CODE (op00type) == ARRAY_TYPE
16074 && type == TREE_TYPE (op00type))
16076 tree type_domain = TYPE_DOMAIN (op00type);
16077 tree min_val = size_zero_node;
16078 if (type_domain && TYPE_MIN_VALUE (type_domain))
16079 min_val = TYPE_MIN_VALUE (type_domain);
16080 poly_uint64 type_size, index;
16081 if (poly_int_tree_p (min_val)
16082 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16083 && multiple_p (const_op01, type_size, &index))
16085 poly_offset_int off = index + wi::to_poly_offset (min_val);
16086 op01 = wide_int_to_tree (sizetype, off);
16087 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16088 NULL_TREE, NULL_TREE);
16094 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16095 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16096 && type == TREE_TYPE (TREE_TYPE (subtype))
16097 && (!in_gimple_form
16098 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16100 tree type_domain;
16101 tree min_val = size_zero_node;
16102 sub = build_fold_indirect_ref_loc (loc, sub);
16103 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16104 if (type_domain && TYPE_MIN_VALUE (type_domain))
16105 min_val = TYPE_MIN_VALUE (type_domain);
16106 if (in_gimple_form
16107 && TREE_CODE (min_val) != INTEGER_CST)
16108 return NULL_TREE;
16109 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16110 NULL_TREE);
16113 return NULL_TREE;
16116 /* Builds an expression for an indirection through T, simplifying some
16117 cases. */
16119 tree
16120 build_fold_indirect_ref_loc (location_t loc, tree t)
16122 tree type = TREE_TYPE (TREE_TYPE (t));
16123 tree sub = fold_indirect_ref_1 (loc, type, t);
16125 if (sub)
16126 return sub;
16128 return build1_loc (loc, INDIRECT_REF, type, t);
16131 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16133 tree
16134 fold_indirect_ref_loc (location_t loc, tree t)
16136 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16138 if (sub)
16139 return sub;
16140 else
16141 return t;
16144 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16145 whose result is ignored. The type of the returned tree need not be
16146 the same as the original expression. */
16148 tree
16149 fold_ignored_result (tree t)
16151 if (!TREE_SIDE_EFFECTS (t))
16152 return integer_zero_node;
16154 for (;;)
16155 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16157 case tcc_unary:
16158 t = TREE_OPERAND (t, 0);
16159 break;
16161 case tcc_binary:
16162 case tcc_comparison:
16163 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16164 t = TREE_OPERAND (t, 0);
16165 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16166 t = TREE_OPERAND (t, 1);
16167 else
16168 return t;
16169 break;
16171 case tcc_expression:
16172 switch (TREE_CODE (t))
16174 case COMPOUND_EXPR:
16175 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16176 return t;
16177 t = TREE_OPERAND (t, 0);
16178 break;
16180 case COND_EXPR:
16181 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16182 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16183 return t;
16184 t = TREE_OPERAND (t, 0);
16185 break;
16187 default:
16188 return t;
16190 break;
16192 default:
16193 return t;
16197 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16199 tree
16200 round_up_loc (location_t loc, tree value, unsigned int divisor)
16202 tree div = NULL_TREE;
16204 if (divisor == 1)
16205 return value;
16207 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16208 have to do anything. Only do this when we are not given a const,
16209 because in that case, this check is more expensive than just
16210 doing it. */
16211 if (TREE_CODE (value) != INTEGER_CST)
16213 div = build_int_cst (TREE_TYPE (value), divisor);
16215 if (multiple_of_p (TREE_TYPE (value), value, div))
16216 return value;
16219 /* If divisor is a power of two, simplify this to bit manipulation. */
16220 if (pow2_or_zerop (divisor))
16222 if (TREE_CODE (value) == INTEGER_CST)
16224 wide_int val = wi::to_wide (value);
16225 bool overflow_p;
16227 if ((val & (divisor - 1)) == 0)
16228 return value;
16230 overflow_p = TREE_OVERFLOW (value);
16231 val += divisor - 1;
16232 val &= (int) -divisor;
16233 if (val == 0)
16234 overflow_p = true;
16236 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16238 else
16240 tree t;
16242 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16243 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16244 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16245 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16248 else
16250 if (!div)
16251 div = build_int_cst (TREE_TYPE (value), divisor);
16252 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16253 value = size_binop_loc (loc, MULT_EXPR, value, div);
16256 return value;
16259 /* Likewise, but round down. */
16261 tree
16262 round_down_loc (location_t loc, tree value, int divisor)
16264 tree div = NULL_TREE;
16266 gcc_assert (divisor > 0);
16267 if (divisor == 1)
16268 return value;
16270 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16271 have to do anything. Only do this when we are not given a const,
16272 because in that case, this check is more expensive than just
16273 doing it. */
16274 if (TREE_CODE (value) != INTEGER_CST)
16276 div = build_int_cst (TREE_TYPE (value), divisor);
16278 if (multiple_of_p (TREE_TYPE (value), value, div))
16279 return value;
16282 /* If divisor is a power of two, simplify this to bit manipulation. */
16283 if (pow2_or_zerop (divisor))
16285 tree t;
16287 t = build_int_cst (TREE_TYPE (value), -divisor);
16288 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16290 else
16292 if (!div)
16293 div = build_int_cst (TREE_TYPE (value), divisor);
16294 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16295 value = size_binop_loc (loc, MULT_EXPR, value, div);
16298 return value;
16301 /* Returns the pointer to the base of the object addressed by EXP and
16302 extracts the information about the offset of the access, storing it
16303 to PBITPOS and POFFSET. */
16305 static tree
16306 split_address_to_core_and_offset (tree exp,
16307 poly_int64_pod *pbitpos, tree *poffset)
16309 tree core;
16310 machine_mode mode;
16311 int unsignedp, reversep, volatilep;
16312 poly_int64 bitsize;
16313 location_t loc = EXPR_LOCATION (exp);
16315 if (TREE_CODE (exp) == SSA_NAME)
16316 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16317 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16318 exp = gimple_assign_rhs1 (def);
16320 if (TREE_CODE (exp) == ADDR_EXPR)
16322 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16323 poffset, &mode, &unsignedp, &reversep,
16324 &volatilep);
16325 core = build_fold_addr_expr_loc (loc, core);
16327 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16329 core = TREE_OPERAND (exp, 0);
16330 STRIP_NOPS (core);
16331 *pbitpos = 0;
16332 *poffset = TREE_OPERAND (exp, 1);
16333 if (poly_int_tree_p (*poffset))
16335 poly_offset_int tem
16336 = wi::sext (wi::to_poly_offset (*poffset),
16337 TYPE_PRECISION (TREE_TYPE (*poffset)));
16338 tem <<= LOG2_BITS_PER_UNIT;
16339 if (tem.to_shwi (pbitpos))
16340 *poffset = NULL_TREE;
16343 else
16345 core = exp;
16346 *pbitpos = 0;
16347 *poffset = NULL_TREE;
16350 return core;
16353 /* Returns true if addresses of E1 and E2 differ by a constant, false
16354 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16356 bool
16357 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16359 tree core1, core2;
16360 poly_int64 bitpos1, bitpos2;
16361 tree toffset1, toffset2, tdiff, type;
16363 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16364 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16366 poly_int64 bytepos1, bytepos2;
16367 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16368 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16369 || !operand_equal_p (core1, core2, 0))
16370 return false;
16372 if (toffset1 && toffset2)
16374 type = TREE_TYPE (toffset1);
16375 if (type != TREE_TYPE (toffset2))
16376 toffset2 = fold_convert (type, toffset2);
16378 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16379 if (!cst_and_fits_in_hwi (tdiff))
16380 return false;
16382 *diff = int_cst_value (tdiff);
16384 else if (toffset1 || toffset2)
16386 /* If only one of the offsets is non-constant, the difference cannot
16387 be a constant. */
16388 return false;
16390 else
16391 *diff = 0;
16393 *diff += bytepos1 - bytepos2;
16394 return true;
16397 /* Return OFF converted to a pointer offset type suitable as offset for
16398 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16399 tree
16400 convert_to_ptrofftype_loc (location_t loc, tree off)
16402 if (ptrofftype_p (TREE_TYPE (off)))
16403 return off;
16404 return fold_convert_loc (loc, sizetype, off);
16407 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16408 tree
16409 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16411 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16412 ptr, convert_to_ptrofftype_loc (loc, off));
16415 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16416 tree
16417 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16419 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16420 ptr, size_int (off));
16423 /* Return a pointer to a NUL-terminated string containing the sequence
16424 of bytes corresponding to the representation of the object referred to
16425 by SRC (or a subsequence of such bytes within it if SRC is a reference
16426 to an initialized constant array plus some constant offset).
16427 Set *STRSIZE the number of bytes in the constant sequence including
16428 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16429 where A is the array that stores the constant sequence that SRC points
16430 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16431 need not point to a string or even an array of characters but may point
16432 to an object of any type. */
16434 const char *
16435 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16437 /* The offset into the array A storing the string, and A's byte size. */
16438 tree offset_node;
16439 tree mem_size;
16441 if (strsize)
16442 *strsize = 0;
16444 if (strsize)
16445 src = byte_representation (src, &offset_node, &mem_size, NULL);
16446 else
16447 src = string_constant (src, &offset_node, &mem_size, NULL);
16448 if (!src)
16449 return NULL;
16451 unsigned HOST_WIDE_INT offset = 0;
16452 if (offset_node != NULL_TREE)
16454 if (!tree_fits_uhwi_p (offset_node))
16455 return NULL;
16456 else
16457 offset = tree_to_uhwi (offset_node);
16460 if (!tree_fits_uhwi_p (mem_size))
16461 return NULL;
16463 /* ARRAY_SIZE is the byte size of the array the constant sequence
16464 is stored in and equal to sizeof A. INIT_BYTES is the number
16465 of bytes in the constant sequence used to initialize the array,
16466 including any embedded NULs as well as the terminating NUL (for
16467 strings), but not including any trailing zeros/NULs past
16468 the terminating one appended implicitly to a string literal to
16469 zero out the remainder of the array it's stored in. For example,
16470 given:
16471 const char a[7] = "abc\0d";
16472 n = strlen (a + 1);
16473 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16474 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16475 is equal to strlen (A) + 1. */
16476 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16477 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16478 const char *string = TREE_STRING_POINTER (src);
16480 /* Ideally this would turn into a gcc_checking_assert over time. */
16481 if (init_bytes > array_size)
16482 init_bytes = array_size;
16484 if (init_bytes == 0 || offset >= array_size)
16485 return NULL;
16487 if (strsize)
16489 /* Compute and store the number of characters from the beginning
16490 of the substring at OFFSET to the end, including the terminating
16491 nul. Offsets past the initial length refer to null strings. */
16492 if (offset < init_bytes)
16493 *strsize = init_bytes - offset;
16494 else
16495 *strsize = 1;
16497 else
16499 tree eltype = TREE_TYPE (TREE_TYPE (src));
16500 /* Support only properly NUL-terminated single byte strings. */
16501 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16502 return NULL;
16503 if (string[init_bytes - 1] != '\0')
16504 return NULL;
16507 return offset < init_bytes ? string + offset : "";
16510 /* Return a pointer to a NUL-terminated string corresponding to
16511 the expression STR referencing a constant string, possibly
16512 involving a constant offset. Return null if STR either doesn't
16513 reference a constant string or if it involves a nonconstant
16514 offset. */
16516 const char *
16517 c_getstr (tree str)
16519 return getbyterep (str, NULL);
16522 /* Given a tree T, compute which bits in T may be nonzero. */
16524 wide_int
16525 tree_nonzero_bits (const_tree t)
16527 switch (TREE_CODE (t))
16529 case INTEGER_CST:
16530 return wi::to_wide (t);
16531 case SSA_NAME:
16532 return get_nonzero_bits (t);
16533 case NON_LVALUE_EXPR:
16534 case SAVE_EXPR:
16535 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16536 case BIT_AND_EXPR:
16537 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16538 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16539 case BIT_IOR_EXPR:
16540 case BIT_XOR_EXPR:
16541 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16542 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16543 case COND_EXPR:
16544 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16545 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16546 CASE_CONVERT:
16547 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16548 TYPE_PRECISION (TREE_TYPE (t)),
16549 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16550 case PLUS_EXPR:
16551 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16553 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16554 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16555 if (wi::bit_and (nzbits1, nzbits2) == 0)
16556 return wi::bit_or (nzbits1, nzbits2);
16558 break;
16559 case LSHIFT_EXPR:
16560 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16562 tree type = TREE_TYPE (t);
16563 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16564 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16565 TYPE_PRECISION (type));
16566 return wi::neg_p (arg1)
16567 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16568 : wi::lshift (nzbits, arg1);
16570 break;
16571 case RSHIFT_EXPR:
16572 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16574 tree type = TREE_TYPE (t);
16575 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16576 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16577 TYPE_PRECISION (type));
16578 return wi::neg_p (arg1)
16579 ? wi::lshift (nzbits, -arg1)
16580 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16582 break;
16583 default:
16584 break;
16587 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16590 /* Helper function for address compare simplifications in match.pd.
16591 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16592 TYPE is the type of comparison operands.
16593 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16594 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16595 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16596 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16597 and 2 if unknown. */
16600 address_compare (tree_code code, tree type, tree op0, tree op1,
16601 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16602 bool generic)
16604 if (TREE_CODE (op0) == SSA_NAME)
16605 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16606 if (TREE_CODE (op1) == SSA_NAME)
16607 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16608 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16609 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16610 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16611 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16612 if (base0 && TREE_CODE (base0) == MEM_REF)
16614 off0 += mem_ref_offset (base0).force_shwi ();
16615 base0 = TREE_OPERAND (base0, 0);
16617 if (base1 && TREE_CODE (base1) == MEM_REF)
16619 off1 += mem_ref_offset (base1).force_shwi ();
16620 base1 = TREE_OPERAND (base1, 0);
16622 if (base0 == NULL_TREE || base1 == NULL_TREE)
16623 return 2;
16625 int equal = 2;
16626 /* Punt in GENERIC on variables with value expressions;
16627 the value expressions might point to fields/elements
16628 of other vars etc. */
16629 if (generic
16630 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16631 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16632 return 2;
16633 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16635 symtab_node *node0 = symtab_node::get_create (base0);
16636 symtab_node *node1 = symtab_node::get_create (base1);
16637 equal = node0->equal_address_to (node1);
16639 else if ((DECL_P (base0)
16640 || TREE_CODE (base0) == SSA_NAME
16641 || TREE_CODE (base0) == STRING_CST)
16642 && (DECL_P (base1)
16643 || TREE_CODE (base1) == SSA_NAME
16644 || TREE_CODE (base1) == STRING_CST))
16645 equal = (base0 == base1);
16646 /* Assume different STRING_CSTs with the same content will be
16647 merged. */
16648 if (equal == 0
16649 && TREE_CODE (base0) == STRING_CST
16650 && TREE_CODE (base1) == STRING_CST
16651 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16652 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16653 TREE_STRING_LENGTH (base0)) == 0)
16654 equal = 1;
16655 if (equal == 1)
16657 if (code == EQ_EXPR
16658 || code == NE_EXPR
16659 /* If the offsets are equal we can ignore overflow. */
16660 || known_eq (off0, off1)
16661 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16662 /* Or if we compare using pointers to decls or strings. */
16663 || (POINTER_TYPE_P (type)
16664 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16665 return 1;
16666 return 2;
16668 if (equal != 0)
16669 return equal;
16670 if (code != EQ_EXPR && code != NE_EXPR)
16671 return 2;
16673 /* At this point we know (or assume) the two pointers point at
16674 different objects. */
16675 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16676 off0.is_constant (&ioff0);
16677 off1.is_constant (&ioff1);
16678 /* Punt on non-zero offsets from functions. */
16679 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16680 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16681 return 2;
16682 /* Or if the bases are neither decls nor string literals. */
16683 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16684 return 2;
16685 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16686 return 2;
16687 /* For initializers, assume addresses of different functions are
16688 different. */
16689 if (folding_initializer
16690 && TREE_CODE (base0) == FUNCTION_DECL
16691 && TREE_CODE (base1) == FUNCTION_DECL)
16692 return 0;
16694 /* Compute whether one address points to the start of one
16695 object and another one to the end of another one. */
16696 poly_int64 size0 = 0, size1 = 0;
16697 if (TREE_CODE (base0) == STRING_CST)
16699 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16700 equal = 2;
16701 else
16702 size0 = TREE_STRING_LENGTH (base0);
16704 else if (TREE_CODE (base0) == FUNCTION_DECL)
16705 size0 = 1;
16706 else
16708 tree sz0 = DECL_SIZE_UNIT (base0);
16709 if (!tree_fits_poly_int64_p (sz0))
16710 equal = 2;
16711 else
16712 size0 = tree_to_poly_int64 (sz0);
16714 if (TREE_CODE (base1) == STRING_CST)
16716 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16717 equal = 2;
16718 else
16719 size1 = TREE_STRING_LENGTH (base1);
16721 else if (TREE_CODE (base1) == FUNCTION_DECL)
16722 size1 = 1;
16723 else
16725 tree sz1 = DECL_SIZE_UNIT (base1);
16726 if (!tree_fits_poly_int64_p (sz1))
16727 equal = 2;
16728 else
16729 size1 = tree_to_poly_int64 (sz1);
16731 if (equal == 0)
16733 /* If one offset is pointing (or could be) to the beginning of one
16734 object and the other is pointing to one past the last byte of the
16735 other object, punt. */
16736 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16737 equal = 2;
16738 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16739 equal = 2;
16740 /* If both offsets are the same, there are some cases we know that are
16741 ok. Either if we know they aren't zero, or if we know both sizes
16742 are no zero. */
16743 if (equal == 2
16744 && known_eq (off0, off1)
16745 && (known_ne (off0, 0)
16746 || (known_ne (size0, 0) && known_ne (size1, 0))))
16747 equal = 0;
16750 /* At this point, equal is 2 if either one or both pointers are out of
16751 bounds of their object, or one points to start of its object and the
16752 other points to end of its object. This is unspecified behavior
16753 e.g. in C++. Otherwise equal is 0. */
16754 if (folding_cxx_constexpr && equal)
16755 return equal;
16757 /* When both pointers point to string literals, even when equal is 0,
16758 due to tail merging of string literals the pointers might be the same. */
16759 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16761 if (ioff0 < 0
16762 || ioff1 < 0
16763 || ioff0 > TREE_STRING_LENGTH (base0)
16764 || ioff1 > TREE_STRING_LENGTH (base1))
16765 return 2;
16767 /* If the bytes in the string literals starting at the pointers
16768 differ, the pointers need to be different. */
16769 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16770 TREE_STRING_POINTER (base1) + ioff1,
16771 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16772 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16774 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16775 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16776 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16777 ioffmin) == 0)
16778 /* If even the bytes in the string literal before the
16779 pointers are the same, the string literals could be
16780 tail merged. */
16781 return 2;
16783 return 0;
16786 if (folding_cxx_constexpr)
16787 return 0;
16789 /* If this is a pointer comparison, ignore for now even
16790 valid equalities where one pointer is the offset zero
16791 of one object and the other to one past end of another one. */
16792 if (!INTEGRAL_TYPE_P (type))
16793 return 0;
16795 /* Assume that string literals can't be adjacent to variables
16796 (automatic or global). */
16797 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16798 return 0;
16800 /* Assume that automatic variables can't be adjacent to global
16801 variables. */
16802 if (is_global_var (base0) != is_global_var (base1))
16803 return 0;
16805 return equal;
16808 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16809 tree
16810 ctor_single_nonzero_element (const_tree t)
16812 unsigned HOST_WIDE_INT idx;
16813 constructor_elt *ce;
16814 tree elt = NULL_TREE;
16816 if (TREE_CODE (t) != CONSTRUCTOR)
16817 return NULL_TREE;
16818 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16819 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16821 if (elt)
16822 return NULL_TREE;
16823 elt = ce->value;
16825 return elt;
16828 #if CHECKING_P
16830 namespace selftest {
16832 /* Helper functions for writing tests of folding trees. */
16834 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16836 static void
16837 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16838 tree constant)
16840 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16843 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16844 wrapping WRAPPED_EXPR. */
16846 static void
16847 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16848 tree wrapped_expr)
16850 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16851 ASSERT_NE (wrapped_expr, result);
16852 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16853 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16856 /* Verify that various arithmetic binary operations are folded
16857 correctly. */
16859 static void
16860 test_arithmetic_folding ()
16862 tree type = integer_type_node;
16863 tree x = create_tmp_var_raw (type, "x");
16864 tree zero = build_zero_cst (type);
16865 tree one = build_int_cst (type, 1);
16867 /* Addition. */
16868 /* 1 <-- (0 + 1) */
16869 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16870 one);
16871 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16872 one);
16874 /* (nonlvalue)x <-- (x + 0) */
16875 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16878 /* Subtraction. */
16879 /* 0 <-- (x - x) */
16880 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16881 zero);
16882 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16885 /* Multiplication. */
16886 /* 0 <-- (x * 0) */
16887 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16888 zero);
16890 /* (nonlvalue)x <-- (x * 1) */
16891 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16895 /* Verify that various binary operations on vectors are folded
16896 correctly. */
16898 static void
16899 test_vector_folding ()
16901 tree inner_type = integer_type_node;
16902 tree type = build_vector_type (inner_type, 4);
16903 tree zero = build_zero_cst (type);
16904 tree one = build_one_cst (type);
16905 tree index = build_index_vector (type, 0, 1);
16907 /* Verify equality tests that return a scalar boolean result. */
16908 tree res_type = boolean_type_node;
16909 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16910 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16911 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16912 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16913 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16914 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16915 index, one)));
16916 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16917 index, index)));
16918 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16919 index, index)));
16922 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16924 static void
16925 test_vec_duplicate_folding ()
16927 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16928 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16929 /* This will be 1 if VEC_MODE isn't a vector mode. */
16930 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16932 tree type = build_vector_type (ssizetype, nunits);
16933 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16934 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16935 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16938 /* Run all of the selftests within this file. */
16940 void
16941 fold_const_cc_tests ()
16943 test_arithmetic_folding ();
16944 test_vector_folding ();
16945 test_vec_duplicate_folding ();
16948 } // namespace selftest
16950 #endif /* CHECKING_P */