d: Merge upstream dmd 56589f0f4, druntime 651389b5, phobos 1516ecad9.
[official-gcc.git] / gcc / fold-const.cc
blob99021a82df4977b179b45db04e3083012c63067a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_negate_const (tree, tree);
148 static tree fold_not_const (const_tree, tree);
149 static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 static tree fold_convert_const (enum tree_code, tree, tree);
151 static tree fold_view_convert_expr (tree, tree);
152 static tree fold_negate_expr (location_t, tree);
155 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
156 Otherwise, return LOC. */
158 static location_t
159 expr_location_or (tree t, location_t loc)
161 location_t tloc = EXPR_LOCATION (t);
162 return tloc == UNKNOWN_LOCATION ? loc : tloc;
165 /* Similar to protected_set_expr_location, but never modify x in place,
166 if location can and needs to be set, unshare it. */
168 static inline tree
169 protected_set_expr_location_unshare (tree x, location_t loc)
171 if (CAN_HAVE_LOCATION_P (x)
172 && EXPR_LOCATION (x) != loc
173 && !(TREE_CODE (x) == SAVE_EXPR
174 || TREE_CODE (x) == TARGET_EXPR
175 || TREE_CODE (x) == BIND_EXPR))
177 x = copy_node (x);
178 SET_EXPR_LOCATION (x, loc);
180 return x;
183 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
184 division and returns the quotient. Otherwise returns
185 NULL_TREE. */
187 tree
188 div_if_zero_remainder (const_tree arg1, const_tree arg2)
190 widest_int quo;
192 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
193 SIGNED, &quo))
194 return wide_int_to_tree (TREE_TYPE (arg1), quo);
196 return NULL_TREE;
199 /* This is nonzero if we should defer warnings about undefined
200 overflow. This facility exists because these warnings are a
201 special case. The code to estimate loop iterations does not want
202 to issue any warnings, since it works with expressions which do not
203 occur in user code. Various bits of cleanup code call fold(), but
204 only use the result if it has certain characteristics (e.g., is a
205 constant); that code only wants to issue a warning if the result is
206 used. */
208 static int fold_deferring_overflow_warnings;
210 /* If a warning about undefined overflow is deferred, this is the
211 warning. Note that this may cause us to turn two warnings into
212 one, but that is fine since it is sufficient to only give one
213 warning per expression. */
215 static const char* fold_deferred_overflow_warning;
217 /* If a warning about undefined overflow is deferred, this is the
218 level at which the warning should be emitted. */
220 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222 /* Start deferring overflow warnings. We could use a stack here to
223 permit nested calls, but at present it is not necessary. */
225 void
226 fold_defer_overflow_warnings (void)
228 ++fold_deferring_overflow_warnings;
231 /* Stop deferring overflow warnings. If there is a pending warning,
232 and ISSUE is true, then issue the warning if appropriate. STMT is
233 the statement with which the warning should be associated (used for
234 location information); STMT may be NULL. CODE is the level of the
235 warning--a warn_strict_overflow_code value. This function will use
236 the smaller of CODE and the deferred code when deciding whether to
237 issue the warning. CODE may be zero to mean to always use the
238 deferred code. */
240 void
241 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
243 const char *warnmsg;
244 location_t locus;
246 gcc_assert (fold_deferring_overflow_warnings > 0);
247 --fold_deferring_overflow_warnings;
248 if (fold_deferring_overflow_warnings > 0)
250 if (fold_deferred_overflow_warning != NULL
251 && code != 0
252 && code < (int) fold_deferred_overflow_code)
253 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
254 return;
257 warnmsg = fold_deferred_overflow_warning;
258 fold_deferred_overflow_warning = NULL;
260 if (!issue || warnmsg == NULL)
261 return;
263 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
264 return;
266 /* Use the smallest code level when deciding to issue the
267 warning. */
268 if (code == 0 || code > (int) fold_deferred_overflow_code)
269 code = fold_deferred_overflow_code;
271 if (!issue_strict_overflow_warning (code))
272 return;
274 if (stmt == NULL)
275 locus = input_location;
276 else
277 locus = gimple_location (stmt);
278 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
281 /* Stop deferring overflow warnings, ignoring any deferred
282 warnings. */
284 void
285 fold_undefer_and_ignore_overflow_warnings (void)
287 fold_undefer_overflow_warnings (false, NULL, 0);
290 /* Whether we are deferring overflow warnings. */
292 bool
293 fold_deferring_overflow_warnings_p (void)
295 return fold_deferring_overflow_warnings > 0;
298 /* This is called when we fold something based on the fact that signed
299 overflow is undefined. */
301 void
302 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 if (fold_deferring_overflow_warnings > 0)
306 if (fold_deferred_overflow_warning == NULL
307 || wc < fold_deferred_overflow_code)
309 fold_deferred_overflow_warning = gmsgid;
310 fold_deferred_overflow_code = wc;
313 else if (issue_strict_overflow_warning (wc))
314 warning (OPT_Wstrict_overflow, gmsgid);
317 /* Return true if the built-in mathematical function specified by CODE
318 is odd, i.e. -f(x) == f(-x). */
320 bool
321 negate_mathfn_p (combined_fn fn)
323 switch (fn)
325 CASE_CFN_ASIN:
326 CASE_CFN_ASINH:
327 CASE_CFN_ATAN:
328 CASE_CFN_ATANH:
329 CASE_CFN_CASIN:
330 CASE_CFN_CASINH:
331 CASE_CFN_CATAN:
332 CASE_CFN_CATANH:
333 CASE_CFN_CBRT:
334 CASE_CFN_CPROJ:
335 CASE_CFN_CSIN:
336 CASE_CFN_CSINH:
337 CASE_CFN_CTAN:
338 CASE_CFN_CTANH:
339 CASE_CFN_ERF:
340 CASE_CFN_LLROUND:
341 CASE_CFN_LROUND:
342 CASE_CFN_ROUND:
343 CASE_CFN_ROUNDEVEN:
344 CASE_CFN_ROUNDEVEN_FN:
345 CASE_CFN_SIN:
346 CASE_CFN_SINH:
347 CASE_CFN_TAN:
348 CASE_CFN_TANH:
349 CASE_CFN_TRUNC:
350 return true;
352 CASE_CFN_LLRINT:
353 CASE_CFN_LRINT:
354 CASE_CFN_NEARBYINT:
355 CASE_CFN_RINT:
356 return !flag_rounding_math;
358 default:
359 break;
361 return false;
364 /* Check whether we may negate an integer constant T without causing
365 overflow. */
367 bool
368 may_negate_without_overflow_p (const_tree t)
370 tree type;
372 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374 type = TREE_TYPE (t);
375 if (TYPE_UNSIGNED (type))
376 return false;
378 return !wi::only_sign_bit_p (wi::to_wide (t));
381 /* Determine whether an expression T can be cheaply negated using
382 the function negate_expr without introducing undefined overflow. */
384 static bool
385 negate_expr_p (tree t)
387 tree type;
389 if (t == 0)
390 return false;
392 type = TREE_TYPE (t);
394 STRIP_SIGN_NOPS (t);
395 switch (TREE_CODE (t))
397 case INTEGER_CST:
398 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
399 return true;
401 /* Check that -CST will not overflow type. */
402 return may_negate_without_overflow_p (t);
403 case BIT_NOT_EXPR:
404 return (INTEGRAL_TYPE_P (type)
405 && TYPE_OVERFLOW_WRAPS (type));
407 case FIXED_CST:
408 return true;
410 case NEGATE_EXPR:
411 return !TYPE_OVERFLOW_SANITIZED (type);
413 case REAL_CST:
414 /* We want to canonicalize to positive real constants. Pretend
415 that only negative ones can be easily negated. */
416 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
418 case COMPLEX_CST:
419 return negate_expr_p (TREE_REALPART (t))
420 && negate_expr_p (TREE_IMAGPART (t));
422 case VECTOR_CST:
424 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
425 return true;
427 /* Steps don't prevent negation. */
428 unsigned int count = vector_cst_encoded_nelts (t);
429 for (unsigned int i = 0; i < count; ++i)
430 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
431 return false;
433 return true;
436 case COMPLEX_EXPR:
437 return negate_expr_p (TREE_OPERAND (t, 0))
438 && negate_expr_p (TREE_OPERAND (t, 1));
440 case CONJ_EXPR:
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case PLUS_EXPR:
444 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
445 || HONOR_SIGNED_ZEROS (type)
446 || (ANY_INTEGRAL_TYPE_P (type)
447 && ! TYPE_OVERFLOW_WRAPS (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1)))
451 return true;
452 /* -(A + B) -> (-A) - B. */
453 return negate_expr_p (TREE_OPERAND (t, 0));
455 case MINUS_EXPR:
456 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
457 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
458 && !HONOR_SIGNED_ZEROS (type)
459 && (! ANY_INTEGRAL_TYPE_P (type)
460 || TYPE_OVERFLOW_WRAPS (type));
462 case MULT_EXPR:
463 if (TYPE_UNSIGNED (type))
464 break;
465 /* INT_MIN/n * n doesn't overflow while negating one operand it does
466 if n is a (negative) power of two. */
467 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
468 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
469 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
470 && (wi::popcount
471 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && (wi::popcount
474 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
475 break;
477 /* Fall through. */
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case EXACT_DIV_EXPR:
488 if (TYPE_UNSIGNED (type))
489 break;
490 /* In general we can't negate A in A / B, because if A is INT_MIN and
491 B is not 1 we change the sign of the result. */
492 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 && negate_expr_p (TREE_OPERAND (t, 0)))
494 return true;
495 /* In general we can't negate B in A / B, because if A is INT_MIN and
496 B is 1, we may turn this into INT_MIN / -1 which is undefined
497 and actually traps on some architectures. */
498 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
499 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
500 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
501 && ! integer_onep (TREE_OPERAND (t, 1))))
502 return negate_expr_p (TREE_OPERAND (t, 1));
503 break;
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
513 break;
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (get_call_combined_fn (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 tree op1 = TREE_OPERAND (t, 1);
526 if (wi::to_wide (op1) == element_precision (type) - 1)
527 return true;
529 break;
531 default:
532 break;
534 return false;
537 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
538 simplification is possible.
539 If negate_expr_p would return true for T, NULL_TREE will never be
540 returned. */
542 static tree
543 fold_negate_expr_1 (location_t loc, tree t)
545 tree type = TREE_TYPE (t);
546 tree tem;
548 switch (TREE_CODE (t))
550 /* Convert - (~A) to A + 1. */
551 case BIT_NOT_EXPR:
552 if (INTEGRAL_TYPE_P (type))
553 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
554 build_one_cst (type));
555 break;
557 case INTEGER_CST:
558 tem = fold_negate_const (t, type);
559 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
560 || (ANY_INTEGRAL_TYPE_P (type)
561 && !TYPE_OVERFLOW_TRAPS (type)
562 && TYPE_OVERFLOW_WRAPS (type))
563 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 return tem;
565 break;
567 case POLY_INT_CST:
568 case REAL_CST:
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
576 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
577 if (rpart && ipart)
578 return build_complex (type, rpart, ipart);
580 break;
582 case VECTOR_CST:
584 tree_vector_builder elts;
585 elts.new_unary_operation (type, t, true);
586 unsigned int count = elts.encoded_nelts ();
587 for (unsigned int i = 0; i < count; ++i)
589 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
590 if (elt == NULL_TREE)
591 return NULL_TREE;
592 elts.quick_push (elt);
595 return elts.build ();
598 case COMPLEX_EXPR:
599 if (negate_expr_p (t))
600 return fold_build2_loc (loc, COMPLEX_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
602 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
603 break;
605 case CONJ_EXPR:
606 if (negate_expr_p (t))
607 return fold_build1_loc (loc, CONJ_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
609 break;
611 case NEGATE_EXPR:
612 if (!TYPE_OVERFLOW_SANITIZED (type))
613 return TREE_OPERAND (t, 0);
614 break;
616 case PLUS_EXPR:
617 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
618 && !HONOR_SIGNED_ZEROS (type))
620 /* -(A + B) -> (-B) - A. */
621 if (negate_expr_p (TREE_OPERAND (t, 1)))
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
636 break;
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
641 && !HONOR_SIGNED_ZEROS (type))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
650 /* Fall through. */
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
664 break;
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 /* In general we can't negate A in A / B, because if A is INT_MIN and
672 B is not 1 we change the sign of the result. */
673 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
674 && negate_expr_p (TREE_OPERAND (t, 0)))
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 negate_expr (TREE_OPERAND (t, 0)),
677 TREE_OPERAND (t, 1));
678 /* In general we can't negate B in A / B, because if A is INT_MIN and
679 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 and actually traps on some architectures. */
681 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
682 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
683 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
684 && ! integer_onep (TREE_OPERAND (t, 1))))
685 && negate_expr_p (TREE_OPERAND (t, 1)))
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0),
688 negate_expr (TREE_OPERAND (t, 1)));
689 break;
691 case NOP_EXPR:
692 /* Convert -((double)float) into (double)(-float). */
693 if (TREE_CODE (type) == REAL_TYPE)
695 tem = strip_float_extensions (t);
696 if (tem != t && negate_expr_p (tem))
697 return fold_convert_loc (loc, type, negate_expr (tem));
699 break;
701 case CALL_EXPR:
702 /* Negate -f(x) as f(-x). */
703 if (negate_mathfn_p (get_call_combined_fn (t))
704 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 tree fndecl, arg;
708 fndecl = get_callee_fndecl (t);
709 arg = negate_expr (CALL_EXPR_ARG (t, 0));
710 return build_call_expr_loc (loc, fndecl, 1, arg);
712 break;
714 case RSHIFT_EXPR:
715 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
716 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
718 tree op1 = TREE_OPERAND (t, 1);
719 if (wi::to_wide (op1) == element_precision (type) - 1)
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
729 break;
731 default:
732 break;
735 return NULL_TREE;
738 /* A wrapper for fold_negate_expr_1. */
740 static tree
741 fold_negate_expr (location_t loc, tree t)
743 tree type = TREE_TYPE (t);
744 STRIP_SIGN_NOPS (t);
745 tree tem = fold_negate_expr_1 (loc, t);
746 if (tem == NULL_TREE)
747 return NULL_TREE;
748 return fold_convert_loc (loc, type, tem);
751 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
752 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
753 return NULL_TREE. */
755 static tree
756 negate_expr (tree t)
758 tree type, tem;
759 location_t loc;
761 if (t == NULL_TREE)
762 return NULL_TREE;
764 loc = EXPR_LOCATION (t);
765 type = TREE_TYPE (t);
766 STRIP_SIGN_NOPS (t);
768 tem = fold_negate_expr (loc, t);
769 if (!tem)
770 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
771 return fold_convert_loc (loc, type, tem);
774 /* Split a tree IN into a constant, literal and variable parts that could be
775 combined with CODE to make IN. "constant" means an expression with
776 TREE_CONSTANT but that isn't an actual constant. CODE must be a
777 commutative arithmetic operation. Store the constant part into *CONP,
778 the literal in *LITP and return the variable part. If a part isn't
779 present, set it to null. If the tree does not decompose in this way,
780 return the entire tree as the variable part and the other parts as null.
782 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
783 case, we negate an operand that was subtracted. Except if it is a
784 literal for which we use *MINUS_LITP instead.
786 If NEGATE_P is true, we are negating all of IN, again except a literal
787 for which we use *MINUS_LITP instead. If a variable part is of pointer
788 type, it is negated after converting to TYPE. This prevents us from
789 generating illegal MINUS pointer expression. LOC is the location of
790 the converted variable part.
792 If IN is itself a literal or constant, return it as appropriate.
794 Note that we do not guarantee that any of the three values will be the
795 same type as IN, but they will have the same signedness and mode. */
797 static tree
798 split_tree (tree in, tree type, enum tree_code code,
799 tree *minus_varp, tree *conp, tree *minus_conp,
800 tree *litp, tree *minus_litp, int negate_p)
802 tree var = 0;
803 *minus_varp = 0;
804 *conp = 0;
805 *minus_conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
823 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR
825 && (TREE_CODE (in) == PLUS_EXPR
826 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
828 tree op0 = TREE_OPERAND (in, 0);
829 tree op1 = TREE_OPERAND (in, 1);
830 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
831 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
833 /* First see if either of the operands is a literal, then a constant. */
834 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
835 || TREE_CODE (op0) == FIXED_CST)
836 *litp = op0, op0 = 0;
837 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
838 || TREE_CODE (op1) == FIXED_CST)
839 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
841 if (op0 != 0 && TREE_CONSTANT (op0))
842 *conp = op0, op0 = 0;
843 else if (op1 != 0 && TREE_CONSTANT (op1))
844 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
846 /* If we haven't dealt with either operand, this is not a case we can
847 decompose. Otherwise, VAR is either of the ones remaining, if any. */
848 if (op0 != 0 && op1 != 0)
849 var = in;
850 else if (op0 != 0)
851 var = op0;
852 else
853 var = op1, neg_var_p = neg1_p;
855 /* Now do any needed negations. */
856 if (neg_litp_p)
857 *minus_litp = *litp, *litp = 0;
858 if (neg_conp_p && *conp)
859 *minus_conp = *conp, *conp = 0;
860 if (neg_var_p && var)
861 *minus_varp = var, var = 0;
863 else if (TREE_CONSTANT (in))
864 *conp = in;
865 else if (TREE_CODE (in) == BIT_NOT_EXPR
866 && code == PLUS_EXPR)
868 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
869 when IN is constant. */
870 *litp = build_minus_one_cst (type);
871 *minus_varp = TREE_OPERAND (in, 0);
873 else
874 var = in;
876 if (negate_p)
878 if (*litp)
879 *minus_litp = *litp, *litp = 0;
880 else if (*minus_litp)
881 *litp = *minus_litp, *minus_litp = 0;
882 if (*conp)
883 *minus_conp = *conp, *conp = 0;
884 else if (*minus_conp)
885 *conp = *minus_conp, *minus_conp = 0;
886 if (var)
887 *minus_varp = var, var = 0;
888 else if (*minus_varp)
889 var = *minus_varp, *minus_varp = 0;
892 if (*litp
893 && TREE_OVERFLOW_P (*litp))
894 *litp = drop_tree_overflow (*litp);
895 if (*minus_litp
896 && TREE_OVERFLOW_P (*minus_litp))
897 *minus_litp = drop_tree_overflow (*minus_litp);
899 return var;
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
910 if (t1 == 0)
912 gcc_assert (t2 == 0 || code != MINUS_EXPR);
913 return t2;
915 else if (t2 == 0)
916 return t1;
918 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
919 try to fold this since we will have infinite recursion. But do
920 deal with any NEGATE_EXPRs. */
921 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
922 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
923 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
925 if (code == PLUS_EXPR)
927 if (TREE_CODE (t1) == NEGATE_EXPR)
928 return build2_loc (loc, MINUS_EXPR, type,
929 fold_convert_loc (loc, type, t2),
930 fold_convert_loc (loc, type,
931 TREE_OPERAND (t1, 0)));
932 else if (TREE_CODE (t2) == NEGATE_EXPR)
933 return build2_loc (loc, MINUS_EXPR, type,
934 fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type,
936 TREE_OPERAND (t2, 0)));
937 else if (integer_zerop (t2))
938 return fold_convert_loc (loc, type, t1);
940 else if (code == MINUS_EXPR)
942 if (integer_zerop (t2))
943 return fold_convert_loc (loc, type, t1);
946 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
950 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
951 fold_convert_loc (loc, type, t2));
954 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
955 for use in int_const_binop, size_binop and size_diffop. */
957 static bool
958 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
960 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
961 return false;
962 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
963 return false;
965 switch (code)
967 case LSHIFT_EXPR:
968 case RSHIFT_EXPR:
969 case LROTATE_EXPR:
970 case RROTATE_EXPR:
971 return true;
973 default:
974 break;
977 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
978 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
979 && TYPE_MODE (type1) == TYPE_MODE (type2);
982 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
983 a new constant in RES. Return FALSE if we don't know how to
984 evaluate CODE at compile-time. */
986 bool
987 wide_int_binop (wide_int &res,
988 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
989 signop sign, wi::overflow_type *overflow)
991 wide_int tmp;
992 *overflow = wi::OVF_NONE;
993 switch (code)
995 case BIT_IOR_EXPR:
996 res = wi::bit_or (arg1, arg2);
997 break;
999 case BIT_XOR_EXPR:
1000 res = wi::bit_xor (arg1, arg2);
1001 break;
1003 case BIT_AND_EXPR:
1004 res = wi::bit_and (arg1, arg2);
1005 break;
1007 case LSHIFT_EXPR:
1008 if (wi::neg_p (arg2))
1009 return false;
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RSHIFT_EXPR:
1014 if (wi::neg_p (arg2))
1015 return false;
1016 /* It's unclear from the C standard whether shifts can overflow.
1017 The following code ignores overflow; perhaps a C standard
1018 interpretation ruling is needed. */
1019 res = wi::rshift (arg1, arg2, sign);
1020 break;
1022 case RROTATE_EXPR:
1023 case LROTATE_EXPR:
1024 if (wi::neg_p (arg2))
1026 tmp = -arg2;
1027 if (code == RROTATE_EXPR)
1028 code = LROTATE_EXPR;
1029 else
1030 code = RROTATE_EXPR;
1032 else
1033 tmp = arg2;
1035 if (code == RROTATE_EXPR)
1036 res = wi::rrotate (arg1, tmp);
1037 else
1038 res = wi::lrotate (arg1, tmp);
1039 break;
1041 case PLUS_EXPR:
1042 res = wi::add (arg1, arg2, sign, overflow);
1043 break;
1045 case MINUS_EXPR:
1046 res = wi::sub (arg1, arg2, sign, overflow);
1047 break;
1049 case MULT_EXPR:
1050 res = wi::mul (arg1, arg2, sign, overflow);
1051 break;
1053 case MULT_HIGHPART_EXPR:
1054 res = wi::mul_high (arg1, arg2, sign);
1055 break;
1057 case TRUNC_DIV_EXPR:
1058 case EXACT_DIV_EXPR:
1059 if (arg2 == 0)
1060 return false;
1061 res = wi::div_trunc (arg1, arg2, sign, overflow);
1062 break;
1064 case FLOOR_DIV_EXPR:
1065 if (arg2 == 0)
1066 return false;
1067 res = wi::div_floor (arg1, arg2, sign, overflow);
1068 break;
1070 case CEIL_DIV_EXPR:
1071 if (arg2 == 0)
1072 return false;
1073 res = wi::div_ceil (arg1, arg2, sign, overflow);
1074 break;
1076 case ROUND_DIV_EXPR:
1077 if (arg2 == 0)
1078 return false;
1079 res = wi::div_round (arg1, arg2, sign, overflow);
1080 break;
1082 case TRUNC_MOD_EXPR:
1083 if (arg2 == 0)
1084 return false;
1085 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1086 break;
1088 case FLOOR_MOD_EXPR:
1089 if (arg2 == 0)
1090 return false;
1091 res = wi::mod_floor (arg1, arg2, sign, overflow);
1092 break;
1094 case CEIL_MOD_EXPR:
1095 if (arg2 == 0)
1096 return false;
1097 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1098 break;
1100 case ROUND_MOD_EXPR:
1101 if (arg2 == 0)
1102 return false;
1103 res = wi::mod_round (arg1, arg2, sign, overflow);
1104 break;
1106 case MIN_EXPR:
1107 res = wi::min (arg1, arg2, sign);
1108 break;
1110 case MAX_EXPR:
1111 res = wi::max (arg1, arg2, sign);
1112 break;
1114 default:
1115 return false;
1117 return true;
1120 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1121 produce a new constant in RES. Return FALSE if we don't know how
1122 to evaluate CODE at compile-time. */
1124 static bool
1125 poly_int_binop (poly_wide_int &res, enum tree_code code,
1126 const_tree arg1, const_tree arg2,
1127 signop sign, wi::overflow_type *overflow)
1129 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1130 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1131 switch (code)
1133 case PLUS_EXPR:
1134 res = wi::add (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1138 case MINUS_EXPR:
1139 res = wi::sub (wi::to_poly_wide (arg1),
1140 wi::to_poly_wide (arg2), sign, overflow);
1141 break;
1143 case MULT_EXPR:
1144 if (TREE_CODE (arg2) == INTEGER_CST)
1145 res = wi::mul (wi::to_poly_wide (arg1),
1146 wi::to_wide (arg2), sign, overflow);
1147 else if (TREE_CODE (arg1) == INTEGER_CST)
1148 res = wi::mul (wi::to_poly_wide (arg2),
1149 wi::to_wide (arg1), sign, overflow);
1150 else
1151 return NULL_TREE;
1152 break;
1154 case LSHIFT_EXPR:
1155 if (TREE_CODE (arg2) == INTEGER_CST)
1156 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1157 else
1158 return false;
1159 break;
1161 case BIT_IOR_EXPR:
1162 if (TREE_CODE (arg2) != INTEGER_CST
1163 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1164 &res))
1165 return false;
1166 break;
1168 default:
1169 return false;
1171 return true;
1174 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1175 produce a new constant. Return NULL_TREE if we don't know how to
1176 evaluate CODE at compile-time. */
1178 tree
1179 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1180 int overflowable)
1182 poly_wide_int poly_res;
1183 tree type = TREE_TYPE (arg1);
1184 signop sign = TYPE_SIGN (type);
1185 wi::overflow_type overflow = wi::OVF_NONE;
1187 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1189 wide_int warg1 = wi::to_wide (arg1), res;
1190 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1191 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1192 return NULL_TREE;
1193 poly_res = res;
1195 else if (!poly_int_tree_p (arg1)
1196 || !poly_int_tree_p (arg2)
1197 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1198 return NULL_TREE;
1199 return force_fit_type (type, poly_res, overflowable,
1200 (((sign == SIGNED || overflowable == -1)
1201 && overflow)
1202 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1205 /* Return true if binary operation OP distributes over addition in operand
1206 OPNO, with the other operand being held constant. OPNO counts from 1. */
1208 static bool
1209 distributes_over_addition_p (tree_code op, int opno)
1211 switch (op)
1213 case PLUS_EXPR:
1214 case MINUS_EXPR:
1215 case MULT_EXPR:
1216 return true;
1218 case LSHIFT_EXPR:
1219 return opno == 1;
1221 default:
1222 return false;
1226 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1227 constant. We assume ARG1 and ARG2 have the same data type, or at least
1228 are the same kind of constant and the same machine mode. Return zero if
1229 combining the constants is not allowed in the current operating mode. */
1231 static tree
1232 const_binop (enum tree_code code, tree arg1, tree arg2)
1234 /* Sanity check for the recursive cases. */
1235 if (!arg1 || !arg2)
1236 return NULL_TREE;
1238 STRIP_NOPS (arg1);
1239 STRIP_NOPS (arg2);
1241 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1243 if (code == POINTER_PLUS_EXPR)
1244 return int_const_binop (PLUS_EXPR,
1245 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1247 return int_const_binop (code, arg1, arg2);
1250 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1252 machine_mode mode;
1253 REAL_VALUE_TYPE d1;
1254 REAL_VALUE_TYPE d2;
1255 REAL_VALUE_TYPE value;
1256 REAL_VALUE_TYPE result;
1257 bool inexact;
1258 tree t, type;
1260 /* The following codes are handled by real_arithmetic. */
1261 switch (code)
1263 case PLUS_EXPR:
1264 case MINUS_EXPR:
1265 case MULT_EXPR:
1266 case RDIV_EXPR:
1267 case MIN_EXPR:
1268 case MAX_EXPR:
1269 break;
1271 default:
1272 return NULL_TREE;
1275 d1 = TREE_REAL_CST (arg1);
1276 d2 = TREE_REAL_CST (arg2);
1278 type = TREE_TYPE (arg1);
1279 mode = TYPE_MODE (type);
1281 /* Don't perform operation if we honor signaling NaNs and
1282 either operand is a signaling NaN. */
1283 if (HONOR_SNANS (mode)
1284 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1285 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1286 return NULL_TREE;
1288 /* Don't perform operation if it would raise a division
1289 by zero exception. */
1290 if (code == RDIV_EXPR
1291 && real_equal (&d2, &dconst0)
1292 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1293 return NULL_TREE;
1295 /* If either operand is a NaN, just return it. Otherwise, set up
1296 for floating-point trap; we return an overflow. */
1297 if (REAL_VALUE_ISNAN (d1))
1299 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1300 is off. */
1301 d1.signalling = 0;
1302 t = build_real (type, d1);
1303 return t;
1305 else if (REAL_VALUE_ISNAN (d2))
1307 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1308 is off. */
1309 d2.signalling = 0;
1310 t = build_real (type, d2);
1311 return t;
1314 inexact = real_arithmetic (&value, code, &d1, &d2);
1315 real_convert (&result, mode, &value);
1317 /* Don't constant fold this floating point operation if
1318 both operands are not NaN but the result is NaN, and
1319 flag_trapping_math. Such operations should raise an
1320 invalid operation exception. */
1321 if (flag_trapping_math
1322 && MODE_HAS_NANS (mode)
1323 && REAL_VALUE_ISNAN (result)
1324 && !REAL_VALUE_ISNAN (d1)
1325 && !REAL_VALUE_ISNAN (d2))
1326 return NULL_TREE;
1328 /* Don't constant fold this floating point operation if
1329 the result has overflowed and flag_trapping_math. */
1330 if (flag_trapping_math
1331 && MODE_HAS_INFINITIES (mode)
1332 && REAL_VALUE_ISINF (result)
1333 && !REAL_VALUE_ISINF (d1)
1334 && !REAL_VALUE_ISINF (d2))
1335 return NULL_TREE;
1337 /* Don't constant fold this floating point operation if the
1338 result may dependent upon the run-time rounding mode and
1339 flag_rounding_math is set, or if GCC's software emulation
1340 is unable to accurately represent the result. */
1341 if ((flag_rounding_math
1342 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1343 && (inexact || !real_identical (&result, &value)))
1344 return NULL_TREE;
1346 t = build_real (type, result);
1348 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1349 return t;
1352 if (TREE_CODE (arg1) == FIXED_CST)
1354 FIXED_VALUE_TYPE f1;
1355 FIXED_VALUE_TYPE f2;
1356 FIXED_VALUE_TYPE result;
1357 tree t, type;
1358 int sat_p;
1359 bool overflow_p;
1361 /* The following codes are handled by fixed_arithmetic. */
1362 switch (code)
1364 case PLUS_EXPR:
1365 case MINUS_EXPR:
1366 case MULT_EXPR:
1367 case TRUNC_DIV_EXPR:
1368 if (TREE_CODE (arg2) != FIXED_CST)
1369 return NULL_TREE;
1370 f2 = TREE_FIXED_CST (arg2);
1371 break;
1373 case LSHIFT_EXPR:
1374 case RSHIFT_EXPR:
1376 if (TREE_CODE (arg2) != INTEGER_CST)
1377 return NULL_TREE;
1378 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1379 f2.data.high = w2.elt (1);
1380 f2.data.low = w2.ulow ();
1381 f2.mode = SImode;
1383 break;
1385 default:
1386 return NULL_TREE;
1389 f1 = TREE_FIXED_CST (arg1);
1390 type = TREE_TYPE (arg1);
1391 sat_p = TYPE_SATURATING (type);
1392 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1393 t = build_fixed (type, result);
1394 /* Propagate overflow flags. */
1395 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1396 TREE_OVERFLOW (t) = 1;
1397 return t;
1400 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1402 tree type = TREE_TYPE (arg1);
1403 tree r1 = TREE_REALPART (arg1);
1404 tree i1 = TREE_IMAGPART (arg1);
1405 tree r2 = TREE_REALPART (arg2);
1406 tree i2 = TREE_IMAGPART (arg2);
1407 tree real, imag;
1409 switch (code)
1411 case PLUS_EXPR:
1412 case MINUS_EXPR:
1413 real = const_binop (code, r1, r2);
1414 imag = const_binop (code, i1, i2);
1415 break;
1417 case MULT_EXPR:
1418 if (COMPLEX_FLOAT_TYPE_P (type))
1419 return do_mpc_arg2 (arg1, arg2, type,
1420 /* do_nonfinite= */ folding_initializer,
1421 mpc_mul);
1423 real = const_binop (MINUS_EXPR,
1424 const_binop (MULT_EXPR, r1, r2),
1425 const_binop (MULT_EXPR, i1, i2));
1426 imag = const_binop (PLUS_EXPR,
1427 const_binop (MULT_EXPR, r1, i2),
1428 const_binop (MULT_EXPR, i1, r2));
1429 break;
1431 case RDIV_EXPR:
1432 if (COMPLEX_FLOAT_TYPE_P (type))
1433 return do_mpc_arg2 (arg1, arg2, type,
1434 /* do_nonfinite= */ folding_initializer,
1435 mpc_div);
1436 /* Fallthru. */
1437 case TRUNC_DIV_EXPR:
1438 case CEIL_DIV_EXPR:
1439 case FLOOR_DIV_EXPR:
1440 case ROUND_DIV_EXPR:
1441 if (flag_complex_method == 0)
1443 /* Keep this algorithm in sync with
1444 tree-complex.cc:expand_complex_div_straight().
1446 Expand complex division to scalars, straightforward algorithm.
1447 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1448 t = br*br + bi*bi
1450 tree magsquared
1451 = const_binop (PLUS_EXPR,
1452 const_binop (MULT_EXPR, r2, r2),
1453 const_binop (MULT_EXPR, i2, i2));
1454 tree t1
1455 = const_binop (PLUS_EXPR,
1456 const_binop (MULT_EXPR, r1, r2),
1457 const_binop (MULT_EXPR, i1, i2));
1458 tree t2
1459 = const_binop (MINUS_EXPR,
1460 const_binop (MULT_EXPR, i1, r2),
1461 const_binop (MULT_EXPR, r1, i2));
1463 real = const_binop (code, t1, magsquared);
1464 imag = const_binop (code, t2, magsquared);
1466 else
1468 /* Keep this algorithm in sync with
1469 tree-complex.cc:expand_complex_div_wide().
1471 Expand complex division to scalars, modified algorithm to minimize
1472 overflow with wide input ranges. */
1473 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1474 fold_abs_const (r2, TREE_TYPE (type)),
1475 fold_abs_const (i2, TREE_TYPE (type)));
1477 if (integer_nonzerop (compare))
1479 /* In the TRUE branch, we compute
1480 ratio = br/bi;
1481 div = (br * ratio) + bi;
1482 tr = (ar * ratio) + ai;
1483 ti = (ai * ratio) - ar;
1484 tr = tr / div;
1485 ti = ti / div; */
1486 tree ratio = const_binop (code, r2, i2);
1487 tree div = const_binop (PLUS_EXPR, i2,
1488 const_binop (MULT_EXPR, r2, ratio));
1489 real = const_binop (MULT_EXPR, r1, ratio);
1490 real = const_binop (PLUS_EXPR, real, i1);
1491 real = const_binop (code, real, div);
1493 imag = const_binop (MULT_EXPR, i1, ratio);
1494 imag = const_binop (MINUS_EXPR, imag, r1);
1495 imag = const_binop (code, imag, div);
1497 else
1499 /* In the FALSE branch, we compute
1500 ratio = d/c;
1501 divisor = (d * ratio) + c;
1502 tr = (b * ratio) + a;
1503 ti = b - (a * ratio);
1504 tr = tr / div;
1505 ti = ti / div; */
1506 tree ratio = const_binop (code, i2, r2);
1507 tree div = const_binop (PLUS_EXPR, r2,
1508 const_binop (MULT_EXPR, i2, ratio));
1510 real = const_binop (MULT_EXPR, i1, ratio);
1511 real = const_binop (PLUS_EXPR, real, r1);
1512 real = const_binop (code, real, div);
1514 imag = const_binop (MULT_EXPR, r1, ratio);
1515 imag = const_binop (MINUS_EXPR, i1, imag);
1516 imag = const_binop (code, imag, div);
1519 break;
1521 default:
1522 return NULL_TREE;
1525 if (real && imag)
1526 return build_complex (type, real, imag);
1529 if (TREE_CODE (arg1) == VECTOR_CST
1530 && TREE_CODE (arg2) == VECTOR_CST
1531 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1532 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1534 tree type = TREE_TYPE (arg1);
1535 bool step_ok_p;
1536 if (VECTOR_CST_STEPPED_P (arg1)
1537 && VECTOR_CST_STEPPED_P (arg2))
1538 /* We can operate directly on the encoding if:
1540 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1541 implies
1542 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1544 Addition and subtraction are the supported operators
1545 for which this is true. */
1546 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1547 else if (VECTOR_CST_STEPPED_P (arg1))
1548 /* We can operate directly on stepped encodings if:
1550 a3 - a2 == a2 - a1
1551 implies:
1552 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1554 which is true if (x -> x op c) distributes over addition. */
1555 step_ok_p = distributes_over_addition_p (code, 1);
1556 else
1557 /* Similarly in reverse. */
1558 step_ok_p = distributes_over_addition_p (code, 2);
1559 tree_vector_builder elts;
1560 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1561 return NULL_TREE;
1562 unsigned int count = elts.encoded_nelts ();
1563 for (unsigned int i = 0; i < count; ++i)
1565 tree elem1 = VECTOR_CST_ELT (arg1, i);
1566 tree elem2 = VECTOR_CST_ELT (arg2, i);
1568 tree elt = const_binop (code, elem1, elem2);
1570 /* It is possible that const_binop cannot handle the given
1571 code and return NULL_TREE */
1572 if (elt == NULL_TREE)
1573 return NULL_TREE;
1574 elts.quick_push (elt);
1577 return elts.build ();
1580 /* Shifts allow a scalar offset for a vector. */
1581 if (TREE_CODE (arg1) == VECTOR_CST
1582 && TREE_CODE (arg2) == INTEGER_CST)
1584 tree type = TREE_TYPE (arg1);
1585 bool step_ok_p = distributes_over_addition_p (code, 1);
1586 tree_vector_builder elts;
1587 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1588 return NULL_TREE;
1589 unsigned int count = elts.encoded_nelts ();
1590 for (unsigned int i = 0; i < count; ++i)
1592 tree elem1 = VECTOR_CST_ELT (arg1, i);
1594 tree elt = const_binop (code, elem1, arg2);
1596 /* It is possible that const_binop cannot handle the given
1597 code and return NULL_TREE. */
1598 if (elt == NULL_TREE)
1599 return NULL_TREE;
1600 elts.quick_push (elt);
1603 return elts.build ();
1605 return NULL_TREE;
1608 /* Overload that adds a TYPE parameter to be able to dispatch
1609 to fold_relational_const. */
1611 tree
1612 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1614 if (TREE_CODE_CLASS (code) == tcc_comparison)
1615 return fold_relational_const (code, type, arg1, arg2);
1617 /* ??? Until we make the const_binop worker take the type of the
1618 result as argument put those cases that need it here. */
1619 switch (code)
1621 case VEC_SERIES_EXPR:
1622 if (CONSTANT_CLASS_P (arg1)
1623 && CONSTANT_CLASS_P (arg2))
1624 return build_vec_series (type, arg1, arg2);
1625 return NULL_TREE;
1627 case COMPLEX_EXPR:
1628 if ((TREE_CODE (arg1) == REAL_CST
1629 && TREE_CODE (arg2) == REAL_CST)
1630 || (TREE_CODE (arg1) == INTEGER_CST
1631 && TREE_CODE (arg2) == INTEGER_CST))
1632 return build_complex (type, arg1, arg2);
1633 return NULL_TREE;
1635 case POINTER_DIFF_EXPR:
1636 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1638 poly_offset_int res = (wi::to_poly_offset (arg1)
1639 - wi::to_poly_offset (arg2));
1640 return force_fit_type (type, res, 1,
1641 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1643 return NULL_TREE;
1645 case VEC_PACK_TRUNC_EXPR:
1646 case VEC_PACK_FIX_TRUNC_EXPR:
1647 case VEC_PACK_FLOAT_EXPR:
1649 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1651 if (TREE_CODE (arg1) != VECTOR_CST
1652 || TREE_CODE (arg2) != VECTOR_CST)
1653 return NULL_TREE;
1655 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1656 return NULL_TREE;
1658 out_nelts = in_nelts * 2;
1659 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1660 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1662 tree_vector_builder elts (type, out_nelts, 1);
1663 for (i = 0; i < out_nelts; i++)
1665 tree elt = (i < in_nelts
1666 ? VECTOR_CST_ELT (arg1, i)
1667 : VECTOR_CST_ELT (arg2, i - in_nelts));
1668 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1669 ? NOP_EXPR
1670 : code == VEC_PACK_FLOAT_EXPR
1671 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1672 TREE_TYPE (type), elt);
1673 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1674 return NULL_TREE;
1675 elts.quick_push (elt);
1678 return elts.build ();
1681 case VEC_WIDEN_MULT_LO_EXPR:
1682 case VEC_WIDEN_MULT_HI_EXPR:
1683 case VEC_WIDEN_MULT_EVEN_EXPR:
1684 case VEC_WIDEN_MULT_ODD_EXPR:
1686 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1688 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1689 return NULL_TREE;
1691 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1692 return NULL_TREE;
1693 out_nelts = in_nelts / 2;
1694 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1695 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1697 if (code == VEC_WIDEN_MULT_LO_EXPR)
1698 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1699 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1700 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1701 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1702 scale = 1, ofs = 0;
1703 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1704 scale = 1, ofs = 1;
1706 tree_vector_builder elts (type, out_nelts, 1);
1707 for (out = 0; out < out_nelts; out++)
1709 unsigned int in = (out << scale) + ofs;
1710 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1711 VECTOR_CST_ELT (arg1, in));
1712 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1713 VECTOR_CST_ELT (arg2, in));
1715 if (t1 == NULL_TREE || t2 == NULL_TREE)
1716 return NULL_TREE;
1717 tree elt = const_binop (MULT_EXPR, t1, t2);
1718 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1719 return NULL_TREE;
1720 elts.quick_push (elt);
1723 return elts.build ();
1726 default:;
1729 if (TREE_CODE_CLASS (code) != tcc_binary)
1730 return NULL_TREE;
1732 /* Make sure type and arg0 have the same saturating flag. */
1733 gcc_checking_assert (TYPE_SATURATING (type)
1734 == TYPE_SATURATING (TREE_TYPE (arg1)));
1736 return const_binop (code, arg1, arg2);
1739 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1740 Return zero if computing the constants is not possible. */
1742 tree
1743 const_unop (enum tree_code code, tree type, tree arg0)
1745 /* Don't perform the operation, other than NEGATE and ABS, if
1746 flag_signaling_nans is on and the operand is a signaling NaN. */
1747 if (TREE_CODE (arg0) == REAL_CST
1748 && HONOR_SNANS (arg0)
1749 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1750 && code != NEGATE_EXPR
1751 && code != ABS_EXPR
1752 && code != ABSU_EXPR)
1753 return NULL_TREE;
1755 switch (code)
1757 CASE_CONVERT:
1758 case FLOAT_EXPR:
1759 case FIX_TRUNC_EXPR:
1760 case FIXED_CONVERT_EXPR:
1761 return fold_convert_const (code, type, arg0);
1763 case ADDR_SPACE_CONVERT_EXPR:
1764 /* If the source address is 0, and the source address space
1765 cannot have a valid object at 0, fold to dest type null. */
1766 if (integer_zerop (arg0)
1767 && !(targetm.addr_space.zero_address_valid
1768 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1769 return fold_convert_const (code, type, arg0);
1770 break;
1772 case VIEW_CONVERT_EXPR:
1773 return fold_view_convert_expr (type, arg0);
1775 case NEGATE_EXPR:
1777 /* Can't call fold_negate_const directly here as that doesn't
1778 handle all cases and we might not be able to negate some
1779 constants. */
1780 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1781 if (tem && CONSTANT_CLASS_P (tem))
1782 return tem;
1783 break;
1786 case ABS_EXPR:
1787 case ABSU_EXPR:
1788 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1789 return fold_abs_const (arg0, type);
1790 break;
1792 case CONJ_EXPR:
1793 if (TREE_CODE (arg0) == COMPLEX_CST)
1795 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1796 TREE_TYPE (type));
1797 return build_complex (type, TREE_REALPART (arg0), ipart);
1799 break;
1801 case BIT_NOT_EXPR:
1802 if (TREE_CODE (arg0) == INTEGER_CST)
1803 return fold_not_const (arg0, type);
1804 else if (POLY_INT_CST_P (arg0))
1805 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1806 /* Perform BIT_NOT_EXPR on each element individually. */
1807 else if (TREE_CODE (arg0) == VECTOR_CST)
1809 tree elem;
1811 /* This can cope with stepped encodings because ~x == -1 - x. */
1812 tree_vector_builder elements;
1813 elements.new_unary_operation (type, arg0, true);
1814 unsigned int i, count = elements.encoded_nelts ();
1815 for (i = 0; i < count; ++i)
1817 elem = VECTOR_CST_ELT (arg0, i);
1818 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1819 if (elem == NULL_TREE)
1820 break;
1821 elements.quick_push (elem);
1823 if (i == count)
1824 return elements.build ();
1826 break;
1828 case TRUTH_NOT_EXPR:
1829 if (TREE_CODE (arg0) == INTEGER_CST)
1830 return constant_boolean_node (integer_zerop (arg0), type);
1831 break;
1833 case REALPART_EXPR:
1834 if (TREE_CODE (arg0) == COMPLEX_CST)
1835 return fold_convert (type, TREE_REALPART (arg0));
1836 break;
1838 case IMAGPART_EXPR:
1839 if (TREE_CODE (arg0) == COMPLEX_CST)
1840 return fold_convert (type, TREE_IMAGPART (arg0));
1841 break;
1843 case VEC_UNPACK_LO_EXPR:
1844 case VEC_UNPACK_HI_EXPR:
1845 case VEC_UNPACK_FLOAT_LO_EXPR:
1846 case VEC_UNPACK_FLOAT_HI_EXPR:
1847 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1848 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1850 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1851 enum tree_code subcode;
1853 if (TREE_CODE (arg0) != VECTOR_CST)
1854 return NULL_TREE;
1856 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1857 return NULL_TREE;
1858 out_nelts = in_nelts / 2;
1859 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1861 unsigned int offset = 0;
1862 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1863 || code == VEC_UNPACK_FLOAT_LO_EXPR
1864 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1865 offset = out_nelts;
1867 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1868 subcode = NOP_EXPR;
1869 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1870 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1871 subcode = FLOAT_EXPR;
1872 else
1873 subcode = FIX_TRUNC_EXPR;
1875 tree_vector_builder elts (type, out_nelts, 1);
1876 for (i = 0; i < out_nelts; i++)
1878 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1879 VECTOR_CST_ELT (arg0, i + offset));
1880 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1881 return NULL_TREE;
1882 elts.quick_push (elt);
1885 return elts.build ();
1888 case VEC_DUPLICATE_EXPR:
1889 if (CONSTANT_CLASS_P (arg0))
1890 return build_vector_from_val (type, arg0);
1891 return NULL_TREE;
1893 default:
1894 break;
1897 return NULL_TREE;
1900 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1901 indicates which particular sizetype to create. */
1903 tree
1904 size_int_kind (poly_int64 number, enum size_type_kind kind)
1906 return build_int_cst (sizetype_tab[(int) kind], number);
1909 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1910 is a tree code. The type of the result is taken from the operands.
1911 Both must be equivalent integer types, ala int_binop_types_match_p.
1912 If the operands are constant, so is the result. */
1914 tree
1915 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1917 tree type = TREE_TYPE (arg0);
1919 if (arg0 == error_mark_node || arg1 == error_mark_node)
1920 return error_mark_node;
1922 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1923 TREE_TYPE (arg1)));
1925 /* Handle the special case of two poly_int constants faster. */
1926 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1928 /* And some specific cases even faster than that. */
1929 if (code == PLUS_EXPR)
1931 if (integer_zerop (arg0)
1932 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1933 return arg1;
1934 if (integer_zerop (arg1)
1935 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1936 return arg0;
1938 else if (code == MINUS_EXPR)
1940 if (integer_zerop (arg1)
1941 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1942 return arg0;
1944 else if (code == MULT_EXPR)
1946 if (integer_onep (arg0)
1947 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1948 return arg1;
1951 /* Handle general case of two integer constants. For sizetype
1952 constant calculations we always want to know about overflow,
1953 even in the unsigned case. */
1954 tree res = int_const_binop (code, arg0, arg1, -1);
1955 if (res != NULL_TREE)
1956 return res;
1959 return fold_build2_loc (loc, code, type, arg0, arg1);
1962 /* Given two values, either both of sizetype or both of bitsizetype,
1963 compute the difference between the two values. Return the value
1964 in signed type corresponding to the type of the operands. */
1966 tree
1967 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1969 tree type = TREE_TYPE (arg0);
1970 tree ctype;
1972 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1973 TREE_TYPE (arg1)));
1975 /* If the type is already signed, just do the simple thing. */
1976 if (!TYPE_UNSIGNED (type))
1977 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1979 if (type == sizetype)
1980 ctype = ssizetype;
1981 else if (type == bitsizetype)
1982 ctype = sbitsizetype;
1983 else
1984 ctype = signed_type_for (type);
1986 /* If either operand is not a constant, do the conversions to the signed
1987 type and subtract. The hardware will do the right thing with any
1988 overflow in the subtraction. */
1989 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1990 return size_binop_loc (loc, MINUS_EXPR,
1991 fold_convert_loc (loc, ctype, arg0),
1992 fold_convert_loc (loc, ctype, arg1));
1994 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1995 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1996 overflow) and negate (which can't either). Special-case a result
1997 of zero while we're here. */
1998 if (tree_int_cst_equal (arg0, arg1))
1999 return build_int_cst (ctype, 0);
2000 else if (tree_int_cst_lt (arg1, arg0))
2001 return fold_convert_loc (loc, ctype,
2002 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2003 else
2004 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2005 fold_convert_loc (loc, ctype,
2006 size_binop_loc (loc,
2007 MINUS_EXPR,
2008 arg1, arg0)));
2011 /* A subroutine of fold_convert_const handling conversions of an
2012 INTEGER_CST to another integer type. */
2014 static tree
2015 fold_convert_const_int_from_int (tree type, const_tree arg1)
2017 /* Given an integer constant, make new constant with new type,
2018 appropriately sign-extended or truncated. Use widest_int
2019 so that any extension is done according ARG1's type. */
2020 return force_fit_type (type, wi::to_widest (arg1),
2021 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2022 TREE_OVERFLOW (arg1));
2025 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2026 to an integer type. */
2028 static tree
2029 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2031 bool overflow = false;
2032 tree t;
2034 /* The following code implements the floating point to integer
2035 conversion rules required by the Java Language Specification,
2036 that IEEE NaNs are mapped to zero and values that overflow
2037 the target precision saturate, i.e. values greater than
2038 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2039 are mapped to INT_MIN. These semantics are allowed by the
2040 C and C++ standards that simply state that the behavior of
2041 FP-to-integer conversion is unspecified upon overflow. */
2043 wide_int val;
2044 REAL_VALUE_TYPE r;
2045 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2047 switch (code)
2049 case FIX_TRUNC_EXPR:
2050 real_trunc (&r, VOIDmode, &x);
2051 break;
2053 default:
2054 gcc_unreachable ();
2057 /* If R is NaN, return zero and show we have an overflow. */
2058 if (REAL_VALUE_ISNAN (r))
2060 overflow = true;
2061 val = wi::zero (TYPE_PRECISION (type));
2064 /* See if R is less than the lower bound or greater than the
2065 upper bound. */
2067 if (! overflow)
2069 tree lt = TYPE_MIN_VALUE (type);
2070 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2071 if (real_less (&r, &l))
2073 overflow = true;
2074 val = wi::to_wide (lt);
2078 if (! overflow)
2080 tree ut = TYPE_MAX_VALUE (type);
2081 if (ut)
2083 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2084 if (real_less (&u, &r))
2086 overflow = true;
2087 val = wi::to_wide (ut);
2092 if (! overflow)
2093 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2095 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2096 return t;
2099 /* A subroutine of fold_convert_const handling conversions of a
2100 FIXED_CST to an integer type. */
2102 static tree
2103 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2105 tree t;
2106 double_int temp, temp_trunc;
2107 scalar_mode mode;
2109 /* Right shift FIXED_CST to temp by fbit. */
2110 temp = TREE_FIXED_CST (arg1).data;
2111 mode = TREE_FIXED_CST (arg1).mode;
2112 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2114 temp = temp.rshift (GET_MODE_FBIT (mode),
2115 HOST_BITS_PER_DOUBLE_INT,
2116 SIGNED_FIXED_POINT_MODE_P (mode));
2118 /* Left shift temp to temp_trunc by fbit. */
2119 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2120 HOST_BITS_PER_DOUBLE_INT,
2121 SIGNED_FIXED_POINT_MODE_P (mode));
2123 else
2125 temp = double_int_zero;
2126 temp_trunc = double_int_zero;
2129 /* If FIXED_CST is negative, we need to round the value toward 0.
2130 By checking if the fractional bits are not zero to add 1 to temp. */
2131 if (SIGNED_FIXED_POINT_MODE_P (mode)
2132 && temp_trunc.is_negative ()
2133 && TREE_FIXED_CST (arg1).data != temp_trunc)
2134 temp += double_int_one;
2136 /* Given a fixed-point constant, make new constant with new type,
2137 appropriately sign-extended or truncated. */
2138 t = force_fit_type (type, temp, -1,
2139 (temp.is_negative ()
2140 && (TYPE_UNSIGNED (type)
2141 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2142 | TREE_OVERFLOW (arg1));
2144 return t;
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148 to another floating point type. */
2150 static tree
2151 fold_convert_const_real_from_real (tree type, const_tree arg1)
2153 REAL_VALUE_TYPE value;
2154 tree t;
2156 /* Don't perform the operation if flag_signaling_nans is on
2157 and the operand is a signaling NaN. */
2158 if (HONOR_SNANS (arg1)
2159 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2160 return NULL_TREE;
2162 /* With flag_rounding_math we should respect the current rounding mode
2163 unless the conversion is exact. */
2164 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2165 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2166 return NULL_TREE;
2168 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2169 t = build_real (type, value);
2171 /* If converting an infinity or NAN to a representation that doesn't
2172 have one, set the overflow bit so that we can produce some kind of
2173 error message at the appropriate point if necessary. It's not the
2174 most user-friendly message, but it's better than nothing. */
2175 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2176 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2177 TREE_OVERFLOW (t) = 1;
2178 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2179 && !MODE_HAS_NANS (TYPE_MODE (type)))
2180 TREE_OVERFLOW (t) = 1;
2181 /* Regular overflow, conversion produced an infinity in a mode that
2182 can't represent them. */
2183 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2184 && REAL_VALUE_ISINF (value)
2185 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2186 TREE_OVERFLOW (t) = 1;
2187 else
2188 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2189 return t;
2192 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2193 to a floating point type. */
2195 static tree
2196 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2198 REAL_VALUE_TYPE value;
2199 tree t;
2201 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2202 &TREE_FIXED_CST (arg1));
2203 t = build_real (type, value);
2205 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2206 return t;
2209 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2210 to another fixed-point type. */
2212 static tree
2213 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2215 FIXED_VALUE_TYPE value;
2216 tree t;
2217 bool overflow_p;
2219 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2220 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2221 t = build_fixed (type, value);
2223 /* Propagate overflow flags. */
2224 if (overflow_p | TREE_OVERFLOW (arg1))
2225 TREE_OVERFLOW (t) = 1;
2226 return t;
2229 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2230 to a fixed-point type. */
2232 static tree
2233 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2235 FIXED_VALUE_TYPE value;
2236 tree t;
2237 bool overflow_p;
2238 double_int di;
2240 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2242 di.low = TREE_INT_CST_ELT (arg1, 0);
2243 if (TREE_INT_CST_NUNITS (arg1) == 1)
2244 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2245 else
2246 di.high = TREE_INT_CST_ELT (arg1, 1);
2248 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2249 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2250 TYPE_SATURATING (type));
2251 t = build_fixed (type, value);
2253 /* Propagate overflow flags. */
2254 if (overflow_p | TREE_OVERFLOW (arg1))
2255 TREE_OVERFLOW (t) = 1;
2256 return t;
2259 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2260 to a fixed-point type. */
2262 static tree
2263 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2265 FIXED_VALUE_TYPE value;
2266 tree t;
2267 bool overflow_p;
2269 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2270 &TREE_REAL_CST (arg1),
2271 TYPE_SATURATING (type));
2272 t = build_fixed (type, value);
2274 /* Propagate overflow flags. */
2275 if (overflow_p | TREE_OVERFLOW (arg1))
2276 TREE_OVERFLOW (t) = 1;
2277 return t;
2280 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2281 type TYPE. If no simplification can be done return NULL_TREE. */
2283 static tree
2284 fold_convert_const (enum tree_code code, tree type, tree arg1)
2286 tree arg_type = TREE_TYPE (arg1);
2287 if (arg_type == type)
2288 return arg1;
2290 /* We can't widen types, since the runtime value could overflow the
2291 original type before being extended to the new type. */
2292 if (POLY_INT_CST_P (arg1)
2293 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2294 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2295 return build_poly_int_cst (type,
2296 poly_wide_int::from (poly_int_cst_value (arg1),
2297 TYPE_PRECISION (type),
2298 TYPE_SIGN (arg_type)));
2300 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2301 || TREE_CODE (type) == OFFSET_TYPE)
2303 if (TREE_CODE (arg1) == INTEGER_CST)
2304 return fold_convert_const_int_from_int (type, arg1);
2305 else if (TREE_CODE (arg1) == REAL_CST)
2306 return fold_convert_const_int_from_real (code, type, arg1);
2307 else if (TREE_CODE (arg1) == FIXED_CST)
2308 return fold_convert_const_int_from_fixed (type, arg1);
2310 else if (TREE_CODE (type) == REAL_TYPE)
2312 if (TREE_CODE (arg1) == INTEGER_CST)
2314 tree res = build_real_from_int_cst (type, arg1);
2315 /* Avoid the folding if flag_rounding_math is on and the
2316 conversion is not exact. */
2317 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2319 bool fail = false;
2320 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2321 TYPE_PRECISION (TREE_TYPE (arg1)));
2322 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2323 return NULL_TREE;
2325 return res;
2327 else if (TREE_CODE (arg1) == REAL_CST)
2328 return fold_convert_const_real_from_real (type, arg1);
2329 else if (TREE_CODE (arg1) == FIXED_CST)
2330 return fold_convert_const_real_from_fixed (type, arg1);
2332 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2334 if (TREE_CODE (arg1) == FIXED_CST)
2335 return fold_convert_const_fixed_from_fixed (type, arg1);
2336 else if (TREE_CODE (arg1) == INTEGER_CST)
2337 return fold_convert_const_fixed_from_int (type, arg1);
2338 else if (TREE_CODE (arg1) == REAL_CST)
2339 return fold_convert_const_fixed_from_real (type, arg1);
2341 else if (TREE_CODE (type) == VECTOR_TYPE)
2343 if (TREE_CODE (arg1) == VECTOR_CST
2344 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2346 tree elttype = TREE_TYPE (type);
2347 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2348 /* We can't handle steps directly when extending, since the
2349 values need to wrap at the original precision first. */
2350 bool step_ok_p
2351 = (INTEGRAL_TYPE_P (elttype)
2352 && INTEGRAL_TYPE_P (arg1_elttype)
2353 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2354 tree_vector_builder v;
2355 if (!v.new_unary_operation (type, arg1, step_ok_p))
2356 return NULL_TREE;
2357 unsigned int len = v.encoded_nelts ();
2358 for (unsigned int i = 0; i < len; ++i)
2360 tree elt = VECTOR_CST_ELT (arg1, i);
2361 tree cvt = fold_convert_const (code, elttype, elt);
2362 if (cvt == NULL_TREE)
2363 return NULL_TREE;
2364 v.quick_push (cvt);
2366 return v.build ();
2369 return NULL_TREE;
2372 /* Construct a vector of zero elements of vector type TYPE. */
2374 static tree
2375 build_zero_vector (tree type)
2377 tree t;
2379 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2380 return build_vector_from_val (type, t);
2383 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2385 bool
2386 fold_convertible_p (const_tree type, const_tree arg)
2388 const_tree orig = TREE_TYPE (arg);
2390 if (type == orig)
2391 return true;
2393 if (TREE_CODE (arg) == ERROR_MARK
2394 || TREE_CODE (type) == ERROR_MARK
2395 || TREE_CODE (orig) == ERROR_MARK)
2396 return false;
2398 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2399 return true;
2401 switch (TREE_CODE (type))
2403 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2404 case POINTER_TYPE: case REFERENCE_TYPE:
2405 case OFFSET_TYPE:
2406 return (INTEGRAL_TYPE_P (orig)
2407 || (POINTER_TYPE_P (orig)
2408 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2409 || TREE_CODE (orig) == OFFSET_TYPE);
2411 case REAL_TYPE:
2412 case FIXED_POINT_TYPE:
2413 case VOID_TYPE:
2414 return TREE_CODE (type) == TREE_CODE (orig);
2416 case VECTOR_TYPE:
2417 return (VECTOR_TYPE_P (orig)
2418 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2419 TYPE_VECTOR_SUBPARTS (orig))
2420 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2422 default:
2423 return false;
2427 /* Convert expression ARG to type TYPE. Used by the middle-end for
2428 simple conversions in preference to calling the front-end's convert. */
2430 tree
2431 fold_convert_loc (location_t loc, tree type, tree arg)
2433 tree orig = TREE_TYPE (arg);
2434 tree tem;
2436 if (type == orig)
2437 return arg;
2439 if (TREE_CODE (arg) == ERROR_MARK
2440 || TREE_CODE (type) == ERROR_MARK
2441 || TREE_CODE (orig) == ERROR_MARK)
2442 return error_mark_node;
2444 switch (TREE_CODE (type))
2446 case POINTER_TYPE:
2447 case REFERENCE_TYPE:
2448 /* Handle conversions between pointers to different address spaces. */
2449 if (POINTER_TYPE_P (orig)
2450 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2451 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2452 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2453 /* fall through */
2455 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2456 case OFFSET_TYPE:
2457 if (TREE_CODE (arg) == INTEGER_CST)
2459 tem = fold_convert_const (NOP_EXPR, type, arg);
2460 if (tem != NULL_TREE)
2461 return tem;
2463 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2464 || TREE_CODE (orig) == OFFSET_TYPE)
2465 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2466 if (TREE_CODE (orig) == COMPLEX_TYPE)
2467 return fold_convert_loc (loc, type,
2468 fold_build1_loc (loc, REALPART_EXPR,
2469 TREE_TYPE (orig), arg));
2470 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2471 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2472 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2474 case REAL_TYPE:
2475 if (TREE_CODE (arg) == INTEGER_CST)
2477 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2478 if (tem != NULL_TREE)
2479 return tem;
2481 else if (TREE_CODE (arg) == REAL_CST)
2483 tem = fold_convert_const (NOP_EXPR, type, arg);
2484 if (tem != NULL_TREE)
2485 return tem;
2487 else if (TREE_CODE (arg) == FIXED_CST)
2489 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2490 if (tem != NULL_TREE)
2491 return tem;
2494 switch (TREE_CODE (orig))
2496 case INTEGER_TYPE:
2497 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2498 case POINTER_TYPE: case REFERENCE_TYPE:
2499 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2501 case REAL_TYPE:
2502 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2504 case FIXED_POINT_TYPE:
2505 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2507 case COMPLEX_TYPE:
2508 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2509 return fold_convert_loc (loc, type, tem);
2511 default:
2512 gcc_unreachable ();
2515 case FIXED_POINT_TYPE:
2516 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2517 || TREE_CODE (arg) == REAL_CST)
2519 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2520 if (tem != NULL_TREE)
2521 goto fold_convert_exit;
2524 switch (TREE_CODE (orig))
2526 case FIXED_POINT_TYPE:
2527 case INTEGER_TYPE:
2528 case ENUMERAL_TYPE:
2529 case BOOLEAN_TYPE:
2530 case REAL_TYPE:
2531 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2533 case COMPLEX_TYPE:
2534 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2535 return fold_convert_loc (loc, type, tem);
2537 default:
2538 gcc_unreachable ();
2541 case COMPLEX_TYPE:
2542 switch (TREE_CODE (orig))
2544 case INTEGER_TYPE:
2545 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2546 case POINTER_TYPE: case REFERENCE_TYPE:
2547 case REAL_TYPE:
2548 case FIXED_POINT_TYPE:
2549 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2550 fold_convert_loc (loc, TREE_TYPE (type), arg),
2551 fold_convert_loc (loc, TREE_TYPE (type),
2552 integer_zero_node));
2553 case COMPLEX_TYPE:
2555 tree rpart, ipart;
2557 if (TREE_CODE (arg) == COMPLEX_EXPR)
2559 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2560 TREE_OPERAND (arg, 0));
2561 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2562 TREE_OPERAND (arg, 1));
2563 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2566 arg = save_expr (arg);
2567 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2568 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2569 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2570 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2571 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2574 default:
2575 gcc_unreachable ();
2578 case VECTOR_TYPE:
2579 if (integer_zerop (arg))
2580 return build_zero_vector (type);
2581 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2582 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2583 || TREE_CODE (orig) == VECTOR_TYPE);
2584 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2586 case VOID_TYPE:
2587 tem = fold_ignored_result (arg);
2588 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2590 default:
2591 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2592 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2593 gcc_unreachable ();
2595 fold_convert_exit:
2596 protected_set_expr_location_unshare (tem, loc);
2597 return tem;
2600 /* Return false if expr can be assumed not to be an lvalue, true
2601 otherwise. */
2603 static bool
2604 maybe_lvalue_p (const_tree x)
2606 /* We only need to wrap lvalue tree codes. */
2607 switch (TREE_CODE (x))
2609 case VAR_DECL:
2610 case PARM_DECL:
2611 case RESULT_DECL:
2612 case LABEL_DECL:
2613 case FUNCTION_DECL:
2614 case SSA_NAME:
2616 case COMPONENT_REF:
2617 case MEM_REF:
2618 case INDIRECT_REF:
2619 case ARRAY_REF:
2620 case ARRAY_RANGE_REF:
2621 case BIT_FIELD_REF:
2622 case OBJ_TYPE_REF:
2624 case REALPART_EXPR:
2625 case IMAGPART_EXPR:
2626 case PREINCREMENT_EXPR:
2627 case PREDECREMENT_EXPR:
2628 case SAVE_EXPR:
2629 case TRY_CATCH_EXPR:
2630 case WITH_CLEANUP_EXPR:
2631 case COMPOUND_EXPR:
2632 case MODIFY_EXPR:
2633 case TARGET_EXPR:
2634 case COND_EXPR:
2635 case BIND_EXPR:
2636 case VIEW_CONVERT_EXPR:
2637 break;
2639 default:
2640 /* Assume the worst for front-end tree codes. */
2641 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2642 break;
2643 return false;
2646 return true;
2649 /* Return an expr equal to X but certainly not valid as an lvalue. */
2651 tree
2652 non_lvalue_loc (location_t loc, tree x)
2654 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2655 us. */
2656 if (in_gimple_form)
2657 return x;
2659 if (! maybe_lvalue_p (x))
2660 return x;
2661 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2664 /* Given a tree comparison code, return the code that is the logical inverse.
2665 It is generally not safe to do this for floating-point comparisons, except
2666 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2667 ERROR_MARK in this case. */
2669 enum tree_code
2670 invert_tree_comparison (enum tree_code code, bool honor_nans)
2672 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2673 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2674 return ERROR_MARK;
2676 switch (code)
2678 case EQ_EXPR:
2679 return NE_EXPR;
2680 case NE_EXPR:
2681 return EQ_EXPR;
2682 case GT_EXPR:
2683 return honor_nans ? UNLE_EXPR : LE_EXPR;
2684 case GE_EXPR:
2685 return honor_nans ? UNLT_EXPR : LT_EXPR;
2686 case LT_EXPR:
2687 return honor_nans ? UNGE_EXPR : GE_EXPR;
2688 case LE_EXPR:
2689 return honor_nans ? UNGT_EXPR : GT_EXPR;
2690 case LTGT_EXPR:
2691 return UNEQ_EXPR;
2692 case UNEQ_EXPR:
2693 return LTGT_EXPR;
2694 case UNGT_EXPR:
2695 return LE_EXPR;
2696 case UNGE_EXPR:
2697 return LT_EXPR;
2698 case UNLT_EXPR:
2699 return GE_EXPR;
2700 case UNLE_EXPR:
2701 return GT_EXPR;
2702 case ORDERED_EXPR:
2703 return UNORDERED_EXPR;
2704 case UNORDERED_EXPR:
2705 return ORDERED_EXPR;
2706 default:
2707 gcc_unreachable ();
2711 /* Similar, but return the comparison that results if the operands are
2712 swapped. This is safe for floating-point. */
2714 enum tree_code
2715 swap_tree_comparison (enum tree_code code)
2717 switch (code)
2719 case EQ_EXPR:
2720 case NE_EXPR:
2721 case ORDERED_EXPR:
2722 case UNORDERED_EXPR:
2723 case LTGT_EXPR:
2724 case UNEQ_EXPR:
2725 return code;
2726 case GT_EXPR:
2727 return LT_EXPR;
2728 case GE_EXPR:
2729 return LE_EXPR;
2730 case LT_EXPR:
2731 return GT_EXPR;
2732 case LE_EXPR:
2733 return GE_EXPR;
2734 case UNGT_EXPR:
2735 return UNLT_EXPR;
2736 case UNGE_EXPR:
2737 return UNLE_EXPR;
2738 case UNLT_EXPR:
2739 return UNGT_EXPR;
2740 case UNLE_EXPR:
2741 return UNGE_EXPR;
2742 default:
2743 gcc_unreachable ();
2748 /* Convert a comparison tree code from an enum tree_code representation
2749 into a compcode bit-based encoding. This function is the inverse of
2750 compcode_to_comparison. */
2752 static enum comparison_code
2753 comparison_to_compcode (enum tree_code code)
2755 switch (code)
2757 case LT_EXPR:
2758 return COMPCODE_LT;
2759 case EQ_EXPR:
2760 return COMPCODE_EQ;
2761 case LE_EXPR:
2762 return COMPCODE_LE;
2763 case GT_EXPR:
2764 return COMPCODE_GT;
2765 case NE_EXPR:
2766 return COMPCODE_NE;
2767 case GE_EXPR:
2768 return COMPCODE_GE;
2769 case ORDERED_EXPR:
2770 return COMPCODE_ORD;
2771 case UNORDERED_EXPR:
2772 return COMPCODE_UNORD;
2773 case UNLT_EXPR:
2774 return COMPCODE_UNLT;
2775 case UNEQ_EXPR:
2776 return COMPCODE_UNEQ;
2777 case UNLE_EXPR:
2778 return COMPCODE_UNLE;
2779 case UNGT_EXPR:
2780 return COMPCODE_UNGT;
2781 case LTGT_EXPR:
2782 return COMPCODE_LTGT;
2783 case UNGE_EXPR:
2784 return COMPCODE_UNGE;
2785 default:
2786 gcc_unreachable ();
2790 /* Convert a compcode bit-based encoding of a comparison operator back
2791 to GCC's enum tree_code representation. This function is the
2792 inverse of comparison_to_compcode. */
2794 static enum tree_code
2795 compcode_to_comparison (enum comparison_code code)
2797 switch (code)
2799 case COMPCODE_LT:
2800 return LT_EXPR;
2801 case COMPCODE_EQ:
2802 return EQ_EXPR;
2803 case COMPCODE_LE:
2804 return LE_EXPR;
2805 case COMPCODE_GT:
2806 return GT_EXPR;
2807 case COMPCODE_NE:
2808 return NE_EXPR;
2809 case COMPCODE_GE:
2810 return GE_EXPR;
2811 case COMPCODE_ORD:
2812 return ORDERED_EXPR;
2813 case COMPCODE_UNORD:
2814 return UNORDERED_EXPR;
2815 case COMPCODE_UNLT:
2816 return UNLT_EXPR;
2817 case COMPCODE_UNEQ:
2818 return UNEQ_EXPR;
2819 case COMPCODE_UNLE:
2820 return UNLE_EXPR;
2821 case COMPCODE_UNGT:
2822 return UNGT_EXPR;
2823 case COMPCODE_LTGT:
2824 return LTGT_EXPR;
2825 case COMPCODE_UNGE:
2826 return UNGE_EXPR;
2827 default:
2828 gcc_unreachable ();
2832 /* Return true if COND1 tests the opposite condition of COND2. */
2834 bool
2835 inverse_conditions_p (const_tree cond1, const_tree cond2)
2837 return (COMPARISON_CLASS_P (cond1)
2838 && COMPARISON_CLASS_P (cond2)
2839 && (invert_tree_comparison
2840 (TREE_CODE (cond1),
2841 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2842 && operand_equal_p (TREE_OPERAND (cond1, 0),
2843 TREE_OPERAND (cond2, 0), 0)
2844 && operand_equal_p (TREE_OPERAND (cond1, 1),
2845 TREE_OPERAND (cond2, 1), 0));
2848 /* Return a tree for the comparison which is the combination of
2849 doing the AND or OR (depending on CODE) of the two operations LCODE
2850 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2851 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2852 if this makes the transformation invalid. */
2854 tree
2855 combine_comparisons (location_t loc,
2856 enum tree_code code, enum tree_code lcode,
2857 enum tree_code rcode, tree truth_type,
2858 tree ll_arg, tree lr_arg)
2860 bool honor_nans = HONOR_NANS (ll_arg);
2861 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2862 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2863 int compcode;
2865 switch (code)
2867 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2868 compcode = lcompcode & rcompcode;
2869 break;
2871 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2872 compcode = lcompcode | rcompcode;
2873 break;
2875 default:
2876 return NULL_TREE;
2879 if (!honor_nans)
2881 /* Eliminate unordered comparisons, as well as LTGT and ORD
2882 which are not used unless the mode has NaNs. */
2883 compcode &= ~COMPCODE_UNORD;
2884 if (compcode == COMPCODE_LTGT)
2885 compcode = COMPCODE_NE;
2886 else if (compcode == COMPCODE_ORD)
2887 compcode = COMPCODE_TRUE;
2889 else if (flag_trapping_math)
2891 /* Check that the original operation and the optimized ones will trap
2892 under the same condition. */
2893 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2894 && (lcompcode != COMPCODE_EQ)
2895 && (lcompcode != COMPCODE_ORD);
2896 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2897 && (rcompcode != COMPCODE_EQ)
2898 && (rcompcode != COMPCODE_ORD);
2899 bool trap = (compcode & COMPCODE_UNORD) == 0
2900 && (compcode != COMPCODE_EQ)
2901 && (compcode != COMPCODE_ORD);
2903 /* In a short-circuited boolean expression the LHS might be
2904 such that the RHS, if evaluated, will never trap. For
2905 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2906 if neither x nor y is NaN. (This is a mixed blessing: for
2907 example, the expression above will never trap, hence
2908 optimizing it to x < y would be invalid). */
2909 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2910 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2911 rtrap = false;
2913 /* If the comparison was short-circuited, and only the RHS
2914 trapped, we may now generate a spurious trap. */
2915 if (rtrap && !ltrap
2916 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2917 return NULL_TREE;
2919 /* If we changed the conditions that cause a trap, we lose. */
2920 if ((ltrap || rtrap) != trap)
2921 return NULL_TREE;
2924 if (compcode == COMPCODE_TRUE)
2925 return constant_boolean_node (true, truth_type);
2926 else if (compcode == COMPCODE_FALSE)
2927 return constant_boolean_node (false, truth_type);
2928 else
2930 enum tree_code tcode;
2932 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2933 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2937 /* Return nonzero if two operands (typically of the same tree node)
2938 are necessarily equal. FLAGS modifies behavior as follows:
2940 If OEP_ONLY_CONST is set, only return nonzero for constants.
2941 This function tests whether the operands are indistinguishable;
2942 it does not test whether they are equal using C's == operation.
2943 The distinction is important for IEEE floating point, because
2944 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2945 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2947 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2948 even though it may hold multiple values during a function.
2949 This is because a GCC tree node guarantees that nothing else is
2950 executed between the evaluation of its "operands" (which may often
2951 be evaluated in arbitrary order). Hence if the operands themselves
2952 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2953 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2954 unset means assuming isochronic (or instantaneous) tree equivalence.
2955 Unless comparing arbitrary expression trees, such as from different
2956 statements, this flag can usually be left unset.
2958 If OEP_PURE_SAME is set, then pure functions with identical arguments
2959 are considered the same. It is used when the caller has other ways
2960 to ensure that global memory is unchanged in between.
2962 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2963 not values of expressions.
2965 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2966 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2968 If OEP_BITWISE is set, then require the values to be bitwise identical
2969 rather than simply numerically equal. Do not take advantage of things
2970 like math-related flags or undefined behavior; only return true for
2971 values that are provably bitwise identical in all circumstances.
2973 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2974 any operand with side effect. This is unnecesarily conservative in the
2975 case we know that arg0 and arg1 are in disjoint code paths (such as in
2976 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2977 addresses with TREE_CONSTANT flag set so we know that &var == &var
2978 even if var is volatile. */
2980 bool
2981 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2982 unsigned int flags)
2984 bool r;
2985 if (verify_hash_value (arg0, arg1, flags, &r))
2986 return r;
2988 STRIP_ANY_LOCATION_WRAPPER (arg0);
2989 STRIP_ANY_LOCATION_WRAPPER (arg1);
2991 /* If either is ERROR_MARK, they aren't equal. */
2992 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2993 || TREE_TYPE (arg0) == error_mark_node
2994 || TREE_TYPE (arg1) == error_mark_node)
2995 return false;
2997 /* Similar, if either does not have a type (like a template id),
2998 they aren't equal. */
2999 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3000 return false;
3002 /* Bitwise identity makes no sense if the values have different layouts. */
3003 if ((flags & OEP_BITWISE)
3004 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3005 return false;
3007 /* We cannot consider pointers to different address space equal. */
3008 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3009 && POINTER_TYPE_P (TREE_TYPE (arg1))
3010 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3011 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3012 return false;
3014 /* Check equality of integer constants before bailing out due to
3015 precision differences. */
3016 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3018 /* Address of INTEGER_CST is not defined; check that we did not forget
3019 to drop the OEP_ADDRESS_OF flags. */
3020 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3021 return tree_int_cst_equal (arg0, arg1);
3024 if (!(flags & OEP_ADDRESS_OF))
3026 /* If both types don't have the same signedness, then we can't consider
3027 them equal. We must check this before the STRIP_NOPS calls
3028 because they may change the signedness of the arguments. As pointers
3029 strictly don't have a signedness, require either two pointers or
3030 two non-pointers as well. */
3031 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3032 || POINTER_TYPE_P (TREE_TYPE (arg0))
3033 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3034 return false;
3036 /* If both types don't have the same precision, then it is not safe
3037 to strip NOPs. */
3038 if (element_precision (TREE_TYPE (arg0))
3039 != element_precision (TREE_TYPE (arg1)))
3040 return false;
3042 STRIP_NOPS (arg0);
3043 STRIP_NOPS (arg1);
3045 #if 0
3046 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3047 sanity check once the issue is solved. */
3048 else
3049 /* Addresses of conversions and SSA_NAMEs (and many other things)
3050 are not defined. Check that we did not forget to drop the
3051 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3052 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3053 && TREE_CODE (arg0) != SSA_NAME);
3054 #endif
3056 /* In case both args are comparisons but with different comparison
3057 code, try to swap the comparison operands of one arg to produce
3058 a match and compare that variant. */
3059 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3060 && COMPARISON_CLASS_P (arg0)
3061 && COMPARISON_CLASS_P (arg1))
3063 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3065 if (TREE_CODE (arg0) == swap_code)
3066 return operand_equal_p (TREE_OPERAND (arg0, 0),
3067 TREE_OPERAND (arg1, 1), flags)
3068 && operand_equal_p (TREE_OPERAND (arg0, 1),
3069 TREE_OPERAND (arg1, 0), flags);
3072 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3074 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3075 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3077 else if (flags & OEP_ADDRESS_OF)
3079 /* If we are interested in comparing addresses ignore
3080 MEM_REF wrappings of the base that can appear just for
3081 TBAA reasons. */
3082 if (TREE_CODE (arg0) == MEM_REF
3083 && DECL_P (arg1)
3084 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3085 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3086 && integer_zerop (TREE_OPERAND (arg0, 1)))
3087 return true;
3088 else if (TREE_CODE (arg1) == MEM_REF
3089 && DECL_P (arg0)
3090 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3091 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3092 && integer_zerop (TREE_OPERAND (arg1, 1)))
3093 return true;
3094 return false;
3096 else
3097 return false;
3100 /* When not checking adddresses, this is needed for conversions and for
3101 COMPONENT_REF. Might as well play it safe and always test this. */
3102 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3103 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3104 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3105 && !(flags & OEP_ADDRESS_OF)))
3106 return false;
3108 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3109 We don't care about side effects in that case because the SAVE_EXPR
3110 takes care of that for us. In all other cases, two expressions are
3111 equal if they have no side effects. If we have two identical
3112 expressions with side effects that should be treated the same due
3113 to the only side effects being identical SAVE_EXPR's, that will
3114 be detected in the recursive calls below.
3115 If we are taking an invariant address of two identical objects
3116 they are necessarily equal as well. */
3117 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3118 && (TREE_CODE (arg0) == SAVE_EXPR
3119 || (flags & OEP_MATCH_SIDE_EFFECTS)
3120 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3121 return true;
3123 /* Next handle constant cases, those for which we can return 1 even
3124 if ONLY_CONST is set. */
3125 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3126 switch (TREE_CODE (arg0))
3128 case INTEGER_CST:
3129 return tree_int_cst_equal (arg0, arg1);
3131 case FIXED_CST:
3132 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3133 TREE_FIXED_CST (arg1));
3135 case REAL_CST:
3136 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3137 return true;
3139 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3141 /* If we do not distinguish between signed and unsigned zero,
3142 consider them equal. */
3143 if (real_zerop (arg0) && real_zerop (arg1))
3144 return true;
3146 return false;
3148 case VECTOR_CST:
3150 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3151 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3152 return false;
3154 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3155 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3156 return false;
3158 unsigned int count = vector_cst_encoded_nelts (arg0);
3159 for (unsigned int i = 0; i < count; ++i)
3160 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3161 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3162 return false;
3163 return true;
3166 case COMPLEX_CST:
3167 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3168 flags)
3169 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3170 flags));
3172 case STRING_CST:
3173 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3174 && ! memcmp (TREE_STRING_POINTER (arg0),
3175 TREE_STRING_POINTER (arg1),
3176 TREE_STRING_LENGTH (arg0)));
3178 case ADDR_EXPR:
3179 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3180 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3181 flags | OEP_ADDRESS_OF
3182 | OEP_MATCH_SIDE_EFFECTS);
3183 case CONSTRUCTOR:
3184 /* In GIMPLE empty constructors are allowed in initializers of
3185 aggregates. */
3186 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3187 default:
3188 break;
3191 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3192 two instances of undefined behavior will give identical results. */
3193 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3194 return false;
3196 /* Define macros to test an operand from arg0 and arg1 for equality and a
3197 variant that allows null and views null as being different from any
3198 non-null value. In the latter case, if either is null, the both
3199 must be; otherwise, do the normal comparison. */
3200 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3201 TREE_OPERAND (arg1, N), flags)
3203 #define OP_SAME_WITH_NULL(N) \
3204 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3205 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3207 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3209 case tcc_unary:
3210 /* Two conversions are equal only if signedness and modes match. */
3211 switch (TREE_CODE (arg0))
3213 CASE_CONVERT:
3214 case FIX_TRUNC_EXPR:
3215 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3216 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3217 return false;
3218 break;
3219 default:
3220 break;
3223 return OP_SAME (0);
3226 case tcc_comparison:
3227 case tcc_binary:
3228 if (OP_SAME (0) && OP_SAME (1))
3229 return true;
3231 /* For commutative ops, allow the other order. */
3232 return (commutative_tree_code (TREE_CODE (arg0))
3233 && operand_equal_p (TREE_OPERAND (arg0, 0),
3234 TREE_OPERAND (arg1, 1), flags)
3235 && operand_equal_p (TREE_OPERAND (arg0, 1),
3236 TREE_OPERAND (arg1, 0), flags));
3238 case tcc_reference:
3239 /* If either of the pointer (or reference) expressions we are
3240 dereferencing contain a side effect, these cannot be equal,
3241 but their addresses can be. */
3242 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3243 && (TREE_SIDE_EFFECTS (arg0)
3244 || TREE_SIDE_EFFECTS (arg1)))
3245 return false;
3247 switch (TREE_CODE (arg0))
3249 case INDIRECT_REF:
3250 if (!(flags & OEP_ADDRESS_OF))
3252 if (TYPE_ALIGN (TREE_TYPE (arg0))
3253 != TYPE_ALIGN (TREE_TYPE (arg1)))
3254 return false;
3255 /* Verify that the access types are compatible. */
3256 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3257 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3258 return false;
3260 flags &= ~OEP_ADDRESS_OF;
3261 return OP_SAME (0);
3263 case IMAGPART_EXPR:
3264 /* Require the same offset. */
3265 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3266 TYPE_SIZE (TREE_TYPE (arg1)),
3267 flags & ~OEP_ADDRESS_OF))
3268 return false;
3270 /* Fallthru. */
3271 case REALPART_EXPR:
3272 case VIEW_CONVERT_EXPR:
3273 return OP_SAME (0);
3275 case TARGET_MEM_REF:
3276 case MEM_REF:
3277 if (!(flags & OEP_ADDRESS_OF))
3279 /* Require equal access sizes */
3280 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3281 && (!TYPE_SIZE (TREE_TYPE (arg0))
3282 || !TYPE_SIZE (TREE_TYPE (arg1))
3283 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3284 TYPE_SIZE (TREE_TYPE (arg1)),
3285 flags)))
3286 return false;
3287 /* Verify that access happens in similar types. */
3288 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3289 return false;
3290 /* Verify that accesses are TBAA compatible. */
3291 if (!alias_ptr_types_compatible_p
3292 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3293 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3294 || (MR_DEPENDENCE_CLIQUE (arg0)
3295 != MR_DEPENDENCE_CLIQUE (arg1))
3296 || (MR_DEPENDENCE_BASE (arg0)
3297 != MR_DEPENDENCE_BASE (arg1)))
3298 return false;
3299 /* Verify that alignment is compatible. */
3300 if (TYPE_ALIGN (TREE_TYPE (arg0))
3301 != TYPE_ALIGN (TREE_TYPE (arg1)))
3302 return false;
3304 flags &= ~OEP_ADDRESS_OF;
3305 return (OP_SAME (0) && OP_SAME (1)
3306 /* TARGET_MEM_REF require equal extra operands. */
3307 && (TREE_CODE (arg0) != TARGET_MEM_REF
3308 || (OP_SAME_WITH_NULL (2)
3309 && OP_SAME_WITH_NULL (3)
3310 && OP_SAME_WITH_NULL (4))));
3312 case ARRAY_REF:
3313 case ARRAY_RANGE_REF:
3314 if (!OP_SAME (0))
3315 return false;
3316 flags &= ~OEP_ADDRESS_OF;
3317 /* Compare the array index by value if it is constant first as we
3318 may have different types but same value here. */
3319 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3320 TREE_OPERAND (arg1, 1))
3321 || OP_SAME (1))
3322 && OP_SAME_WITH_NULL (2)
3323 && OP_SAME_WITH_NULL (3)
3324 /* Compare low bound and element size as with OEP_ADDRESS_OF
3325 we have to account for the offset of the ref. */
3326 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3327 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3328 || (operand_equal_p (array_ref_low_bound
3329 (CONST_CAST_TREE (arg0)),
3330 array_ref_low_bound
3331 (CONST_CAST_TREE (arg1)), flags)
3332 && operand_equal_p (array_ref_element_size
3333 (CONST_CAST_TREE (arg0)),
3334 array_ref_element_size
3335 (CONST_CAST_TREE (arg1)),
3336 flags))));
3338 case COMPONENT_REF:
3339 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3340 may be NULL when we're called to compare MEM_EXPRs. */
3341 if (!OP_SAME_WITH_NULL (0))
3342 return false;
3344 bool compare_address = flags & OEP_ADDRESS_OF;
3346 /* Most of time we only need to compare FIELD_DECLs for equality.
3347 However when determining address look into actual offsets.
3348 These may match for unions and unshared record types. */
3349 flags &= ~OEP_ADDRESS_OF;
3350 if (!OP_SAME (1))
3352 if (compare_address
3353 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3355 if (TREE_OPERAND (arg0, 2)
3356 || TREE_OPERAND (arg1, 2))
3357 return OP_SAME_WITH_NULL (2);
3358 tree field0 = TREE_OPERAND (arg0, 1);
3359 tree field1 = TREE_OPERAND (arg1, 1);
3361 /* Non-FIELD_DECL operands can appear in C++ templates. */
3362 if (TREE_CODE (field0) != FIELD_DECL
3363 || TREE_CODE (field1) != FIELD_DECL
3364 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3365 DECL_FIELD_OFFSET (field1), flags)
3366 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3367 DECL_FIELD_BIT_OFFSET (field1),
3368 flags))
3369 return false;
3371 else
3372 return false;
3375 return OP_SAME_WITH_NULL (2);
3377 case BIT_FIELD_REF:
3378 if (!OP_SAME (0))
3379 return false;
3380 flags &= ~OEP_ADDRESS_OF;
3381 return OP_SAME (1) && OP_SAME (2);
3383 default:
3384 return false;
3387 case tcc_expression:
3388 switch (TREE_CODE (arg0))
3390 case ADDR_EXPR:
3391 /* Be sure we pass right ADDRESS_OF flag. */
3392 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3393 return operand_equal_p (TREE_OPERAND (arg0, 0),
3394 TREE_OPERAND (arg1, 0),
3395 flags | OEP_ADDRESS_OF);
3397 case TRUTH_NOT_EXPR:
3398 return OP_SAME (0);
3400 case TRUTH_ANDIF_EXPR:
3401 case TRUTH_ORIF_EXPR:
3402 return OP_SAME (0) && OP_SAME (1);
3404 case WIDEN_MULT_PLUS_EXPR:
3405 case WIDEN_MULT_MINUS_EXPR:
3406 if (!OP_SAME (2))
3407 return false;
3408 /* The multiplcation operands are commutative. */
3409 /* FALLTHRU */
3411 case TRUTH_AND_EXPR:
3412 case TRUTH_OR_EXPR:
3413 case TRUTH_XOR_EXPR:
3414 if (OP_SAME (0) && OP_SAME (1))
3415 return true;
3417 /* Otherwise take into account this is a commutative operation. */
3418 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3419 TREE_OPERAND (arg1, 1), flags)
3420 && operand_equal_p (TREE_OPERAND (arg0, 1),
3421 TREE_OPERAND (arg1, 0), flags));
3423 case COND_EXPR:
3424 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3425 return false;
3426 flags &= ~OEP_ADDRESS_OF;
3427 return OP_SAME (0);
3429 case BIT_INSERT_EXPR:
3430 /* BIT_INSERT_EXPR has an implict operand as the type precision
3431 of op1. Need to check to make sure they are the same. */
3432 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3433 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3434 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3435 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3436 return false;
3437 /* FALLTHRU */
3439 case VEC_COND_EXPR:
3440 case DOT_PROD_EXPR:
3441 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3443 case MODIFY_EXPR:
3444 case INIT_EXPR:
3445 case COMPOUND_EXPR:
3446 case PREDECREMENT_EXPR:
3447 case PREINCREMENT_EXPR:
3448 case POSTDECREMENT_EXPR:
3449 case POSTINCREMENT_EXPR:
3450 if (flags & OEP_LEXICOGRAPHIC)
3451 return OP_SAME (0) && OP_SAME (1);
3452 return false;
3454 case CLEANUP_POINT_EXPR:
3455 case EXPR_STMT:
3456 case SAVE_EXPR:
3457 if (flags & OEP_LEXICOGRAPHIC)
3458 return OP_SAME (0);
3459 return false;
3461 case OBJ_TYPE_REF:
3462 /* Virtual table reference. */
3463 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3464 OBJ_TYPE_REF_EXPR (arg1), flags))
3465 return false;
3466 flags &= ~OEP_ADDRESS_OF;
3467 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3468 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3469 return false;
3470 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3471 OBJ_TYPE_REF_OBJECT (arg1), flags))
3472 return false;
3473 if (virtual_method_call_p (arg0))
3475 if (!virtual_method_call_p (arg1))
3476 return false;
3477 return types_same_for_odr (obj_type_ref_class (arg0),
3478 obj_type_ref_class (arg1));
3480 return false;
3482 default:
3483 return false;
3486 case tcc_vl_exp:
3487 switch (TREE_CODE (arg0))
3489 case CALL_EXPR:
3490 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3491 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3492 /* If not both CALL_EXPRs are either internal or normal function
3493 functions, then they are not equal. */
3494 return false;
3495 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3497 /* If the CALL_EXPRs call different internal functions, then they
3498 are not equal. */
3499 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3500 return false;
3502 else
3504 /* If the CALL_EXPRs call different functions, then they are not
3505 equal. */
3506 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3507 flags))
3508 return false;
3511 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3513 unsigned int cef = call_expr_flags (arg0);
3514 if (flags & OEP_PURE_SAME)
3515 cef &= ECF_CONST | ECF_PURE;
3516 else
3517 cef &= ECF_CONST;
3518 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3519 return false;
3522 /* Now see if all the arguments are the same. */
3524 const_call_expr_arg_iterator iter0, iter1;
3525 const_tree a0, a1;
3526 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3527 a1 = first_const_call_expr_arg (arg1, &iter1);
3528 a0 && a1;
3529 a0 = next_const_call_expr_arg (&iter0),
3530 a1 = next_const_call_expr_arg (&iter1))
3531 if (! operand_equal_p (a0, a1, flags))
3532 return false;
3534 /* If we get here and both argument lists are exhausted
3535 then the CALL_EXPRs are equal. */
3536 return ! (a0 || a1);
3538 default:
3539 return false;
3542 case tcc_declaration:
3543 /* Consider __builtin_sqrt equal to sqrt. */
3544 if (TREE_CODE (arg0) == FUNCTION_DECL)
3545 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3546 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3547 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3548 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3550 if (DECL_P (arg0)
3551 && (flags & OEP_DECL_NAME)
3552 && (flags & OEP_LEXICOGRAPHIC))
3554 /* Consider decls with the same name equal. The caller needs
3555 to make sure they refer to the same entity (such as a function
3556 formal parameter). */
3557 tree a0name = DECL_NAME (arg0);
3558 tree a1name = DECL_NAME (arg1);
3559 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3560 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3561 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3563 return false;
3565 case tcc_exceptional:
3566 if (TREE_CODE (arg0) == CONSTRUCTOR)
3568 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3569 return false;
3571 /* In GIMPLE constructors are used only to build vectors from
3572 elements. Individual elements in the constructor must be
3573 indexed in increasing order and form an initial sequence.
3575 We make no effort to compare constructors in generic.
3576 (see sem_variable::equals in ipa-icf which can do so for
3577 constants). */
3578 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3579 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3580 return false;
3582 /* Be sure that vectors constructed have the same representation.
3583 We only tested element precision and modes to match.
3584 Vectors may be BLKmode and thus also check that the number of
3585 parts match. */
3586 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3587 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3588 return false;
3590 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3591 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3592 unsigned int len = vec_safe_length (v0);
3594 if (len != vec_safe_length (v1))
3595 return false;
3597 for (unsigned int i = 0; i < len; i++)
3599 constructor_elt *c0 = &(*v0)[i];
3600 constructor_elt *c1 = &(*v1)[i];
3602 if (!operand_equal_p (c0->value, c1->value, flags)
3603 /* In GIMPLE the indexes can be either NULL or matching i.
3604 Double check this so we won't get false
3605 positives for GENERIC. */
3606 || (c0->index
3607 && (TREE_CODE (c0->index) != INTEGER_CST
3608 || compare_tree_int (c0->index, i)))
3609 || (c1->index
3610 && (TREE_CODE (c1->index) != INTEGER_CST
3611 || compare_tree_int (c1->index, i))))
3612 return false;
3614 return true;
3616 else if (TREE_CODE (arg0) == STATEMENT_LIST
3617 && (flags & OEP_LEXICOGRAPHIC))
3619 /* Compare the STATEMENT_LISTs. */
3620 tree_stmt_iterator tsi1, tsi2;
3621 tree body1 = CONST_CAST_TREE (arg0);
3622 tree body2 = CONST_CAST_TREE (arg1);
3623 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3624 tsi_next (&tsi1), tsi_next (&tsi2))
3626 /* The lists don't have the same number of statements. */
3627 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3628 return false;
3629 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3630 return true;
3631 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3632 flags & (OEP_LEXICOGRAPHIC
3633 | OEP_NO_HASH_CHECK)))
3634 return false;
3637 return false;
3639 case tcc_statement:
3640 switch (TREE_CODE (arg0))
3642 case RETURN_EXPR:
3643 if (flags & OEP_LEXICOGRAPHIC)
3644 return OP_SAME_WITH_NULL (0);
3645 return false;
3646 case DEBUG_BEGIN_STMT:
3647 if (flags & OEP_LEXICOGRAPHIC)
3648 return true;
3649 return false;
3650 default:
3651 return false;
3654 default:
3655 return false;
3658 #undef OP_SAME
3659 #undef OP_SAME_WITH_NULL
3662 /* Generate a hash value for an expression. This can be used iteratively
3663 by passing a previous result as the HSTATE argument. */
3665 void
3666 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3667 unsigned int flags)
3669 int i;
3670 enum tree_code code;
3671 enum tree_code_class tclass;
3673 if (t == NULL_TREE || t == error_mark_node)
3675 hstate.merge_hash (0);
3676 return;
3679 STRIP_ANY_LOCATION_WRAPPER (t);
3681 if (!(flags & OEP_ADDRESS_OF))
3682 STRIP_NOPS (t);
3684 code = TREE_CODE (t);
3686 switch (code)
3688 /* Alas, constants aren't shared, so we can't rely on pointer
3689 identity. */
3690 case VOID_CST:
3691 hstate.merge_hash (0);
3692 return;
3693 case INTEGER_CST:
3694 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3695 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3696 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3697 return;
3698 case REAL_CST:
3700 unsigned int val2;
3701 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3702 val2 = rvc_zero;
3703 else
3704 val2 = real_hash (TREE_REAL_CST_PTR (t));
3705 hstate.merge_hash (val2);
3706 return;
3708 case FIXED_CST:
3710 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3711 hstate.merge_hash (val2);
3712 return;
3714 case STRING_CST:
3715 hstate.add ((const void *) TREE_STRING_POINTER (t),
3716 TREE_STRING_LENGTH (t));
3717 return;
3718 case COMPLEX_CST:
3719 hash_operand (TREE_REALPART (t), hstate, flags);
3720 hash_operand (TREE_IMAGPART (t), hstate, flags);
3721 return;
3722 case VECTOR_CST:
3724 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3725 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3726 unsigned int count = vector_cst_encoded_nelts (t);
3727 for (unsigned int i = 0; i < count; ++i)
3728 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3729 return;
3731 case SSA_NAME:
3732 /* We can just compare by pointer. */
3733 hstate.add_hwi (SSA_NAME_VERSION (t));
3734 return;
3735 case PLACEHOLDER_EXPR:
3736 /* The node itself doesn't matter. */
3737 return;
3738 case BLOCK:
3739 case OMP_CLAUSE:
3740 /* Ignore. */
3741 return;
3742 case TREE_LIST:
3743 /* A list of expressions, for a CALL_EXPR or as the elements of a
3744 VECTOR_CST. */
3745 for (; t; t = TREE_CHAIN (t))
3746 hash_operand (TREE_VALUE (t), hstate, flags);
3747 return;
3748 case CONSTRUCTOR:
3750 unsigned HOST_WIDE_INT idx;
3751 tree field, value;
3752 flags &= ~OEP_ADDRESS_OF;
3753 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3754 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3756 /* In GIMPLE the indexes can be either NULL or matching i. */
3757 if (field == NULL_TREE)
3758 field = bitsize_int (idx);
3759 hash_operand (field, hstate, flags);
3760 hash_operand (value, hstate, flags);
3762 return;
3764 case STATEMENT_LIST:
3766 tree_stmt_iterator i;
3767 for (i = tsi_start (CONST_CAST_TREE (t));
3768 !tsi_end_p (i); tsi_next (&i))
3769 hash_operand (tsi_stmt (i), hstate, flags);
3770 return;
3772 case TREE_VEC:
3773 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3774 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3775 return;
3776 case IDENTIFIER_NODE:
3777 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3778 return;
3779 case FUNCTION_DECL:
3780 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3781 Otherwise nodes that compare equal according to operand_equal_p might
3782 get different hash codes. However, don't do this for machine specific
3783 or front end builtins, since the function code is overloaded in those
3784 cases. */
3785 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3786 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3788 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3789 code = TREE_CODE (t);
3791 /* FALL THROUGH */
3792 default:
3793 if (POLY_INT_CST_P (t))
3795 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3796 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3797 return;
3799 tclass = TREE_CODE_CLASS (code);
3801 if (tclass == tcc_declaration)
3803 /* DECL's have a unique ID */
3804 hstate.add_hwi (DECL_UID (t));
3806 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3808 /* For comparisons that can be swapped, use the lower
3809 tree code. */
3810 enum tree_code ccode = swap_tree_comparison (code);
3811 if (code < ccode)
3812 ccode = code;
3813 hstate.add_object (ccode);
3814 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3815 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3817 else if (CONVERT_EXPR_CODE_P (code))
3819 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3820 operand_equal_p. */
3821 enum tree_code ccode = NOP_EXPR;
3822 hstate.add_object (ccode);
3824 /* Don't hash the type, that can lead to having nodes which
3825 compare equal according to operand_equal_p, but which
3826 have different hash codes. Make sure to include signedness
3827 in the hash computation. */
3828 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3829 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3831 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3832 else if (code == MEM_REF
3833 && (flags & OEP_ADDRESS_OF) != 0
3834 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3835 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3836 && integer_zerop (TREE_OPERAND (t, 1)))
3837 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3838 hstate, flags);
3839 /* Don't ICE on FE specific trees, or their arguments etc.
3840 during operand_equal_p hash verification. */
3841 else if (!IS_EXPR_CODE_CLASS (tclass))
3842 gcc_assert (flags & OEP_HASH_CHECK);
3843 else
3845 unsigned int sflags = flags;
3847 hstate.add_object (code);
3849 switch (code)
3851 case ADDR_EXPR:
3852 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3853 flags |= OEP_ADDRESS_OF;
3854 sflags = flags;
3855 break;
3857 case INDIRECT_REF:
3858 case MEM_REF:
3859 case TARGET_MEM_REF:
3860 flags &= ~OEP_ADDRESS_OF;
3861 sflags = flags;
3862 break;
3864 case COMPONENT_REF:
3865 if (sflags & OEP_ADDRESS_OF)
3867 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3868 if (TREE_OPERAND (t, 2))
3869 hash_operand (TREE_OPERAND (t, 2), hstate,
3870 flags & ~OEP_ADDRESS_OF);
3871 else
3873 tree field = TREE_OPERAND (t, 1);
3874 hash_operand (DECL_FIELD_OFFSET (field),
3875 hstate, flags & ~OEP_ADDRESS_OF);
3876 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3877 hstate, flags & ~OEP_ADDRESS_OF);
3879 return;
3881 break;
3882 case ARRAY_REF:
3883 case ARRAY_RANGE_REF:
3884 case BIT_FIELD_REF:
3885 sflags &= ~OEP_ADDRESS_OF;
3886 break;
3888 case COND_EXPR:
3889 flags &= ~OEP_ADDRESS_OF;
3890 break;
3892 case WIDEN_MULT_PLUS_EXPR:
3893 case WIDEN_MULT_MINUS_EXPR:
3895 /* The multiplication operands are commutative. */
3896 inchash::hash one, two;
3897 hash_operand (TREE_OPERAND (t, 0), one, flags);
3898 hash_operand (TREE_OPERAND (t, 1), two, flags);
3899 hstate.add_commutative (one, two);
3900 hash_operand (TREE_OPERAND (t, 2), two, flags);
3901 return;
3904 case CALL_EXPR:
3905 if (CALL_EXPR_FN (t) == NULL_TREE)
3906 hstate.add_int (CALL_EXPR_IFN (t));
3907 break;
3909 case TARGET_EXPR:
3910 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3911 Usually different TARGET_EXPRs just should use
3912 different temporaries in their slots. */
3913 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3914 return;
3916 case OBJ_TYPE_REF:
3917 /* Virtual table reference. */
3918 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3919 flags &= ~OEP_ADDRESS_OF;
3920 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3921 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3922 if (!virtual_method_call_p (t))
3923 return;
3924 if (tree c = obj_type_ref_class (t))
3926 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3927 /* We compute mangled names only when free_lang_data is run.
3928 In that case we can hash precisely. */
3929 if (TREE_CODE (c) == TYPE_DECL
3930 && DECL_ASSEMBLER_NAME_SET_P (c))
3931 hstate.add_object
3932 (IDENTIFIER_HASH_VALUE
3933 (DECL_ASSEMBLER_NAME (c)));
3935 return;
3936 default:
3937 break;
3940 /* Don't hash the type, that can lead to having nodes which
3941 compare equal according to operand_equal_p, but which
3942 have different hash codes. */
3943 if (code == NON_LVALUE_EXPR)
3945 /* Make sure to include signness in the hash computation. */
3946 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3947 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3950 else if (commutative_tree_code (code))
3952 /* It's a commutative expression. We want to hash it the same
3953 however it appears. We do this by first hashing both operands
3954 and then rehashing based on the order of their independent
3955 hashes. */
3956 inchash::hash one, two;
3957 hash_operand (TREE_OPERAND (t, 0), one, flags);
3958 hash_operand (TREE_OPERAND (t, 1), two, flags);
3959 hstate.add_commutative (one, two);
3961 else
3962 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3963 hash_operand (TREE_OPERAND (t, i), hstate,
3964 i == 0 ? flags : sflags);
3966 return;
3970 bool
3971 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3972 unsigned int flags, bool *ret)
3974 /* When checking and unless comparing DECL names, verify that if
3975 the outermost operand_equal_p call returns non-zero then ARG0
3976 and ARG1 have the same hash value. */
3977 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3979 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3981 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3983 inchash::hash hstate0 (0), hstate1 (0);
3984 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3985 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3986 hashval_t h0 = hstate0.end ();
3987 hashval_t h1 = hstate1.end ();
3988 gcc_assert (h0 == h1);
3990 *ret = true;
3992 else
3993 *ret = false;
3995 return true;
3998 return false;
4002 static operand_compare default_compare_instance;
4004 /* Conveinece wrapper around operand_compare class because usually we do
4005 not need to play with the valueizer. */
4007 bool
4008 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4010 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4013 namespace inchash
4016 /* Generate a hash value for an expression. This can be used iteratively
4017 by passing a previous result as the HSTATE argument.
4019 This function is intended to produce the same hash for expressions which
4020 would compare equal using operand_equal_p. */
4021 void
4022 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4024 default_compare_instance.hash_operand (t, hstate, flags);
4029 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4030 with a different signedness or a narrower precision. */
4032 static bool
4033 operand_equal_for_comparison_p (tree arg0, tree arg1)
4035 if (operand_equal_p (arg0, arg1, 0))
4036 return true;
4038 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4039 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4040 return false;
4042 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4043 and see if the inner values are the same. This removes any
4044 signedness comparison, which doesn't matter here. */
4045 tree op0 = arg0;
4046 tree op1 = arg1;
4047 STRIP_NOPS (op0);
4048 STRIP_NOPS (op1);
4049 if (operand_equal_p (op0, op1, 0))
4050 return true;
4052 /* Discard a single widening conversion from ARG1 and see if the inner
4053 value is the same as ARG0. */
4054 if (CONVERT_EXPR_P (arg1)
4055 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4056 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4057 < TYPE_PRECISION (TREE_TYPE (arg1))
4058 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4059 return true;
4061 return false;
4064 /* See if ARG is an expression that is either a comparison or is performing
4065 arithmetic on comparisons. The comparisons must only be comparing
4066 two different values, which will be stored in *CVAL1 and *CVAL2; if
4067 they are nonzero it means that some operands have already been found.
4068 No variables may be used anywhere else in the expression except in the
4069 comparisons.
4071 If this is true, return 1. Otherwise, return zero. */
4073 static bool
4074 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4076 enum tree_code code = TREE_CODE (arg);
4077 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4079 /* We can handle some of the tcc_expression cases here. */
4080 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4081 tclass = tcc_unary;
4082 else if (tclass == tcc_expression
4083 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4084 || code == COMPOUND_EXPR))
4085 tclass = tcc_binary;
4087 switch (tclass)
4089 case tcc_unary:
4090 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4092 case tcc_binary:
4093 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4094 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4096 case tcc_constant:
4097 return true;
4099 case tcc_expression:
4100 if (code == COND_EXPR)
4101 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4102 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4103 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4104 return false;
4106 case tcc_comparison:
4107 /* First see if we can handle the first operand, then the second. For
4108 the second operand, we know *CVAL1 can't be zero. It must be that
4109 one side of the comparison is each of the values; test for the
4110 case where this isn't true by failing if the two operands
4111 are the same. */
4113 if (operand_equal_p (TREE_OPERAND (arg, 0),
4114 TREE_OPERAND (arg, 1), 0))
4115 return false;
4117 if (*cval1 == 0)
4118 *cval1 = TREE_OPERAND (arg, 0);
4119 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4121 else if (*cval2 == 0)
4122 *cval2 = TREE_OPERAND (arg, 0);
4123 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4125 else
4126 return false;
4128 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4130 else if (*cval2 == 0)
4131 *cval2 = TREE_OPERAND (arg, 1);
4132 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4134 else
4135 return false;
4137 return true;
4139 default:
4140 return false;
4144 /* ARG is a tree that is known to contain just arithmetic operations and
4145 comparisons. Evaluate the operations in the tree substituting NEW0 for
4146 any occurrence of OLD0 as an operand of a comparison and likewise for
4147 NEW1 and OLD1. */
4149 static tree
4150 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4151 tree old1, tree new1)
4153 tree type = TREE_TYPE (arg);
4154 enum tree_code code = TREE_CODE (arg);
4155 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4157 /* We can handle some of the tcc_expression cases here. */
4158 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4159 tclass = tcc_unary;
4160 else if (tclass == tcc_expression
4161 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4162 tclass = tcc_binary;
4164 switch (tclass)
4166 case tcc_unary:
4167 return fold_build1_loc (loc, code, type,
4168 eval_subst (loc, TREE_OPERAND (arg, 0),
4169 old0, new0, old1, new1));
4171 case tcc_binary:
4172 return fold_build2_loc (loc, code, type,
4173 eval_subst (loc, TREE_OPERAND (arg, 0),
4174 old0, new0, old1, new1),
4175 eval_subst (loc, TREE_OPERAND (arg, 1),
4176 old0, new0, old1, new1));
4178 case tcc_expression:
4179 switch (code)
4181 case SAVE_EXPR:
4182 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4183 old1, new1);
4185 case COMPOUND_EXPR:
4186 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4187 old1, new1);
4189 case COND_EXPR:
4190 return fold_build3_loc (loc, code, type,
4191 eval_subst (loc, TREE_OPERAND (arg, 0),
4192 old0, new0, old1, new1),
4193 eval_subst (loc, TREE_OPERAND (arg, 1),
4194 old0, new0, old1, new1),
4195 eval_subst (loc, TREE_OPERAND (arg, 2),
4196 old0, new0, old1, new1));
4197 default:
4198 break;
4200 /* Fall through - ??? */
4202 case tcc_comparison:
4204 tree arg0 = TREE_OPERAND (arg, 0);
4205 tree arg1 = TREE_OPERAND (arg, 1);
4207 /* We need to check both for exact equality and tree equality. The
4208 former will be true if the operand has a side-effect. In that
4209 case, we know the operand occurred exactly once. */
4211 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4212 arg0 = new0;
4213 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4214 arg0 = new1;
4216 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4217 arg1 = new0;
4218 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4219 arg1 = new1;
4221 return fold_build2_loc (loc, code, type, arg0, arg1);
4224 default:
4225 return arg;
4229 /* Return a tree for the case when the result of an expression is RESULT
4230 converted to TYPE and OMITTED was previously an operand of the expression
4231 but is now not needed (e.g., we folded OMITTED * 0).
4233 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4234 the conversion of RESULT to TYPE. */
4236 tree
4237 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4239 tree t = fold_convert_loc (loc, type, result);
4241 /* If the resulting operand is an empty statement, just return the omitted
4242 statement casted to void. */
4243 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4244 return build1_loc (loc, NOP_EXPR, void_type_node,
4245 fold_ignored_result (omitted));
4247 if (TREE_SIDE_EFFECTS (omitted))
4248 return build2_loc (loc, COMPOUND_EXPR, type,
4249 fold_ignored_result (omitted), t);
4251 return non_lvalue_loc (loc, t);
4254 /* Return a tree for the case when the result of an expression is RESULT
4255 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4256 of the expression but are now not needed.
4258 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4259 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4260 evaluated before OMITTED2. Otherwise, if neither has side effects,
4261 just do the conversion of RESULT to TYPE. */
4263 tree
4264 omit_two_operands_loc (location_t loc, tree type, tree result,
4265 tree omitted1, tree omitted2)
4267 tree t = fold_convert_loc (loc, type, result);
4269 if (TREE_SIDE_EFFECTS (omitted2))
4270 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4271 if (TREE_SIDE_EFFECTS (omitted1))
4272 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4274 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4278 /* Return a simplified tree node for the truth-negation of ARG. This
4279 never alters ARG itself. We assume that ARG is an operation that
4280 returns a truth value (0 or 1).
4282 FIXME: one would think we would fold the result, but it causes
4283 problems with the dominator optimizer. */
4285 static tree
4286 fold_truth_not_expr (location_t loc, tree arg)
4288 tree type = TREE_TYPE (arg);
4289 enum tree_code code = TREE_CODE (arg);
4290 location_t loc1, loc2;
4292 /* If this is a comparison, we can simply invert it, except for
4293 floating-point non-equality comparisons, in which case we just
4294 enclose a TRUTH_NOT_EXPR around what we have. */
4296 if (TREE_CODE_CLASS (code) == tcc_comparison)
4298 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4299 if (FLOAT_TYPE_P (op_type)
4300 && flag_trapping_math
4301 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4302 && code != NE_EXPR && code != EQ_EXPR)
4303 return NULL_TREE;
4305 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4306 if (code == ERROR_MARK)
4307 return NULL_TREE;
4309 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4310 TREE_OPERAND (arg, 1));
4311 copy_warning (ret, arg);
4312 return ret;
4315 switch (code)
4317 case INTEGER_CST:
4318 return constant_boolean_node (integer_zerop (arg), type);
4320 case TRUTH_AND_EXPR:
4321 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4322 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4323 return build2_loc (loc, TRUTH_OR_EXPR, type,
4324 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4325 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4327 case TRUTH_OR_EXPR:
4328 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4329 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4330 return build2_loc (loc, TRUTH_AND_EXPR, type,
4331 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4332 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4334 case TRUTH_XOR_EXPR:
4335 /* Here we can invert either operand. We invert the first operand
4336 unless the second operand is a TRUTH_NOT_EXPR in which case our
4337 result is the XOR of the first operand with the inside of the
4338 negation of the second operand. */
4340 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4341 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4342 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4343 else
4344 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4345 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4346 TREE_OPERAND (arg, 1));
4348 case TRUTH_ANDIF_EXPR:
4349 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4350 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4351 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4352 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4353 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4355 case TRUTH_ORIF_EXPR:
4356 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4357 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4358 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4359 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4360 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4362 case TRUTH_NOT_EXPR:
4363 return TREE_OPERAND (arg, 0);
4365 case COND_EXPR:
4367 tree arg1 = TREE_OPERAND (arg, 1);
4368 tree arg2 = TREE_OPERAND (arg, 2);
4370 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4371 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4373 /* A COND_EXPR may have a throw as one operand, which
4374 then has void type. Just leave void operands
4375 as they are. */
4376 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4377 VOID_TYPE_P (TREE_TYPE (arg1))
4378 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4379 VOID_TYPE_P (TREE_TYPE (arg2))
4380 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4383 case COMPOUND_EXPR:
4384 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4385 return build2_loc (loc, COMPOUND_EXPR, type,
4386 TREE_OPERAND (arg, 0),
4387 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4389 case NON_LVALUE_EXPR:
4390 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4393 CASE_CONVERT:
4394 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4395 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4397 /* fall through */
4399 case FLOAT_EXPR:
4400 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4401 return build1_loc (loc, TREE_CODE (arg), type,
4402 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4404 case BIT_AND_EXPR:
4405 if (!integer_onep (TREE_OPERAND (arg, 1)))
4406 return NULL_TREE;
4407 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4409 case SAVE_EXPR:
4410 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4412 case CLEANUP_POINT_EXPR:
4413 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4414 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4415 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4417 default:
4418 return NULL_TREE;
4422 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4423 assume that ARG is an operation that returns a truth value (0 or 1
4424 for scalars, 0 or -1 for vectors). Return the folded expression if
4425 folding is successful. Otherwise, return NULL_TREE. */
4427 static tree
4428 fold_invert_truthvalue (location_t loc, tree arg)
4430 tree type = TREE_TYPE (arg);
4431 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4432 ? BIT_NOT_EXPR
4433 : TRUTH_NOT_EXPR,
4434 type, arg);
4437 /* Return a simplified tree node for the truth-negation of ARG. This
4438 never alters ARG itself. We assume that ARG is an operation that
4439 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4441 tree
4442 invert_truthvalue_loc (location_t loc, tree arg)
4444 if (TREE_CODE (arg) == ERROR_MARK)
4445 return arg;
4447 tree type = TREE_TYPE (arg);
4448 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4449 ? BIT_NOT_EXPR
4450 : TRUTH_NOT_EXPR,
4451 type, arg);
4454 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4455 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4456 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4457 is the original memory reference used to preserve the alias set of
4458 the access. */
4460 static tree
4461 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4462 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4463 int unsignedp, int reversep)
4465 tree result, bftype;
4467 /* Attempt not to lose the access path if possible. */
4468 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4470 tree ninner = TREE_OPERAND (orig_inner, 0);
4471 machine_mode nmode;
4472 poly_int64 nbitsize, nbitpos;
4473 tree noffset;
4474 int nunsignedp, nreversep, nvolatilep = 0;
4475 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4476 &noffset, &nmode, &nunsignedp,
4477 &nreversep, &nvolatilep);
4478 if (base == inner
4479 && noffset == NULL_TREE
4480 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4481 && !reversep
4482 && !nreversep
4483 && !nvolatilep)
4485 inner = ninner;
4486 bitpos -= nbitpos;
4490 alias_set_type iset = get_alias_set (orig_inner);
4491 if (iset == 0 && get_alias_set (inner) != iset)
4492 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4493 build_fold_addr_expr (inner),
4494 build_int_cst (ptr_type_node, 0));
4496 if (known_eq (bitpos, 0) && !reversep)
4498 tree size = TYPE_SIZE (TREE_TYPE (inner));
4499 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4500 || POINTER_TYPE_P (TREE_TYPE (inner)))
4501 && tree_fits_shwi_p (size)
4502 && tree_to_shwi (size) == bitsize)
4503 return fold_convert_loc (loc, type, inner);
4506 bftype = type;
4507 if (TYPE_PRECISION (bftype) != bitsize
4508 || TYPE_UNSIGNED (bftype) == !unsignedp)
4509 bftype = build_nonstandard_integer_type (bitsize, 0);
4511 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4512 bitsize_int (bitsize), bitsize_int (bitpos));
4513 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4515 if (bftype != type)
4516 result = fold_convert_loc (loc, type, result);
4518 return result;
4521 /* Optimize a bit-field compare.
4523 There are two cases: First is a compare against a constant and the
4524 second is a comparison of two items where the fields are at the same
4525 bit position relative to the start of a chunk (byte, halfword, word)
4526 large enough to contain it. In these cases we can avoid the shift
4527 implicit in bitfield extractions.
4529 For constants, we emit a compare of the shifted constant with the
4530 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4531 compared. For two fields at the same position, we do the ANDs with the
4532 similar mask and compare the result of the ANDs.
4534 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4535 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4536 are the left and right operands of the comparison, respectively.
4538 If the optimization described above can be done, we return the resulting
4539 tree. Otherwise we return zero. */
4541 static tree
4542 optimize_bit_field_compare (location_t loc, enum tree_code code,
4543 tree compare_type, tree lhs, tree rhs)
4545 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4546 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4547 tree type = TREE_TYPE (lhs);
4548 tree unsigned_type;
4549 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4550 machine_mode lmode, rmode;
4551 scalar_int_mode nmode;
4552 int lunsignedp, runsignedp;
4553 int lreversep, rreversep;
4554 int lvolatilep = 0, rvolatilep = 0;
4555 tree linner, rinner = NULL_TREE;
4556 tree mask;
4557 tree offset;
4559 /* Get all the information about the extractions being done. If the bit size
4560 is the same as the size of the underlying object, we aren't doing an
4561 extraction at all and so can do nothing. We also don't want to
4562 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4563 then will no longer be able to replace it. */
4564 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4565 &lunsignedp, &lreversep, &lvolatilep);
4566 if (linner == lhs
4567 || !known_size_p (plbitsize)
4568 || !plbitsize.is_constant (&lbitsize)
4569 || !plbitpos.is_constant (&lbitpos)
4570 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4571 || offset != 0
4572 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4573 || lvolatilep)
4574 return 0;
4576 if (const_p)
4577 rreversep = lreversep;
4578 else
4580 /* If this is not a constant, we can only do something if bit positions,
4581 sizes, signedness and storage order are the same. */
4582 rinner
4583 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4584 &runsignedp, &rreversep, &rvolatilep);
4586 if (rinner == rhs
4587 || maybe_ne (lbitpos, rbitpos)
4588 || maybe_ne (lbitsize, rbitsize)
4589 || lunsignedp != runsignedp
4590 || lreversep != rreversep
4591 || offset != 0
4592 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4593 || rvolatilep)
4594 return 0;
4597 /* Honor the C++ memory model and mimic what RTL expansion does. */
4598 poly_uint64 bitstart = 0;
4599 poly_uint64 bitend = 0;
4600 if (TREE_CODE (lhs) == COMPONENT_REF)
4602 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4603 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4604 return 0;
4607 /* See if we can find a mode to refer to this field. We should be able to,
4608 but fail if we can't. */
4609 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4610 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4611 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4612 TYPE_ALIGN (TREE_TYPE (rinner))),
4613 BITS_PER_WORD, false, &nmode))
4614 return 0;
4616 /* Set signed and unsigned types of the precision of this mode for the
4617 shifts below. */
4618 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4620 /* Compute the bit position and size for the new reference and our offset
4621 within it. If the new reference is the same size as the original, we
4622 won't optimize anything, so return zero. */
4623 nbitsize = GET_MODE_BITSIZE (nmode);
4624 nbitpos = lbitpos & ~ (nbitsize - 1);
4625 lbitpos -= nbitpos;
4626 if (nbitsize == lbitsize)
4627 return 0;
4629 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4630 lbitpos = nbitsize - lbitsize - lbitpos;
4632 /* Make the mask to be used against the extracted field. */
4633 mask = build_int_cst_type (unsigned_type, -1);
4634 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4635 mask = const_binop (RSHIFT_EXPR, mask,
4636 size_int (nbitsize - lbitsize - lbitpos));
4638 if (! const_p)
4640 if (nbitpos < 0)
4641 return 0;
4643 /* If not comparing with constant, just rework the comparison
4644 and return. */
4645 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4646 nbitsize, nbitpos, 1, lreversep);
4647 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4648 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4649 nbitsize, nbitpos, 1, rreversep);
4650 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4651 return fold_build2_loc (loc, code, compare_type, t1, t2);
4654 /* Otherwise, we are handling the constant case. See if the constant is too
4655 big for the field. Warn and return a tree for 0 (false) if so. We do
4656 this not only for its own sake, but to avoid having to test for this
4657 error case below. If we didn't, we might generate wrong code.
4659 For unsigned fields, the constant shifted right by the field length should
4660 be all zero. For signed fields, the high-order bits should agree with
4661 the sign bit. */
4663 if (lunsignedp)
4665 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4667 warning (0, "comparison is always %d due to width of bit-field",
4668 code == NE_EXPR);
4669 return constant_boolean_node (code == NE_EXPR, compare_type);
4672 else
4674 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4675 if (tem != 0 && tem != -1)
4677 warning (0, "comparison is always %d due to width of bit-field",
4678 code == NE_EXPR);
4679 return constant_boolean_node (code == NE_EXPR, compare_type);
4683 if (nbitpos < 0)
4684 return 0;
4686 /* Single-bit compares should always be against zero. */
4687 if (lbitsize == 1 && ! integer_zerop (rhs))
4689 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4690 rhs = build_int_cst (type, 0);
4693 /* Make a new bitfield reference, shift the constant over the
4694 appropriate number of bits and mask it with the computed mask
4695 (in case this was a signed field). If we changed it, make a new one. */
4696 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4697 nbitsize, nbitpos, 1, lreversep);
4699 rhs = const_binop (BIT_AND_EXPR,
4700 const_binop (LSHIFT_EXPR,
4701 fold_convert_loc (loc, unsigned_type, rhs),
4702 size_int (lbitpos)),
4703 mask);
4705 lhs = build2_loc (loc, code, compare_type,
4706 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4707 return lhs;
4710 /* Subroutine for fold_truth_andor_1: decode a field reference.
4712 If EXP is a comparison reference, we return the innermost reference.
4714 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4715 set to the starting bit number.
4717 If the innermost field can be completely contained in a mode-sized
4718 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4720 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4721 otherwise it is not changed.
4723 *PUNSIGNEDP is set to the signedness of the field.
4725 *PREVERSEP is set to the storage order of the field.
4727 *PMASK is set to the mask used. This is either contained in a
4728 BIT_AND_EXPR or derived from the width of the field.
4730 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4732 Return 0 if this is not a component reference or is one that we can't
4733 do anything with. */
4735 static tree
4736 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4737 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4738 int *punsignedp, int *preversep, int *pvolatilep,
4739 tree *pmask, tree *pand_mask)
4741 tree exp = *exp_;
4742 tree outer_type = 0;
4743 tree and_mask = 0;
4744 tree mask, inner, offset;
4745 tree unsigned_type;
4746 unsigned int precision;
4748 /* All the optimizations using this function assume integer fields.
4749 There are problems with FP fields since the type_for_size call
4750 below can fail for, e.g., XFmode. */
4751 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4752 return NULL_TREE;
4754 /* We are interested in the bare arrangement of bits, so strip everything
4755 that doesn't affect the machine mode. However, record the type of the
4756 outermost expression if it may matter below. */
4757 if (CONVERT_EXPR_P (exp)
4758 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4759 outer_type = TREE_TYPE (exp);
4760 STRIP_NOPS (exp);
4762 if (TREE_CODE (exp) == BIT_AND_EXPR)
4764 and_mask = TREE_OPERAND (exp, 1);
4765 exp = TREE_OPERAND (exp, 0);
4766 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4767 if (TREE_CODE (and_mask) != INTEGER_CST)
4768 return NULL_TREE;
4771 poly_int64 poly_bitsize, poly_bitpos;
4772 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4773 pmode, punsignedp, preversep, pvolatilep);
4774 if ((inner == exp && and_mask == 0)
4775 || !poly_bitsize.is_constant (pbitsize)
4776 || !poly_bitpos.is_constant (pbitpos)
4777 || *pbitsize < 0
4778 || offset != 0
4779 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4780 /* Reject out-of-bound accesses (PR79731). */
4781 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4782 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4783 *pbitpos + *pbitsize) < 0))
4784 return NULL_TREE;
4786 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4787 if (unsigned_type == NULL_TREE)
4788 return NULL_TREE;
4790 *exp_ = exp;
4792 /* If the number of bits in the reference is the same as the bitsize of
4793 the outer type, then the outer type gives the signedness. Otherwise
4794 (in case of a small bitfield) the signedness is unchanged. */
4795 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4796 *punsignedp = TYPE_UNSIGNED (outer_type);
4798 /* Compute the mask to access the bitfield. */
4799 precision = TYPE_PRECISION (unsigned_type);
4801 mask = build_int_cst_type (unsigned_type, -1);
4803 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4804 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4806 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4807 if (and_mask != 0)
4808 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4809 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4811 *pmask = mask;
4812 *pand_mask = and_mask;
4813 return inner;
4816 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4817 bit positions and MASK is SIGNED. */
4819 static bool
4820 all_ones_mask_p (const_tree mask, unsigned int size)
4822 tree type = TREE_TYPE (mask);
4823 unsigned int precision = TYPE_PRECISION (type);
4825 /* If this function returns true when the type of the mask is
4826 UNSIGNED, then there will be errors. In particular see
4827 gcc.c-torture/execute/990326-1.c. There does not appear to be
4828 any documentation paper trail as to why this is so. But the pre
4829 wide-int worked with that restriction and it has been preserved
4830 here. */
4831 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4832 return false;
4834 return wi::mask (size, false, precision) == wi::to_wide (mask);
4837 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4838 represents the sign bit of EXP's type. If EXP represents a sign
4839 or zero extension, also test VAL against the unextended type.
4840 The return value is the (sub)expression whose sign bit is VAL,
4841 or NULL_TREE otherwise. */
4843 tree
4844 sign_bit_p (tree exp, const_tree val)
4846 int width;
4847 tree t;
4849 /* Tree EXP must have an integral type. */
4850 t = TREE_TYPE (exp);
4851 if (! INTEGRAL_TYPE_P (t))
4852 return NULL_TREE;
4854 /* Tree VAL must be an integer constant. */
4855 if (TREE_CODE (val) != INTEGER_CST
4856 || TREE_OVERFLOW (val))
4857 return NULL_TREE;
4859 width = TYPE_PRECISION (t);
4860 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4861 return exp;
4863 /* Handle extension from a narrower type. */
4864 if (TREE_CODE (exp) == NOP_EXPR
4865 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4866 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4868 return NULL_TREE;
4871 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4872 to be evaluated unconditionally. */
4874 static bool
4875 simple_operand_p (const_tree exp)
4877 /* Strip any conversions that don't change the machine mode. */
4878 STRIP_NOPS (exp);
4880 return (CONSTANT_CLASS_P (exp)
4881 || TREE_CODE (exp) == SSA_NAME
4882 || (DECL_P (exp)
4883 && ! TREE_ADDRESSABLE (exp)
4884 && ! TREE_THIS_VOLATILE (exp)
4885 && ! DECL_NONLOCAL (exp)
4886 /* Don't regard global variables as simple. They may be
4887 allocated in ways unknown to the compiler (shared memory,
4888 #pragma weak, etc). */
4889 && ! TREE_PUBLIC (exp)
4890 && ! DECL_EXTERNAL (exp)
4891 /* Weakrefs are not safe to be read, since they can be NULL.
4892 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4893 have DECL_WEAK flag set. */
4894 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4895 /* Loading a static variable is unduly expensive, but global
4896 registers aren't expensive. */
4897 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4900 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4901 to be evaluated unconditionally.
4902 I addition to simple_operand_p, we assume that comparisons, conversions,
4903 and logic-not operations are simple, if their operands are simple, too. */
4905 static bool
4906 simple_operand_p_2 (tree exp)
4908 enum tree_code code;
4910 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4911 return false;
4913 while (CONVERT_EXPR_P (exp))
4914 exp = TREE_OPERAND (exp, 0);
4916 code = TREE_CODE (exp);
4918 if (TREE_CODE_CLASS (code) == tcc_comparison)
4919 return (simple_operand_p (TREE_OPERAND (exp, 0))
4920 && simple_operand_p (TREE_OPERAND (exp, 1)));
4922 if (code == TRUTH_NOT_EXPR)
4923 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4925 return simple_operand_p (exp);
4929 /* The following functions are subroutines to fold_range_test and allow it to
4930 try to change a logical combination of comparisons into a range test.
4932 For example, both
4933 X == 2 || X == 3 || X == 4 || X == 5
4935 X >= 2 && X <= 5
4936 are converted to
4937 (unsigned) (X - 2) <= 3
4939 We describe each set of comparisons as being either inside or outside
4940 a range, using a variable named like IN_P, and then describe the
4941 range with a lower and upper bound. If one of the bounds is omitted,
4942 it represents either the highest or lowest value of the type.
4944 In the comments below, we represent a range by two numbers in brackets
4945 preceded by a "+" to designate being inside that range, or a "-" to
4946 designate being outside that range, so the condition can be inverted by
4947 flipping the prefix. An omitted bound is represented by a "-". For
4948 example, "- [-, 10]" means being outside the range starting at the lowest
4949 possible value and ending at 10, in other words, being greater than 10.
4950 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4951 always false.
4953 We set up things so that the missing bounds are handled in a consistent
4954 manner so neither a missing bound nor "true" and "false" need to be
4955 handled using a special case. */
4957 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4958 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4959 and UPPER1_P are nonzero if the respective argument is an upper bound
4960 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4961 must be specified for a comparison. ARG1 will be converted to ARG0's
4962 type if both are specified. */
4964 static tree
4965 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4966 tree arg1, int upper1_p)
4968 tree tem;
4969 int result;
4970 int sgn0, sgn1;
4972 /* If neither arg represents infinity, do the normal operation.
4973 Else, if not a comparison, return infinity. Else handle the special
4974 comparison rules. Note that most of the cases below won't occur, but
4975 are handled for consistency. */
4977 if (arg0 != 0 && arg1 != 0)
4979 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4980 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4981 STRIP_NOPS (tem);
4982 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4985 if (TREE_CODE_CLASS (code) != tcc_comparison)
4986 return 0;
4988 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4989 for neither. In real maths, we cannot assume open ended ranges are
4990 the same. But, this is computer arithmetic, where numbers are finite.
4991 We can therefore make the transformation of any unbounded range with
4992 the value Z, Z being greater than any representable number. This permits
4993 us to treat unbounded ranges as equal. */
4994 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4995 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4996 switch (code)
4998 case EQ_EXPR:
4999 result = sgn0 == sgn1;
5000 break;
5001 case NE_EXPR:
5002 result = sgn0 != sgn1;
5003 break;
5004 case LT_EXPR:
5005 result = sgn0 < sgn1;
5006 break;
5007 case LE_EXPR:
5008 result = sgn0 <= sgn1;
5009 break;
5010 case GT_EXPR:
5011 result = sgn0 > sgn1;
5012 break;
5013 case GE_EXPR:
5014 result = sgn0 >= sgn1;
5015 break;
5016 default:
5017 gcc_unreachable ();
5020 return constant_boolean_node (result, type);
5023 /* Helper routine for make_range. Perform one step for it, return
5024 new expression if the loop should continue or NULL_TREE if it should
5025 stop. */
5027 tree
5028 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5029 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5030 bool *strict_overflow_p)
5032 tree arg0_type = TREE_TYPE (arg0);
5033 tree n_low, n_high, low = *p_low, high = *p_high;
5034 int in_p = *p_in_p, n_in_p;
5036 switch (code)
5038 case TRUTH_NOT_EXPR:
5039 /* We can only do something if the range is testing for zero. */
5040 if (low == NULL_TREE || high == NULL_TREE
5041 || ! integer_zerop (low) || ! integer_zerop (high))
5042 return NULL_TREE;
5043 *p_in_p = ! in_p;
5044 return arg0;
5046 case EQ_EXPR: case NE_EXPR:
5047 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5048 /* We can only do something if the range is testing for zero
5049 and if the second operand is an integer constant. Note that
5050 saying something is "in" the range we make is done by
5051 complementing IN_P since it will set in the initial case of
5052 being not equal to zero; "out" is leaving it alone. */
5053 if (low == NULL_TREE || high == NULL_TREE
5054 || ! integer_zerop (low) || ! integer_zerop (high)
5055 || TREE_CODE (arg1) != INTEGER_CST)
5056 return NULL_TREE;
5058 switch (code)
5060 case NE_EXPR: /* - [c, c] */
5061 low = high = arg1;
5062 break;
5063 case EQ_EXPR: /* + [c, c] */
5064 in_p = ! in_p, low = high = arg1;
5065 break;
5066 case GT_EXPR: /* - [-, c] */
5067 low = 0, high = arg1;
5068 break;
5069 case GE_EXPR: /* + [c, -] */
5070 in_p = ! in_p, low = arg1, high = 0;
5071 break;
5072 case LT_EXPR: /* - [c, -] */
5073 low = arg1, high = 0;
5074 break;
5075 case LE_EXPR: /* + [-, c] */
5076 in_p = ! in_p, low = 0, high = arg1;
5077 break;
5078 default:
5079 gcc_unreachable ();
5082 /* If this is an unsigned comparison, we also know that EXP is
5083 greater than or equal to zero. We base the range tests we make
5084 on that fact, so we record it here so we can parse existing
5085 range tests. We test arg0_type since often the return type
5086 of, e.g. EQ_EXPR, is boolean. */
5087 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5089 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5090 in_p, low, high, 1,
5091 build_int_cst (arg0_type, 0),
5092 NULL_TREE))
5093 return NULL_TREE;
5095 in_p = n_in_p, low = n_low, high = n_high;
5097 /* If the high bound is missing, but we have a nonzero low
5098 bound, reverse the range so it goes from zero to the low bound
5099 minus 1. */
5100 if (high == 0 && low && ! integer_zerop (low))
5102 in_p = ! in_p;
5103 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5104 build_int_cst (TREE_TYPE (low), 1), 0);
5105 low = build_int_cst (arg0_type, 0);
5109 *p_low = low;
5110 *p_high = high;
5111 *p_in_p = in_p;
5112 return arg0;
5114 case NEGATE_EXPR:
5115 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5116 low and high are non-NULL, then normalize will DTRT. */
5117 if (!TYPE_UNSIGNED (arg0_type)
5118 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5120 if (low == NULL_TREE)
5121 low = TYPE_MIN_VALUE (arg0_type);
5122 if (high == NULL_TREE)
5123 high = TYPE_MAX_VALUE (arg0_type);
5126 /* (-x) IN [a,b] -> x in [-b, -a] */
5127 n_low = range_binop (MINUS_EXPR, exp_type,
5128 build_int_cst (exp_type, 0),
5129 0, high, 1);
5130 n_high = range_binop (MINUS_EXPR, exp_type,
5131 build_int_cst (exp_type, 0),
5132 0, low, 0);
5133 if (n_high != 0 && TREE_OVERFLOW (n_high))
5134 return NULL_TREE;
5135 goto normalize;
5137 case BIT_NOT_EXPR:
5138 /* ~ X -> -X - 1 */
5139 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5140 build_int_cst (exp_type, 1));
5142 case PLUS_EXPR:
5143 case MINUS_EXPR:
5144 if (TREE_CODE (arg1) != INTEGER_CST)
5145 return NULL_TREE;
5147 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5148 move a constant to the other side. */
5149 if (!TYPE_UNSIGNED (arg0_type)
5150 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5151 return NULL_TREE;
5153 /* If EXP is signed, any overflow in the computation is undefined,
5154 so we don't worry about it so long as our computations on
5155 the bounds don't overflow. For unsigned, overflow is defined
5156 and this is exactly the right thing. */
5157 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5158 arg0_type, low, 0, arg1, 0);
5159 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5160 arg0_type, high, 1, arg1, 0);
5161 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5162 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5163 return NULL_TREE;
5165 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5166 *strict_overflow_p = true;
5168 normalize:
5169 /* Check for an unsigned range which has wrapped around the maximum
5170 value thus making n_high < n_low, and normalize it. */
5171 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5173 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5174 build_int_cst (TREE_TYPE (n_high), 1), 0);
5175 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5176 build_int_cst (TREE_TYPE (n_low), 1), 0);
5178 /* If the range is of the form +/- [ x+1, x ], we won't
5179 be able to normalize it. But then, it represents the
5180 whole range or the empty set, so make it
5181 +/- [ -, - ]. */
5182 if (tree_int_cst_equal (n_low, low)
5183 && tree_int_cst_equal (n_high, high))
5184 low = high = 0;
5185 else
5186 in_p = ! in_p;
5188 else
5189 low = n_low, high = n_high;
5191 *p_low = low;
5192 *p_high = high;
5193 *p_in_p = in_p;
5194 return arg0;
5196 CASE_CONVERT:
5197 case NON_LVALUE_EXPR:
5198 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5199 return NULL_TREE;
5201 if (! INTEGRAL_TYPE_P (arg0_type)
5202 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5203 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5204 return NULL_TREE;
5206 n_low = low, n_high = high;
5208 if (n_low != 0)
5209 n_low = fold_convert_loc (loc, arg0_type, n_low);
5211 if (n_high != 0)
5212 n_high = fold_convert_loc (loc, arg0_type, n_high);
5214 /* If we're converting arg0 from an unsigned type, to exp,
5215 a signed type, we will be doing the comparison as unsigned.
5216 The tests above have already verified that LOW and HIGH
5217 are both positive.
5219 So we have to ensure that we will handle large unsigned
5220 values the same way that the current signed bounds treat
5221 negative values. */
5223 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5225 tree high_positive;
5226 tree equiv_type;
5227 /* For fixed-point modes, we need to pass the saturating flag
5228 as the 2nd parameter. */
5229 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5230 equiv_type
5231 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5232 TYPE_SATURATING (arg0_type));
5233 else
5234 equiv_type
5235 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5237 /* A range without an upper bound is, naturally, unbounded.
5238 Since convert would have cropped a very large value, use
5239 the max value for the destination type. */
5240 high_positive
5241 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5242 : TYPE_MAX_VALUE (arg0_type);
5244 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5245 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5246 fold_convert_loc (loc, arg0_type,
5247 high_positive),
5248 build_int_cst (arg0_type, 1));
5250 /* If the low bound is specified, "and" the range with the
5251 range for which the original unsigned value will be
5252 positive. */
5253 if (low != 0)
5255 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5256 1, fold_convert_loc (loc, arg0_type,
5257 integer_zero_node),
5258 high_positive))
5259 return NULL_TREE;
5261 in_p = (n_in_p == in_p);
5263 else
5265 /* Otherwise, "or" the range with the range of the input
5266 that will be interpreted as negative. */
5267 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5268 1, fold_convert_loc (loc, arg0_type,
5269 integer_zero_node),
5270 high_positive))
5271 return NULL_TREE;
5273 in_p = (in_p != n_in_p);
5277 /* Otherwise, if we are converting arg0 from signed type, to exp,
5278 an unsigned type, we will do the comparison as signed. If
5279 high is non-NULL, we punt above if it doesn't fit in the signed
5280 type, so if we get through here, +[-, high] or +[low, high] are
5281 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5282 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5283 high is not, the +[low, -] range is equivalent to union of
5284 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5285 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5286 low being 0, which should be treated as [-, -]. */
5287 else if (TYPE_UNSIGNED (exp_type)
5288 && !TYPE_UNSIGNED (arg0_type)
5289 && low
5290 && !high)
5292 if (integer_zerop (low))
5293 n_low = NULL_TREE;
5294 else
5296 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5297 n_low, build_int_cst (arg0_type, -1));
5298 n_low = build_zero_cst (arg0_type);
5299 in_p = !in_p;
5303 *p_low = n_low;
5304 *p_high = n_high;
5305 *p_in_p = in_p;
5306 return arg0;
5308 default:
5309 return NULL_TREE;
5313 /* Given EXP, a logical expression, set the range it is testing into
5314 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5315 actually being tested. *PLOW and *PHIGH will be made of the same
5316 type as the returned expression. If EXP is not a comparison, we
5317 will most likely not be returning a useful value and range. Set
5318 *STRICT_OVERFLOW_P to true if the return value is only valid
5319 because signed overflow is undefined; otherwise, do not change
5320 *STRICT_OVERFLOW_P. */
5322 tree
5323 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5324 bool *strict_overflow_p)
5326 enum tree_code code;
5327 tree arg0, arg1 = NULL_TREE;
5328 tree exp_type, nexp;
5329 int in_p;
5330 tree low, high;
5331 location_t loc = EXPR_LOCATION (exp);
5333 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5334 and see if we can refine the range. Some of the cases below may not
5335 happen, but it doesn't seem worth worrying about this. We "continue"
5336 the outer loop when we've changed something; otherwise we "break"
5337 the switch, which will "break" the while. */
5339 in_p = 0;
5340 low = high = build_int_cst (TREE_TYPE (exp), 0);
5342 while (1)
5344 code = TREE_CODE (exp);
5345 exp_type = TREE_TYPE (exp);
5346 arg0 = NULL_TREE;
5348 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5350 if (TREE_OPERAND_LENGTH (exp) > 0)
5351 arg0 = TREE_OPERAND (exp, 0);
5352 if (TREE_CODE_CLASS (code) == tcc_binary
5353 || TREE_CODE_CLASS (code) == tcc_comparison
5354 || (TREE_CODE_CLASS (code) == tcc_expression
5355 && TREE_OPERAND_LENGTH (exp) > 1))
5356 arg1 = TREE_OPERAND (exp, 1);
5358 if (arg0 == NULL_TREE)
5359 break;
5361 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5362 &high, &in_p, strict_overflow_p);
5363 if (nexp == NULL_TREE)
5364 break;
5365 exp = nexp;
5368 /* If EXP is a constant, we can evaluate whether this is true or false. */
5369 if (TREE_CODE (exp) == INTEGER_CST)
5371 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5372 exp, 0, low, 0))
5373 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5374 exp, 1, high, 1)));
5375 low = high = 0;
5376 exp = 0;
5379 *pin_p = in_p, *plow = low, *phigh = high;
5380 return exp;
5383 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5384 a bitwise check i.e. when
5385 LOW == 0xXX...X00...0
5386 HIGH == 0xXX...X11...1
5387 Return corresponding mask in MASK and stem in VALUE. */
5389 static bool
5390 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5391 tree *value)
5393 if (TREE_CODE (low) != INTEGER_CST
5394 || TREE_CODE (high) != INTEGER_CST)
5395 return false;
5397 unsigned prec = TYPE_PRECISION (type);
5398 wide_int lo = wi::to_wide (low, prec);
5399 wide_int hi = wi::to_wide (high, prec);
5401 wide_int end_mask = lo ^ hi;
5402 if ((end_mask & (end_mask + 1)) != 0
5403 || (lo & end_mask) != 0)
5404 return false;
5406 wide_int stem_mask = ~end_mask;
5407 wide_int stem = lo & stem_mask;
5408 if (stem != (hi & stem_mask))
5409 return false;
5411 *mask = wide_int_to_tree (type, stem_mask);
5412 *value = wide_int_to_tree (type, stem);
5414 return true;
5417 /* Helper routine for build_range_check and match.pd. Return the type to
5418 perform the check or NULL if it shouldn't be optimized. */
5420 tree
5421 range_check_type (tree etype)
5423 /* First make sure that arithmetics in this type is valid, then make sure
5424 that it wraps around. */
5425 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5426 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5428 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5430 tree utype, minv, maxv;
5432 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5433 for the type in question, as we rely on this here. */
5434 utype = unsigned_type_for (etype);
5435 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5436 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5437 build_int_cst (TREE_TYPE (maxv), 1), 1);
5438 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5440 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5441 minv, 1, maxv, 1)))
5442 etype = utype;
5443 else
5444 return NULL_TREE;
5446 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5447 etype = unsigned_type_for (etype);
5448 return etype;
5451 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5452 type, TYPE, return an expression to test if EXP is in (or out of, depending
5453 on IN_P) the range. Return 0 if the test couldn't be created. */
5455 tree
5456 build_range_check (location_t loc, tree type, tree exp, int in_p,
5457 tree low, tree high)
5459 tree etype = TREE_TYPE (exp), mask, value;
5461 /* Disable this optimization for function pointer expressions
5462 on targets that require function pointer canonicalization. */
5463 if (targetm.have_canonicalize_funcptr_for_compare ()
5464 && POINTER_TYPE_P (etype)
5465 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5466 return NULL_TREE;
5468 if (! in_p)
5470 value = build_range_check (loc, type, exp, 1, low, high);
5471 if (value != 0)
5472 return invert_truthvalue_loc (loc, value);
5474 return 0;
5477 if (low == 0 && high == 0)
5478 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5480 if (low == 0)
5481 return fold_build2_loc (loc, LE_EXPR, type, exp,
5482 fold_convert_loc (loc, etype, high));
5484 if (high == 0)
5485 return fold_build2_loc (loc, GE_EXPR, type, exp,
5486 fold_convert_loc (loc, etype, low));
5488 if (operand_equal_p (low, high, 0))
5489 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5490 fold_convert_loc (loc, etype, low));
5492 if (TREE_CODE (exp) == BIT_AND_EXPR
5493 && maskable_range_p (low, high, etype, &mask, &value))
5494 return fold_build2_loc (loc, EQ_EXPR, type,
5495 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5496 exp, mask),
5497 value);
5499 if (integer_zerop (low))
5501 if (! TYPE_UNSIGNED (etype))
5503 etype = unsigned_type_for (etype);
5504 high = fold_convert_loc (loc, etype, high);
5505 exp = fold_convert_loc (loc, etype, exp);
5507 return build_range_check (loc, type, exp, 1, 0, high);
5510 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5511 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5513 int prec = TYPE_PRECISION (etype);
5515 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5517 if (TYPE_UNSIGNED (etype))
5519 tree signed_etype = signed_type_for (etype);
5520 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5521 etype
5522 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5523 else
5524 etype = signed_etype;
5525 exp = fold_convert_loc (loc, etype, exp);
5527 return fold_build2_loc (loc, GT_EXPR, type, exp,
5528 build_int_cst (etype, 0));
5532 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5533 This requires wrap-around arithmetics for the type of the expression. */
5534 etype = range_check_type (etype);
5535 if (etype == NULL_TREE)
5536 return NULL_TREE;
5538 high = fold_convert_loc (loc, etype, high);
5539 low = fold_convert_loc (loc, etype, low);
5540 exp = fold_convert_loc (loc, etype, exp);
5542 value = const_binop (MINUS_EXPR, high, low);
5544 if (value != 0 && !TREE_OVERFLOW (value))
5545 return build_range_check (loc, type,
5546 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5547 1, build_int_cst (etype, 0), value);
5549 return 0;
5552 /* Return the predecessor of VAL in its type, handling the infinite case. */
5554 static tree
5555 range_predecessor (tree val)
5557 tree type = TREE_TYPE (val);
5559 if (INTEGRAL_TYPE_P (type)
5560 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5561 return 0;
5562 else
5563 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5564 build_int_cst (TREE_TYPE (val), 1), 0);
5567 /* Return the successor of VAL in its type, handling the infinite case. */
5569 static tree
5570 range_successor (tree val)
5572 tree type = TREE_TYPE (val);
5574 if (INTEGRAL_TYPE_P (type)
5575 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5576 return 0;
5577 else
5578 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5579 build_int_cst (TREE_TYPE (val), 1), 0);
5582 /* Given two ranges, see if we can merge them into one. Return 1 if we
5583 can, 0 if we can't. Set the output range into the specified parameters. */
5585 bool
5586 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5587 tree high0, int in1_p, tree low1, tree high1)
5589 int no_overlap;
5590 int subset;
5591 int temp;
5592 tree tem;
5593 int in_p;
5594 tree low, high;
5595 int lowequal = ((low0 == 0 && low1 == 0)
5596 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5597 low0, 0, low1, 0)));
5598 int highequal = ((high0 == 0 && high1 == 0)
5599 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5600 high0, 1, high1, 1)));
5602 /* Make range 0 be the range that starts first, or ends last if they
5603 start at the same value. Swap them if it isn't. */
5604 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5605 low0, 0, low1, 0))
5606 || (lowequal
5607 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5608 high1, 1, high0, 1))))
5610 temp = in0_p, in0_p = in1_p, in1_p = temp;
5611 tem = low0, low0 = low1, low1 = tem;
5612 tem = high0, high0 = high1, high1 = tem;
5615 /* If the second range is != high1 where high1 is the type maximum of
5616 the type, try first merging with < high1 range. */
5617 if (low1
5618 && high1
5619 && TREE_CODE (low1) == INTEGER_CST
5620 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5621 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5622 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5623 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5624 && operand_equal_p (low1, high1, 0))
5626 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5627 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5628 !in1_p, NULL_TREE, range_predecessor (low1)))
5629 return true;
5630 /* Similarly for the second range != low1 where low1 is the type minimum
5631 of the type, try first merging with > low1 range. */
5632 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5633 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5634 !in1_p, range_successor (low1), NULL_TREE))
5635 return true;
5638 /* Now flag two cases, whether the ranges are disjoint or whether the
5639 second range is totally subsumed in the first. Note that the tests
5640 below are simplified by the ones above. */
5641 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5642 high0, 1, low1, 0));
5643 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5644 high1, 1, high0, 1));
5646 /* We now have four cases, depending on whether we are including or
5647 excluding the two ranges. */
5648 if (in0_p && in1_p)
5650 /* If they don't overlap, the result is false. If the second range
5651 is a subset it is the result. Otherwise, the range is from the start
5652 of the second to the end of the first. */
5653 if (no_overlap)
5654 in_p = 0, low = high = 0;
5655 else if (subset)
5656 in_p = 1, low = low1, high = high1;
5657 else
5658 in_p = 1, low = low1, high = high0;
5661 else if (in0_p && ! in1_p)
5663 /* If they don't overlap, the result is the first range. If they are
5664 equal, the result is false. If the second range is a subset of the
5665 first, and the ranges begin at the same place, we go from just after
5666 the end of the second range to the end of the first. If the second
5667 range is not a subset of the first, or if it is a subset and both
5668 ranges end at the same place, the range starts at the start of the
5669 first range and ends just before the second range.
5670 Otherwise, we can't describe this as a single range. */
5671 if (no_overlap)
5672 in_p = 1, low = low0, high = high0;
5673 else if (lowequal && highequal)
5674 in_p = 0, low = high = 0;
5675 else if (subset && lowequal)
5677 low = range_successor (high1);
5678 high = high0;
5679 in_p = 1;
5680 if (low == 0)
5682 /* We are in the weird situation where high0 > high1 but
5683 high1 has no successor. Punt. */
5684 return 0;
5687 else if (! subset || highequal)
5689 low = low0;
5690 high = range_predecessor (low1);
5691 in_p = 1;
5692 if (high == 0)
5694 /* low0 < low1 but low1 has no predecessor. Punt. */
5695 return 0;
5698 else
5699 return 0;
5702 else if (! in0_p && in1_p)
5704 /* If they don't overlap, the result is the second range. If the second
5705 is a subset of the first, the result is false. Otherwise,
5706 the range starts just after the first range and ends at the
5707 end of the second. */
5708 if (no_overlap)
5709 in_p = 1, low = low1, high = high1;
5710 else if (subset || highequal)
5711 in_p = 0, low = high = 0;
5712 else
5714 low = range_successor (high0);
5715 high = high1;
5716 in_p = 1;
5717 if (low == 0)
5719 /* high1 > high0 but high0 has no successor. Punt. */
5720 return 0;
5725 else
5727 /* The case where we are excluding both ranges. Here the complex case
5728 is if they don't overlap. In that case, the only time we have a
5729 range is if they are adjacent. If the second is a subset of the
5730 first, the result is the first. Otherwise, the range to exclude
5731 starts at the beginning of the first range and ends at the end of the
5732 second. */
5733 if (no_overlap)
5735 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5736 range_successor (high0),
5737 1, low1, 0)))
5738 in_p = 0, low = low0, high = high1;
5739 else
5741 /* Canonicalize - [min, x] into - [-, x]. */
5742 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5743 switch (TREE_CODE (TREE_TYPE (low0)))
5745 case ENUMERAL_TYPE:
5746 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5747 GET_MODE_BITSIZE
5748 (TYPE_MODE (TREE_TYPE (low0)))))
5749 break;
5750 /* FALLTHROUGH */
5751 case INTEGER_TYPE:
5752 if (tree_int_cst_equal (low0,
5753 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5754 low0 = 0;
5755 break;
5756 case POINTER_TYPE:
5757 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5758 && integer_zerop (low0))
5759 low0 = 0;
5760 break;
5761 default:
5762 break;
5765 /* Canonicalize - [x, max] into - [x, -]. */
5766 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5767 switch (TREE_CODE (TREE_TYPE (high1)))
5769 case ENUMERAL_TYPE:
5770 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5771 GET_MODE_BITSIZE
5772 (TYPE_MODE (TREE_TYPE (high1)))))
5773 break;
5774 /* FALLTHROUGH */
5775 case INTEGER_TYPE:
5776 if (tree_int_cst_equal (high1,
5777 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5778 high1 = 0;
5779 break;
5780 case POINTER_TYPE:
5781 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5782 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5783 high1, 1,
5784 build_int_cst (TREE_TYPE (high1), 1),
5785 1)))
5786 high1 = 0;
5787 break;
5788 default:
5789 break;
5792 /* The ranges might be also adjacent between the maximum and
5793 minimum values of the given type. For
5794 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5795 return + [x + 1, y - 1]. */
5796 if (low0 == 0 && high1 == 0)
5798 low = range_successor (high0);
5799 high = range_predecessor (low1);
5800 if (low == 0 || high == 0)
5801 return 0;
5803 in_p = 1;
5805 else
5806 return 0;
5809 else if (subset)
5810 in_p = 0, low = low0, high = high0;
5811 else
5812 in_p = 0, low = low0, high = high1;
5815 *pin_p = in_p, *plow = low, *phigh = high;
5816 return 1;
5820 /* Subroutine of fold, looking inside expressions of the form
5821 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5822 are the three operands of the COND_EXPR. This function is
5823 being used also to optimize A op B ? C : A, by reversing the
5824 comparison first.
5826 Return a folded expression whose code is not a COND_EXPR
5827 anymore, or NULL_TREE if no folding opportunity is found. */
5829 static tree
5830 fold_cond_expr_with_comparison (location_t loc, tree type,
5831 enum tree_code comp_code,
5832 tree arg00, tree arg01, tree arg1, tree arg2)
5834 tree arg1_type = TREE_TYPE (arg1);
5835 tree tem;
5837 STRIP_NOPS (arg1);
5838 STRIP_NOPS (arg2);
5840 /* If we have A op 0 ? A : -A, consider applying the following
5841 transformations:
5843 A == 0? A : -A same as -A
5844 A != 0? A : -A same as A
5845 A >= 0? A : -A same as abs (A)
5846 A > 0? A : -A same as abs (A)
5847 A <= 0? A : -A same as -abs (A)
5848 A < 0? A : -A same as -abs (A)
5850 None of these transformations work for modes with signed
5851 zeros. If A is +/-0, the first two transformations will
5852 change the sign of the result (from +0 to -0, or vice
5853 versa). The last four will fix the sign of the result,
5854 even though the original expressions could be positive or
5855 negative, depending on the sign of A.
5857 Note that all these transformations are correct if A is
5858 NaN, since the two alternatives (A and -A) are also NaNs. */
5859 if (!HONOR_SIGNED_ZEROS (type)
5860 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5861 ? real_zerop (arg01)
5862 : integer_zerop (arg01))
5863 && ((TREE_CODE (arg2) == NEGATE_EXPR
5864 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5865 /* In the case that A is of the form X-Y, '-A' (arg2) may
5866 have already been folded to Y-X, check for that. */
5867 || (TREE_CODE (arg1) == MINUS_EXPR
5868 && TREE_CODE (arg2) == MINUS_EXPR
5869 && operand_equal_p (TREE_OPERAND (arg1, 0),
5870 TREE_OPERAND (arg2, 1), 0)
5871 && operand_equal_p (TREE_OPERAND (arg1, 1),
5872 TREE_OPERAND (arg2, 0), 0))))
5873 switch (comp_code)
5875 case EQ_EXPR:
5876 case UNEQ_EXPR:
5877 tem = fold_convert_loc (loc, arg1_type, arg1);
5878 return fold_convert_loc (loc, type, negate_expr (tem));
5879 case NE_EXPR:
5880 case LTGT_EXPR:
5881 return fold_convert_loc (loc, type, arg1);
5882 case UNGE_EXPR:
5883 case UNGT_EXPR:
5884 if (flag_trapping_math)
5885 break;
5886 /* Fall through. */
5887 case GE_EXPR:
5888 case GT_EXPR:
5889 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5890 break;
5891 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5892 return fold_convert_loc (loc, type, tem);
5893 case UNLE_EXPR:
5894 case UNLT_EXPR:
5895 if (flag_trapping_math)
5896 break;
5897 /* FALLTHRU */
5898 case LE_EXPR:
5899 case LT_EXPR:
5900 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5901 break;
5902 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5903 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5905 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5906 is not, invokes UB both in abs and in the negation of it.
5907 So, use ABSU_EXPR instead. */
5908 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5909 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5910 tem = negate_expr (tem);
5911 return fold_convert_loc (loc, type, tem);
5913 else
5915 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5916 return negate_expr (fold_convert_loc (loc, type, tem));
5918 default:
5919 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5920 break;
5923 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5924 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5925 both transformations are correct when A is NaN: A != 0
5926 is then true, and A == 0 is false. */
5928 if (!HONOR_SIGNED_ZEROS (type)
5929 && integer_zerop (arg01) && integer_zerop (arg2))
5931 if (comp_code == NE_EXPR)
5932 return fold_convert_loc (loc, type, arg1);
5933 else if (comp_code == EQ_EXPR)
5934 return build_zero_cst (type);
5937 /* Try some transformations of A op B ? A : B.
5939 A == B? A : B same as B
5940 A != B? A : B same as A
5941 A >= B? A : B same as max (A, B)
5942 A > B? A : B same as max (B, A)
5943 A <= B? A : B same as min (A, B)
5944 A < B? A : B same as min (B, A)
5946 As above, these transformations don't work in the presence
5947 of signed zeros. For example, if A and B are zeros of
5948 opposite sign, the first two transformations will change
5949 the sign of the result. In the last four, the original
5950 expressions give different results for (A=+0, B=-0) and
5951 (A=-0, B=+0), but the transformed expressions do not.
5953 The first two transformations are correct if either A or B
5954 is a NaN. In the first transformation, the condition will
5955 be false, and B will indeed be chosen. In the case of the
5956 second transformation, the condition A != B will be true,
5957 and A will be chosen.
5959 The conversions to max() and min() are not correct if B is
5960 a number and A is not. The conditions in the original
5961 expressions will be false, so all four give B. The min()
5962 and max() versions would give a NaN instead. */
5963 if (!HONOR_SIGNED_ZEROS (type)
5964 && operand_equal_for_comparison_p (arg01, arg2)
5965 /* Avoid these transformations if the COND_EXPR may be used
5966 as an lvalue in the C++ front-end. PR c++/19199. */
5967 && (in_gimple_form
5968 || VECTOR_TYPE_P (type)
5969 || (! lang_GNU_CXX ()
5970 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5971 || ! maybe_lvalue_p (arg1)
5972 || ! maybe_lvalue_p (arg2)))
5974 tree comp_op0 = arg00;
5975 tree comp_op1 = arg01;
5976 tree comp_type = TREE_TYPE (comp_op0);
5978 switch (comp_code)
5980 case EQ_EXPR:
5981 return fold_convert_loc (loc, type, arg2);
5982 case NE_EXPR:
5983 return fold_convert_loc (loc, type, arg1);
5984 case LE_EXPR:
5985 case LT_EXPR:
5986 case UNLE_EXPR:
5987 case UNLT_EXPR:
5988 /* In C++ a ?: expression can be an lvalue, so put the
5989 operand which will be used if they are equal first
5990 so that we can convert this back to the
5991 corresponding COND_EXPR. */
5992 if (!HONOR_NANS (arg1))
5994 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5995 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5996 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5997 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5998 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5999 comp_op1, comp_op0);
6000 return fold_convert_loc (loc, type, tem);
6002 break;
6003 case GE_EXPR:
6004 case GT_EXPR:
6005 case UNGE_EXPR:
6006 case UNGT_EXPR:
6007 if (!HONOR_NANS (arg1))
6009 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6010 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6011 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6012 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6013 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6014 comp_op1, comp_op0);
6015 return fold_convert_loc (loc, type, tem);
6017 break;
6018 case UNEQ_EXPR:
6019 if (!HONOR_NANS (arg1))
6020 return fold_convert_loc (loc, type, arg2);
6021 break;
6022 case LTGT_EXPR:
6023 if (!HONOR_NANS (arg1))
6024 return fold_convert_loc (loc, type, arg1);
6025 break;
6026 default:
6027 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6028 break;
6032 return NULL_TREE;
6037 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6038 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6039 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6040 false) >= 2)
6041 #endif
6043 /* EXP is some logical combination of boolean tests. See if we can
6044 merge it into some range test. Return the new tree if so. */
6046 static tree
6047 fold_range_test (location_t loc, enum tree_code code, tree type,
6048 tree op0, tree op1)
6050 int or_op = (code == TRUTH_ORIF_EXPR
6051 || code == TRUTH_OR_EXPR);
6052 int in0_p, in1_p, in_p;
6053 tree low0, low1, low, high0, high1, high;
6054 bool strict_overflow_p = false;
6055 tree tem, lhs, rhs;
6056 const char * const warnmsg = G_("assuming signed overflow does not occur "
6057 "when simplifying range test");
6059 if (!INTEGRAL_TYPE_P (type))
6060 return 0;
6062 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6063 /* If op0 is known true or false and this is a short-circuiting
6064 operation we must not merge with op1 since that makes side-effects
6065 unconditional. So special-case this. */
6066 if (!lhs
6067 && ((code == TRUTH_ORIF_EXPR && in0_p)
6068 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6069 return op0;
6070 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6072 /* If this is an OR operation, invert both sides; we will invert
6073 again at the end. */
6074 if (or_op)
6075 in0_p = ! in0_p, in1_p = ! in1_p;
6077 /* If both expressions are the same, if we can merge the ranges, and we
6078 can build the range test, return it or it inverted. If one of the
6079 ranges is always true or always false, consider it to be the same
6080 expression as the other. */
6081 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6082 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6083 in1_p, low1, high1)
6084 && (tem = (build_range_check (loc, type,
6085 lhs != 0 ? lhs
6086 : rhs != 0 ? rhs : integer_zero_node,
6087 in_p, low, high))) != 0)
6089 if (strict_overflow_p)
6090 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6091 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6094 /* On machines where the branch cost is expensive, if this is a
6095 short-circuited branch and the underlying object on both sides
6096 is the same, make a non-short-circuit operation. */
6097 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6098 if (param_logical_op_non_short_circuit != -1)
6099 logical_op_non_short_circuit
6100 = param_logical_op_non_short_circuit;
6101 if (logical_op_non_short_circuit
6102 && !sanitize_coverage_p ()
6103 && lhs != 0 && rhs != 0
6104 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6105 && operand_equal_p (lhs, rhs, 0))
6107 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6108 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6109 which cases we can't do this. */
6110 if (simple_operand_p (lhs))
6111 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6112 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6113 type, op0, op1);
6115 else if (!lang_hooks.decls.global_bindings_p ()
6116 && !CONTAINS_PLACEHOLDER_P (lhs))
6118 tree common = save_expr (lhs);
6120 if ((lhs = build_range_check (loc, type, common,
6121 or_op ? ! in0_p : in0_p,
6122 low0, high0)) != 0
6123 && (rhs = build_range_check (loc, type, common,
6124 or_op ? ! in1_p : in1_p,
6125 low1, high1)) != 0)
6127 if (strict_overflow_p)
6128 fold_overflow_warning (warnmsg,
6129 WARN_STRICT_OVERFLOW_COMPARISON);
6130 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6131 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6132 type, lhs, rhs);
6137 return 0;
6140 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6141 bit value. Arrange things so the extra bits will be set to zero if and
6142 only if C is signed-extended to its full width. If MASK is nonzero,
6143 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6145 static tree
6146 unextend (tree c, int p, int unsignedp, tree mask)
6148 tree type = TREE_TYPE (c);
6149 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6150 tree temp;
6152 if (p == modesize || unsignedp)
6153 return c;
6155 /* We work by getting just the sign bit into the low-order bit, then
6156 into the high-order bit, then sign-extend. We then XOR that value
6157 with C. */
6158 temp = build_int_cst (TREE_TYPE (c),
6159 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6161 /* We must use a signed type in order to get an arithmetic right shift.
6162 However, we must also avoid introducing accidental overflows, so that
6163 a subsequent call to integer_zerop will work. Hence we must
6164 do the type conversion here. At this point, the constant is either
6165 zero or one, and the conversion to a signed type can never overflow.
6166 We could get an overflow if this conversion is done anywhere else. */
6167 if (TYPE_UNSIGNED (type))
6168 temp = fold_convert (signed_type_for (type), temp);
6170 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6171 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6172 if (mask != 0)
6173 temp = const_binop (BIT_AND_EXPR, temp,
6174 fold_convert (TREE_TYPE (c), mask));
6175 /* If necessary, convert the type back to match the type of C. */
6176 if (TYPE_UNSIGNED (type))
6177 temp = fold_convert (type, temp);
6179 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6182 /* For an expression that has the form
6183 (A && B) || ~B
6185 (A || B) && ~B,
6186 we can drop one of the inner expressions and simplify to
6187 A || ~B
6189 A && ~B
6190 LOC is the location of the resulting expression. OP is the inner
6191 logical operation; the left-hand side in the examples above, while CMPOP
6192 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6193 removing a condition that guards another, as in
6194 (A != NULL && A->...) || A == NULL
6195 which we must not transform. If RHS_ONLY is true, only eliminate the
6196 right-most operand of the inner logical operation. */
6198 static tree
6199 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6200 bool rhs_only)
6202 tree type = TREE_TYPE (cmpop);
6203 enum tree_code code = TREE_CODE (cmpop);
6204 enum tree_code truthop_code = TREE_CODE (op);
6205 tree lhs = TREE_OPERAND (op, 0);
6206 tree rhs = TREE_OPERAND (op, 1);
6207 tree orig_lhs = lhs, orig_rhs = rhs;
6208 enum tree_code rhs_code = TREE_CODE (rhs);
6209 enum tree_code lhs_code = TREE_CODE (lhs);
6210 enum tree_code inv_code;
6212 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6213 return NULL_TREE;
6215 if (TREE_CODE_CLASS (code) != tcc_comparison)
6216 return NULL_TREE;
6218 if (rhs_code == truthop_code)
6220 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6221 if (newrhs != NULL_TREE)
6223 rhs = newrhs;
6224 rhs_code = TREE_CODE (rhs);
6227 if (lhs_code == truthop_code && !rhs_only)
6229 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6230 if (newlhs != NULL_TREE)
6232 lhs = newlhs;
6233 lhs_code = TREE_CODE (lhs);
6237 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6238 if (inv_code == rhs_code
6239 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6240 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6241 return lhs;
6242 if (!rhs_only && inv_code == lhs_code
6243 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6244 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6245 return rhs;
6246 if (rhs != orig_rhs || lhs != orig_lhs)
6247 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6248 lhs, rhs);
6249 return NULL_TREE;
6252 /* Find ways of folding logical expressions of LHS and RHS:
6253 Try to merge two comparisons to the same innermost item.
6254 Look for range tests like "ch >= '0' && ch <= '9'".
6255 Look for combinations of simple terms on machines with expensive branches
6256 and evaluate the RHS unconditionally.
6258 For example, if we have p->a == 2 && p->b == 4 and we can make an
6259 object large enough to span both A and B, we can do this with a comparison
6260 against the object ANDed with the a mask.
6262 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6263 operations to do this with one comparison.
6265 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6266 function and the one above.
6268 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6269 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6271 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6272 two operands.
6274 We return the simplified tree or 0 if no optimization is possible. */
6276 static tree
6277 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6278 tree lhs, tree rhs)
6280 /* If this is the "or" of two comparisons, we can do something if
6281 the comparisons are NE_EXPR. If this is the "and", we can do something
6282 if the comparisons are EQ_EXPR. I.e.,
6283 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6285 WANTED_CODE is this operation code. For single bit fields, we can
6286 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6287 comparison for one-bit fields. */
6289 enum tree_code wanted_code;
6290 enum tree_code lcode, rcode;
6291 tree ll_arg, lr_arg, rl_arg, rr_arg;
6292 tree ll_inner, lr_inner, rl_inner, rr_inner;
6293 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6294 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6295 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6296 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6297 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6298 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6299 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6300 scalar_int_mode lnmode, rnmode;
6301 tree ll_mask, lr_mask, rl_mask, rr_mask;
6302 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6303 tree l_const, r_const;
6304 tree lntype, rntype, result;
6305 HOST_WIDE_INT first_bit, end_bit;
6306 int volatilep;
6308 /* Start by getting the comparison codes. Fail if anything is volatile.
6309 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6310 it were surrounded with a NE_EXPR. */
6312 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6313 return 0;
6315 lcode = TREE_CODE (lhs);
6316 rcode = TREE_CODE (rhs);
6318 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6320 lhs = build2 (NE_EXPR, truth_type, lhs,
6321 build_int_cst (TREE_TYPE (lhs), 0));
6322 lcode = NE_EXPR;
6325 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6327 rhs = build2 (NE_EXPR, truth_type, rhs,
6328 build_int_cst (TREE_TYPE (rhs), 0));
6329 rcode = NE_EXPR;
6332 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6333 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6334 return 0;
6336 ll_arg = TREE_OPERAND (lhs, 0);
6337 lr_arg = TREE_OPERAND (lhs, 1);
6338 rl_arg = TREE_OPERAND (rhs, 0);
6339 rr_arg = TREE_OPERAND (rhs, 1);
6341 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6342 if (simple_operand_p (ll_arg)
6343 && simple_operand_p (lr_arg))
6345 if (operand_equal_p (ll_arg, rl_arg, 0)
6346 && operand_equal_p (lr_arg, rr_arg, 0))
6348 result = combine_comparisons (loc, code, lcode, rcode,
6349 truth_type, ll_arg, lr_arg);
6350 if (result)
6351 return result;
6353 else if (operand_equal_p (ll_arg, rr_arg, 0)
6354 && operand_equal_p (lr_arg, rl_arg, 0))
6356 result = combine_comparisons (loc, code, lcode,
6357 swap_tree_comparison (rcode),
6358 truth_type, ll_arg, lr_arg);
6359 if (result)
6360 return result;
6364 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6365 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6367 /* If the RHS can be evaluated unconditionally and its operands are
6368 simple, it wins to evaluate the RHS unconditionally on machines
6369 with expensive branches. In this case, this isn't a comparison
6370 that can be merged. */
6372 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6373 false) >= 2
6374 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6375 && simple_operand_p (rl_arg)
6376 && simple_operand_p (rr_arg))
6378 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6379 if (code == TRUTH_OR_EXPR
6380 && lcode == NE_EXPR && integer_zerop (lr_arg)
6381 && rcode == NE_EXPR && integer_zerop (rr_arg)
6382 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6383 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6384 return build2_loc (loc, NE_EXPR, truth_type,
6385 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6386 ll_arg, rl_arg),
6387 build_int_cst (TREE_TYPE (ll_arg), 0));
6389 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6390 if (code == TRUTH_AND_EXPR
6391 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6392 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6393 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6394 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6395 return build2_loc (loc, EQ_EXPR, truth_type,
6396 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6397 ll_arg, rl_arg),
6398 build_int_cst (TREE_TYPE (ll_arg), 0));
6401 /* See if the comparisons can be merged. Then get all the parameters for
6402 each side. */
6404 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6405 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6406 return 0;
6408 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6409 volatilep = 0;
6410 ll_inner = decode_field_reference (loc, &ll_arg,
6411 &ll_bitsize, &ll_bitpos, &ll_mode,
6412 &ll_unsignedp, &ll_reversep, &volatilep,
6413 &ll_mask, &ll_and_mask);
6414 lr_inner = decode_field_reference (loc, &lr_arg,
6415 &lr_bitsize, &lr_bitpos, &lr_mode,
6416 &lr_unsignedp, &lr_reversep, &volatilep,
6417 &lr_mask, &lr_and_mask);
6418 rl_inner = decode_field_reference (loc, &rl_arg,
6419 &rl_bitsize, &rl_bitpos, &rl_mode,
6420 &rl_unsignedp, &rl_reversep, &volatilep,
6421 &rl_mask, &rl_and_mask);
6422 rr_inner = decode_field_reference (loc, &rr_arg,
6423 &rr_bitsize, &rr_bitpos, &rr_mode,
6424 &rr_unsignedp, &rr_reversep, &volatilep,
6425 &rr_mask, &rr_and_mask);
6427 /* It must be true that the inner operation on the lhs of each
6428 comparison must be the same if we are to be able to do anything.
6429 Then see if we have constants. If not, the same must be true for
6430 the rhs's. */
6431 if (volatilep
6432 || ll_reversep != rl_reversep
6433 || ll_inner == 0 || rl_inner == 0
6434 || ! operand_equal_p (ll_inner, rl_inner, 0))
6435 return 0;
6437 if (TREE_CODE (lr_arg) == INTEGER_CST
6438 && TREE_CODE (rr_arg) == INTEGER_CST)
6440 l_const = lr_arg, r_const = rr_arg;
6441 lr_reversep = ll_reversep;
6443 else if (lr_reversep != rr_reversep
6444 || lr_inner == 0 || rr_inner == 0
6445 || ! operand_equal_p (lr_inner, rr_inner, 0))
6446 return 0;
6447 else
6448 l_const = r_const = 0;
6450 /* If either comparison code is not correct for our logical operation,
6451 fail. However, we can convert a one-bit comparison against zero into
6452 the opposite comparison against that bit being set in the field. */
6454 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6455 if (lcode != wanted_code)
6457 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6459 /* Make the left operand unsigned, since we are only interested
6460 in the value of one bit. Otherwise we are doing the wrong
6461 thing below. */
6462 ll_unsignedp = 1;
6463 l_const = ll_mask;
6465 else
6466 return 0;
6469 /* This is analogous to the code for l_const above. */
6470 if (rcode != wanted_code)
6472 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6474 rl_unsignedp = 1;
6475 r_const = rl_mask;
6477 else
6478 return 0;
6481 /* See if we can find a mode that contains both fields being compared on
6482 the left. If we can't, fail. Otherwise, update all constants and masks
6483 to be relative to a field of that size. */
6484 first_bit = MIN (ll_bitpos, rl_bitpos);
6485 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6486 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6487 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6488 volatilep, &lnmode))
6489 return 0;
6491 lnbitsize = GET_MODE_BITSIZE (lnmode);
6492 lnbitpos = first_bit & ~ (lnbitsize - 1);
6493 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6494 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6496 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6498 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6499 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6502 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6503 size_int (xll_bitpos));
6504 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6505 size_int (xrl_bitpos));
6506 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6507 return 0;
6509 if (l_const)
6511 l_const = fold_convert_loc (loc, lntype, l_const);
6512 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6513 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6514 if (l_const == NULL_TREE)
6515 return 0;
6516 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6517 fold_build1_loc (loc, BIT_NOT_EXPR,
6518 lntype, ll_mask))))
6520 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6522 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6525 if (r_const)
6527 r_const = fold_convert_loc (loc, lntype, r_const);
6528 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6529 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6530 if (r_const == NULL_TREE)
6531 return 0;
6532 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6533 fold_build1_loc (loc, BIT_NOT_EXPR,
6534 lntype, rl_mask))))
6536 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6538 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6542 /* If the right sides are not constant, do the same for it. Also,
6543 disallow this optimization if a size, signedness or storage order
6544 mismatch occurs between the left and right sides. */
6545 if (l_const == 0)
6547 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6548 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6549 || ll_reversep != lr_reversep
6550 /* Make sure the two fields on the right
6551 correspond to the left without being swapped. */
6552 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6553 return 0;
6555 first_bit = MIN (lr_bitpos, rr_bitpos);
6556 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6557 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6558 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6559 volatilep, &rnmode))
6560 return 0;
6562 rnbitsize = GET_MODE_BITSIZE (rnmode);
6563 rnbitpos = first_bit & ~ (rnbitsize - 1);
6564 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6565 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6567 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6569 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6570 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6573 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6574 rntype, lr_mask),
6575 size_int (xlr_bitpos));
6576 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6577 rntype, rr_mask),
6578 size_int (xrr_bitpos));
6579 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6580 return 0;
6582 /* Make a mask that corresponds to both fields being compared.
6583 Do this for both items being compared. If the operands are the
6584 same size and the bits being compared are in the same position
6585 then we can do this by masking both and comparing the masked
6586 results. */
6587 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6588 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6589 if (lnbitsize == rnbitsize
6590 && xll_bitpos == xlr_bitpos
6591 && lnbitpos >= 0
6592 && rnbitpos >= 0)
6594 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6595 lntype, lnbitsize, lnbitpos,
6596 ll_unsignedp || rl_unsignedp, ll_reversep);
6597 if (! all_ones_mask_p (ll_mask, lnbitsize))
6598 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6600 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6601 rntype, rnbitsize, rnbitpos,
6602 lr_unsignedp || rr_unsignedp, lr_reversep);
6603 if (! all_ones_mask_p (lr_mask, rnbitsize))
6604 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6606 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6609 /* There is still another way we can do something: If both pairs of
6610 fields being compared are adjacent, we may be able to make a wider
6611 field containing them both.
6613 Note that we still must mask the lhs/rhs expressions. Furthermore,
6614 the mask must be shifted to account for the shift done by
6615 make_bit_field_ref. */
6616 if (((ll_bitsize + ll_bitpos == rl_bitpos
6617 && lr_bitsize + lr_bitpos == rr_bitpos)
6618 || (ll_bitpos == rl_bitpos + rl_bitsize
6619 && lr_bitpos == rr_bitpos + rr_bitsize))
6620 && ll_bitpos >= 0
6621 && rl_bitpos >= 0
6622 && lr_bitpos >= 0
6623 && rr_bitpos >= 0)
6625 tree type;
6627 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6628 ll_bitsize + rl_bitsize,
6629 MIN (ll_bitpos, rl_bitpos),
6630 ll_unsignedp, ll_reversep);
6631 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6632 lr_bitsize + rr_bitsize,
6633 MIN (lr_bitpos, rr_bitpos),
6634 lr_unsignedp, lr_reversep);
6636 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6637 size_int (MIN (xll_bitpos, xrl_bitpos)));
6638 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6639 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6640 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6641 return 0;
6643 /* Convert to the smaller type before masking out unwanted bits. */
6644 type = lntype;
6645 if (lntype != rntype)
6647 if (lnbitsize > rnbitsize)
6649 lhs = fold_convert_loc (loc, rntype, lhs);
6650 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6651 type = rntype;
6653 else if (lnbitsize < rnbitsize)
6655 rhs = fold_convert_loc (loc, lntype, rhs);
6656 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6657 type = lntype;
6661 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6662 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6664 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6665 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6667 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6670 return 0;
6673 /* Handle the case of comparisons with constants. If there is something in
6674 common between the masks, those bits of the constants must be the same.
6675 If not, the condition is always false. Test for this to avoid generating
6676 incorrect code below. */
6677 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6678 if (! integer_zerop (result)
6679 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6680 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6682 if (wanted_code == NE_EXPR)
6684 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6685 return constant_boolean_node (true, truth_type);
6687 else
6689 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6690 return constant_boolean_node (false, truth_type);
6694 if (lnbitpos < 0)
6695 return 0;
6697 /* Construct the expression we will return. First get the component
6698 reference we will make. Unless the mask is all ones the width of
6699 that field, perform the mask operation. Then compare with the
6700 merged constant. */
6701 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6702 lntype, lnbitsize, lnbitpos,
6703 ll_unsignedp || rl_unsignedp, ll_reversep);
6705 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6706 if (! all_ones_mask_p (ll_mask, lnbitsize))
6707 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6709 return build2_loc (loc, wanted_code, truth_type, result,
6710 const_binop (BIT_IOR_EXPR, l_const, r_const));
6713 /* T is an integer expression that is being multiplied, divided, or taken a
6714 modulus (CODE says which and what kind of divide or modulus) by a
6715 constant C. See if we can eliminate that operation by folding it with
6716 other operations already in T. WIDE_TYPE, if non-null, is a type that
6717 should be used for the computation if wider than our type.
6719 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6720 (X * 2) + (Y * 4). We must, however, be assured that either the original
6721 expression would not overflow or that overflow is undefined for the type
6722 in the language in question.
6724 If we return a non-null expression, it is an equivalent form of the
6725 original computation, but need not be in the original type.
6727 We set *STRICT_OVERFLOW_P to true if the return values depends on
6728 signed overflow being undefined. Otherwise we do not change
6729 *STRICT_OVERFLOW_P. */
6731 static tree
6732 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6733 bool *strict_overflow_p)
6735 /* To avoid exponential search depth, refuse to allow recursion past
6736 three levels. Beyond that (1) it's highly unlikely that we'll find
6737 something interesting and (2) we've probably processed it before
6738 when we built the inner expression. */
6740 static int depth;
6741 tree ret;
6743 if (depth > 3)
6744 return NULL;
6746 depth++;
6747 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6748 depth--;
6750 return ret;
6753 static tree
6754 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6755 bool *strict_overflow_p)
6757 tree type = TREE_TYPE (t);
6758 enum tree_code tcode = TREE_CODE (t);
6759 tree ctype = (wide_type != 0
6760 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6761 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6762 ? wide_type : type);
6763 tree t1, t2;
6764 int same_p = tcode == code;
6765 tree op0 = NULL_TREE, op1 = NULL_TREE;
6766 bool sub_strict_overflow_p;
6768 /* Don't deal with constants of zero here; they confuse the code below. */
6769 if (integer_zerop (c))
6770 return NULL_TREE;
6772 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6773 op0 = TREE_OPERAND (t, 0);
6775 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6776 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6778 /* Note that we need not handle conditional operations here since fold
6779 already handles those cases. So just do arithmetic here. */
6780 switch (tcode)
6782 case INTEGER_CST:
6783 /* For a constant, we can always simplify if we are a multiply
6784 or (for divide and modulus) if it is a multiple of our constant. */
6785 if (code == MULT_EXPR
6786 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6787 TYPE_SIGN (type)))
6789 tree tem = const_binop (code, fold_convert (ctype, t),
6790 fold_convert (ctype, c));
6791 /* If the multiplication overflowed, we lost information on it.
6792 See PR68142 and PR69845. */
6793 if (TREE_OVERFLOW (tem))
6794 return NULL_TREE;
6795 return tem;
6797 break;
6799 CASE_CONVERT: case NON_LVALUE_EXPR:
6800 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6801 break;
6802 /* If op0 is an expression ... */
6803 if ((COMPARISON_CLASS_P (op0)
6804 || UNARY_CLASS_P (op0)
6805 || BINARY_CLASS_P (op0)
6806 || VL_EXP_CLASS_P (op0)
6807 || EXPRESSION_CLASS_P (op0))
6808 /* ... and has wrapping overflow, and its type is smaller
6809 than ctype, then we cannot pass through as widening. */
6810 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6811 && (TYPE_PRECISION (ctype)
6812 > TYPE_PRECISION (TREE_TYPE (op0))))
6813 /* ... or this is a truncation (t is narrower than op0),
6814 then we cannot pass through this narrowing. */
6815 || (TYPE_PRECISION (type)
6816 < TYPE_PRECISION (TREE_TYPE (op0)))
6817 /* ... or signedness changes for division or modulus,
6818 then we cannot pass through this conversion. */
6819 || (code != MULT_EXPR
6820 && (TYPE_UNSIGNED (ctype)
6821 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6822 /* ... or has undefined overflow while the converted to
6823 type has not, we cannot do the operation in the inner type
6824 as that would introduce undefined overflow. */
6825 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6826 && !TYPE_OVERFLOW_UNDEFINED (type))))
6827 break;
6829 /* Pass the constant down and see if we can make a simplification. If
6830 we can, replace this expression with the inner simplification for
6831 possible later conversion to our or some other type. */
6832 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6833 && TREE_CODE (t2) == INTEGER_CST
6834 && !TREE_OVERFLOW (t2)
6835 && (t1 = extract_muldiv (op0, t2, code,
6836 code == MULT_EXPR ? ctype : NULL_TREE,
6837 strict_overflow_p)) != 0)
6838 return t1;
6839 break;
6841 case ABS_EXPR:
6842 /* If widening the type changes it from signed to unsigned, then we
6843 must avoid building ABS_EXPR itself as unsigned. */
6844 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6846 tree cstype = (*signed_type_for) (ctype);
6847 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6848 != 0)
6850 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6851 return fold_convert (ctype, t1);
6853 break;
6855 /* If the constant is negative, we cannot simplify this. */
6856 if (tree_int_cst_sgn (c) == -1)
6857 break;
6858 /* FALLTHROUGH */
6859 case NEGATE_EXPR:
6860 /* For division and modulus, type can't be unsigned, as e.g.
6861 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6862 For signed types, even with wrapping overflow, this is fine. */
6863 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6864 break;
6865 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6866 != 0)
6867 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6868 break;
6870 case MIN_EXPR: case MAX_EXPR:
6871 /* If widening the type changes the signedness, then we can't perform
6872 this optimization as that changes the result. */
6873 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6874 break;
6876 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6877 sub_strict_overflow_p = false;
6878 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6879 &sub_strict_overflow_p)) != 0
6880 && (t2 = extract_muldiv (op1, c, code, wide_type,
6881 &sub_strict_overflow_p)) != 0)
6883 if (tree_int_cst_sgn (c) < 0)
6884 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6885 if (sub_strict_overflow_p)
6886 *strict_overflow_p = true;
6887 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6888 fold_convert (ctype, t2));
6890 break;
6892 case LSHIFT_EXPR: case RSHIFT_EXPR:
6893 /* If the second operand is constant, this is a multiplication
6894 or floor division, by a power of two, so we can treat it that
6895 way unless the multiplier or divisor overflows. Signed
6896 left-shift overflow is implementation-defined rather than
6897 undefined in C90, so do not convert signed left shift into
6898 multiplication. */
6899 if (TREE_CODE (op1) == INTEGER_CST
6900 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6901 /* const_binop may not detect overflow correctly,
6902 so check for it explicitly here. */
6903 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6904 wi::to_wide (op1))
6905 && (t1 = fold_convert (ctype,
6906 const_binop (LSHIFT_EXPR, size_one_node,
6907 op1))) != 0
6908 && !TREE_OVERFLOW (t1))
6909 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6910 ? MULT_EXPR : FLOOR_DIV_EXPR,
6911 ctype,
6912 fold_convert (ctype, op0),
6913 t1),
6914 c, code, wide_type, strict_overflow_p);
6915 break;
6917 case PLUS_EXPR: case MINUS_EXPR:
6918 /* See if we can eliminate the operation on both sides. If we can, we
6919 can return a new PLUS or MINUS. If we can't, the only remaining
6920 cases where we can do anything are if the second operand is a
6921 constant. */
6922 sub_strict_overflow_p = false;
6923 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6924 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6925 if (t1 != 0 && t2 != 0
6926 && TYPE_OVERFLOW_WRAPS (ctype)
6927 && (code == MULT_EXPR
6928 /* If not multiplication, we can only do this if both operands
6929 are divisible by c. */
6930 || (multiple_of_p (ctype, op0, c)
6931 && multiple_of_p (ctype, op1, c))))
6933 if (sub_strict_overflow_p)
6934 *strict_overflow_p = true;
6935 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6936 fold_convert (ctype, t2));
6939 /* If this was a subtraction, negate OP1 and set it to be an addition.
6940 This simplifies the logic below. */
6941 if (tcode == MINUS_EXPR)
6943 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6944 /* If OP1 was not easily negatable, the constant may be OP0. */
6945 if (TREE_CODE (op0) == INTEGER_CST)
6947 std::swap (op0, op1);
6948 std::swap (t1, t2);
6952 if (TREE_CODE (op1) != INTEGER_CST)
6953 break;
6955 /* If either OP1 or C are negative, this optimization is not safe for
6956 some of the division and remainder types while for others we need
6957 to change the code. */
6958 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6960 if (code == CEIL_DIV_EXPR)
6961 code = FLOOR_DIV_EXPR;
6962 else if (code == FLOOR_DIV_EXPR)
6963 code = CEIL_DIV_EXPR;
6964 else if (code != MULT_EXPR
6965 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6966 break;
6969 /* If it's a multiply or a division/modulus operation of a multiple
6970 of our constant, do the operation and verify it doesn't overflow. */
6971 if (code == MULT_EXPR
6972 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6973 TYPE_SIGN (type)))
6975 op1 = const_binop (code, fold_convert (ctype, op1),
6976 fold_convert (ctype, c));
6977 /* We allow the constant to overflow with wrapping semantics. */
6978 if (op1 == 0
6979 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6980 break;
6982 else
6983 break;
6985 /* If we have an unsigned type, we cannot widen the operation since it
6986 will change the result if the original computation overflowed. */
6987 if (TYPE_UNSIGNED (ctype) && ctype != type)
6988 break;
6990 /* The last case is if we are a multiply. In that case, we can
6991 apply the distributive law to commute the multiply and addition
6992 if the multiplication of the constants doesn't overflow
6993 and overflow is defined. With undefined overflow
6994 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6995 But fold_plusminus_mult_expr would factor back any power-of-two
6996 value so do not distribute in the first place in this case. */
6997 if (code == MULT_EXPR
6998 && TYPE_OVERFLOW_WRAPS (ctype)
6999 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7000 return fold_build2 (tcode, ctype,
7001 fold_build2 (code, ctype,
7002 fold_convert (ctype, op0),
7003 fold_convert (ctype, c)),
7004 op1);
7006 break;
7008 case MULT_EXPR:
7009 /* We have a special case here if we are doing something like
7010 (C * 8) % 4 since we know that's zero. */
7011 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7012 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7013 /* If the multiplication can overflow we cannot optimize this. */
7014 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7015 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7016 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7017 TYPE_SIGN (type)))
7019 *strict_overflow_p = true;
7020 return omit_one_operand (type, integer_zero_node, op0);
7023 /* ... fall through ... */
7025 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7026 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7027 /* If we can extract our operation from the LHS, do so and return a
7028 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7029 do something only if the second operand is a constant. */
7030 if (same_p
7031 && TYPE_OVERFLOW_WRAPS (ctype)
7032 && (t1 = extract_muldiv (op0, c, code, wide_type,
7033 strict_overflow_p)) != 0)
7034 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7035 fold_convert (ctype, op1));
7036 else if (tcode == MULT_EXPR && code == MULT_EXPR
7037 && TYPE_OVERFLOW_WRAPS (ctype)
7038 && (t1 = extract_muldiv (op1, c, code, wide_type,
7039 strict_overflow_p)) != 0)
7040 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7041 fold_convert (ctype, t1));
7042 else if (TREE_CODE (op1) != INTEGER_CST)
7043 return 0;
7045 /* If these are the same operation types, we can associate them
7046 assuming no overflow. */
7047 if (tcode == code)
7049 bool overflow_p = false;
7050 wi::overflow_type overflow_mul;
7051 signop sign = TYPE_SIGN (ctype);
7052 unsigned prec = TYPE_PRECISION (ctype);
7053 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7054 wi::to_wide (c, prec),
7055 sign, &overflow_mul);
7056 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7057 if (overflow_mul
7058 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7059 overflow_p = true;
7060 if (!overflow_p)
7061 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7062 wide_int_to_tree (ctype, mul));
7065 /* If these operations "cancel" each other, we have the main
7066 optimizations of this pass, which occur when either constant is a
7067 multiple of the other, in which case we replace this with either an
7068 operation or CODE or TCODE.
7070 If we have an unsigned type, we cannot do this since it will change
7071 the result if the original computation overflowed. */
7072 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7073 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7074 || (tcode == MULT_EXPR
7075 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7076 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7077 && code != MULT_EXPR)))
7079 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7080 TYPE_SIGN (type)))
7082 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7083 *strict_overflow_p = true;
7084 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7085 fold_convert (ctype,
7086 const_binop (TRUNC_DIV_EXPR,
7087 op1, c)));
7089 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7090 TYPE_SIGN (type)))
7092 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7093 *strict_overflow_p = true;
7094 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7095 fold_convert (ctype,
7096 const_binop (TRUNC_DIV_EXPR,
7097 c, op1)));
7100 break;
7102 default:
7103 break;
7106 return 0;
7109 /* Return a node which has the indicated constant VALUE (either 0 or
7110 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7111 and is of the indicated TYPE. */
7113 tree
7114 constant_boolean_node (bool value, tree type)
7116 if (type == integer_type_node)
7117 return value ? integer_one_node : integer_zero_node;
7118 else if (type == boolean_type_node)
7119 return value ? boolean_true_node : boolean_false_node;
7120 else if (TREE_CODE (type) == VECTOR_TYPE)
7121 return build_vector_from_val (type,
7122 build_int_cst (TREE_TYPE (type),
7123 value ? -1 : 0));
7124 else
7125 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7129 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7130 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7131 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7132 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7133 COND is the first argument to CODE; otherwise (as in the example
7134 given here), it is the second argument. TYPE is the type of the
7135 original expression. Return NULL_TREE if no simplification is
7136 possible. */
7138 static tree
7139 fold_binary_op_with_conditional_arg (location_t loc,
7140 enum tree_code code,
7141 tree type, tree op0, tree op1,
7142 tree cond, tree arg, int cond_first_p)
7144 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7145 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7146 tree test, true_value, false_value;
7147 tree lhs = NULL_TREE;
7148 tree rhs = NULL_TREE;
7149 enum tree_code cond_code = COND_EXPR;
7151 /* Do not move possibly trapping operations into the conditional as this
7152 pessimizes code and causes gimplification issues when applied late. */
7153 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7154 ANY_INTEGRAL_TYPE_P (type)
7155 && TYPE_OVERFLOW_TRAPS (type), op1))
7156 return NULL_TREE;
7158 if (TREE_CODE (cond) == COND_EXPR
7159 || TREE_CODE (cond) == VEC_COND_EXPR)
7161 test = TREE_OPERAND (cond, 0);
7162 true_value = TREE_OPERAND (cond, 1);
7163 false_value = TREE_OPERAND (cond, 2);
7164 /* If this operand throws an expression, then it does not make
7165 sense to try to perform a logical or arithmetic operation
7166 involving it. */
7167 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7168 lhs = true_value;
7169 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7170 rhs = false_value;
7172 else if (!(TREE_CODE (type) != VECTOR_TYPE
7173 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7175 tree testtype = TREE_TYPE (cond);
7176 test = cond;
7177 true_value = constant_boolean_node (true, testtype);
7178 false_value = constant_boolean_node (false, testtype);
7180 else
7181 /* Detect the case of mixing vector and scalar types - bail out. */
7182 return NULL_TREE;
7184 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7185 cond_code = VEC_COND_EXPR;
7187 /* This transformation is only worthwhile if we don't have to wrap ARG
7188 in a SAVE_EXPR and the operation can be simplified without recursing
7189 on at least one of the branches once its pushed inside the COND_EXPR. */
7190 if (!TREE_CONSTANT (arg)
7191 && (TREE_SIDE_EFFECTS (arg)
7192 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7193 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7194 return NULL_TREE;
7196 arg = fold_convert_loc (loc, arg_type, arg);
7197 if (lhs == 0)
7199 true_value = fold_convert_loc (loc, cond_type, true_value);
7200 if (cond_first_p)
7201 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7202 else
7203 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7205 if (rhs == 0)
7207 false_value = fold_convert_loc (loc, cond_type, false_value);
7208 if (cond_first_p)
7209 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7210 else
7211 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7214 /* Check that we have simplified at least one of the branches. */
7215 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7216 return NULL_TREE;
7218 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7222 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7224 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7225 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7226 if ARG - ZERO_ARG is the same as X.
7228 If ARG is NULL, check for any value of type TYPE.
7230 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7231 and finite. The problematic cases are when X is zero, and its mode
7232 has signed zeros. In the case of rounding towards -infinity,
7233 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7234 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7236 bool
7237 fold_real_zero_addition_p (const_tree type, const_tree arg,
7238 const_tree zero_arg, int negate)
7240 if (!real_zerop (zero_arg))
7241 return false;
7243 /* Don't allow the fold with -fsignaling-nans. */
7244 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7245 return false;
7247 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7248 if (!HONOR_SIGNED_ZEROS (type))
7249 return true;
7251 /* There is no case that is safe for all rounding modes. */
7252 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7253 return false;
7255 /* In a vector or complex, we would need to check the sign of all zeros. */
7256 if (TREE_CODE (zero_arg) == VECTOR_CST)
7257 zero_arg = uniform_vector_p (zero_arg);
7258 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7259 return false;
7261 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7262 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7263 negate = !negate;
7265 /* The mode has signed zeros, and we have to honor their sign.
7266 In this situation, there are only two cases we can return true for.
7267 (i) X - 0 is the same as X with default rounding.
7268 (ii) X + 0 is X when X can't possibly be -0.0. */
7269 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7272 /* Subroutine of match.pd that optimizes comparisons of a division by
7273 a nonzero integer constant against an integer constant, i.e.
7274 X/C1 op C2.
7276 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7277 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7279 enum tree_code
7280 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7281 tree *hi, bool *neg_overflow)
7283 tree prod, tmp, type = TREE_TYPE (c1);
7284 signop sign = TYPE_SIGN (type);
7285 wi::overflow_type overflow;
7287 /* We have to do this the hard way to detect unsigned overflow.
7288 prod = int_const_binop (MULT_EXPR, c1, c2); */
7289 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7290 prod = force_fit_type (type, val, -1, overflow);
7291 *neg_overflow = false;
7293 if (sign == UNSIGNED)
7295 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7296 *lo = prod;
7298 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7299 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7300 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7302 else if (tree_int_cst_sgn (c1) >= 0)
7304 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7305 switch (tree_int_cst_sgn (c2))
7307 case -1:
7308 *neg_overflow = true;
7309 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7310 *hi = prod;
7311 break;
7313 case 0:
7314 *lo = fold_negate_const (tmp, type);
7315 *hi = tmp;
7316 break;
7318 case 1:
7319 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7320 *lo = prod;
7321 break;
7323 default:
7324 gcc_unreachable ();
7327 else
7329 /* A negative divisor reverses the relational operators. */
7330 code = swap_tree_comparison (code);
7332 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7333 switch (tree_int_cst_sgn (c2))
7335 case -1:
7336 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7337 *lo = prod;
7338 break;
7340 case 0:
7341 *hi = fold_negate_const (tmp, type);
7342 *lo = tmp;
7343 break;
7345 case 1:
7346 *neg_overflow = true;
7347 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7348 *hi = prod;
7349 break;
7351 default:
7352 gcc_unreachable ();
7356 if (code != EQ_EXPR && code != NE_EXPR)
7357 return code;
7359 if (TREE_OVERFLOW (*lo)
7360 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7361 *lo = NULL_TREE;
7362 if (TREE_OVERFLOW (*hi)
7363 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7364 *hi = NULL_TREE;
7366 return code;
7370 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7371 equality/inequality test, then return a simplified form of the test
7372 using a sign testing. Otherwise return NULL. TYPE is the desired
7373 result type. */
7375 static tree
7376 fold_single_bit_test_into_sign_test (location_t loc,
7377 enum tree_code code, tree arg0, tree arg1,
7378 tree result_type)
7380 /* If this is testing a single bit, we can optimize the test. */
7381 if ((code == NE_EXPR || code == EQ_EXPR)
7382 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7383 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7385 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7386 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7387 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7389 if (arg00 != NULL_TREE
7390 /* This is only a win if casting to a signed type is cheap,
7391 i.e. when arg00's type is not a partial mode. */
7392 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7394 tree stype = signed_type_for (TREE_TYPE (arg00));
7395 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7396 result_type,
7397 fold_convert_loc (loc, stype, arg00),
7398 build_int_cst (stype, 0));
7402 return NULL_TREE;
7405 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7406 equality/inequality test, then return a simplified form of
7407 the test using shifts and logical operations. Otherwise return
7408 NULL. TYPE is the desired result type. */
7410 tree
7411 fold_single_bit_test (location_t loc, enum tree_code code,
7412 tree arg0, tree arg1, tree result_type)
7414 /* If this is testing a single bit, we can optimize the test. */
7415 if ((code == NE_EXPR || code == EQ_EXPR)
7416 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7417 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7419 tree inner = TREE_OPERAND (arg0, 0);
7420 tree type = TREE_TYPE (arg0);
7421 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7422 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7423 int ops_unsigned;
7424 tree signed_type, unsigned_type, intermediate_type;
7425 tree tem, one;
7427 /* First, see if we can fold the single bit test into a sign-bit
7428 test. */
7429 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7430 result_type);
7431 if (tem)
7432 return tem;
7434 /* Otherwise we have (A & C) != 0 where C is a single bit,
7435 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7436 Similarly for (A & C) == 0. */
7438 /* If INNER is a right shift of a constant and it plus BITNUM does
7439 not overflow, adjust BITNUM and INNER. */
7440 if (TREE_CODE (inner) == RSHIFT_EXPR
7441 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7442 && bitnum < TYPE_PRECISION (type)
7443 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7444 TYPE_PRECISION (type) - bitnum))
7446 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7447 inner = TREE_OPERAND (inner, 0);
7450 /* If we are going to be able to omit the AND below, we must do our
7451 operations as unsigned. If we must use the AND, we have a choice.
7452 Normally unsigned is faster, but for some machines signed is. */
7453 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7454 && !flag_syntax_only) ? 0 : 1;
7456 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7457 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7458 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7459 inner = fold_convert_loc (loc, intermediate_type, inner);
7461 if (bitnum != 0)
7462 inner = build2 (RSHIFT_EXPR, intermediate_type,
7463 inner, size_int (bitnum));
7465 one = build_int_cst (intermediate_type, 1);
7467 if (code == EQ_EXPR)
7468 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7470 /* Put the AND last so it can combine with more things. */
7471 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7473 /* Make sure to return the proper type. */
7474 inner = fold_convert_loc (loc, result_type, inner);
7476 return inner;
7478 return NULL_TREE;
7481 /* Test whether it is preferable to swap two operands, ARG0 and
7482 ARG1, for example because ARG0 is an integer constant and ARG1
7483 isn't. */
7485 bool
7486 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7488 if (CONSTANT_CLASS_P (arg1))
7489 return 0;
7490 if (CONSTANT_CLASS_P (arg0))
7491 return 1;
7493 STRIP_NOPS (arg0);
7494 STRIP_NOPS (arg1);
7496 if (TREE_CONSTANT (arg1))
7497 return 0;
7498 if (TREE_CONSTANT (arg0))
7499 return 1;
7501 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7502 for commutative and comparison operators. Ensuring a canonical
7503 form allows the optimizers to find additional redundancies without
7504 having to explicitly check for both orderings. */
7505 if (TREE_CODE (arg0) == SSA_NAME
7506 && TREE_CODE (arg1) == SSA_NAME
7507 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7508 return 1;
7510 /* Put SSA_NAMEs last. */
7511 if (TREE_CODE (arg1) == SSA_NAME)
7512 return 0;
7513 if (TREE_CODE (arg0) == SSA_NAME)
7514 return 1;
7516 /* Put variables last. */
7517 if (DECL_P (arg1))
7518 return 0;
7519 if (DECL_P (arg0))
7520 return 1;
7522 return 0;
7526 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7527 means A >= Y && A != MAX, but in this case we know that
7528 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7530 static tree
7531 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7533 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7535 if (TREE_CODE (bound) == LT_EXPR)
7536 a = TREE_OPERAND (bound, 0);
7537 else if (TREE_CODE (bound) == GT_EXPR)
7538 a = TREE_OPERAND (bound, 1);
7539 else
7540 return NULL_TREE;
7542 typea = TREE_TYPE (a);
7543 if (!INTEGRAL_TYPE_P (typea)
7544 && !POINTER_TYPE_P (typea))
7545 return NULL_TREE;
7547 if (TREE_CODE (ineq) == LT_EXPR)
7549 a1 = TREE_OPERAND (ineq, 1);
7550 y = TREE_OPERAND (ineq, 0);
7552 else if (TREE_CODE (ineq) == GT_EXPR)
7554 a1 = TREE_OPERAND (ineq, 0);
7555 y = TREE_OPERAND (ineq, 1);
7557 else
7558 return NULL_TREE;
7560 if (TREE_TYPE (a1) != typea)
7561 return NULL_TREE;
7563 if (POINTER_TYPE_P (typea))
7565 /* Convert the pointer types into integer before taking the difference. */
7566 tree ta = fold_convert_loc (loc, ssizetype, a);
7567 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7568 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7570 else
7571 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7573 if (!diff || !integer_onep (diff))
7574 return NULL_TREE;
7576 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7579 /* Fold a sum or difference of at least one multiplication.
7580 Returns the folded tree or NULL if no simplification could be made. */
7582 static tree
7583 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7584 tree arg0, tree arg1)
7586 tree arg00, arg01, arg10, arg11;
7587 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7589 /* (A * C) +- (B * C) -> (A+-B) * C.
7590 (A * C) +- A -> A * (C+-1).
7591 We are most concerned about the case where C is a constant,
7592 but other combinations show up during loop reduction. Since
7593 it is not difficult, try all four possibilities. */
7595 if (TREE_CODE (arg0) == MULT_EXPR)
7597 arg00 = TREE_OPERAND (arg0, 0);
7598 arg01 = TREE_OPERAND (arg0, 1);
7600 else if (TREE_CODE (arg0) == INTEGER_CST)
7602 arg00 = build_one_cst (type);
7603 arg01 = arg0;
7605 else
7607 /* We cannot generate constant 1 for fract. */
7608 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7609 return NULL_TREE;
7610 arg00 = arg0;
7611 arg01 = build_one_cst (type);
7613 if (TREE_CODE (arg1) == MULT_EXPR)
7615 arg10 = TREE_OPERAND (arg1, 0);
7616 arg11 = TREE_OPERAND (arg1, 1);
7618 else if (TREE_CODE (arg1) == INTEGER_CST)
7620 arg10 = build_one_cst (type);
7621 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7622 the purpose of this canonicalization. */
7623 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7624 && negate_expr_p (arg1)
7625 && code == PLUS_EXPR)
7627 arg11 = negate_expr (arg1);
7628 code = MINUS_EXPR;
7630 else
7631 arg11 = arg1;
7633 else
7635 /* We cannot generate constant 1 for fract. */
7636 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7637 return NULL_TREE;
7638 arg10 = arg1;
7639 arg11 = build_one_cst (type);
7641 same = NULL_TREE;
7643 /* Prefer factoring a common non-constant. */
7644 if (operand_equal_p (arg00, arg10, 0))
7645 same = arg00, alt0 = arg01, alt1 = arg11;
7646 else if (operand_equal_p (arg01, arg11, 0))
7647 same = arg01, alt0 = arg00, alt1 = arg10;
7648 else if (operand_equal_p (arg00, arg11, 0))
7649 same = arg00, alt0 = arg01, alt1 = arg10;
7650 else if (operand_equal_p (arg01, arg10, 0))
7651 same = arg01, alt0 = arg00, alt1 = arg11;
7653 /* No identical multiplicands; see if we can find a common
7654 power-of-two factor in non-power-of-two multiplies. This
7655 can help in multi-dimensional array access. */
7656 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7658 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7659 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7660 HOST_WIDE_INT tmp;
7661 bool swap = false;
7662 tree maybe_same;
7664 /* Move min of absolute values to int11. */
7665 if (absu_hwi (int01) < absu_hwi (int11))
7667 tmp = int01, int01 = int11, int11 = tmp;
7668 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7669 maybe_same = arg01;
7670 swap = true;
7672 else
7673 maybe_same = arg11;
7675 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7676 if (factor > 1
7677 && pow2p_hwi (factor)
7678 && (int01 & (factor - 1)) == 0
7679 /* The remainder should not be a constant, otherwise we
7680 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7681 increased the number of multiplications necessary. */
7682 && TREE_CODE (arg10) != INTEGER_CST)
7684 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7685 build_int_cst (TREE_TYPE (arg00),
7686 int01 / int11));
7687 alt1 = arg10;
7688 same = maybe_same;
7689 if (swap)
7690 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7694 if (!same)
7695 return NULL_TREE;
7697 if (! ANY_INTEGRAL_TYPE_P (type)
7698 || TYPE_OVERFLOW_WRAPS (type)
7699 /* We are neither factoring zero nor minus one. */
7700 || TREE_CODE (same) == INTEGER_CST)
7701 return fold_build2_loc (loc, MULT_EXPR, type,
7702 fold_build2_loc (loc, code, type,
7703 fold_convert_loc (loc, type, alt0),
7704 fold_convert_loc (loc, type, alt1)),
7705 fold_convert_loc (loc, type, same));
7707 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7708 same may be minus one and thus the multiplication may overflow. Perform
7709 the sum operation in an unsigned type. */
7710 tree utype = unsigned_type_for (type);
7711 tree tem = fold_build2_loc (loc, code, utype,
7712 fold_convert_loc (loc, utype, alt0),
7713 fold_convert_loc (loc, utype, alt1));
7714 /* If the sum evaluated to a constant that is not -INF the multiplication
7715 cannot overflow. */
7716 if (TREE_CODE (tem) == INTEGER_CST
7717 && (wi::to_wide (tem)
7718 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7719 return fold_build2_loc (loc, MULT_EXPR, type,
7720 fold_convert (type, tem), same);
7722 /* Do not resort to unsigned multiplication because
7723 we lose the no-overflow property of the expression. */
7724 return NULL_TREE;
7727 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7728 specified by EXPR into the buffer PTR of length LEN bytes.
7729 Return the number of bytes placed in the buffer, or zero
7730 upon failure. */
7732 static int
7733 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7735 tree type = TREE_TYPE (expr);
7736 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7737 int byte, offset, word, words;
7738 unsigned char value;
7740 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7741 return 0;
7742 if (off == -1)
7743 off = 0;
7745 if (ptr == NULL)
7746 /* Dry run. */
7747 return MIN (len, total_bytes - off);
7749 words = total_bytes / UNITS_PER_WORD;
7751 for (byte = 0; byte < total_bytes; byte++)
7753 int bitpos = byte * BITS_PER_UNIT;
7754 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7755 number of bytes. */
7756 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7758 if (total_bytes > UNITS_PER_WORD)
7760 word = byte / UNITS_PER_WORD;
7761 if (WORDS_BIG_ENDIAN)
7762 word = (words - 1) - word;
7763 offset = word * UNITS_PER_WORD;
7764 if (BYTES_BIG_ENDIAN)
7765 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7766 else
7767 offset += byte % UNITS_PER_WORD;
7769 else
7770 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7771 if (offset >= off && offset - off < len)
7772 ptr[offset - off] = value;
7774 return MIN (len, total_bytes - off);
7778 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7779 specified by EXPR into the buffer PTR of length LEN bytes.
7780 Return the number of bytes placed in the buffer, or zero
7781 upon failure. */
7783 static int
7784 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7786 tree type = TREE_TYPE (expr);
7787 scalar_mode mode = SCALAR_TYPE_MODE (type);
7788 int total_bytes = GET_MODE_SIZE (mode);
7789 FIXED_VALUE_TYPE value;
7790 tree i_value, i_type;
7792 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7793 return 0;
7795 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7797 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7798 return 0;
7800 value = TREE_FIXED_CST (expr);
7801 i_value = double_int_to_tree (i_type, value.data);
7803 return native_encode_int (i_value, ptr, len, off);
7807 /* Subroutine of native_encode_expr. Encode the REAL_CST
7808 specified by EXPR into the buffer PTR of length LEN bytes.
7809 Return the number of bytes placed in the buffer, or zero
7810 upon failure. */
7812 static int
7813 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7815 tree type = TREE_TYPE (expr);
7816 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7817 int byte, offset, word, words, bitpos;
7818 unsigned char value;
7820 /* There are always 32 bits in each long, no matter the size of
7821 the hosts long. We handle floating point representations with
7822 up to 192 bits. */
7823 long tmp[6];
7825 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7826 return 0;
7827 if (off == -1)
7828 off = 0;
7830 if (ptr == NULL)
7831 /* Dry run. */
7832 return MIN (len, total_bytes - off);
7834 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7836 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7838 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7839 bitpos += BITS_PER_UNIT)
7841 byte = (bitpos / BITS_PER_UNIT) & 3;
7842 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7844 if (UNITS_PER_WORD < 4)
7846 word = byte / UNITS_PER_WORD;
7847 if (WORDS_BIG_ENDIAN)
7848 word = (words - 1) - word;
7849 offset = word * UNITS_PER_WORD;
7850 if (BYTES_BIG_ENDIAN)
7851 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7852 else
7853 offset += byte % UNITS_PER_WORD;
7855 else
7857 offset = byte;
7858 if (BYTES_BIG_ENDIAN)
7860 /* Reverse bytes within each long, or within the entire float
7861 if it's smaller than a long (for HFmode). */
7862 offset = MIN (3, total_bytes - 1) - offset;
7863 gcc_assert (offset >= 0);
7866 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7867 if (offset >= off
7868 && offset - off < len)
7869 ptr[offset - off] = value;
7871 return MIN (len, total_bytes - off);
7874 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7875 specified by EXPR into the buffer PTR of length LEN bytes.
7876 Return the number of bytes placed in the buffer, or zero
7877 upon failure. */
7879 static int
7880 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7882 int rsize, isize;
7883 tree part;
7885 part = TREE_REALPART (expr);
7886 rsize = native_encode_expr (part, ptr, len, off);
7887 if (off == -1 && rsize == 0)
7888 return 0;
7889 part = TREE_IMAGPART (expr);
7890 if (off != -1)
7891 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7892 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7893 len - rsize, off);
7894 if (off == -1 && isize != rsize)
7895 return 0;
7896 return rsize + isize;
7899 /* Like native_encode_vector, but only encode the first COUNT elements.
7900 The other arguments are as for native_encode_vector. */
7902 static int
7903 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7904 int off, unsigned HOST_WIDE_INT count)
7906 tree itype = TREE_TYPE (TREE_TYPE (expr));
7907 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7908 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7910 /* This is the only case in which elements can be smaller than a byte.
7911 Element 0 is always in the lsb of the containing byte. */
7912 unsigned int elt_bits = TYPE_PRECISION (itype);
7913 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7914 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7915 return 0;
7917 if (off == -1)
7918 off = 0;
7920 /* Zero the buffer and then set bits later where necessary. */
7921 int extract_bytes = MIN (len, total_bytes - off);
7922 if (ptr)
7923 memset (ptr, 0, extract_bytes);
7925 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7926 unsigned int first_elt = off * elts_per_byte;
7927 unsigned int extract_elts = extract_bytes * elts_per_byte;
7928 for (unsigned int i = 0; i < extract_elts; ++i)
7930 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7931 if (TREE_CODE (elt) != INTEGER_CST)
7932 return 0;
7934 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7936 unsigned int bit = i * elt_bits;
7937 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7940 return extract_bytes;
7943 int offset = 0;
7944 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7945 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7947 if (off >= size)
7949 off -= size;
7950 continue;
7952 tree elem = VECTOR_CST_ELT (expr, i);
7953 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7954 len - offset, off);
7955 if ((off == -1 && res != size) || res == 0)
7956 return 0;
7957 offset += res;
7958 if (offset >= len)
7959 return (off == -1 && i < count - 1) ? 0 : offset;
7960 if (off != -1)
7961 off = 0;
7963 return offset;
7966 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7967 specified by EXPR into the buffer PTR of length LEN bytes.
7968 Return the number of bytes placed in the buffer, or zero
7969 upon failure. */
7971 static int
7972 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7974 unsigned HOST_WIDE_INT count;
7975 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7976 return 0;
7977 return native_encode_vector_part (expr, ptr, len, off, count);
7981 /* Subroutine of native_encode_expr. Encode the STRING_CST
7982 specified by EXPR into the buffer PTR of length LEN bytes.
7983 Return the number of bytes placed in the buffer, or zero
7984 upon failure. */
7986 static int
7987 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7989 tree type = TREE_TYPE (expr);
7991 /* Wide-char strings are encoded in target byte-order so native
7992 encoding them is trivial. */
7993 if (BITS_PER_UNIT != CHAR_BIT
7994 || TREE_CODE (type) != ARRAY_TYPE
7995 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7996 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7997 return 0;
7999 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8000 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8001 return 0;
8002 if (off == -1)
8003 off = 0;
8004 len = MIN (total_bytes - off, len);
8005 if (ptr == NULL)
8006 /* Dry run. */;
8007 else
8009 int written = 0;
8010 if (off < TREE_STRING_LENGTH (expr))
8012 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8013 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8015 memset (ptr + written, 0, len - written);
8017 return len;
8021 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8022 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8023 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8024 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8025 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8026 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8027 Return the number of bytes placed in the buffer, or zero upon failure. */
8030 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8032 /* We don't support starting at negative offset and -1 is special. */
8033 if (off < -1)
8034 return 0;
8036 switch (TREE_CODE (expr))
8038 case INTEGER_CST:
8039 return native_encode_int (expr, ptr, len, off);
8041 case REAL_CST:
8042 return native_encode_real (expr, ptr, len, off);
8044 case FIXED_CST:
8045 return native_encode_fixed (expr, ptr, len, off);
8047 case COMPLEX_CST:
8048 return native_encode_complex (expr, ptr, len, off);
8050 case VECTOR_CST:
8051 return native_encode_vector (expr, ptr, len, off);
8053 case STRING_CST:
8054 return native_encode_string (expr, ptr, len, off);
8056 default:
8057 return 0;
8061 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8062 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8063 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8064 machine modes, we can't just use build_nonstandard_integer_type. */
8066 tree
8067 find_bitfield_repr_type (int fieldsize, int len)
8069 machine_mode mode;
8070 for (int pass = 0; pass < 2; pass++)
8072 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8073 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8074 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8075 && known_eq (GET_MODE_PRECISION (mode),
8076 GET_MODE_BITSIZE (mode))
8077 && known_le (GET_MODE_SIZE (mode), len))
8079 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8080 if (ret && TYPE_MODE (ret) == mode)
8081 return ret;
8085 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8086 if (int_n_enabled_p[i]
8087 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8088 && int_n_trees[i].unsigned_type)
8090 tree ret = int_n_trees[i].unsigned_type;
8091 mode = TYPE_MODE (ret);
8092 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8093 && known_eq (GET_MODE_PRECISION (mode),
8094 GET_MODE_BITSIZE (mode))
8095 && known_le (GET_MODE_SIZE (mode), len))
8096 return ret;
8099 return NULL_TREE;
8102 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8103 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8104 to be non-NULL and OFF zero), then in addition to filling the
8105 bytes pointed by PTR with the value also clear any bits pointed
8106 by MASK that are known to be initialized, keep them as is for
8107 e.g. uninitialized padding bits or uninitialized fields. */
8110 native_encode_initializer (tree init, unsigned char *ptr, int len,
8111 int off, unsigned char *mask)
8113 int r;
8115 /* We don't support starting at negative offset and -1 is special. */
8116 if (off < -1 || init == NULL_TREE)
8117 return 0;
8119 gcc_assert (mask == NULL || (off == 0 && ptr));
8121 STRIP_NOPS (init);
8122 switch (TREE_CODE (init))
8124 case VIEW_CONVERT_EXPR:
8125 case NON_LVALUE_EXPR:
8126 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8127 mask);
8128 default:
8129 r = native_encode_expr (init, ptr, len, off);
8130 if (mask)
8131 memset (mask, 0, r);
8132 return r;
8133 case CONSTRUCTOR:
8134 tree type = TREE_TYPE (init);
8135 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8136 if (total_bytes < 0)
8137 return 0;
8138 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8139 return 0;
8140 int o = off == -1 ? 0 : off;
8141 if (TREE_CODE (type) == ARRAY_TYPE)
8143 tree min_index;
8144 unsigned HOST_WIDE_INT cnt;
8145 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8146 constructor_elt *ce;
8148 if (!TYPE_DOMAIN (type)
8149 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8150 return 0;
8152 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8153 if (fieldsize <= 0)
8154 return 0;
8156 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8157 if (ptr)
8158 memset (ptr, '\0', MIN (total_bytes - off, len));
8160 for (cnt = 0; ; cnt++)
8162 tree val = NULL_TREE, index = NULL_TREE;
8163 HOST_WIDE_INT pos = curpos, count = 0;
8164 bool full = false;
8165 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8167 val = ce->value;
8168 index = ce->index;
8170 else if (mask == NULL
8171 || CONSTRUCTOR_NO_CLEARING (init)
8172 || curpos >= total_bytes)
8173 break;
8174 else
8175 pos = total_bytes;
8177 if (index && TREE_CODE (index) == RANGE_EXPR)
8179 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8180 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8181 return 0;
8182 offset_int wpos
8183 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8184 - wi::to_offset (min_index),
8185 TYPE_PRECISION (sizetype));
8186 wpos *= fieldsize;
8187 if (!wi::fits_shwi_p (pos))
8188 return 0;
8189 pos = wpos.to_shwi ();
8190 offset_int wcount
8191 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8192 - wi::to_offset (TREE_OPERAND (index, 0)),
8193 TYPE_PRECISION (sizetype));
8194 if (!wi::fits_shwi_p (wcount))
8195 return 0;
8196 count = wcount.to_shwi ();
8198 else if (index)
8200 if (TREE_CODE (index) != INTEGER_CST)
8201 return 0;
8202 offset_int wpos
8203 = wi::sext (wi::to_offset (index)
8204 - wi::to_offset (min_index),
8205 TYPE_PRECISION (sizetype));
8206 wpos *= fieldsize;
8207 if (!wi::fits_shwi_p (wpos))
8208 return 0;
8209 pos = wpos.to_shwi ();
8212 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8214 if (valueinit == -1)
8216 tree zero = build_zero_cst (TREE_TYPE (type));
8217 r = native_encode_initializer (zero, ptr + curpos,
8218 fieldsize, 0,
8219 mask + curpos);
8220 if (TREE_CODE (zero) == CONSTRUCTOR)
8221 ggc_free (zero);
8222 if (!r)
8223 return 0;
8224 valueinit = curpos;
8225 curpos += fieldsize;
8227 while (curpos != pos)
8229 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8230 memcpy (mask + curpos, mask + valueinit, fieldsize);
8231 curpos += fieldsize;
8235 curpos = pos;
8236 if (val)
8239 if (off == -1
8240 || (curpos >= off
8241 && (curpos + fieldsize
8242 <= (HOST_WIDE_INT) off + len)))
8244 if (full)
8246 if (ptr)
8247 memcpy (ptr + (curpos - o), ptr + (pos - o),
8248 fieldsize);
8249 if (mask)
8250 memcpy (mask + curpos, mask + pos, fieldsize);
8252 else if (!native_encode_initializer (val,
8254 ? ptr + curpos - o
8255 : NULL,
8256 fieldsize,
8257 off == -1 ? -1
8258 : 0,
8259 mask
8260 ? mask + curpos
8261 : NULL))
8262 return 0;
8263 else
8265 full = true;
8266 pos = curpos;
8269 else if (curpos + fieldsize > off
8270 && curpos < (HOST_WIDE_INT) off + len)
8272 /* Partial overlap. */
8273 unsigned char *p = NULL;
8274 int no = 0;
8275 int l;
8276 gcc_assert (mask == NULL);
8277 if (curpos >= off)
8279 if (ptr)
8280 p = ptr + curpos - off;
8281 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8282 fieldsize);
8284 else
8286 p = ptr;
8287 no = off - curpos;
8288 l = len;
8290 if (!native_encode_initializer (val, p, l, no, NULL))
8291 return 0;
8293 curpos += fieldsize;
8295 while (count-- != 0);
8297 return MIN (total_bytes - off, len);
8299 else if (TREE_CODE (type) == RECORD_TYPE
8300 || TREE_CODE (type) == UNION_TYPE)
8302 unsigned HOST_WIDE_INT cnt;
8303 constructor_elt *ce;
8304 tree fld_base = TYPE_FIELDS (type);
8305 tree to_free = NULL_TREE;
8307 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8308 if (ptr != NULL)
8309 memset (ptr, '\0', MIN (total_bytes - o, len));
8310 for (cnt = 0; ; cnt++)
8312 tree val = NULL_TREE, field = NULL_TREE;
8313 HOST_WIDE_INT pos = 0, fieldsize;
8314 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8316 if (to_free)
8318 ggc_free (to_free);
8319 to_free = NULL_TREE;
8322 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8324 val = ce->value;
8325 field = ce->index;
8326 if (field == NULL_TREE)
8327 return 0;
8329 pos = int_byte_position (field);
8330 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8331 continue;
8333 else if (mask == NULL
8334 || CONSTRUCTOR_NO_CLEARING (init))
8335 break;
8336 else
8337 pos = total_bytes;
8339 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8341 tree fld;
8342 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8344 if (TREE_CODE (fld) != FIELD_DECL)
8345 continue;
8346 if (fld == field)
8347 break;
8348 if (DECL_PADDING_P (fld))
8349 continue;
8350 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8351 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8352 return 0;
8353 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8354 continue;
8355 break;
8357 if (fld == NULL_TREE)
8359 if (ce == NULL)
8360 break;
8361 return 0;
8363 fld_base = DECL_CHAIN (fld);
8364 if (fld != field)
8366 cnt--;
8367 field = fld;
8368 pos = int_byte_position (field);
8369 val = build_zero_cst (TREE_TYPE (fld));
8370 if (TREE_CODE (val) == CONSTRUCTOR)
8371 to_free = val;
8375 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8376 && TYPE_DOMAIN (TREE_TYPE (field))
8377 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8379 if (mask || off != -1)
8380 return 0;
8381 if (val == NULL_TREE)
8382 continue;
8383 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8384 return 0;
8385 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8386 if (fieldsize < 0
8387 || (int) fieldsize != fieldsize
8388 || (pos + fieldsize) > INT_MAX)
8389 return 0;
8390 if (pos + fieldsize > total_bytes)
8392 if (ptr != NULL && total_bytes < len)
8393 memset (ptr + total_bytes, '\0',
8394 MIN (pos + fieldsize, len) - total_bytes);
8395 total_bytes = pos + fieldsize;
8398 else
8400 if (DECL_SIZE_UNIT (field) == NULL_TREE
8401 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8402 return 0;
8403 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8405 if (fieldsize == 0)
8406 continue;
8408 if (DECL_BIT_FIELD (field))
8410 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8411 return 0;
8412 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8413 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8414 if (bpos % BITS_PER_UNIT)
8415 bpos %= BITS_PER_UNIT;
8416 else
8417 bpos = 0;
8418 fieldsize += bpos;
8419 epos = fieldsize % BITS_PER_UNIT;
8420 fieldsize += BITS_PER_UNIT - 1;
8421 fieldsize /= BITS_PER_UNIT;
8424 if (off != -1 && pos + fieldsize <= off)
8425 continue;
8427 if (val == NULL_TREE)
8428 continue;
8430 if (DECL_BIT_FIELD (field))
8432 /* FIXME: Handle PDP endian. */
8433 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8434 return 0;
8436 if (TREE_CODE (val) != INTEGER_CST)
8437 return 0;
8439 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8440 tree repr_type = NULL_TREE;
8441 HOST_WIDE_INT rpos = 0;
8442 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8444 rpos = int_byte_position (repr);
8445 repr_type = TREE_TYPE (repr);
8447 else
8449 repr_type = find_bitfield_repr_type (fieldsize, len);
8450 if (repr_type == NULL_TREE)
8451 return 0;
8452 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8453 gcc_assert (repr_size > 0 && repr_size <= len);
8454 if (pos + repr_size <= o + len)
8455 rpos = pos;
8456 else
8458 rpos = o + len - repr_size;
8459 gcc_assert (rpos <= pos);
8463 if (rpos > pos)
8464 return 0;
8465 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8466 int diff = (TYPE_PRECISION (repr_type)
8467 - TYPE_PRECISION (TREE_TYPE (field)));
8468 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8469 if (!BYTES_BIG_ENDIAN)
8470 w = wi::lshift (w, bitoff);
8471 else
8472 w = wi::lshift (w, diff - bitoff);
8473 val = wide_int_to_tree (repr_type, w);
8475 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8476 / BITS_PER_UNIT + 1];
8477 int l = native_encode_int (val, buf, sizeof buf, 0);
8478 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8479 return 0;
8481 if (ptr == NULL)
8482 continue;
8484 /* If the bitfield does not start at byte boundary, handle
8485 the partial byte at the start. */
8486 if (bpos
8487 && (off == -1 || (pos >= off && len >= 1)))
8489 if (!BYTES_BIG_ENDIAN)
8491 int msk = (1 << bpos) - 1;
8492 buf[pos - rpos] &= ~msk;
8493 buf[pos - rpos] |= ptr[pos - o] & msk;
8494 if (mask)
8496 if (fieldsize > 1 || epos == 0)
8497 mask[pos] &= msk;
8498 else
8499 mask[pos] &= (msk | ~((1 << epos) - 1));
8502 else
8504 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8505 buf[pos - rpos] &= msk;
8506 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8507 if (mask)
8509 if (fieldsize > 1 || epos == 0)
8510 mask[pos] &= ~msk;
8511 else
8512 mask[pos] &= (~msk
8513 | ((1 << (BITS_PER_UNIT - epos))
8514 - 1));
8518 /* If the bitfield does not end at byte boundary, handle
8519 the partial byte at the end. */
8520 if (epos
8521 && (off == -1
8522 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8524 if (!BYTES_BIG_ENDIAN)
8526 int msk = (1 << epos) - 1;
8527 buf[pos - rpos + fieldsize - 1] &= msk;
8528 buf[pos - rpos + fieldsize - 1]
8529 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8530 if (mask && (fieldsize > 1 || bpos == 0))
8531 mask[pos + fieldsize - 1] &= ~msk;
8533 else
8535 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8536 buf[pos - rpos + fieldsize - 1] &= ~msk;
8537 buf[pos - rpos + fieldsize - 1]
8538 |= ptr[pos + fieldsize - 1 - o] & msk;
8539 if (mask && (fieldsize > 1 || bpos == 0))
8540 mask[pos + fieldsize - 1] &= msk;
8543 if (off == -1
8544 || (pos >= off
8545 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8547 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8548 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8549 memset (mask + pos + (bpos != 0), 0,
8550 fieldsize - (bpos != 0) - (epos != 0));
8552 else
8554 /* Partial overlap. */
8555 HOST_WIDE_INT fsz = fieldsize;
8556 gcc_assert (mask == NULL);
8557 if (pos < off)
8559 fsz -= (off - pos);
8560 pos = off;
8562 if (pos + fsz > (HOST_WIDE_INT) off + len)
8563 fsz = (HOST_WIDE_INT) off + len - pos;
8564 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8566 continue;
8569 if (off == -1
8570 || (pos >= off
8571 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8573 int fldsize = fieldsize;
8574 if (off == -1)
8576 tree fld = DECL_CHAIN (field);
8577 while (fld)
8579 if (TREE_CODE (fld) == FIELD_DECL)
8580 break;
8581 fld = DECL_CHAIN (fld);
8583 if (fld == NULL_TREE)
8584 fldsize = len - pos;
8586 r = native_encode_initializer (val, ptr ? ptr + pos - o
8587 : NULL,
8588 fldsize,
8589 off == -1 ? -1 : 0,
8590 mask ? mask + pos : NULL);
8591 if (!r)
8592 return 0;
8593 if (off == -1
8594 && fldsize != fieldsize
8595 && r > fieldsize
8596 && pos + r > total_bytes)
8597 total_bytes = pos + r;
8599 else
8601 /* Partial overlap. */
8602 unsigned char *p = NULL;
8603 int no = 0;
8604 int l;
8605 gcc_assert (mask == NULL);
8606 if (pos >= off)
8608 if (ptr)
8609 p = ptr + pos - off;
8610 l = MIN ((HOST_WIDE_INT) off + len - pos,
8611 fieldsize);
8613 else
8615 p = ptr;
8616 no = off - pos;
8617 l = len;
8619 if (!native_encode_initializer (val, p, l, no, NULL))
8620 return 0;
8623 return MIN (total_bytes - off, len);
8625 return 0;
8630 /* Subroutine of native_interpret_expr. Interpret the contents of
8631 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8632 If the buffer cannot be interpreted, return NULL_TREE. */
8634 static tree
8635 native_interpret_int (tree type, const unsigned char *ptr, int len)
8637 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8639 if (total_bytes > len
8640 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8641 return NULL_TREE;
8643 wide_int result = wi::from_buffer (ptr, total_bytes);
8645 return wide_int_to_tree (type, result);
8649 /* Subroutine of native_interpret_expr. Interpret the contents of
8650 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8651 If the buffer cannot be interpreted, return NULL_TREE. */
8653 static tree
8654 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8656 scalar_mode mode = SCALAR_TYPE_MODE (type);
8657 int total_bytes = GET_MODE_SIZE (mode);
8658 double_int result;
8659 FIXED_VALUE_TYPE fixed_value;
8661 if (total_bytes > len
8662 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8663 return NULL_TREE;
8665 result = double_int::from_buffer (ptr, total_bytes);
8666 fixed_value = fixed_from_double_int (result, mode);
8668 return build_fixed (type, fixed_value);
8672 /* Subroutine of native_interpret_expr. Interpret the contents of
8673 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8674 If the buffer cannot be interpreted, return NULL_TREE. */
8676 tree
8677 native_interpret_real (tree type, const unsigned char *ptr, int len)
8679 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8680 int total_bytes = GET_MODE_SIZE (mode);
8681 unsigned char value;
8682 /* There are always 32 bits in each long, no matter the size of
8683 the hosts long. We handle floating point representations with
8684 up to 192 bits. */
8685 REAL_VALUE_TYPE r;
8686 long tmp[6];
8688 if (total_bytes > len || total_bytes > 24)
8689 return NULL_TREE;
8690 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8692 memset (tmp, 0, sizeof (tmp));
8693 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8694 bitpos += BITS_PER_UNIT)
8696 /* Both OFFSET and BYTE index within a long;
8697 bitpos indexes the whole float. */
8698 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8699 if (UNITS_PER_WORD < 4)
8701 int word = byte / UNITS_PER_WORD;
8702 if (WORDS_BIG_ENDIAN)
8703 word = (words - 1) - word;
8704 offset = word * UNITS_PER_WORD;
8705 if (BYTES_BIG_ENDIAN)
8706 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8707 else
8708 offset += byte % UNITS_PER_WORD;
8710 else
8712 offset = byte;
8713 if (BYTES_BIG_ENDIAN)
8715 /* Reverse bytes within each long, or within the entire float
8716 if it's smaller than a long (for HFmode). */
8717 offset = MIN (3, total_bytes - 1) - offset;
8718 gcc_assert (offset >= 0);
8721 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8723 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8726 real_from_target (&r, tmp, mode);
8727 return build_real (type, r);
8731 /* Subroutine of native_interpret_expr. Interpret the contents of
8732 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8733 If the buffer cannot be interpreted, return NULL_TREE. */
8735 static tree
8736 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8738 tree etype, rpart, ipart;
8739 int size;
8741 etype = TREE_TYPE (type);
8742 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8743 if (size * 2 > len)
8744 return NULL_TREE;
8745 rpart = native_interpret_expr (etype, ptr, size);
8746 if (!rpart)
8747 return NULL_TREE;
8748 ipart = native_interpret_expr (etype, ptr+size, size);
8749 if (!ipart)
8750 return NULL_TREE;
8751 return build_complex (type, rpart, ipart);
8754 /* Read a vector of type TYPE from the target memory image given by BYTES,
8755 which contains LEN bytes. The vector is known to be encodable using
8756 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8758 Return the vector on success, otherwise return null. */
8760 static tree
8761 native_interpret_vector_part (tree type, const unsigned char *bytes,
8762 unsigned int len, unsigned int npatterns,
8763 unsigned int nelts_per_pattern)
8765 tree elt_type = TREE_TYPE (type);
8766 if (VECTOR_BOOLEAN_TYPE_P (type)
8767 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8769 /* This is the only case in which elements can be smaller than a byte.
8770 Element 0 is always in the lsb of the containing byte. */
8771 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8772 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8773 return NULL_TREE;
8775 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8776 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8778 unsigned int bit_index = i * elt_bits;
8779 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8780 unsigned int lsb = bit_index % BITS_PER_UNIT;
8781 builder.quick_push (bytes[byte_index] & (1 << lsb)
8782 ? build_all_ones_cst (elt_type)
8783 : build_zero_cst (elt_type));
8785 return builder.build ();
8788 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8789 if (elt_bytes * npatterns * nelts_per_pattern > len)
8790 return NULL_TREE;
8792 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8793 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8795 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8796 if (!elt)
8797 return NULL_TREE;
8798 builder.quick_push (elt);
8799 bytes += elt_bytes;
8801 return builder.build ();
8804 /* Subroutine of native_interpret_expr. Interpret the contents of
8805 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8806 If the buffer cannot be interpreted, return NULL_TREE. */
8808 static tree
8809 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8811 tree etype;
8812 unsigned int size;
8813 unsigned HOST_WIDE_INT count;
8815 etype = TREE_TYPE (type);
8816 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8817 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8818 || size * count > len)
8819 return NULL_TREE;
8821 return native_interpret_vector_part (type, ptr, len, count, 1);
8825 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8826 the buffer PTR of length LEN as a constant of type TYPE. For
8827 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8828 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8829 return NULL_TREE. */
8831 tree
8832 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8834 switch (TREE_CODE (type))
8836 case INTEGER_TYPE:
8837 case ENUMERAL_TYPE:
8838 case BOOLEAN_TYPE:
8839 case POINTER_TYPE:
8840 case REFERENCE_TYPE:
8841 case OFFSET_TYPE:
8842 return native_interpret_int (type, ptr, len);
8844 case REAL_TYPE:
8845 if (tree ret = native_interpret_real (type, ptr, len))
8847 /* For floating point values in composite modes, punt if this
8848 folding doesn't preserve bit representation. As the mode doesn't
8849 have fixed precision while GCC pretends it does, there could be
8850 valid values that GCC can't really represent accurately.
8851 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8852 bit combinationations which GCC doesn't preserve. */
8853 unsigned char buf[24];
8854 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8855 int total_bytes = GET_MODE_SIZE (mode);
8856 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8857 || memcmp (ptr, buf, total_bytes) != 0)
8858 return NULL_TREE;
8859 return ret;
8861 return NULL_TREE;
8863 case FIXED_POINT_TYPE:
8864 return native_interpret_fixed (type, ptr, len);
8866 case COMPLEX_TYPE:
8867 return native_interpret_complex (type, ptr, len);
8869 case VECTOR_TYPE:
8870 return native_interpret_vector (type, ptr, len);
8872 default:
8873 return NULL_TREE;
8877 /* Returns true if we can interpret the contents of a native encoding
8878 as TYPE. */
8880 bool
8881 can_native_interpret_type_p (tree type)
8883 switch (TREE_CODE (type))
8885 case INTEGER_TYPE:
8886 case ENUMERAL_TYPE:
8887 case BOOLEAN_TYPE:
8888 case POINTER_TYPE:
8889 case REFERENCE_TYPE:
8890 case FIXED_POINT_TYPE:
8891 case REAL_TYPE:
8892 case COMPLEX_TYPE:
8893 case VECTOR_TYPE:
8894 case OFFSET_TYPE:
8895 return true;
8896 default:
8897 return false;
8901 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8902 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8904 tree
8905 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8906 int len)
8908 vec<constructor_elt, va_gc> *elts = NULL;
8909 if (TREE_CODE (type) == ARRAY_TYPE)
8911 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8912 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8913 return NULL_TREE;
8915 HOST_WIDE_INT cnt = 0;
8916 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8918 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8919 return NULL_TREE;
8920 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8922 if (eltsz == 0)
8923 cnt = 0;
8924 HOST_WIDE_INT pos = 0;
8925 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8927 tree v = NULL_TREE;
8928 if (pos >= len || pos + eltsz > len)
8929 return NULL_TREE;
8930 if (can_native_interpret_type_p (TREE_TYPE (type)))
8932 v = native_interpret_expr (TREE_TYPE (type),
8933 ptr + off + pos, eltsz);
8934 if (v == NULL_TREE)
8935 return NULL_TREE;
8937 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8938 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8939 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8940 eltsz);
8941 if (v == NULL_TREE)
8942 return NULL_TREE;
8943 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8945 return build_constructor (type, elts);
8947 if (TREE_CODE (type) != RECORD_TYPE)
8948 return NULL_TREE;
8949 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8951 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8952 continue;
8953 tree fld = field;
8954 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8955 int diff = 0;
8956 tree v = NULL_TREE;
8957 if (DECL_BIT_FIELD (field))
8959 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8960 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8962 poly_int64 bitoffset;
8963 poly_uint64 field_offset, fld_offset;
8964 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8965 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8966 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8967 else
8968 bitoffset = 0;
8969 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8970 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8971 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8972 - TYPE_PRECISION (TREE_TYPE (field)));
8973 if (!bitoffset.is_constant (&bitoff)
8974 || bitoff < 0
8975 || bitoff > diff)
8976 return NULL_TREE;
8978 else
8980 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8981 return NULL_TREE;
8982 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8983 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8984 bpos %= BITS_PER_UNIT;
8985 fieldsize += bpos;
8986 fieldsize += BITS_PER_UNIT - 1;
8987 fieldsize /= BITS_PER_UNIT;
8988 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8989 if (repr_type == NULL_TREE)
8990 return NULL_TREE;
8991 sz = int_size_in_bytes (repr_type);
8992 if (sz < 0 || sz > len)
8993 return NULL_TREE;
8994 pos = int_byte_position (field);
8995 if (pos < 0 || pos > len || pos + fieldsize > len)
8996 return NULL_TREE;
8997 HOST_WIDE_INT rpos;
8998 if (pos + sz <= len)
8999 rpos = pos;
9000 else
9002 rpos = len - sz;
9003 gcc_assert (rpos <= pos);
9005 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9006 pos = rpos;
9007 diff = (TYPE_PRECISION (repr_type)
9008 - TYPE_PRECISION (TREE_TYPE (field)));
9009 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9010 if (v == NULL_TREE)
9011 return NULL_TREE;
9012 fld = NULL_TREE;
9016 if (fld)
9018 sz = int_size_in_bytes (TREE_TYPE (fld));
9019 if (sz < 0 || sz > len)
9020 return NULL_TREE;
9021 tree byte_pos = byte_position (fld);
9022 if (!tree_fits_shwi_p (byte_pos))
9023 return NULL_TREE;
9024 pos = tree_to_shwi (byte_pos);
9025 if (pos < 0 || pos > len || pos + sz > len)
9026 return NULL_TREE;
9028 if (fld == NULL_TREE)
9029 /* Already handled above. */;
9030 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9032 v = native_interpret_expr (TREE_TYPE (fld),
9033 ptr + off + pos, sz);
9034 if (v == NULL_TREE)
9035 return NULL_TREE;
9037 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9038 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9039 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9040 if (v == NULL_TREE)
9041 return NULL_TREE;
9042 if (fld != field)
9044 if (TREE_CODE (v) != INTEGER_CST)
9045 return NULL_TREE;
9047 /* FIXME: Figure out how to handle PDP endian bitfields. */
9048 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9049 return NULL_TREE;
9050 if (!BYTES_BIG_ENDIAN)
9051 v = wide_int_to_tree (TREE_TYPE (field),
9052 wi::lrshift (wi::to_wide (v), bitoff));
9053 else
9054 v = wide_int_to_tree (TREE_TYPE (field),
9055 wi::lrshift (wi::to_wide (v),
9056 diff - bitoff));
9058 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9060 return build_constructor (type, elts);
9063 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9064 or extracted constant positions and/or sizes aren't byte aligned. */
9066 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9067 bits between adjacent elements. AMNT should be within
9068 [0, BITS_PER_UNIT).
9069 Example, AMNT = 2:
9070 00011111|11100000 << 2 = 01111111|10000000
9071 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9073 void
9074 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9075 unsigned int amnt)
9077 if (amnt == 0)
9078 return;
9080 unsigned char carry_over = 0U;
9081 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9082 unsigned char clear_mask = (~0U) << amnt;
9084 for (unsigned int i = 0; i < sz; i++)
9086 unsigned prev_carry_over = carry_over;
9087 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9089 ptr[i] <<= amnt;
9090 if (i != 0)
9092 ptr[i] &= clear_mask;
9093 ptr[i] |= prev_carry_over;
9098 /* Like shift_bytes_in_array_left but for big-endian.
9099 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9100 bits between adjacent elements. AMNT should be within
9101 [0, BITS_PER_UNIT).
9102 Example, AMNT = 2:
9103 00011111|11100000 >> 2 = 00000111|11111000
9104 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9106 void
9107 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9108 unsigned int amnt)
9110 if (amnt == 0)
9111 return;
9113 unsigned char carry_over = 0U;
9114 unsigned char carry_mask = ~(~0U << amnt);
9116 for (unsigned int i = 0; i < sz; i++)
9118 unsigned prev_carry_over = carry_over;
9119 carry_over = ptr[i] & carry_mask;
9121 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9122 ptr[i] >>= amnt;
9123 ptr[i] |= prev_carry_over;
9127 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9128 directly on the VECTOR_CST encoding, in a way that works for variable-
9129 length vectors. Return the resulting VECTOR_CST on success or null
9130 on failure. */
9132 static tree
9133 fold_view_convert_vector_encoding (tree type, tree expr)
9135 tree expr_type = TREE_TYPE (expr);
9136 poly_uint64 type_bits, expr_bits;
9137 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9138 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9139 return NULL_TREE;
9141 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9142 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9143 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9144 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9146 /* We can only preserve the semantics of a stepped pattern if the new
9147 vector element is an integer of the same size. */
9148 if (VECTOR_CST_STEPPED_P (expr)
9149 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9150 return NULL_TREE;
9152 /* The number of bits needed to encode one element from every pattern
9153 of the original vector. */
9154 unsigned int expr_sequence_bits
9155 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9157 /* The number of bits needed to encode one element from every pattern
9158 of the result. */
9159 unsigned int type_sequence_bits
9160 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9162 /* Don't try to read more bytes than are available, which can happen
9163 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9164 The general VIEW_CONVERT handling can cope with that case, so there's
9165 no point complicating things here. */
9166 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9167 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9168 BITS_PER_UNIT);
9169 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9170 if (known_gt (buffer_bits, expr_bits))
9171 return NULL_TREE;
9173 /* Get enough bytes of EXPR to form the new encoding. */
9174 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9175 buffer.quick_grow (buffer_bytes);
9176 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9177 buffer_bits / expr_elt_bits)
9178 != (int) buffer_bytes)
9179 return NULL_TREE;
9181 /* Reencode the bytes as TYPE. */
9182 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9183 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9184 type_npatterns, nelts_per_pattern);
9187 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9188 TYPE at compile-time. If we're unable to perform the conversion
9189 return NULL_TREE. */
9191 static tree
9192 fold_view_convert_expr (tree type, tree expr)
9194 /* We support up to 512-bit values (for V8DFmode). */
9195 unsigned char buffer[64];
9196 int len;
9198 /* Check that the host and target are sane. */
9199 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9200 return NULL_TREE;
9202 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9203 if (tree res = fold_view_convert_vector_encoding (type, expr))
9204 return res;
9206 len = native_encode_expr (expr, buffer, sizeof (buffer));
9207 if (len == 0)
9208 return NULL_TREE;
9210 return native_interpret_expr (type, buffer, len);
9213 /* Build an expression for the address of T. Folds away INDIRECT_REF
9214 to avoid confusing the gimplify process. */
9216 tree
9217 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9219 /* The size of the object is not relevant when talking about its address. */
9220 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9221 t = TREE_OPERAND (t, 0);
9223 if (TREE_CODE (t) == INDIRECT_REF)
9225 t = TREE_OPERAND (t, 0);
9227 if (TREE_TYPE (t) != ptrtype)
9228 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9230 else if (TREE_CODE (t) == MEM_REF
9231 && integer_zerop (TREE_OPERAND (t, 1)))
9233 t = TREE_OPERAND (t, 0);
9235 if (TREE_TYPE (t) != ptrtype)
9236 t = fold_convert_loc (loc, ptrtype, t);
9238 else if (TREE_CODE (t) == MEM_REF
9239 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9240 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9241 TREE_OPERAND (t, 0),
9242 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9243 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9245 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9247 if (TREE_TYPE (t) != ptrtype)
9248 t = fold_convert_loc (loc, ptrtype, t);
9250 else
9251 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9253 return t;
9256 /* Build an expression for the address of T. */
9258 tree
9259 build_fold_addr_expr_loc (location_t loc, tree t)
9261 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9263 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9266 /* Fold a unary expression of code CODE and type TYPE with operand
9267 OP0. Return the folded expression if folding is successful.
9268 Otherwise, return NULL_TREE. */
9270 tree
9271 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9273 tree tem;
9274 tree arg0;
9275 enum tree_code_class kind = TREE_CODE_CLASS (code);
9277 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9278 && TREE_CODE_LENGTH (code) == 1);
9280 arg0 = op0;
9281 if (arg0)
9283 if (CONVERT_EXPR_CODE_P (code)
9284 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9286 /* Don't use STRIP_NOPS, because signedness of argument type
9287 matters. */
9288 STRIP_SIGN_NOPS (arg0);
9290 else
9292 /* Strip any conversions that don't change the mode. This
9293 is safe for every expression, except for a comparison
9294 expression because its signedness is derived from its
9295 operands.
9297 Note that this is done as an internal manipulation within
9298 the constant folder, in order to find the simplest
9299 representation of the arguments so that their form can be
9300 studied. In any cases, the appropriate type conversions
9301 should be put back in the tree that will get out of the
9302 constant folder. */
9303 STRIP_NOPS (arg0);
9306 if (CONSTANT_CLASS_P (arg0))
9308 tree tem = const_unop (code, type, arg0);
9309 if (tem)
9311 if (TREE_TYPE (tem) != type)
9312 tem = fold_convert_loc (loc, type, tem);
9313 return tem;
9318 tem = generic_simplify (loc, code, type, op0);
9319 if (tem)
9320 return tem;
9322 if (TREE_CODE_CLASS (code) == tcc_unary)
9324 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9325 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9326 fold_build1_loc (loc, code, type,
9327 fold_convert_loc (loc, TREE_TYPE (op0),
9328 TREE_OPERAND (arg0, 1))));
9329 else if (TREE_CODE (arg0) == COND_EXPR)
9331 tree arg01 = TREE_OPERAND (arg0, 1);
9332 tree arg02 = TREE_OPERAND (arg0, 2);
9333 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9334 arg01 = fold_build1_loc (loc, code, type,
9335 fold_convert_loc (loc,
9336 TREE_TYPE (op0), arg01));
9337 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9338 arg02 = fold_build1_loc (loc, code, type,
9339 fold_convert_loc (loc,
9340 TREE_TYPE (op0), arg02));
9341 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9342 arg01, arg02);
9344 /* If this was a conversion, and all we did was to move into
9345 inside the COND_EXPR, bring it back out. But leave it if
9346 it is a conversion from integer to integer and the
9347 result precision is no wider than a word since such a
9348 conversion is cheap and may be optimized away by combine,
9349 while it couldn't if it were outside the COND_EXPR. Then return
9350 so we don't get into an infinite recursion loop taking the
9351 conversion out and then back in. */
9353 if ((CONVERT_EXPR_CODE_P (code)
9354 || code == NON_LVALUE_EXPR)
9355 && TREE_CODE (tem) == COND_EXPR
9356 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9357 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9358 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9359 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9360 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9361 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9362 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9363 && (INTEGRAL_TYPE_P
9364 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9365 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9366 || flag_syntax_only))
9367 tem = build1_loc (loc, code, type,
9368 build3 (COND_EXPR,
9369 TREE_TYPE (TREE_OPERAND
9370 (TREE_OPERAND (tem, 1), 0)),
9371 TREE_OPERAND (tem, 0),
9372 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9373 TREE_OPERAND (TREE_OPERAND (tem, 2),
9374 0)));
9375 return tem;
9379 switch (code)
9381 case NON_LVALUE_EXPR:
9382 if (!maybe_lvalue_p (op0))
9383 return fold_convert_loc (loc, type, op0);
9384 return NULL_TREE;
9386 CASE_CONVERT:
9387 case FLOAT_EXPR:
9388 case FIX_TRUNC_EXPR:
9389 if (COMPARISON_CLASS_P (op0))
9391 /* If we have (type) (a CMP b) and type is an integral type, return
9392 new expression involving the new type. Canonicalize
9393 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9394 non-integral type.
9395 Do not fold the result as that would not simplify further, also
9396 folding again results in recursions. */
9397 if (TREE_CODE (type) == BOOLEAN_TYPE)
9398 return build2_loc (loc, TREE_CODE (op0), type,
9399 TREE_OPERAND (op0, 0),
9400 TREE_OPERAND (op0, 1));
9401 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9402 && TREE_CODE (type) != VECTOR_TYPE)
9403 return build3_loc (loc, COND_EXPR, type, op0,
9404 constant_boolean_node (true, type),
9405 constant_boolean_node (false, type));
9408 /* Handle (T *)&A.B.C for A being of type T and B and C
9409 living at offset zero. This occurs frequently in
9410 C++ upcasting and then accessing the base. */
9411 if (TREE_CODE (op0) == ADDR_EXPR
9412 && POINTER_TYPE_P (type)
9413 && handled_component_p (TREE_OPERAND (op0, 0)))
9415 poly_int64 bitsize, bitpos;
9416 tree offset;
9417 machine_mode mode;
9418 int unsignedp, reversep, volatilep;
9419 tree base
9420 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9421 &offset, &mode, &unsignedp, &reversep,
9422 &volatilep);
9423 /* If the reference was to a (constant) zero offset, we can use
9424 the address of the base if it has the same base type
9425 as the result type and the pointer type is unqualified. */
9426 if (!offset
9427 && known_eq (bitpos, 0)
9428 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9429 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9430 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9431 return fold_convert_loc (loc, type,
9432 build_fold_addr_expr_loc (loc, base));
9435 if (TREE_CODE (op0) == MODIFY_EXPR
9436 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9437 /* Detect assigning a bitfield. */
9438 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9439 && DECL_BIT_FIELD
9440 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9442 /* Don't leave an assignment inside a conversion
9443 unless assigning a bitfield. */
9444 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9445 /* First do the assignment, then return converted constant. */
9446 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9447 suppress_warning (tem /* What warning? */);
9448 TREE_USED (tem) = 1;
9449 return tem;
9452 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9453 constants (if x has signed type, the sign bit cannot be set
9454 in c). This folds extension into the BIT_AND_EXPR.
9455 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9456 very likely don't have maximal range for their precision and this
9457 transformation effectively doesn't preserve non-maximal ranges. */
9458 if (TREE_CODE (type) == INTEGER_TYPE
9459 && TREE_CODE (op0) == BIT_AND_EXPR
9460 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9462 tree and_expr = op0;
9463 tree and0 = TREE_OPERAND (and_expr, 0);
9464 tree and1 = TREE_OPERAND (and_expr, 1);
9465 int change = 0;
9467 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9468 || (TYPE_PRECISION (type)
9469 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9470 change = 1;
9471 else if (TYPE_PRECISION (TREE_TYPE (and1))
9472 <= HOST_BITS_PER_WIDE_INT
9473 && tree_fits_uhwi_p (and1))
9475 unsigned HOST_WIDE_INT cst;
9477 cst = tree_to_uhwi (and1);
9478 cst &= HOST_WIDE_INT_M1U
9479 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9480 change = (cst == 0);
9481 if (change
9482 && !flag_syntax_only
9483 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9484 == ZERO_EXTEND))
9486 tree uns = unsigned_type_for (TREE_TYPE (and0));
9487 and0 = fold_convert_loc (loc, uns, and0);
9488 and1 = fold_convert_loc (loc, uns, and1);
9491 if (change)
9493 tem = force_fit_type (type, wi::to_widest (and1), 0,
9494 TREE_OVERFLOW (and1));
9495 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9496 fold_convert_loc (loc, type, and0), tem);
9500 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9501 cast (T1)X will fold away. We assume that this happens when X itself
9502 is a cast. */
9503 if (POINTER_TYPE_P (type)
9504 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9505 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9507 tree arg00 = TREE_OPERAND (arg0, 0);
9508 tree arg01 = TREE_OPERAND (arg0, 1);
9510 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9511 when the pointed type needs higher alignment than
9512 the p+ first operand's pointed type. */
9513 if (!in_gimple_form
9514 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9515 && (min_align_of_type (TREE_TYPE (type))
9516 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9517 return NULL_TREE;
9519 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9520 when type is a reference type and arg00's type is not,
9521 because arg00 could be validly nullptr and if arg01 doesn't return,
9522 we don't want false positive binding of reference to nullptr. */
9523 if (TREE_CODE (type) == REFERENCE_TYPE
9524 && !in_gimple_form
9525 && sanitize_flags_p (SANITIZE_NULL)
9526 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9527 return NULL_TREE;
9529 arg00 = fold_convert_loc (loc, type, arg00);
9530 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9533 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9534 of the same precision, and X is an integer type not narrower than
9535 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9536 if (INTEGRAL_TYPE_P (type)
9537 && TREE_CODE (op0) == BIT_NOT_EXPR
9538 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9539 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9540 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9542 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9543 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9544 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9545 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9546 fold_convert_loc (loc, type, tem));
9549 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9550 type of X and Y (integer types only). */
9551 if (INTEGRAL_TYPE_P (type)
9552 && TREE_CODE (op0) == MULT_EXPR
9553 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9554 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9556 /* Be careful not to introduce new overflows. */
9557 tree mult_type;
9558 if (TYPE_OVERFLOW_WRAPS (type))
9559 mult_type = type;
9560 else
9561 mult_type = unsigned_type_for (type);
9563 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9565 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9566 fold_convert_loc (loc, mult_type,
9567 TREE_OPERAND (op0, 0)),
9568 fold_convert_loc (loc, mult_type,
9569 TREE_OPERAND (op0, 1)));
9570 return fold_convert_loc (loc, type, tem);
9574 return NULL_TREE;
9576 case VIEW_CONVERT_EXPR:
9577 if (TREE_CODE (op0) == MEM_REF)
9579 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9580 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9581 tem = fold_build2_loc (loc, MEM_REF, type,
9582 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9583 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9584 return tem;
9587 return NULL_TREE;
9589 case NEGATE_EXPR:
9590 tem = fold_negate_expr (loc, arg0);
9591 if (tem)
9592 return fold_convert_loc (loc, type, tem);
9593 return NULL_TREE;
9595 case ABS_EXPR:
9596 /* Convert fabs((double)float) into (double)fabsf(float). */
9597 if (TREE_CODE (arg0) == NOP_EXPR
9598 && TREE_CODE (type) == REAL_TYPE)
9600 tree targ0 = strip_float_extensions (arg0);
9601 if (targ0 != arg0)
9602 return fold_convert_loc (loc, type,
9603 fold_build1_loc (loc, ABS_EXPR,
9604 TREE_TYPE (targ0),
9605 targ0));
9607 return NULL_TREE;
9609 case BIT_NOT_EXPR:
9610 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9611 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9612 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9613 fold_convert_loc (loc, type,
9614 TREE_OPERAND (arg0, 0)))))
9615 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9616 fold_convert_loc (loc, type,
9617 TREE_OPERAND (arg0, 1)));
9618 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9619 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9620 fold_convert_loc (loc, type,
9621 TREE_OPERAND (arg0, 1)))))
9622 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9623 fold_convert_loc (loc, type,
9624 TREE_OPERAND (arg0, 0)), tem);
9626 return NULL_TREE;
9628 case TRUTH_NOT_EXPR:
9629 /* Note that the operand of this must be an int
9630 and its values must be 0 or 1.
9631 ("true" is a fixed value perhaps depending on the language,
9632 but we don't handle values other than 1 correctly yet.) */
9633 tem = fold_truth_not_expr (loc, arg0);
9634 if (!tem)
9635 return NULL_TREE;
9636 return fold_convert_loc (loc, type, tem);
9638 case INDIRECT_REF:
9639 /* Fold *&X to X if X is an lvalue. */
9640 if (TREE_CODE (op0) == ADDR_EXPR)
9642 tree op00 = TREE_OPERAND (op0, 0);
9643 if ((VAR_P (op00)
9644 || TREE_CODE (op00) == PARM_DECL
9645 || TREE_CODE (op00) == RESULT_DECL)
9646 && !TREE_READONLY (op00))
9647 return op00;
9649 return NULL_TREE;
9651 default:
9652 return NULL_TREE;
9653 } /* switch (code) */
9657 /* If the operation was a conversion do _not_ mark a resulting constant
9658 with TREE_OVERFLOW if the original constant was not. These conversions
9659 have implementation defined behavior and retaining the TREE_OVERFLOW
9660 flag here would confuse later passes such as VRP. */
9661 tree
9662 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9663 tree type, tree op0)
9665 tree res = fold_unary_loc (loc, code, type, op0);
9666 if (res
9667 && TREE_CODE (res) == INTEGER_CST
9668 && TREE_CODE (op0) == INTEGER_CST
9669 && CONVERT_EXPR_CODE_P (code))
9670 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9672 return res;
9675 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9676 operands OP0 and OP1. LOC is the location of the resulting expression.
9677 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9678 Return the folded expression if folding is successful. Otherwise,
9679 return NULL_TREE. */
9680 static tree
9681 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9682 tree arg0, tree arg1, tree op0, tree op1)
9684 tree tem;
9686 /* We only do these simplifications if we are optimizing. */
9687 if (!optimize)
9688 return NULL_TREE;
9690 /* Check for things like (A || B) && (A || C). We can convert this
9691 to A || (B && C). Note that either operator can be any of the four
9692 truth and/or operations and the transformation will still be
9693 valid. Also note that we only care about order for the
9694 ANDIF and ORIF operators. If B contains side effects, this
9695 might change the truth-value of A. */
9696 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9697 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9698 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9699 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9700 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9701 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9703 tree a00 = TREE_OPERAND (arg0, 0);
9704 tree a01 = TREE_OPERAND (arg0, 1);
9705 tree a10 = TREE_OPERAND (arg1, 0);
9706 tree a11 = TREE_OPERAND (arg1, 1);
9707 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9708 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9709 && (code == TRUTH_AND_EXPR
9710 || code == TRUTH_OR_EXPR));
9712 if (operand_equal_p (a00, a10, 0))
9713 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9714 fold_build2_loc (loc, code, type, a01, a11));
9715 else if (commutative && operand_equal_p (a00, a11, 0))
9716 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9717 fold_build2_loc (loc, code, type, a01, a10));
9718 else if (commutative && operand_equal_p (a01, a10, 0))
9719 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9720 fold_build2_loc (loc, code, type, a00, a11));
9722 /* This case if tricky because we must either have commutative
9723 operators or else A10 must not have side-effects. */
9725 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9726 && operand_equal_p (a01, a11, 0))
9727 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9728 fold_build2_loc (loc, code, type, a00, a10),
9729 a01);
9732 /* See if we can build a range comparison. */
9733 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9734 return tem;
9736 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9737 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9739 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9740 if (tem)
9741 return fold_build2_loc (loc, code, type, tem, arg1);
9744 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9745 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9747 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9748 if (tem)
9749 return fold_build2_loc (loc, code, type, arg0, tem);
9752 /* Check for the possibility of merging component references. If our
9753 lhs is another similar operation, try to merge its rhs with our
9754 rhs. Then try to merge our lhs and rhs. */
9755 if (TREE_CODE (arg0) == code
9756 && (tem = fold_truth_andor_1 (loc, code, type,
9757 TREE_OPERAND (arg0, 1), arg1)) != 0)
9758 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9760 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9761 return tem;
9763 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9764 if (param_logical_op_non_short_circuit != -1)
9765 logical_op_non_short_circuit
9766 = param_logical_op_non_short_circuit;
9767 if (logical_op_non_short_circuit
9768 && !sanitize_coverage_p ()
9769 && (code == TRUTH_AND_EXPR
9770 || code == TRUTH_ANDIF_EXPR
9771 || code == TRUTH_OR_EXPR
9772 || code == TRUTH_ORIF_EXPR))
9774 enum tree_code ncode, icode;
9776 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9777 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9778 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9780 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9781 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9782 We don't want to pack more than two leafs to a non-IF AND/OR
9783 expression.
9784 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9785 equal to IF-CODE, then we don't want to add right-hand operand.
9786 If the inner right-hand side of left-hand operand has
9787 side-effects, or isn't simple, then we can't add to it,
9788 as otherwise we might destroy if-sequence. */
9789 if (TREE_CODE (arg0) == icode
9790 && simple_operand_p_2 (arg1)
9791 /* Needed for sequence points to handle trappings, and
9792 side-effects. */
9793 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9795 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9796 arg1);
9797 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9798 tem);
9800 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9801 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9802 else if (TREE_CODE (arg1) == icode
9803 && simple_operand_p_2 (arg0)
9804 /* Needed for sequence points to handle trappings, and
9805 side-effects. */
9806 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9808 tem = fold_build2_loc (loc, ncode, type,
9809 arg0, TREE_OPERAND (arg1, 0));
9810 return fold_build2_loc (loc, icode, type, tem,
9811 TREE_OPERAND (arg1, 1));
9813 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9814 into (A OR B).
9815 For sequence point consistancy, we need to check for trapping,
9816 and side-effects. */
9817 else if (code == icode && simple_operand_p_2 (arg0)
9818 && simple_operand_p_2 (arg1))
9819 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9822 return NULL_TREE;
9825 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9826 by changing CODE to reduce the magnitude of constants involved in
9827 ARG0 of the comparison.
9828 Returns a canonicalized comparison tree if a simplification was
9829 possible, otherwise returns NULL_TREE.
9830 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9831 valid if signed overflow is undefined. */
9833 static tree
9834 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9835 tree arg0, tree arg1,
9836 bool *strict_overflow_p)
9838 enum tree_code code0 = TREE_CODE (arg0);
9839 tree t, cst0 = NULL_TREE;
9840 int sgn0;
9842 /* Match A +- CST code arg1. We can change this only if overflow
9843 is undefined. */
9844 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9846 /* In principle pointers also have undefined overflow behavior,
9847 but that causes problems elsewhere. */
9848 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9849 && (code0 == MINUS_EXPR
9850 || code0 == PLUS_EXPR)
9851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9852 return NULL_TREE;
9854 /* Identify the constant in arg0 and its sign. */
9855 cst0 = TREE_OPERAND (arg0, 1);
9856 sgn0 = tree_int_cst_sgn (cst0);
9858 /* Overflowed constants and zero will cause problems. */
9859 if (integer_zerop (cst0)
9860 || TREE_OVERFLOW (cst0))
9861 return NULL_TREE;
9863 /* See if we can reduce the magnitude of the constant in
9864 arg0 by changing the comparison code. */
9865 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9866 if (code == LT_EXPR
9867 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9868 code = LE_EXPR;
9869 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9870 else if (code == GT_EXPR
9871 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9872 code = GE_EXPR;
9873 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9874 else if (code == LE_EXPR
9875 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9876 code = LT_EXPR;
9877 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9878 else if (code == GE_EXPR
9879 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9880 code = GT_EXPR;
9881 else
9882 return NULL_TREE;
9883 *strict_overflow_p = true;
9885 /* Now build the constant reduced in magnitude. But not if that
9886 would produce one outside of its types range. */
9887 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9888 && ((sgn0 == 1
9889 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9890 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9891 || (sgn0 == -1
9892 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9893 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9894 return NULL_TREE;
9896 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9897 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9898 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9899 t = fold_convert (TREE_TYPE (arg1), t);
9901 return fold_build2_loc (loc, code, type, t, arg1);
9904 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9905 overflow further. Try to decrease the magnitude of constants involved
9906 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9907 and put sole constants at the second argument position.
9908 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9910 static tree
9911 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9912 tree arg0, tree arg1)
9914 tree t;
9915 bool strict_overflow_p;
9916 const char * const warnmsg = G_("assuming signed overflow does not occur "
9917 "when reducing constant in comparison");
9919 /* Try canonicalization by simplifying arg0. */
9920 strict_overflow_p = false;
9921 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9922 &strict_overflow_p);
9923 if (t)
9925 if (strict_overflow_p)
9926 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9927 return t;
9930 /* Try canonicalization by simplifying arg1 using the swapped
9931 comparison. */
9932 code = swap_tree_comparison (code);
9933 strict_overflow_p = false;
9934 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9935 &strict_overflow_p);
9936 if (t && strict_overflow_p)
9937 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9938 return t;
9941 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9942 space. This is used to avoid issuing overflow warnings for
9943 expressions like &p->x which cannot wrap. */
9945 static bool
9946 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9948 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9949 return true;
9951 if (maybe_lt (bitpos, 0))
9952 return true;
9954 poly_wide_int wi_offset;
9955 int precision = TYPE_PRECISION (TREE_TYPE (base));
9956 if (offset == NULL_TREE)
9957 wi_offset = wi::zero (precision);
9958 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9959 return true;
9960 else
9961 wi_offset = wi::to_poly_wide (offset);
9963 wi::overflow_type overflow;
9964 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9965 precision);
9966 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9967 if (overflow)
9968 return true;
9970 poly_uint64 total_hwi, size;
9971 if (!total.to_uhwi (&total_hwi)
9972 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9973 &size)
9974 || known_eq (size, 0U))
9975 return true;
9977 if (known_le (total_hwi, size))
9978 return false;
9980 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9981 array. */
9982 if (TREE_CODE (base) == ADDR_EXPR
9983 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9984 &size)
9985 && maybe_ne (size, 0U)
9986 && known_le (total_hwi, size))
9987 return false;
9989 return true;
9992 /* Return a positive integer when the symbol DECL is known to have
9993 a nonzero address, zero when it's known not to (e.g., it's a weak
9994 symbol), and a negative integer when the symbol is not yet in the
9995 symbol table and so whether or not its address is zero is unknown.
9996 For function local objects always return positive integer. */
9997 static int
9998 maybe_nonzero_address (tree decl)
10000 /* Normally, don't do anything for variables and functions before symtab is
10001 built; it is quite possible that DECL will be declared weak later.
10002 But if folding_initializer, we need a constant answer now, so create
10003 the symtab entry and prevent later weak declaration. */
10004 if (DECL_P (decl) && decl_in_symtab_p (decl))
10005 if (struct symtab_node *symbol
10006 = (folding_initializer
10007 ? symtab_node::get_create (decl)
10008 : symtab_node::get (decl)))
10009 return symbol->nonzero_address ();
10011 /* Function local objects are never NULL. */
10012 if (DECL_P (decl)
10013 && (DECL_CONTEXT (decl)
10014 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10015 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10016 return 1;
10018 return -1;
10021 /* Subroutine of fold_binary. This routine performs all of the
10022 transformations that are common to the equality/inequality
10023 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10024 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10025 fold_binary should call fold_binary. Fold a comparison with
10026 tree code CODE and type TYPE with operands OP0 and OP1. Return
10027 the folded comparison or NULL_TREE. */
10029 static tree
10030 fold_comparison (location_t loc, enum tree_code code, tree type,
10031 tree op0, tree op1)
10033 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10034 tree arg0, arg1, tem;
10036 arg0 = op0;
10037 arg1 = op1;
10039 STRIP_SIGN_NOPS (arg0);
10040 STRIP_SIGN_NOPS (arg1);
10042 /* For comparisons of pointers we can decompose it to a compile time
10043 comparison of the base objects and the offsets into the object.
10044 This requires at least one operand being an ADDR_EXPR or a
10045 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10046 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10047 && (TREE_CODE (arg0) == ADDR_EXPR
10048 || TREE_CODE (arg1) == ADDR_EXPR
10049 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10050 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10052 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10053 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10054 machine_mode mode;
10055 int volatilep, reversep, unsignedp;
10056 bool indirect_base0 = false, indirect_base1 = false;
10058 /* Get base and offset for the access. Strip ADDR_EXPR for
10059 get_inner_reference, but put it back by stripping INDIRECT_REF
10060 off the base object if possible. indirect_baseN will be true
10061 if baseN is not an address but refers to the object itself. */
10062 base0 = arg0;
10063 if (TREE_CODE (arg0) == ADDR_EXPR)
10065 base0
10066 = get_inner_reference (TREE_OPERAND (arg0, 0),
10067 &bitsize, &bitpos0, &offset0, &mode,
10068 &unsignedp, &reversep, &volatilep);
10069 if (TREE_CODE (base0) == INDIRECT_REF)
10070 base0 = TREE_OPERAND (base0, 0);
10071 else
10072 indirect_base0 = true;
10074 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10076 base0 = TREE_OPERAND (arg0, 0);
10077 STRIP_SIGN_NOPS (base0);
10078 if (TREE_CODE (base0) == ADDR_EXPR)
10080 base0
10081 = get_inner_reference (TREE_OPERAND (base0, 0),
10082 &bitsize, &bitpos0, &offset0, &mode,
10083 &unsignedp, &reversep, &volatilep);
10084 if (TREE_CODE (base0) == INDIRECT_REF)
10085 base0 = TREE_OPERAND (base0, 0);
10086 else
10087 indirect_base0 = true;
10089 if (offset0 == NULL_TREE || integer_zerop (offset0))
10090 offset0 = TREE_OPERAND (arg0, 1);
10091 else
10092 offset0 = size_binop (PLUS_EXPR, offset0,
10093 TREE_OPERAND (arg0, 1));
10094 if (poly_int_tree_p (offset0))
10096 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10097 TYPE_PRECISION (sizetype));
10098 tem <<= LOG2_BITS_PER_UNIT;
10099 tem += bitpos0;
10100 if (tem.to_shwi (&bitpos0))
10101 offset0 = NULL_TREE;
10105 base1 = arg1;
10106 if (TREE_CODE (arg1) == ADDR_EXPR)
10108 base1
10109 = get_inner_reference (TREE_OPERAND (arg1, 0),
10110 &bitsize, &bitpos1, &offset1, &mode,
10111 &unsignedp, &reversep, &volatilep);
10112 if (TREE_CODE (base1) == INDIRECT_REF)
10113 base1 = TREE_OPERAND (base1, 0);
10114 else
10115 indirect_base1 = true;
10117 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10119 base1 = TREE_OPERAND (arg1, 0);
10120 STRIP_SIGN_NOPS (base1);
10121 if (TREE_CODE (base1) == ADDR_EXPR)
10123 base1
10124 = get_inner_reference (TREE_OPERAND (base1, 0),
10125 &bitsize, &bitpos1, &offset1, &mode,
10126 &unsignedp, &reversep, &volatilep);
10127 if (TREE_CODE (base1) == INDIRECT_REF)
10128 base1 = TREE_OPERAND (base1, 0);
10129 else
10130 indirect_base1 = true;
10132 if (offset1 == NULL_TREE || integer_zerop (offset1))
10133 offset1 = TREE_OPERAND (arg1, 1);
10134 else
10135 offset1 = size_binop (PLUS_EXPR, offset1,
10136 TREE_OPERAND (arg1, 1));
10137 if (poly_int_tree_p (offset1))
10139 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10140 TYPE_PRECISION (sizetype));
10141 tem <<= LOG2_BITS_PER_UNIT;
10142 tem += bitpos1;
10143 if (tem.to_shwi (&bitpos1))
10144 offset1 = NULL_TREE;
10148 /* If we have equivalent bases we might be able to simplify. */
10149 if (indirect_base0 == indirect_base1
10150 && operand_equal_p (base0, base1,
10151 indirect_base0 ? OEP_ADDRESS_OF : 0))
10153 /* We can fold this expression to a constant if the non-constant
10154 offset parts are equal. */
10155 if ((offset0 == offset1
10156 || (offset0 && offset1
10157 && operand_equal_p (offset0, offset1, 0)))
10158 && (equality_code
10159 || (indirect_base0
10160 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10161 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10163 if (!equality_code
10164 && maybe_ne (bitpos0, bitpos1)
10165 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10166 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10167 fold_overflow_warning (("assuming pointer wraparound does not "
10168 "occur when comparing P +- C1 with "
10169 "P +- C2"),
10170 WARN_STRICT_OVERFLOW_CONDITIONAL);
10172 switch (code)
10174 case EQ_EXPR:
10175 if (known_eq (bitpos0, bitpos1))
10176 return constant_boolean_node (true, type);
10177 if (known_ne (bitpos0, bitpos1))
10178 return constant_boolean_node (false, type);
10179 break;
10180 case NE_EXPR:
10181 if (known_ne (bitpos0, bitpos1))
10182 return constant_boolean_node (true, type);
10183 if (known_eq (bitpos0, bitpos1))
10184 return constant_boolean_node (false, type);
10185 break;
10186 case LT_EXPR:
10187 if (known_lt (bitpos0, bitpos1))
10188 return constant_boolean_node (true, type);
10189 if (known_ge (bitpos0, bitpos1))
10190 return constant_boolean_node (false, type);
10191 break;
10192 case LE_EXPR:
10193 if (known_le (bitpos0, bitpos1))
10194 return constant_boolean_node (true, type);
10195 if (known_gt (bitpos0, bitpos1))
10196 return constant_boolean_node (false, type);
10197 break;
10198 case GE_EXPR:
10199 if (known_ge (bitpos0, bitpos1))
10200 return constant_boolean_node (true, type);
10201 if (known_lt (bitpos0, bitpos1))
10202 return constant_boolean_node (false, type);
10203 break;
10204 case GT_EXPR:
10205 if (known_gt (bitpos0, bitpos1))
10206 return constant_boolean_node (true, type);
10207 if (known_le (bitpos0, bitpos1))
10208 return constant_boolean_node (false, type);
10209 break;
10210 default:;
10213 /* We can simplify the comparison to a comparison of the variable
10214 offset parts if the constant offset parts are equal.
10215 Be careful to use signed sizetype here because otherwise we
10216 mess with array offsets in the wrong way. This is possible
10217 because pointer arithmetic is restricted to retain within an
10218 object and overflow on pointer differences is undefined as of
10219 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10220 else if (known_eq (bitpos0, bitpos1)
10221 && (equality_code
10222 || (indirect_base0
10223 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10224 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10226 /* By converting to signed sizetype we cover middle-end pointer
10227 arithmetic which operates on unsigned pointer types of size
10228 type size and ARRAY_REF offsets which are properly sign or
10229 zero extended from their type in case it is narrower than
10230 sizetype. */
10231 if (offset0 == NULL_TREE)
10232 offset0 = build_int_cst (ssizetype, 0);
10233 else
10234 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10235 if (offset1 == NULL_TREE)
10236 offset1 = build_int_cst (ssizetype, 0);
10237 else
10238 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10240 if (!equality_code
10241 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10242 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10243 fold_overflow_warning (("assuming pointer wraparound does not "
10244 "occur when comparing P +- C1 with "
10245 "P +- C2"),
10246 WARN_STRICT_OVERFLOW_COMPARISON);
10248 return fold_build2_loc (loc, code, type, offset0, offset1);
10251 /* For equal offsets we can simplify to a comparison of the
10252 base addresses. */
10253 else if (known_eq (bitpos0, bitpos1)
10254 && (indirect_base0
10255 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10256 && (indirect_base1
10257 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10258 && ((offset0 == offset1)
10259 || (offset0 && offset1
10260 && operand_equal_p (offset0, offset1, 0))))
10262 if (indirect_base0)
10263 base0 = build_fold_addr_expr_loc (loc, base0);
10264 if (indirect_base1)
10265 base1 = build_fold_addr_expr_loc (loc, base1);
10266 return fold_build2_loc (loc, code, type, base0, base1);
10268 /* Comparison between an ordinary (non-weak) symbol and a null
10269 pointer can be eliminated since such symbols must have a non
10270 null address. In C, relational expressions between pointers
10271 to objects and null pointers are undefined. The results
10272 below follow the C++ rules with the additional property that
10273 every object pointer compares greater than a null pointer.
10275 else if (((DECL_P (base0)
10276 && maybe_nonzero_address (base0) > 0
10277 /* Avoid folding references to struct members at offset 0 to
10278 prevent tests like '&ptr->firstmember == 0' from getting
10279 eliminated. When ptr is null, although the -> expression
10280 is strictly speaking invalid, GCC retains it as a matter
10281 of QoI. See PR c/44555. */
10282 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10283 || CONSTANT_CLASS_P (base0))
10284 && indirect_base0
10285 /* The caller guarantees that when one of the arguments is
10286 constant (i.e., null in this case) it is second. */
10287 && integer_zerop (arg1))
10289 switch (code)
10291 case EQ_EXPR:
10292 case LE_EXPR:
10293 case LT_EXPR:
10294 return constant_boolean_node (false, type);
10295 case GE_EXPR:
10296 case GT_EXPR:
10297 case NE_EXPR:
10298 return constant_boolean_node (true, type);
10299 default:
10300 gcc_unreachable ();
10305 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10306 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10307 the resulting offset is smaller in absolute value than the
10308 original one and has the same sign. */
10309 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10310 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10311 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10312 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10313 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10314 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10315 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10316 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10318 tree const1 = TREE_OPERAND (arg0, 1);
10319 tree const2 = TREE_OPERAND (arg1, 1);
10320 tree variable1 = TREE_OPERAND (arg0, 0);
10321 tree variable2 = TREE_OPERAND (arg1, 0);
10322 tree cst;
10323 const char * const warnmsg = G_("assuming signed overflow does not "
10324 "occur when combining constants around "
10325 "a comparison");
10327 /* Put the constant on the side where it doesn't overflow and is
10328 of lower absolute value and of same sign than before. */
10329 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10330 ? MINUS_EXPR : PLUS_EXPR,
10331 const2, const1);
10332 if (!TREE_OVERFLOW (cst)
10333 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10334 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10336 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10337 return fold_build2_loc (loc, code, type,
10338 variable1,
10339 fold_build2_loc (loc, TREE_CODE (arg1),
10340 TREE_TYPE (arg1),
10341 variable2, cst));
10344 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10345 ? MINUS_EXPR : PLUS_EXPR,
10346 const1, const2);
10347 if (!TREE_OVERFLOW (cst)
10348 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10349 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10351 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10352 return fold_build2_loc (loc, code, type,
10353 fold_build2_loc (loc, TREE_CODE (arg0),
10354 TREE_TYPE (arg0),
10355 variable1, cst),
10356 variable2);
10360 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10361 if (tem)
10362 return tem;
10364 /* If we are comparing an expression that just has comparisons
10365 of two integer values, arithmetic expressions of those comparisons,
10366 and constants, we can simplify it. There are only three cases
10367 to check: the two values can either be equal, the first can be
10368 greater, or the second can be greater. Fold the expression for
10369 those three values. Since each value must be 0 or 1, we have
10370 eight possibilities, each of which corresponds to the constant 0
10371 or 1 or one of the six possible comparisons.
10373 This handles common cases like (a > b) == 0 but also handles
10374 expressions like ((x > y) - (y > x)) > 0, which supposedly
10375 occur in macroized code. */
10377 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10379 tree cval1 = 0, cval2 = 0;
10381 if (twoval_comparison_p (arg0, &cval1, &cval2)
10382 /* Don't handle degenerate cases here; they should already
10383 have been handled anyway. */
10384 && cval1 != 0 && cval2 != 0
10385 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10386 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10387 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10388 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10389 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10390 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10391 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10393 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10394 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10396 /* We can't just pass T to eval_subst in case cval1 or cval2
10397 was the same as ARG1. */
10399 tree high_result
10400 = fold_build2_loc (loc, code, type,
10401 eval_subst (loc, arg0, cval1, maxval,
10402 cval2, minval),
10403 arg1);
10404 tree equal_result
10405 = fold_build2_loc (loc, code, type,
10406 eval_subst (loc, arg0, cval1, maxval,
10407 cval2, maxval),
10408 arg1);
10409 tree low_result
10410 = fold_build2_loc (loc, code, type,
10411 eval_subst (loc, arg0, cval1, minval,
10412 cval2, maxval),
10413 arg1);
10415 /* All three of these results should be 0 or 1. Confirm they are.
10416 Then use those values to select the proper code to use. */
10418 if (TREE_CODE (high_result) == INTEGER_CST
10419 && TREE_CODE (equal_result) == INTEGER_CST
10420 && TREE_CODE (low_result) == INTEGER_CST)
10422 /* Make a 3-bit mask with the high-order bit being the
10423 value for `>', the next for '=', and the low for '<'. */
10424 switch ((integer_onep (high_result) * 4)
10425 + (integer_onep (equal_result) * 2)
10426 + integer_onep (low_result))
10428 case 0:
10429 /* Always false. */
10430 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10431 case 1:
10432 code = LT_EXPR;
10433 break;
10434 case 2:
10435 code = EQ_EXPR;
10436 break;
10437 case 3:
10438 code = LE_EXPR;
10439 break;
10440 case 4:
10441 code = GT_EXPR;
10442 break;
10443 case 5:
10444 code = NE_EXPR;
10445 break;
10446 case 6:
10447 code = GE_EXPR;
10448 break;
10449 case 7:
10450 /* Always true. */
10451 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10454 return fold_build2_loc (loc, code, type, cval1, cval2);
10459 return NULL_TREE;
10463 /* Subroutine of fold_binary. Optimize complex multiplications of the
10464 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10465 argument EXPR represents the expression "z" of type TYPE. */
10467 static tree
10468 fold_mult_zconjz (location_t loc, tree type, tree expr)
10470 tree itype = TREE_TYPE (type);
10471 tree rpart, ipart, tem;
10473 if (TREE_CODE (expr) == COMPLEX_EXPR)
10475 rpart = TREE_OPERAND (expr, 0);
10476 ipart = TREE_OPERAND (expr, 1);
10478 else if (TREE_CODE (expr) == COMPLEX_CST)
10480 rpart = TREE_REALPART (expr);
10481 ipart = TREE_IMAGPART (expr);
10483 else
10485 expr = save_expr (expr);
10486 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10487 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10490 rpart = save_expr (rpart);
10491 ipart = save_expr (ipart);
10492 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10493 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10494 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10495 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10496 build_zero_cst (itype));
10500 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10501 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10502 true if successful. */
10504 static bool
10505 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10507 unsigned HOST_WIDE_INT i, nunits;
10509 if (TREE_CODE (arg) == VECTOR_CST
10510 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10512 for (i = 0; i < nunits; ++i)
10513 elts[i] = VECTOR_CST_ELT (arg, i);
10515 else if (TREE_CODE (arg) == CONSTRUCTOR)
10517 constructor_elt *elt;
10519 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10520 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10521 return false;
10522 else
10523 elts[i] = elt->value;
10525 else
10526 return false;
10527 for (; i < nelts; i++)
10528 elts[i]
10529 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10530 return true;
10533 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10534 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10535 NULL_TREE otherwise. */
10537 tree
10538 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10540 unsigned int i;
10541 unsigned HOST_WIDE_INT nelts;
10542 bool need_ctor = false;
10544 if (!sel.length ().is_constant (&nelts))
10545 return NULL_TREE;
10546 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10547 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10548 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10549 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10550 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10551 return NULL_TREE;
10553 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10554 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10555 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10556 return NULL_TREE;
10558 tree_vector_builder out_elts (type, nelts, 1);
10559 for (i = 0; i < nelts; i++)
10561 HOST_WIDE_INT index;
10562 if (!sel[i].is_constant (&index))
10563 return NULL_TREE;
10564 if (!CONSTANT_CLASS_P (in_elts[index]))
10565 need_ctor = true;
10566 out_elts.quick_push (unshare_expr (in_elts[index]));
10569 if (need_ctor)
10571 vec<constructor_elt, va_gc> *v;
10572 vec_alloc (v, nelts);
10573 for (i = 0; i < nelts; i++)
10574 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10575 return build_constructor (type, v);
10577 else
10578 return out_elts.build ();
10581 /* Try to fold a pointer difference of type TYPE two address expressions of
10582 array references AREF0 and AREF1 using location LOC. Return a
10583 simplified expression for the difference or NULL_TREE. */
10585 static tree
10586 fold_addr_of_array_ref_difference (location_t loc, tree type,
10587 tree aref0, tree aref1,
10588 bool use_pointer_diff)
10590 tree base0 = TREE_OPERAND (aref0, 0);
10591 tree base1 = TREE_OPERAND (aref1, 0);
10592 tree base_offset = build_int_cst (type, 0);
10594 /* If the bases are array references as well, recurse. If the bases
10595 are pointer indirections compute the difference of the pointers.
10596 If the bases are equal, we are set. */
10597 if ((TREE_CODE (base0) == ARRAY_REF
10598 && TREE_CODE (base1) == ARRAY_REF
10599 && (base_offset
10600 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10601 use_pointer_diff)))
10602 || (INDIRECT_REF_P (base0)
10603 && INDIRECT_REF_P (base1)
10604 && (base_offset
10605 = use_pointer_diff
10606 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10607 TREE_OPERAND (base0, 0),
10608 TREE_OPERAND (base1, 0))
10609 : fold_binary_loc (loc, MINUS_EXPR, type,
10610 fold_convert (type,
10611 TREE_OPERAND (base0, 0)),
10612 fold_convert (type,
10613 TREE_OPERAND (base1, 0)))))
10614 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10616 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10617 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10618 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10619 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10620 return fold_build2_loc (loc, PLUS_EXPR, type,
10621 base_offset,
10622 fold_build2_loc (loc, MULT_EXPR, type,
10623 diff, esz));
10625 return NULL_TREE;
10628 /* If the real or vector real constant CST of type TYPE has an exact
10629 inverse, return it, else return NULL. */
10631 tree
10632 exact_inverse (tree type, tree cst)
10634 REAL_VALUE_TYPE r;
10635 tree unit_type;
10636 machine_mode mode;
10638 switch (TREE_CODE (cst))
10640 case REAL_CST:
10641 r = TREE_REAL_CST (cst);
10643 if (exact_real_inverse (TYPE_MODE (type), &r))
10644 return build_real (type, r);
10646 return NULL_TREE;
10648 case VECTOR_CST:
10650 unit_type = TREE_TYPE (type);
10651 mode = TYPE_MODE (unit_type);
10653 tree_vector_builder elts;
10654 if (!elts.new_unary_operation (type, cst, false))
10655 return NULL_TREE;
10656 unsigned int count = elts.encoded_nelts ();
10657 for (unsigned int i = 0; i < count; ++i)
10659 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10660 if (!exact_real_inverse (mode, &r))
10661 return NULL_TREE;
10662 elts.quick_push (build_real (unit_type, r));
10665 return elts.build ();
10668 default:
10669 return NULL_TREE;
10673 /* Mask out the tz least significant bits of X of type TYPE where
10674 tz is the number of trailing zeroes in Y. */
10675 static wide_int
10676 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10678 int tz = wi::ctz (y);
10679 if (tz > 0)
10680 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10681 return x;
10684 /* Return true when T is an address and is known to be nonzero.
10685 For floating point we further ensure that T is not denormal.
10686 Similar logic is present in nonzero_address in rtlanal.h.
10688 If the return value is based on the assumption that signed overflow
10689 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10690 change *STRICT_OVERFLOW_P. */
10692 static bool
10693 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10695 tree type = TREE_TYPE (t);
10696 enum tree_code code;
10698 /* Doing something useful for floating point would need more work. */
10699 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10700 return false;
10702 code = TREE_CODE (t);
10703 switch (TREE_CODE_CLASS (code))
10705 case tcc_unary:
10706 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10707 strict_overflow_p);
10708 case tcc_binary:
10709 case tcc_comparison:
10710 return tree_binary_nonzero_warnv_p (code, type,
10711 TREE_OPERAND (t, 0),
10712 TREE_OPERAND (t, 1),
10713 strict_overflow_p);
10714 case tcc_constant:
10715 case tcc_declaration:
10716 case tcc_reference:
10717 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10719 default:
10720 break;
10723 switch (code)
10725 case TRUTH_NOT_EXPR:
10726 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10727 strict_overflow_p);
10729 case TRUTH_AND_EXPR:
10730 case TRUTH_OR_EXPR:
10731 case TRUTH_XOR_EXPR:
10732 return tree_binary_nonzero_warnv_p (code, type,
10733 TREE_OPERAND (t, 0),
10734 TREE_OPERAND (t, 1),
10735 strict_overflow_p);
10737 case COND_EXPR:
10738 case CONSTRUCTOR:
10739 case OBJ_TYPE_REF:
10740 case ASSERT_EXPR:
10741 case ADDR_EXPR:
10742 case WITH_SIZE_EXPR:
10743 case SSA_NAME:
10744 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10746 case COMPOUND_EXPR:
10747 case MODIFY_EXPR:
10748 case BIND_EXPR:
10749 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10750 strict_overflow_p);
10752 case SAVE_EXPR:
10753 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10754 strict_overflow_p);
10756 case CALL_EXPR:
10758 tree fndecl = get_callee_fndecl (t);
10759 if (!fndecl) return false;
10760 if (flag_delete_null_pointer_checks && !flag_check_new
10761 && DECL_IS_OPERATOR_NEW_P (fndecl)
10762 && !TREE_NOTHROW (fndecl))
10763 return true;
10764 if (flag_delete_null_pointer_checks
10765 && lookup_attribute ("returns_nonnull",
10766 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10767 return true;
10768 return alloca_call_p (t);
10771 default:
10772 break;
10774 return false;
10777 /* Return true when T is an address and is known to be nonzero.
10778 Handle warnings about undefined signed overflow. */
10780 bool
10781 tree_expr_nonzero_p (tree t)
10783 bool ret, strict_overflow_p;
10785 strict_overflow_p = false;
10786 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10787 if (strict_overflow_p)
10788 fold_overflow_warning (("assuming signed overflow does not occur when "
10789 "determining that expression is always "
10790 "non-zero"),
10791 WARN_STRICT_OVERFLOW_MISC);
10792 return ret;
10795 /* Return true if T is known not to be equal to an integer W. */
10797 bool
10798 expr_not_equal_to (tree t, const wide_int &w)
10800 int_range_max vr;
10801 switch (TREE_CODE (t))
10803 case INTEGER_CST:
10804 return wi::to_wide (t) != w;
10806 case SSA_NAME:
10807 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10808 return false;
10810 if (cfun)
10811 get_range_query (cfun)->range_of_expr (vr, t);
10812 else
10813 get_global_range_query ()->range_of_expr (vr, t);
10815 if (!vr.undefined_p ()
10816 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10817 return true;
10818 /* If T has some known zero bits and W has any of those bits set,
10819 then T is known not to be equal to W. */
10820 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10821 TYPE_PRECISION (TREE_TYPE (t))), 0))
10822 return true;
10823 return false;
10825 default:
10826 return false;
10830 /* Fold a binary expression of code CODE and type TYPE with operands
10831 OP0 and OP1. LOC is the location of the resulting expression.
10832 Return the folded expression if folding is successful. Otherwise,
10833 return NULL_TREE. */
10835 tree
10836 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10837 tree op0, tree op1)
10839 enum tree_code_class kind = TREE_CODE_CLASS (code);
10840 tree arg0, arg1, tem;
10841 tree t1 = NULL_TREE;
10842 bool strict_overflow_p;
10843 unsigned int prec;
10845 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10846 && TREE_CODE_LENGTH (code) == 2
10847 && op0 != NULL_TREE
10848 && op1 != NULL_TREE);
10850 arg0 = op0;
10851 arg1 = op1;
10853 /* Strip any conversions that don't change the mode. This is
10854 safe for every expression, except for a comparison expression
10855 because its signedness is derived from its operands. So, in
10856 the latter case, only strip conversions that don't change the
10857 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10858 preserved.
10860 Note that this is done as an internal manipulation within the
10861 constant folder, in order to find the simplest representation
10862 of the arguments so that their form can be studied. In any
10863 cases, the appropriate type conversions should be put back in
10864 the tree that will get out of the constant folder. */
10866 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10868 STRIP_SIGN_NOPS (arg0);
10869 STRIP_SIGN_NOPS (arg1);
10871 else
10873 STRIP_NOPS (arg0);
10874 STRIP_NOPS (arg1);
10877 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10878 constant but we can't do arithmetic on them. */
10879 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10881 tem = const_binop (code, type, arg0, arg1);
10882 if (tem != NULL_TREE)
10884 if (TREE_TYPE (tem) != type)
10885 tem = fold_convert_loc (loc, type, tem);
10886 return tem;
10890 /* If this is a commutative operation, and ARG0 is a constant, move it
10891 to ARG1 to reduce the number of tests below. */
10892 if (commutative_tree_code (code)
10893 && tree_swap_operands_p (arg0, arg1))
10894 return fold_build2_loc (loc, code, type, op1, op0);
10896 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10897 to ARG1 to reduce the number of tests below. */
10898 if (kind == tcc_comparison
10899 && tree_swap_operands_p (arg0, arg1))
10900 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10902 tem = generic_simplify (loc, code, type, op0, op1);
10903 if (tem)
10904 return tem;
10906 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10908 First check for cases where an arithmetic operation is applied to a
10909 compound, conditional, or comparison operation. Push the arithmetic
10910 operation inside the compound or conditional to see if any folding
10911 can then be done. Convert comparison to conditional for this purpose.
10912 The also optimizes non-constant cases that used to be done in
10913 expand_expr.
10915 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10916 one of the operands is a comparison and the other is a comparison, a
10917 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10918 code below would make the expression more complex. Change it to a
10919 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10920 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10922 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10923 || code == EQ_EXPR || code == NE_EXPR)
10924 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10925 && ((truth_value_p (TREE_CODE (arg0))
10926 && (truth_value_p (TREE_CODE (arg1))
10927 || (TREE_CODE (arg1) == BIT_AND_EXPR
10928 && integer_onep (TREE_OPERAND (arg1, 1)))))
10929 || (truth_value_p (TREE_CODE (arg1))
10930 && (truth_value_p (TREE_CODE (arg0))
10931 || (TREE_CODE (arg0) == BIT_AND_EXPR
10932 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10934 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10935 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10936 : TRUTH_XOR_EXPR,
10937 boolean_type_node,
10938 fold_convert_loc (loc, boolean_type_node, arg0),
10939 fold_convert_loc (loc, boolean_type_node, arg1));
10941 if (code == EQ_EXPR)
10942 tem = invert_truthvalue_loc (loc, tem);
10944 return fold_convert_loc (loc, type, tem);
10947 if (TREE_CODE_CLASS (code) == tcc_binary
10948 || TREE_CODE_CLASS (code) == tcc_comparison)
10950 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10952 tem = fold_build2_loc (loc, code, type,
10953 fold_convert_loc (loc, TREE_TYPE (op0),
10954 TREE_OPERAND (arg0, 1)), op1);
10955 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10956 tem);
10958 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10960 tem = fold_build2_loc (loc, code, type, op0,
10961 fold_convert_loc (loc, TREE_TYPE (op1),
10962 TREE_OPERAND (arg1, 1)));
10963 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10964 tem);
10967 if (TREE_CODE (arg0) == COND_EXPR
10968 || TREE_CODE (arg0) == VEC_COND_EXPR
10969 || COMPARISON_CLASS_P (arg0))
10971 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10972 arg0, arg1,
10973 /*cond_first_p=*/1);
10974 if (tem != NULL_TREE)
10975 return tem;
10978 if (TREE_CODE (arg1) == COND_EXPR
10979 || TREE_CODE (arg1) == VEC_COND_EXPR
10980 || COMPARISON_CLASS_P (arg1))
10982 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10983 arg1, arg0,
10984 /*cond_first_p=*/0);
10985 if (tem != NULL_TREE)
10986 return tem;
10990 switch (code)
10992 case MEM_REF:
10993 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10994 if (TREE_CODE (arg0) == ADDR_EXPR
10995 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10997 tree iref = TREE_OPERAND (arg0, 0);
10998 return fold_build2 (MEM_REF, type,
10999 TREE_OPERAND (iref, 0),
11000 int_const_binop (PLUS_EXPR, arg1,
11001 TREE_OPERAND (iref, 1)));
11004 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11005 if (TREE_CODE (arg0) == ADDR_EXPR
11006 && handled_component_p (TREE_OPERAND (arg0, 0)))
11008 tree base;
11009 poly_int64 coffset;
11010 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11011 &coffset);
11012 if (!base)
11013 return NULL_TREE;
11014 return fold_build2 (MEM_REF, type,
11015 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11016 int_const_binop (PLUS_EXPR, arg1,
11017 size_int (coffset)));
11020 return NULL_TREE;
11022 case POINTER_PLUS_EXPR:
11023 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11024 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11025 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11026 return fold_convert_loc (loc, type,
11027 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11028 fold_convert_loc (loc, sizetype,
11029 arg1),
11030 fold_convert_loc (loc, sizetype,
11031 arg0)));
11033 return NULL_TREE;
11035 case PLUS_EXPR:
11036 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11038 /* X + (X / CST) * -CST is X % CST. */
11039 if (TREE_CODE (arg1) == MULT_EXPR
11040 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11041 && operand_equal_p (arg0,
11042 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11044 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11045 tree cst1 = TREE_OPERAND (arg1, 1);
11046 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11047 cst1, cst0);
11048 if (sum && integer_zerop (sum))
11049 return fold_convert_loc (loc, type,
11050 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11051 TREE_TYPE (arg0), arg0,
11052 cst0));
11056 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11057 one. Make sure the type is not saturating and has the signedness of
11058 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11059 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11060 if ((TREE_CODE (arg0) == MULT_EXPR
11061 || TREE_CODE (arg1) == MULT_EXPR)
11062 && !TYPE_SATURATING (type)
11063 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11064 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11065 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11067 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11068 if (tem)
11069 return tem;
11072 if (! FLOAT_TYPE_P (type))
11074 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11075 (plus (plus (mult) (mult)) (foo)) so that we can
11076 take advantage of the factoring cases below. */
11077 if (ANY_INTEGRAL_TYPE_P (type)
11078 && TYPE_OVERFLOW_WRAPS (type)
11079 && (((TREE_CODE (arg0) == PLUS_EXPR
11080 || TREE_CODE (arg0) == MINUS_EXPR)
11081 && TREE_CODE (arg1) == MULT_EXPR)
11082 || ((TREE_CODE (arg1) == PLUS_EXPR
11083 || TREE_CODE (arg1) == MINUS_EXPR)
11084 && TREE_CODE (arg0) == MULT_EXPR)))
11086 tree parg0, parg1, parg, marg;
11087 enum tree_code pcode;
11089 if (TREE_CODE (arg1) == MULT_EXPR)
11090 parg = arg0, marg = arg1;
11091 else
11092 parg = arg1, marg = arg0;
11093 pcode = TREE_CODE (parg);
11094 parg0 = TREE_OPERAND (parg, 0);
11095 parg1 = TREE_OPERAND (parg, 1);
11096 STRIP_NOPS (parg0);
11097 STRIP_NOPS (parg1);
11099 if (TREE_CODE (parg0) == MULT_EXPR
11100 && TREE_CODE (parg1) != MULT_EXPR)
11101 return fold_build2_loc (loc, pcode, type,
11102 fold_build2_loc (loc, PLUS_EXPR, type,
11103 fold_convert_loc (loc, type,
11104 parg0),
11105 fold_convert_loc (loc, type,
11106 marg)),
11107 fold_convert_loc (loc, type, parg1));
11108 if (TREE_CODE (parg0) != MULT_EXPR
11109 && TREE_CODE (parg1) == MULT_EXPR)
11110 return
11111 fold_build2_loc (loc, PLUS_EXPR, type,
11112 fold_convert_loc (loc, type, parg0),
11113 fold_build2_loc (loc, pcode, type,
11114 fold_convert_loc (loc, type, marg),
11115 fold_convert_loc (loc, type,
11116 parg1)));
11119 else
11121 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11122 to __complex__ ( x, y ). This is not the same for SNaNs or
11123 if signed zeros are involved. */
11124 if (!HONOR_SNANS (arg0)
11125 && !HONOR_SIGNED_ZEROS (arg0)
11126 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11128 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11129 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11130 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11131 bool arg0rz = false, arg0iz = false;
11132 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11133 || (arg0i && (arg0iz = real_zerop (arg0i))))
11135 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11136 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11137 if (arg0rz && arg1i && real_zerop (arg1i))
11139 tree rp = arg1r ? arg1r
11140 : build1 (REALPART_EXPR, rtype, arg1);
11141 tree ip = arg0i ? arg0i
11142 : build1 (IMAGPART_EXPR, rtype, arg0);
11143 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11145 else if (arg0iz && arg1r && real_zerop (arg1r))
11147 tree rp = arg0r ? arg0r
11148 : build1 (REALPART_EXPR, rtype, arg0);
11149 tree ip = arg1i ? arg1i
11150 : build1 (IMAGPART_EXPR, rtype, arg1);
11151 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11156 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11157 We associate floats only if the user has specified
11158 -fassociative-math. */
11159 if (flag_associative_math
11160 && TREE_CODE (arg1) == PLUS_EXPR
11161 && TREE_CODE (arg0) != MULT_EXPR)
11163 tree tree10 = TREE_OPERAND (arg1, 0);
11164 tree tree11 = TREE_OPERAND (arg1, 1);
11165 if (TREE_CODE (tree11) == MULT_EXPR
11166 && TREE_CODE (tree10) == MULT_EXPR)
11168 tree tree0;
11169 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11170 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11173 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11174 We associate floats only if the user has specified
11175 -fassociative-math. */
11176 if (flag_associative_math
11177 && TREE_CODE (arg0) == PLUS_EXPR
11178 && TREE_CODE (arg1) != MULT_EXPR)
11180 tree tree00 = TREE_OPERAND (arg0, 0);
11181 tree tree01 = TREE_OPERAND (arg0, 1);
11182 if (TREE_CODE (tree01) == MULT_EXPR
11183 && TREE_CODE (tree00) == MULT_EXPR)
11185 tree tree0;
11186 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11187 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11192 bit_rotate:
11193 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11194 is a rotate of A by C1 bits. */
11195 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11196 is a rotate of A by B bits.
11197 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11198 though in this case CODE must be | and not + or ^, otherwise
11199 it doesn't return A when B is 0. */
11201 enum tree_code code0, code1;
11202 tree rtype;
11203 code0 = TREE_CODE (arg0);
11204 code1 = TREE_CODE (arg1);
11205 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11206 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11207 && operand_equal_p (TREE_OPERAND (arg0, 0),
11208 TREE_OPERAND (arg1, 0), 0)
11209 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11210 TYPE_UNSIGNED (rtype))
11211 /* Only create rotates in complete modes. Other cases are not
11212 expanded properly. */
11213 && (element_precision (rtype)
11214 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11216 tree tree01, tree11;
11217 tree orig_tree01, orig_tree11;
11218 enum tree_code code01, code11;
11220 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11221 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11222 STRIP_NOPS (tree01);
11223 STRIP_NOPS (tree11);
11224 code01 = TREE_CODE (tree01);
11225 code11 = TREE_CODE (tree11);
11226 if (code11 != MINUS_EXPR
11227 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11229 std::swap (code0, code1);
11230 std::swap (code01, code11);
11231 std::swap (tree01, tree11);
11232 std::swap (orig_tree01, orig_tree11);
11234 if (code01 == INTEGER_CST
11235 && code11 == INTEGER_CST
11236 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11237 == element_precision (rtype)))
11239 tem = build2_loc (loc, LROTATE_EXPR,
11240 rtype, TREE_OPERAND (arg0, 0),
11241 code0 == LSHIFT_EXPR
11242 ? orig_tree01 : orig_tree11);
11243 return fold_convert_loc (loc, type, tem);
11245 else if (code11 == MINUS_EXPR)
11247 tree tree110, tree111;
11248 tree110 = TREE_OPERAND (tree11, 0);
11249 tree111 = TREE_OPERAND (tree11, 1);
11250 STRIP_NOPS (tree110);
11251 STRIP_NOPS (tree111);
11252 if (TREE_CODE (tree110) == INTEGER_CST
11253 && compare_tree_int (tree110,
11254 element_precision (rtype)) == 0
11255 && operand_equal_p (tree01, tree111, 0))
11257 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11258 ? LROTATE_EXPR : RROTATE_EXPR),
11259 rtype, TREE_OPERAND (arg0, 0),
11260 orig_tree01);
11261 return fold_convert_loc (loc, type, tem);
11264 else if (code == BIT_IOR_EXPR
11265 && code11 == BIT_AND_EXPR
11266 && pow2p_hwi (element_precision (rtype)))
11268 tree tree110, tree111;
11269 tree110 = TREE_OPERAND (tree11, 0);
11270 tree111 = TREE_OPERAND (tree11, 1);
11271 STRIP_NOPS (tree110);
11272 STRIP_NOPS (tree111);
11273 if (TREE_CODE (tree110) == NEGATE_EXPR
11274 && TREE_CODE (tree111) == INTEGER_CST
11275 && compare_tree_int (tree111,
11276 element_precision (rtype) - 1) == 0
11277 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11279 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11280 ? LROTATE_EXPR : RROTATE_EXPR),
11281 rtype, TREE_OPERAND (arg0, 0),
11282 orig_tree01);
11283 return fold_convert_loc (loc, type, tem);
11289 associate:
11290 /* In most languages, can't associate operations on floats through
11291 parentheses. Rather than remember where the parentheses were, we
11292 don't associate floats at all, unless the user has specified
11293 -fassociative-math.
11294 And, we need to make sure type is not saturating. */
11296 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11297 && !TYPE_SATURATING (type))
11299 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11300 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11301 tree atype = type;
11302 bool ok = true;
11304 /* Split both trees into variables, constants, and literals. Then
11305 associate each group together, the constants with literals,
11306 then the result with variables. This increases the chances of
11307 literals being recombined later and of generating relocatable
11308 expressions for the sum of a constant and literal. */
11309 var0 = split_tree (arg0, type, code,
11310 &minus_var0, &con0, &minus_con0,
11311 &lit0, &minus_lit0, 0);
11312 var1 = split_tree (arg1, type, code,
11313 &minus_var1, &con1, &minus_con1,
11314 &lit1, &minus_lit1, code == MINUS_EXPR);
11316 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11317 if (code == MINUS_EXPR)
11318 code = PLUS_EXPR;
11320 /* With undefined overflow prefer doing association in a type
11321 which wraps on overflow, if that is one of the operand types. */
11322 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11323 && !TYPE_OVERFLOW_WRAPS (type))
11325 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11326 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11327 atype = TREE_TYPE (arg0);
11328 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11329 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11330 atype = TREE_TYPE (arg1);
11331 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11334 /* With undefined overflow we can only associate constants with one
11335 variable, and constants whose association doesn't overflow. */
11336 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11337 && !TYPE_OVERFLOW_WRAPS (atype))
11339 if ((var0 && var1) || (minus_var0 && minus_var1))
11341 /* ??? If split_tree would handle NEGATE_EXPR we could
11342 simply reject these cases and the allowed cases would
11343 be the var0/minus_var1 ones. */
11344 tree tmp0 = var0 ? var0 : minus_var0;
11345 tree tmp1 = var1 ? var1 : minus_var1;
11346 bool one_neg = false;
11348 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11350 tmp0 = TREE_OPERAND (tmp0, 0);
11351 one_neg = !one_neg;
11353 if (CONVERT_EXPR_P (tmp0)
11354 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11355 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11356 <= TYPE_PRECISION (atype)))
11357 tmp0 = TREE_OPERAND (tmp0, 0);
11358 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11360 tmp1 = TREE_OPERAND (tmp1, 0);
11361 one_neg = !one_neg;
11363 if (CONVERT_EXPR_P (tmp1)
11364 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11365 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11366 <= TYPE_PRECISION (atype)))
11367 tmp1 = TREE_OPERAND (tmp1, 0);
11368 /* The only case we can still associate with two variables
11369 is if they cancel out. */
11370 if (!one_neg
11371 || !operand_equal_p (tmp0, tmp1, 0))
11372 ok = false;
11374 else if ((var0 && minus_var1
11375 && ! operand_equal_p (var0, minus_var1, 0))
11376 || (minus_var0 && var1
11377 && ! operand_equal_p (minus_var0, var1, 0)))
11378 ok = false;
11381 /* Only do something if we found more than two objects. Otherwise,
11382 nothing has changed and we risk infinite recursion. */
11383 if (ok
11384 && ((var0 != 0) + (var1 != 0)
11385 + (minus_var0 != 0) + (minus_var1 != 0)
11386 + (con0 != 0) + (con1 != 0)
11387 + (minus_con0 != 0) + (minus_con1 != 0)
11388 + (lit0 != 0) + (lit1 != 0)
11389 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11391 var0 = associate_trees (loc, var0, var1, code, atype);
11392 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11393 code, atype);
11394 con0 = associate_trees (loc, con0, con1, code, atype);
11395 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11396 code, atype);
11397 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11398 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11399 code, atype);
11401 if (minus_var0 && var0)
11403 var0 = associate_trees (loc, var0, minus_var0,
11404 MINUS_EXPR, atype);
11405 minus_var0 = 0;
11407 if (minus_con0 && con0)
11409 con0 = associate_trees (loc, con0, minus_con0,
11410 MINUS_EXPR, atype);
11411 minus_con0 = 0;
11414 /* Preserve the MINUS_EXPR if the negative part of the literal is
11415 greater than the positive part. Otherwise, the multiplicative
11416 folding code (i.e extract_muldiv) may be fooled in case
11417 unsigned constants are subtracted, like in the following
11418 example: ((X*2 + 4) - 8U)/2. */
11419 if (minus_lit0 && lit0)
11421 if (TREE_CODE (lit0) == INTEGER_CST
11422 && TREE_CODE (minus_lit0) == INTEGER_CST
11423 && tree_int_cst_lt (lit0, minus_lit0)
11424 /* But avoid ending up with only negated parts. */
11425 && (var0 || con0))
11427 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11428 MINUS_EXPR, atype);
11429 lit0 = 0;
11431 else
11433 lit0 = associate_trees (loc, lit0, minus_lit0,
11434 MINUS_EXPR, atype);
11435 minus_lit0 = 0;
11439 /* Don't introduce overflows through reassociation. */
11440 if ((lit0 && TREE_OVERFLOW_P (lit0))
11441 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11442 return NULL_TREE;
11444 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11445 con0 = associate_trees (loc, con0, lit0, code, atype);
11446 lit0 = 0;
11447 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11448 code, atype);
11449 minus_lit0 = 0;
11451 /* Eliminate minus_con0. */
11452 if (minus_con0)
11454 if (con0)
11455 con0 = associate_trees (loc, con0, minus_con0,
11456 MINUS_EXPR, atype);
11457 else if (var0)
11458 var0 = associate_trees (loc, var0, minus_con0,
11459 MINUS_EXPR, atype);
11460 else
11461 gcc_unreachable ();
11462 minus_con0 = 0;
11465 /* Eliminate minus_var0. */
11466 if (minus_var0)
11468 if (con0)
11469 con0 = associate_trees (loc, con0, minus_var0,
11470 MINUS_EXPR, atype);
11471 else
11472 gcc_unreachable ();
11473 minus_var0 = 0;
11476 return
11477 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11478 code, atype));
11482 return NULL_TREE;
11484 case POINTER_DIFF_EXPR:
11485 case MINUS_EXPR:
11486 /* Fold &a[i] - &a[j] to i-j. */
11487 if (TREE_CODE (arg0) == ADDR_EXPR
11488 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11489 && TREE_CODE (arg1) == ADDR_EXPR
11490 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11492 tree tem = fold_addr_of_array_ref_difference (loc, type,
11493 TREE_OPERAND (arg0, 0),
11494 TREE_OPERAND (arg1, 0),
11495 code
11496 == POINTER_DIFF_EXPR);
11497 if (tem)
11498 return tem;
11501 /* Further transformations are not for pointers. */
11502 if (code == POINTER_DIFF_EXPR)
11503 return NULL_TREE;
11505 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11506 if (TREE_CODE (arg0) == NEGATE_EXPR
11507 && negate_expr_p (op1)
11508 /* If arg0 is e.g. unsigned int and type is int, then this could
11509 introduce UB, because if A is INT_MIN at runtime, the original
11510 expression can be well defined while the latter is not.
11511 See PR83269. */
11512 && !(ANY_INTEGRAL_TYPE_P (type)
11513 && TYPE_OVERFLOW_UNDEFINED (type)
11514 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11515 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11516 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11517 fold_convert_loc (loc, type,
11518 TREE_OPERAND (arg0, 0)));
11520 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11521 __complex__ ( x, -y ). This is not the same for SNaNs or if
11522 signed zeros are involved. */
11523 if (!HONOR_SNANS (arg0)
11524 && !HONOR_SIGNED_ZEROS (arg0)
11525 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11527 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11528 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11529 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11530 bool arg0rz = false, arg0iz = false;
11531 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11532 || (arg0i && (arg0iz = real_zerop (arg0i))))
11534 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11535 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11536 if (arg0rz && arg1i && real_zerop (arg1i))
11538 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11539 arg1r ? arg1r
11540 : build1 (REALPART_EXPR, rtype, arg1));
11541 tree ip = arg0i ? arg0i
11542 : build1 (IMAGPART_EXPR, rtype, arg0);
11543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11545 else if (arg0iz && arg1r && real_zerop (arg1r))
11547 tree rp = arg0r ? arg0r
11548 : build1 (REALPART_EXPR, rtype, arg0);
11549 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11550 arg1i ? arg1i
11551 : build1 (IMAGPART_EXPR, rtype, arg1));
11552 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11557 /* A - B -> A + (-B) if B is easily negatable. */
11558 if (negate_expr_p (op1)
11559 && ! TYPE_OVERFLOW_SANITIZED (type)
11560 && ((FLOAT_TYPE_P (type)
11561 /* Avoid this transformation if B is a positive REAL_CST. */
11562 && (TREE_CODE (op1) != REAL_CST
11563 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11564 || INTEGRAL_TYPE_P (type)))
11565 return fold_build2_loc (loc, PLUS_EXPR, type,
11566 fold_convert_loc (loc, type, arg0),
11567 negate_expr (op1));
11569 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11570 one. Make sure the type is not saturating and has the signedness of
11571 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11572 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11573 if ((TREE_CODE (arg0) == MULT_EXPR
11574 || TREE_CODE (arg1) == MULT_EXPR)
11575 && !TYPE_SATURATING (type)
11576 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11577 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11578 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11580 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11581 if (tem)
11582 return tem;
11585 goto associate;
11587 case MULT_EXPR:
11588 if (! FLOAT_TYPE_P (type))
11590 /* Transform x * -C into -x * C if x is easily negatable. */
11591 if (TREE_CODE (op1) == INTEGER_CST
11592 && tree_int_cst_sgn (op1) == -1
11593 && negate_expr_p (op0)
11594 && negate_expr_p (op1)
11595 && (tem = negate_expr (op1)) != op1
11596 && ! TREE_OVERFLOW (tem))
11597 return fold_build2_loc (loc, MULT_EXPR, type,
11598 fold_convert_loc (loc, type,
11599 negate_expr (op0)), tem);
11601 strict_overflow_p = false;
11602 if (TREE_CODE (arg1) == INTEGER_CST
11603 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11604 &strict_overflow_p)) != 0)
11606 if (strict_overflow_p)
11607 fold_overflow_warning (("assuming signed overflow does not "
11608 "occur when simplifying "
11609 "multiplication"),
11610 WARN_STRICT_OVERFLOW_MISC);
11611 return fold_convert_loc (loc, type, tem);
11614 /* Optimize z * conj(z) for integer complex numbers. */
11615 if (TREE_CODE (arg0) == CONJ_EXPR
11616 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11617 return fold_mult_zconjz (loc, type, arg1);
11618 if (TREE_CODE (arg1) == CONJ_EXPR
11619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11620 return fold_mult_zconjz (loc, type, arg0);
11622 else
11624 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11625 This is not the same for NaNs or if signed zeros are
11626 involved. */
11627 if (!HONOR_NANS (arg0)
11628 && !HONOR_SIGNED_ZEROS (arg0)
11629 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11630 && TREE_CODE (arg1) == COMPLEX_CST
11631 && real_zerop (TREE_REALPART (arg1)))
11633 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11634 if (real_onep (TREE_IMAGPART (arg1)))
11635 return
11636 fold_build2_loc (loc, COMPLEX_EXPR, type,
11637 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11638 rtype, arg0)),
11639 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11640 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11641 return
11642 fold_build2_loc (loc, COMPLEX_EXPR, type,
11643 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11644 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11645 rtype, arg0)));
11648 /* Optimize z * conj(z) for floating point complex numbers.
11649 Guarded by flag_unsafe_math_optimizations as non-finite
11650 imaginary components don't produce scalar results. */
11651 if (flag_unsafe_math_optimizations
11652 && TREE_CODE (arg0) == CONJ_EXPR
11653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11654 return fold_mult_zconjz (loc, type, arg1);
11655 if (flag_unsafe_math_optimizations
11656 && TREE_CODE (arg1) == CONJ_EXPR
11657 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11658 return fold_mult_zconjz (loc, type, arg0);
11660 goto associate;
11662 case BIT_IOR_EXPR:
11663 /* Canonicalize (X & C1) | C2. */
11664 if (TREE_CODE (arg0) == BIT_AND_EXPR
11665 && TREE_CODE (arg1) == INTEGER_CST
11666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11668 int width = TYPE_PRECISION (type), w;
11669 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11670 wide_int c2 = wi::to_wide (arg1);
11672 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11673 if ((c1 & c2) == c1)
11674 return omit_one_operand_loc (loc, type, arg1,
11675 TREE_OPERAND (arg0, 0));
11677 wide_int msk = wi::mask (width, false,
11678 TYPE_PRECISION (TREE_TYPE (arg1)));
11680 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11681 if (wi::bit_and_not (msk, c1 | c2) == 0)
11683 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11684 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11687 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11688 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11689 mode which allows further optimizations. */
11690 c1 &= msk;
11691 c2 &= msk;
11692 wide_int c3 = wi::bit_and_not (c1, c2);
11693 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11695 wide_int mask = wi::mask (w, false,
11696 TYPE_PRECISION (type));
11697 if (((c1 | c2) & mask) == mask
11698 && wi::bit_and_not (c1, mask) == 0)
11700 c3 = mask;
11701 break;
11705 if (c3 != c1)
11707 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11708 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11709 wide_int_to_tree (type, c3));
11710 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11714 /* See if this can be simplified into a rotate first. If that
11715 is unsuccessful continue in the association code. */
11716 goto bit_rotate;
11718 case BIT_XOR_EXPR:
11719 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11720 if (TREE_CODE (arg0) == BIT_AND_EXPR
11721 && INTEGRAL_TYPE_P (type)
11722 && integer_onep (TREE_OPERAND (arg0, 1))
11723 && integer_onep (arg1))
11724 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11725 build_zero_cst (TREE_TYPE (arg0)));
11727 /* See if this can be simplified into a rotate first. If that
11728 is unsuccessful continue in the association code. */
11729 goto bit_rotate;
11731 case BIT_AND_EXPR:
11732 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11733 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11734 && INTEGRAL_TYPE_P (type)
11735 && integer_onep (TREE_OPERAND (arg0, 1))
11736 && integer_onep (arg1))
11738 tree tem2;
11739 tem = TREE_OPERAND (arg0, 0);
11740 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11741 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11742 tem, tem2);
11743 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11744 build_zero_cst (TREE_TYPE (tem)));
11746 /* Fold ~X & 1 as (X & 1) == 0. */
11747 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11748 && INTEGRAL_TYPE_P (type)
11749 && integer_onep (arg1))
11751 tree tem2;
11752 tem = TREE_OPERAND (arg0, 0);
11753 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11754 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11755 tem, tem2);
11756 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11757 build_zero_cst (TREE_TYPE (tem)));
11759 /* Fold !X & 1 as X == 0. */
11760 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11761 && integer_onep (arg1))
11763 tem = TREE_OPERAND (arg0, 0);
11764 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11765 build_zero_cst (TREE_TYPE (tem)));
11768 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11769 multiple of 1 << CST. */
11770 if (TREE_CODE (arg1) == INTEGER_CST)
11772 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11773 wide_int ncst1 = -cst1;
11774 if ((cst1 & ncst1) == ncst1
11775 && multiple_of_p (type, arg0,
11776 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11777 return fold_convert_loc (loc, type, arg0);
11780 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11781 bits from CST2. */
11782 if (TREE_CODE (arg1) == INTEGER_CST
11783 && TREE_CODE (arg0) == MULT_EXPR
11784 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11786 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11787 wide_int masked
11788 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11790 if (masked == 0)
11791 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11792 arg0, arg1);
11793 else if (masked != warg1)
11795 /* Avoid the transform if arg1 is a mask of some
11796 mode which allows further optimizations. */
11797 int pop = wi::popcount (warg1);
11798 if (!(pop >= BITS_PER_UNIT
11799 && pow2p_hwi (pop)
11800 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11801 return fold_build2_loc (loc, code, type, op0,
11802 wide_int_to_tree (type, masked));
11806 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11807 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11808 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11810 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11812 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11813 if (mask == -1)
11814 return
11815 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11818 goto associate;
11820 case RDIV_EXPR:
11821 /* Don't touch a floating-point divide by zero unless the mode
11822 of the constant can represent infinity. */
11823 if (TREE_CODE (arg1) == REAL_CST
11824 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11825 && real_zerop (arg1))
11826 return NULL_TREE;
11828 /* (-A) / (-B) -> A / B */
11829 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11830 return fold_build2_loc (loc, RDIV_EXPR, type,
11831 TREE_OPERAND (arg0, 0),
11832 negate_expr (arg1));
11833 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11834 return fold_build2_loc (loc, RDIV_EXPR, type,
11835 negate_expr (arg0),
11836 TREE_OPERAND (arg1, 0));
11837 return NULL_TREE;
11839 case TRUNC_DIV_EXPR:
11840 /* Fall through */
11842 case FLOOR_DIV_EXPR:
11843 /* Simplify A / (B << N) where A and B are positive and B is
11844 a power of 2, to A >> (N + log2(B)). */
11845 strict_overflow_p = false;
11846 if (TREE_CODE (arg1) == LSHIFT_EXPR
11847 && (TYPE_UNSIGNED (type)
11848 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11850 tree sval = TREE_OPERAND (arg1, 0);
11851 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11853 tree sh_cnt = TREE_OPERAND (arg1, 1);
11854 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11855 wi::exact_log2 (wi::to_wide (sval)));
11857 if (strict_overflow_p)
11858 fold_overflow_warning (("assuming signed overflow does not "
11859 "occur when simplifying A / (B << N)"),
11860 WARN_STRICT_OVERFLOW_MISC);
11862 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11863 sh_cnt, pow2);
11864 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11865 fold_convert_loc (loc, type, arg0), sh_cnt);
11869 /* Fall through */
11871 case ROUND_DIV_EXPR:
11872 case CEIL_DIV_EXPR:
11873 case EXACT_DIV_EXPR:
11874 if (integer_zerop (arg1))
11875 return NULL_TREE;
11877 /* Convert -A / -B to A / B when the type is signed and overflow is
11878 undefined. */
11879 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11880 && TREE_CODE (op0) == NEGATE_EXPR
11881 && negate_expr_p (op1))
11883 if (ANY_INTEGRAL_TYPE_P (type))
11884 fold_overflow_warning (("assuming signed overflow does not occur "
11885 "when distributing negation across "
11886 "division"),
11887 WARN_STRICT_OVERFLOW_MISC);
11888 return fold_build2_loc (loc, code, type,
11889 fold_convert_loc (loc, type,
11890 TREE_OPERAND (arg0, 0)),
11891 negate_expr (op1));
11893 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11894 && TREE_CODE (arg1) == NEGATE_EXPR
11895 && negate_expr_p (op0))
11897 if (ANY_INTEGRAL_TYPE_P (type))
11898 fold_overflow_warning (("assuming signed overflow does not occur "
11899 "when distributing negation across "
11900 "division"),
11901 WARN_STRICT_OVERFLOW_MISC);
11902 return fold_build2_loc (loc, code, type,
11903 negate_expr (op0),
11904 fold_convert_loc (loc, type,
11905 TREE_OPERAND (arg1, 0)));
11908 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11909 operation, EXACT_DIV_EXPR.
11911 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11912 At one time others generated faster code, it's not clear if they do
11913 after the last round to changes to the DIV code in expmed.cc. */
11914 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11915 && multiple_of_p (type, arg0, arg1))
11916 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11917 fold_convert (type, arg0),
11918 fold_convert (type, arg1));
11920 strict_overflow_p = false;
11921 if (TREE_CODE (arg1) == INTEGER_CST
11922 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11923 &strict_overflow_p)) != 0)
11925 if (strict_overflow_p)
11926 fold_overflow_warning (("assuming signed overflow does not occur "
11927 "when simplifying division"),
11928 WARN_STRICT_OVERFLOW_MISC);
11929 return fold_convert_loc (loc, type, tem);
11932 return NULL_TREE;
11934 case CEIL_MOD_EXPR:
11935 case FLOOR_MOD_EXPR:
11936 case ROUND_MOD_EXPR:
11937 case TRUNC_MOD_EXPR:
11938 strict_overflow_p = false;
11939 if (TREE_CODE (arg1) == INTEGER_CST
11940 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11941 &strict_overflow_p)) != 0)
11943 if (strict_overflow_p)
11944 fold_overflow_warning (("assuming signed overflow does not occur "
11945 "when simplifying modulus"),
11946 WARN_STRICT_OVERFLOW_MISC);
11947 return fold_convert_loc (loc, type, tem);
11950 return NULL_TREE;
11952 case LROTATE_EXPR:
11953 case RROTATE_EXPR:
11954 case RSHIFT_EXPR:
11955 case LSHIFT_EXPR:
11956 /* Since negative shift count is not well-defined,
11957 don't try to compute it in the compiler. */
11958 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11959 return NULL_TREE;
11961 prec = element_precision (type);
11963 /* If we have a rotate of a bit operation with the rotate count and
11964 the second operand of the bit operation both constant,
11965 permute the two operations. */
11966 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11967 && (TREE_CODE (arg0) == BIT_AND_EXPR
11968 || TREE_CODE (arg0) == BIT_IOR_EXPR
11969 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11970 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11972 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11973 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11974 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11975 fold_build2_loc (loc, code, type,
11976 arg00, arg1),
11977 fold_build2_loc (loc, code, type,
11978 arg01, arg1));
11981 /* Two consecutive rotates adding up to the some integer
11982 multiple of the precision of the type can be ignored. */
11983 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11984 && TREE_CODE (arg0) == RROTATE_EXPR
11985 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11986 && wi::umod_trunc (wi::to_wide (arg1)
11987 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11988 prec) == 0)
11989 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11991 return NULL_TREE;
11993 case MIN_EXPR:
11994 case MAX_EXPR:
11995 goto associate;
11997 case TRUTH_ANDIF_EXPR:
11998 /* Note that the operands of this must be ints
11999 and their values must be 0 or 1.
12000 ("true" is a fixed value perhaps depending on the language.) */
12001 /* If first arg is constant zero, return it. */
12002 if (integer_zerop (arg0))
12003 return fold_convert_loc (loc, type, arg0);
12004 /* FALLTHRU */
12005 case TRUTH_AND_EXPR:
12006 /* If either arg is constant true, drop it. */
12007 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12009 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12010 /* Preserve sequence points. */
12011 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12013 /* If second arg is constant zero, result is zero, but first arg
12014 must be evaluated. */
12015 if (integer_zerop (arg1))
12016 return omit_one_operand_loc (loc, type, arg1, arg0);
12017 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12018 case will be handled here. */
12019 if (integer_zerop (arg0))
12020 return omit_one_operand_loc (loc, type, arg0, arg1);
12022 /* !X && X is always false. */
12023 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12025 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12026 /* X && !X is always false. */
12027 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12028 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12029 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12031 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12032 means A >= Y && A != MAX, but in this case we know that
12033 A < X <= MAX. */
12035 if (!TREE_SIDE_EFFECTS (arg0)
12036 && !TREE_SIDE_EFFECTS (arg1))
12038 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12039 if (tem && !operand_equal_p (tem, arg0, 0))
12040 return fold_convert (type,
12041 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12042 tem, arg1));
12044 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12045 if (tem && !operand_equal_p (tem, arg1, 0))
12046 return fold_convert (type,
12047 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12048 arg0, tem));
12051 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12052 != NULL_TREE)
12053 return tem;
12055 return NULL_TREE;
12057 case TRUTH_ORIF_EXPR:
12058 /* Note that the operands of this must be ints
12059 and their values must be 0 or true.
12060 ("true" is a fixed value perhaps depending on the language.) */
12061 /* If first arg is constant true, return it. */
12062 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12063 return fold_convert_loc (loc, type, arg0);
12064 /* FALLTHRU */
12065 case TRUTH_OR_EXPR:
12066 /* If either arg is constant zero, drop it. */
12067 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12068 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12069 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12070 /* Preserve sequence points. */
12071 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12072 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12073 /* If second arg is constant true, result is true, but we must
12074 evaluate first arg. */
12075 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12076 return omit_one_operand_loc (loc, type, arg1, arg0);
12077 /* Likewise for first arg, but note this only occurs here for
12078 TRUTH_OR_EXPR. */
12079 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12080 return omit_one_operand_loc (loc, type, arg0, arg1);
12082 /* !X || X is always true. */
12083 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12084 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12085 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12086 /* X || !X is always true. */
12087 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12088 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12089 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12091 /* (X && !Y) || (!X && Y) is X ^ Y */
12092 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12093 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12095 tree a0, a1, l0, l1, n0, n1;
12097 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12098 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12100 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12101 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12103 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12104 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12106 if ((operand_equal_p (n0, a0, 0)
12107 && operand_equal_p (n1, a1, 0))
12108 || (operand_equal_p (n0, a1, 0)
12109 && operand_equal_p (n1, a0, 0)))
12110 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12113 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12114 != NULL_TREE)
12115 return tem;
12117 return NULL_TREE;
12119 case TRUTH_XOR_EXPR:
12120 /* If the second arg is constant zero, drop it. */
12121 if (integer_zerop (arg1))
12122 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12123 /* If the second arg is constant true, this is a logical inversion. */
12124 if (integer_onep (arg1))
12126 tem = invert_truthvalue_loc (loc, arg0);
12127 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12129 /* Identical arguments cancel to zero. */
12130 if (operand_equal_p (arg0, arg1, 0))
12131 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12133 /* !X ^ X is always true. */
12134 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12135 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12136 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12138 /* X ^ !X is always true. */
12139 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12140 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12141 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12143 return NULL_TREE;
12145 case EQ_EXPR:
12146 case NE_EXPR:
12147 STRIP_NOPS (arg0);
12148 STRIP_NOPS (arg1);
12150 tem = fold_comparison (loc, code, type, op0, op1);
12151 if (tem != NULL_TREE)
12152 return tem;
12154 /* bool_var != 1 becomes !bool_var. */
12155 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12156 && code == NE_EXPR)
12157 return fold_convert_loc (loc, type,
12158 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12159 TREE_TYPE (arg0), arg0));
12161 /* bool_var == 0 becomes !bool_var. */
12162 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12163 && code == EQ_EXPR)
12164 return fold_convert_loc (loc, type,
12165 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12166 TREE_TYPE (arg0), arg0));
12168 /* !exp != 0 becomes !exp */
12169 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12170 && code == NE_EXPR)
12171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12173 /* If this is an EQ or NE comparison with zero and ARG0 is
12174 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12175 two operations, but the latter can be done in one less insn
12176 on machines that have only two-operand insns or on which a
12177 constant cannot be the first operand. */
12178 if (TREE_CODE (arg0) == BIT_AND_EXPR
12179 && integer_zerop (arg1))
12181 tree arg00 = TREE_OPERAND (arg0, 0);
12182 tree arg01 = TREE_OPERAND (arg0, 1);
12183 if (TREE_CODE (arg00) == LSHIFT_EXPR
12184 && integer_onep (TREE_OPERAND (arg00, 0)))
12186 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12187 arg01, TREE_OPERAND (arg00, 1));
12188 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12189 build_one_cst (TREE_TYPE (arg0)));
12190 return fold_build2_loc (loc, code, type,
12191 fold_convert_loc (loc, TREE_TYPE (arg1),
12192 tem), arg1);
12194 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12195 && integer_onep (TREE_OPERAND (arg01, 0)))
12197 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12198 arg00, TREE_OPERAND (arg01, 1));
12199 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12200 build_one_cst (TREE_TYPE (arg0)));
12201 return fold_build2_loc (loc, code, type,
12202 fold_convert_loc (loc, TREE_TYPE (arg1),
12203 tem), arg1);
12207 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12208 C1 is a valid shift constant, and C2 is a power of two, i.e.
12209 a single bit. */
12210 if (TREE_CODE (arg0) == BIT_AND_EXPR
12211 && integer_pow2p (TREE_OPERAND (arg0, 1))
12212 && integer_zerop (arg1))
12214 tree arg00 = TREE_OPERAND (arg0, 0);
12215 STRIP_NOPS (arg00);
12216 if (TREE_CODE (arg00) == RSHIFT_EXPR
12217 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12219 tree itype = TREE_TYPE (arg00);
12220 tree arg001 = TREE_OPERAND (arg00, 1);
12221 prec = TYPE_PRECISION (itype);
12223 /* Check for a valid shift count. */
12224 if (wi::ltu_p (wi::to_wide (arg001), prec))
12226 tree arg01 = TREE_OPERAND (arg0, 1);
12227 tree arg000 = TREE_OPERAND (arg00, 0);
12228 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12229 /* If (C2 << C1) doesn't overflow, then
12230 ((X >> C1) & C2) != 0 can be rewritten as
12231 (X & (C2 << C1)) != 0. */
12232 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12234 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12235 arg01, arg001);
12236 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12237 arg000, tem);
12238 return fold_build2_loc (loc, code, type, tem,
12239 fold_convert_loc (loc, itype, arg1));
12241 /* Otherwise, for signed (arithmetic) shifts,
12242 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12243 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12244 else if (!TYPE_UNSIGNED (itype))
12245 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12246 : LT_EXPR,
12247 type, arg000,
12248 build_int_cst (itype, 0));
12249 /* Otherwise, of unsigned (logical) shifts,
12250 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12251 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12252 else
12253 return omit_one_operand_loc (loc, type,
12254 code == EQ_EXPR ? integer_one_node
12255 : integer_zero_node,
12256 arg000);
12261 /* If this is a comparison of a field, we may be able to simplify it. */
12262 if ((TREE_CODE (arg0) == COMPONENT_REF
12263 || TREE_CODE (arg0) == BIT_FIELD_REF)
12264 /* Handle the constant case even without -O
12265 to make sure the warnings are given. */
12266 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12268 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12269 if (t1)
12270 return t1;
12273 /* Optimize comparisons of strlen vs zero to a compare of the
12274 first character of the string vs zero. To wit,
12275 strlen(ptr) == 0 => *ptr == 0
12276 strlen(ptr) != 0 => *ptr != 0
12277 Other cases should reduce to one of these two (or a constant)
12278 due to the return value of strlen being unsigned. */
12279 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12281 tree fndecl = get_callee_fndecl (arg0);
12283 if (fndecl
12284 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12285 && call_expr_nargs (arg0) == 1
12286 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12287 == POINTER_TYPE))
12289 tree ptrtype
12290 = build_pointer_type (build_qualified_type (char_type_node,
12291 TYPE_QUAL_CONST));
12292 tree ptr = fold_convert_loc (loc, ptrtype,
12293 CALL_EXPR_ARG (arg0, 0));
12294 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12295 return fold_build2_loc (loc, code, type, iref,
12296 build_int_cst (TREE_TYPE (iref), 0));
12300 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12301 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12302 if (TREE_CODE (arg0) == RSHIFT_EXPR
12303 && integer_zerop (arg1)
12304 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12306 tree arg00 = TREE_OPERAND (arg0, 0);
12307 tree arg01 = TREE_OPERAND (arg0, 1);
12308 tree itype = TREE_TYPE (arg00);
12309 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12311 if (TYPE_UNSIGNED (itype))
12313 itype = signed_type_for (itype);
12314 arg00 = fold_convert_loc (loc, itype, arg00);
12316 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12317 type, arg00, build_zero_cst (itype));
12321 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12322 (X & C) == 0 when C is a single bit. */
12323 if (TREE_CODE (arg0) == BIT_AND_EXPR
12324 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12325 && integer_zerop (arg1)
12326 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12328 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12329 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12330 TREE_OPERAND (arg0, 1));
12331 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12332 type, tem,
12333 fold_convert_loc (loc, TREE_TYPE (arg0),
12334 arg1));
12337 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12338 constant C is a power of two, i.e. a single bit. */
12339 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12340 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12341 && integer_zerop (arg1)
12342 && integer_pow2p (TREE_OPERAND (arg0, 1))
12343 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12344 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12346 tree arg00 = TREE_OPERAND (arg0, 0);
12347 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12348 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12351 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12352 when is C is a power of two, i.e. a single bit. */
12353 if (TREE_CODE (arg0) == BIT_AND_EXPR
12354 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12355 && integer_zerop (arg1)
12356 && integer_pow2p (TREE_OPERAND (arg0, 1))
12357 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12358 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12360 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12361 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12362 arg000, TREE_OPERAND (arg0, 1));
12363 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12364 tem, build_int_cst (TREE_TYPE (tem), 0));
12367 if (integer_zerop (arg1)
12368 && tree_expr_nonzero_p (arg0))
12370 tree res = constant_boolean_node (code==NE_EXPR, type);
12371 return omit_one_operand_loc (loc, type, res, arg0);
12374 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12375 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12377 tree arg00 = TREE_OPERAND (arg0, 0);
12378 tree arg01 = TREE_OPERAND (arg0, 1);
12379 tree arg10 = TREE_OPERAND (arg1, 0);
12380 tree arg11 = TREE_OPERAND (arg1, 1);
12381 tree itype = TREE_TYPE (arg0);
12383 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12384 operand_equal_p guarantees no side-effects so we don't need
12385 to use omit_one_operand on Z. */
12386 if (operand_equal_p (arg01, arg11, 0))
12387 return fold_build2_loc (loc, code, type, arg00,
12388 fold_convert_loc (loc, TREE_TYPE (arg00),
12389 arg10));
12390 if (operand_equal_p (arg01, arg10, 0))
12391 return fold_build2_loc (loc, code, type, arg00,
12392 fold_convert_loc (loc, TREE_TYPE (arg00),
12393 arg11));
12394 if (operand_equal_p (arg00, arg11, 0))
12395 return fold_build2_loc (loc, code, type, arg01,
12396 fold_convert_loc (loc, TREE_TYPE (arg01),
12397 arg10));
12398 if (operand_equal_p (arg00, arg10, 0))
12399 return fold_build2_loc (loc, code, type, arg01,
12400 fold_convert_loc (loc, TREE_TYPE (arg01),
12401 arg11));
12403 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12404 if (TREE_CODE (arg01) == INTEGER_CST
12405 && TREE_CODE (arg11) == INTEGER_CST)
12407 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12408 fold_convert_loc (loc, itype, arg11));
12409 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12410 return fold_build2_loc (loc, code, type, tem,
12411 fold_convert_loc (loc, itype, arg10));
12415 /* Attempt to simplify equality/inequality comparisons of complex
12416 values. Only lower the comparison if the result is known or
12417 can be simplified to a single scalar comparison. */
12418 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12419 || TREE_CODE (arg0) == COMPLEX_CST)
12420 && (TREE_CODE (arg1) == COMPLEX_EXPR
12421 || TREE_CODE (arg1) == COMPLEX_CST))
12423 tree real0, imag0, real1, imag1;
12424 tree rcond, icond;
12426 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12428 real0 = TREE_OPERAND (arg0, 0);
12429 imag0 = TREE_OPERAND (arg0, 1);
12431 else
12433 real0 = TREE_REALPART (arg0);
12434 imag0 = TREE_IMAGPART (arg0);
12437 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12439 real1 = TREE_OPERAND (arg1, 0);
12440 imag1 = TREE_OPERAND (arg1, 1);
12442 else
12444 real1 = TREE_REALPART (arg1);
12445 imag1 = TREE_IMAGPART (arg1);
12448 rcond = fold_binary_loc (loc, code, type, real0, real1);
12449 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12451 if (integer_zerop (rcond))
12453 if (code == EQ_EXPR)
12454 return omit_two_operands_loc (loc, type, boolean_false_node,
12455 imag0, imag1);
12456 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12458 else
12460 if (code == NE_EXPR)
12461 return omit_two_operands_loc (loc, type, boolean_true_node,
12462 imag0, imag1);
12463 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12467 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12468 if (icond && TREE_CODE (icond) == INTEGER_CST)
12470 if (integer_zerop (icond))
12472 if (code == EQ_EXPR)
12473 return omit_two_operands_loc (loc, type, boolean_false_node,
12474 real0, real1);
12475 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12477 else
12479 if (code == NE_EXPR)
12480 return omit_two_operands_loc (loc, type, boolean_true_node,
12481 real0, real1);
12482 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12487 return NULL_TREE;
12489 case LT_EXPR:
12490 case GT_EXPR:
12491 case LE_EXPR:
12492 case GE_EXPR:
12493 tem = fold_comparison (loc, code, type, op0, op1);
12494 if (tem != NULL_TREE)
12495 return tem;
12497 /* Transform comparisons of the form X +- C CMP X. */
12498 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12499 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12501 && !HONOR_SNANS (arg0))
12503 tree arg01 = TREE_OPERAND (arg0, 1);
12504 enum tree_code code0 = TREE_CODE (arg0);
12505 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12507 /* (X - c) > X becomes false. */
12508 if (code == GT_EXPR
12509 && ((code0 == MINUS_EXPR && is_positive >= 0)
12510 || (code0 == PLUS_EXPR && is_positive <= 0)))
12511 return constant_boolean_node (0, type);
12513 /* Likewise (X + c) < X becomes false. */
12514 if (code == LT_EXPR
12515 && ((code0 == PLUS_EXPR && is_positive >= 0)
12516 || (code0 == MINUS_EXPR && is_positive <= 0)))
12517 return constant_boolean_node (0, type);
12519 /* Convert (X - c) <= X to true. */
12520 if (!HONOR_NANS (arg1)
12521 && code == LE_EXPR
12522 && ((code0 == MINUS_EXPR && is_positive >= 0)
12523 || (code0 == PLUS_EXPR && is_positive <= 0)))
12524 return constant_boolean_node (1, type);
12526 /* Convert (X + c) >= X to true. */
12527 if (!HONOR_NANS (arg1)
12528 && code == GE_EXPR
12529 && ((code0 == PLUS_EXPR && is_positive >= 0)
12530 || (code0 == MINUS_EXPR && is_positive <= 0)))
12531 return constant_boolean_node (1, type);
12534 /* If we are comparing an ABS_EXPR with a constant, we can
12535 convert all the cases into explicit comparisons, but they may
12536 well not be faster than doing the ABS and one comparison.
12537 But ABS (X) <= C is a range comparison, which becomes a subtraction
12538 and a comparison, and is probably faster. */
12539 if (code == LE_EXPR
12540 && TREE_CODE (arg1) == INTEGER_CST
12541 && TREE_CODE (arg0) == ABS_EXPR
12542 && ! TREE_SIDE_EFFECTS (arg0)
12543 && (tem = negate_expr (arg1)) != 0
12544 && TREE_CODE (tem) == INTEGER_CST
12545 && !TREE_OVERFLOW (tem))
12546 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12547 build2 (GE_EXPR, type,
12548 TREE_OPERAND (arg0, 0), tem),
12549 build2 (LE_EXPR, type,
12550 TREE_OPERAND (arg0, 0), arg1));
12552 /* Convert ABS_EXPR<x> >= 0 to true. */
12553 strict_overflow_p = false;
12554 if (code == GE_EXPR
12555 && (integer_zerop (arg1)
12556 || (! HONOR_NANS (arg0)
12557 && real_zerop (arg1)))
12558 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12560 if (strict_overflow_p)
12561 fold_overflow_warning (("assuming signed overflow does not occur "
12562 "when simplifying comparison of "
12563 "absolute value and zero"),
12564 WARN_STRICT_OVERFLOW_CONDITIONAL);
12565 return omit_one_operand_loc (loc, type,
12566 constant_boolean_node (true, type),
12567 arg0);
12570 /* Convert ABS_EXPR<x> < 0 to false. */
12571 strict_overflow_p = false;
12572 if (code == LT_EXPR
12573 && (integer_zerop (arg1) || real_zerop (arg1))
12574 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12576 if (strict_overflow_p)
12577 fold_overflow_warning (("assuming signed overflow does not occur "
12578 "when simplifying comparison of "
12579 "absolute value and zero"),
12580 WARN_STRICT_OVERFLOW_CONDITIONAL);
12581 return omit_one_operand_loc (loc, type,
12582 constant_boolean_node (false, type),
12583 arg0);
12586 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12587 and similarly for >= into !=. */
12588 if ((code == LT_EXPR || code == GE_EXPR)
12589 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12590 && TREE_CODE (arg1) == LSHIFT_EXPR
12591 && integer_onep (TREE_OPERAND (arg1, 0)))
12592 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12593 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12594 TREE_OPERAND (arg1, 1)),
12595 build_zero_cst (TREE_TYPE (arg0)));
12597 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12598 otherwise Y might be >= # of bits in X's type and thus e.g.
12599 (unsigned char) (1 << Y) for Y 15 might be 0.
12600 If the cast is widening, then 1 << Y should have unsigned type,
12601 otherwise if Y is number of bits in the signed shift type minus 1,
12602 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12603 31 might be 0xffffffff80000000. */
12604 if ((code == LT_EXPR || code == GE_EXPR)
12605 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12606 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12607 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12608 && CONVERT_EXPR_P (arg1)
12609 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12610 && (element_precision (TREE_TYPE (arg1))
12611 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12612 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12613 || (element_precision (TREE_TYPE (arg1))
12614 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12615 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12617 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12618 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12619 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12620 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12621 build_zero_cst (TREE_TYPE (arg0)));
12624 return NULL_TREE;
12626 case UNORDERED_EXPR:
12627 case ORDERED_EXPR:
12628 case UNLT_EXPR:
12629 case UNLE_EXPR:
12630 case UNGT_EXPR:
12631 case UNGE_EXPR:
12632 case UNEQ_EXPR:
12633 case LTGT_EXPR:
12634 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12636 tree targ0 = strip_float_extensions (arg0);
12637 tree targ1 = strip_float_extensions (arg1);
12638 tree newtype = TREE_TYPE (targ0);
12640 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12641 newtype = TREE_TYPE (targ1);
12643 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12644 return fold_build2_loc (loc, code, type,
12645 fold_convert_loc (loc, newtype, targ0),
12646 fold_convert_loc (loc, newtype, targ1));
12649 return NULL_TREE;
12651 case COMPOUND_EXPR:
12652 /* When pedantic, a compound expression can be neither an lvalue
12653 nor an integer constant expression. */
12654 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12655 return NULL_TREE;
12656 /* Don't let (0, 0) be null pointer constant. */
12657 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12658 : fold_convert_loc (loc, type, arg1);
12659 return tem;
12661 case ASSERT_EXPR:
12662 /* An ASSERT_EXPR should never be passed to fold_binary. */
12663 gcc_unreachable ();
12665 default:
12666 return NULL_TREE;
12667 } /* switch (code) */
12670 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12671 ((A & N) + B) & M -> (A + B) & M
12672 Similarly if (N & M) == 0,
12673 ((A | N) + B) & M -> (A + B) & M
12674 and for - instead of + (or unary - instead of +)
12675 and/or ^ instead of |.
12676 If B is constant and (B & M) == 0, fold into A & M.
12678 This function is a helper for match.pd patterns. Return non-NULL
12679 type in which the simplified operation should be performed only
12680 if any optimization is possible.
12682 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12683 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12684 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12685 +/-. */
12686 tree
12687 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12688 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12689 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12690 tree *pmop)
12692 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12693 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12694 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12695 if (~cst1 == 0
12696 || (cst1 & (cst1 + 1)) != 0
12697 || !INTEGRAL_TYPE_P (type)
12698 || (!TYPE_OVERFLOW_WRAPS (type)
12699 && TREE_CODE (type) != INTEGER_TYPE)
12700 || (wi::max_value (type) & cst1) != cst1)
12701 return NULL_TREE;
12703 enum tree_code codes[2] = { code00, code01 };
12704 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12705 int which = 0;
12706 wide_int cst0;
12708 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12709 arg1 (M) is == (1LL << cst) - 1.
12710 Store C into PMOP[0] and D into PMOP[1]. */
12711 pmop[0] = arg00;
12712 pmop[1] = arg01;
12713 which = code != NEGATE_EXPR;
12715 for (; which >= 0; which--)
12716 switch (codes[which])
12718 case BIT_AND_EXPR:
12719 case BIT_IOR_EXPR:
12720 case BIT_XOR_EXPR:
12721 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12722 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12723 if (codes[which] == BIT_AND_EXPR)
12725 if (cst0 != cst1)
12726 break;
12728 else if (cst0 != 0)
12729 break;
12730 /* If C or D is of the form (A & N) where
12731 (N & M) == M, or of the form (A | N) or
12732 (A ^ N) where (N & M) == 0, replace it with A. */
12733 pmop[which] = arg0xx[2 * which];
12734 break;
12735 case ERROR_MARK:
12736 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12737 break;
12738 /* If C or D is a N where (N & M) == 0, it can be
12739 omitted (replaced with 0). */
12740 if ((code == PLUS_EXPR
12741 || (code == MINUS_EXPR && which == 0))
12742 && (cst1 & wi::to_wide (pmop[which])) == 0)
12743 pmop[which] = build_int_cst (type, 0);
12744 /* Similarly, with C - N where (-N & M) == 0. */
12745 if (code == MINUS_EXPR
12746 && which == 1
12747 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12748 pmop[which] = build_int_cst (type, 0);
12749 break;
12750 default:
12751 gcc_unreachable ();
12754 /* Only build anything new if we optimized one or both arguments above. */
12755 if (pmop[0] == arg00 && pmop[1] == arg01)
12756 return NULL_TREE;
12758 if (TYPE_OVERFLOW_WRAPS (type))
12759 return type;
12760 else
12761 return unsigned_type_for (type);
12764 /* Used by contains_label_[p1]. */
12766 struct contains_label_data
12768 hash_set<tree> *pset;
12769 bool inside_switch_p;
12772 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12773 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12774 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12776 static tree
12777 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12779 contains_label_data *d = (contains_label_data *) data;
12780 switch (TREE_CODE (*tp))
12782 case LABEL_EXPR:
12783 return *tp;
12785 case CASE_LABEL_EXPR:
12786 if (!d->inside_switch_p)
12787 return *tp;
12788 return NULL_TREE;
12790 case SWITCH_EXPR:
12791 if (!d->inside_switch_p)
12793 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12794 return *tp;
12795 d->inside_switch_p = true;
12796 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12797 return *tp;
12798 d->inside_switch_p = false;
12799 *walk_subtrees = 0;
12801 return NULL_TREE;
12803 case GOTO_EXPR:
12804 *walk_subtrees = 0;
12805 return NULL_TREE;
12807 default:
12808 return NULL_TREE;
12812 /* Return whether the sub-tree ST contains a label which is accessible from
12813 outside the sub-tree. */
12815 static bool
12816 contains_label_p (tree st)
12818 hash_set<tree> pset;
12819 contains_label_data data = { &pset, false };
12820 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12823 /* Fold a ternary expression of code CODE and type TYPE with operands
12824 OP0, OP1, and OP2. Return the folded expression if folding is
12825 successful. Otherwise, return NULL_TREE. */
12827 tree
12828 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12829 tree op0, tree op1, tree op2)
12831 tree tem;
12832 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12833 enum tree_code_class kind = TREE_CODE_CLASS (code);
12835 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12836 && TREE_CODE_LENGTH (code) == 3);
12838 /* If this is a commutative operation, and OP0 is a constant, move it
12839 to OP1 to reduce the number of tests below. */
12840 if (commutative_ternary_tree_code (code)
12841 && tree_swap_operands_p (op0, op1))
12842 return fold_build3_loc (loc, code, type, op1, op0, op2);
12844 tem = generic_simplify (loc, code, type, op0, op1, op2);
12845 if (tem)
12846 return tem;
12848 /* Strip any conversions that don't change the mode. This is safe
12849 for every expression, except for a comparison expression because
12850 its signedness is derived from its operands. So, in the latter
12851 case, only strip conversions that don't change the signedness.
12853 Note that this is done as an internal manipulation within the
12854 constant folder, in order to find the simplest representation of
12855 the arguments so that their form can be studied. In any cases,
12856 the appropriate type conversions should be put back in the tree
12857 that will get out of the constant folder. */
12858 if (op0)
12860 arg0 = op0;
12861 STRIP_NOPS (arg0);
12864 if (op1)
12866 arg1 = op1;
12867 STRIP_NOPS (arg1);
12870 if (op2)
12872 arg2 = op2;
12873 STRIP_NOPS (arg2);
12876 switch (code)
12878 case COMPONENT_REF:
12879 if (TREE_CODE (arg0) == CONSTRUCTOR
12880 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12882 unsigned HOST_WIDE_INT idx;
12883 tree field, value;
12884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12885 if (field == arg1)
12886 return value;
12888 return NULL_TREE;
12890 case COND_EXPR:
12891 case VEC_COND_EXPR:
12892 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12893 so all simple results must be passed through pedantic_non_lvalue. */
12894 if (TREE_CODE (arg0) == INTEGER_CST)
12896 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12897 tem = integer_zerop (arg0) ? op2 : op1;
12898 /* Only optimize constant conditions when the selected branch
12899 has the same type as the COND_EXPR. This avoids optimizing
12900 away "c ? x : throw", where the throw has a void type.
12901 Avoid throwing away that operand which contains label. */
12902 if ((!TREE_SIDE_EFFECTS (unused_op)
12903 || !contains_label_p (unused_op))
12904 && (! VOID_TYPE_P (TREE_TYPE (tem))
12905 || VOID_TYPE_P (type)))
12906 return protected_set_expr_location_unshare (tem, loc);
12907 return NULL_TREE;
12909 else if (TREE_CODE (arg0) == VECTOR_CST)
12911 unsigned HOST_WIDE_INT nelts;
12912 if ((TREE_CODE (arg1) == VECTOR_CST
12913 || TREE_CODE (arg1) == CONSTRUCTOR)
12914 && (TREE_CODE (arg2) == VECTOR_CST
12915 || TREE_CODE (arg2) == CONSTRUCTOR)
12916 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12918 vec_perm_builder sel (nelts, nelts, 1);
12919 for (unsigned int i = 0; i < nelts; i++)
12921 tree val = VECTOR_CST_ELT (arg0, i);
12922 if (integer_all_onesp (val))
12923 sel.quick_push (i);
12924 else if (integer_zerop (val))
12925 sel.quick_push (nelts + i);
12926 else /* Currently unreachable. */
12927 return NULL_TREE;
12929 vec_perm_indices indices (sel, 2, nelts);
12930 tree t = fold_vec_perm (type, arg1, arg2, indices);
12931 if (t != NULL_TREE)
12932 return t;
12936 /* If we have A op B ? A : C, we may be able to convert this to a
12937 simpler expression, depending on the operation and the values
12938 of B and C. Signed zeros prevent all of these transformations,
12939 for reasons given above each one.
12941 Also try swapping the arguments and inverting the conditional. */
12942 if (COMPARISON_CLASS_P (arg0)
12943 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12944 && !HONOR_SIGNED_ZEROS (op1))
12946 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12947 TREE_OPERAND (arg0, 0),
12948 TREE_OPERAND (arg0, 1),
12949 op1, op2);
12950 if (tem)
12951 return tem;
12954 if (COMPARISON_CLASS_P (arg0)
12955 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12956 && !HONOR_SIGNED_ZEROS (op2))
12958 enum tree_code comp_code = TREE_CODE (arg0);
12959 tree arg00 = TREE_OPERAND (arg0, 0);
12960 tree arg01 = TREE_OPERAND (arg0, 1);
12961 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12962 if (comp_code != ERROR_MARK)
12963 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12964 arg00,
12965 arg01,
12966 op2, op1);
12967 if (tem)
12968 return tem;
12971 /* If the second operand is simpler than the third, swap them
12972 since that produces better jump optimization results. */
12973 if (truth_value_p (TREE_CODE (arg0))
12974 && tree_swap_operands_p (op1, op2))
12976 location_t loc0 = expr_location_or (arg0, loc);
12977 /* See if this can be inverted. If it can't, possibly because
12978 it was a floating-point inequality comparison, don't do
12979 anything. */
12980 tem = fold_invert_truthvalue (loc0, arg0);
12981 if (tem)
12982 return fold_build3_loc (loc, code, type, tem, op2, op1);
12985 /* Convert A ? 1 : 0 to simply A. */
12986 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12987 : (integer_onep (op1)
12988 && !VECTOR_TYPE_P (type)))
12989 && integer_zerop (op2)
12990 /* If we try to convert OP0 to our type, the
12991 call to fold will try to move the conversion inside
12992 a COND, which will recurse. In that case, the COND_EXPR
12993 is probably the best choice, so leave it alone. */
12994 && type == TREE_TYPE (arg0))
12995 return protected_set_expr_location_unshare (arg0, loc);
12997 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12998 over COND_EXPR in cases such as floating point comparisons. */
12999 if (integer_zerop (op1)
13000 && code == COND_EXPR
13001 && integer_onep (op2)
13002 && !VECTOR_TYPE_P (type)
13003 && truth_value_p (TREE_CODE (arg0)))
13004 return fold_convert_loc (loc, type,
13005 invert_truthvalue_loc (loc, arg0));
13007 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13008 if (TREE_CODE (arg0) == LT_EXPR
13009 && integer_zerop (TREE_OPERAND (arg0, 1))
13010 && integer_zerop (op2)
13011 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13013 /* sign_bit_p looks through both zero and sign extensions,
13014 but for this optimization only sign extensions are
13015 usable. */
13016 tree tem2 = TREE_OPERAND (arg0, 0);
13017 while (tem != tem2)
13019 if (TREE_CODE (tem2) != NOP_EXPR
13020 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13022 tem = NULL_TREE;
13023 break;
13025 tem2 = TREE_OPERAND (tem2, 0);
13027 /* sign_bit_p only checks ARG1 bits within A's precision.
13028 If <sign bit of A> has wider type than A, bits outside
13029 of A's precision in <sign bit of A> need to be checked.
13030 If they are all 0, this optimization needs to be done
13031 in unsigned A's type, if they are all 1 in signed A's type,
13032 otherwise this can't be done. */
13033 if (tem
13034 && TYPE_PRECISION (TREE_TYPE (tem))
13035 < TYPE_PRECISION (TREE_TYPE (arg1))
13036 && TYPE_PRECISION (TREE_TYPE (tem))
13037 < TYPE_PRECISION (type))
13039 int inner_width, outer_width;
13040 tree tem_type;
13042 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13043 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13044 if (outer_width > TYPE_PRECISION (type))
13045 outer_width = TYPE_PRECISION (type);
13047 wide_int mask = wi::shifted_mask
13048 (inner_width, outer_width - inner_width, false,
13049 TYPE_PRECISION (TREE_TYPE (arg1)));
13051 wide_int common = mask & wi::to_wide (arg1);
13052 if (common == mask)
13054 tem_type = signed_type_for (TREE_TYPE (tem));
13055 tem = fold_convert_loc (loc, tem_type, tem);
13057 else if (common == 0)
13059 tem_type = unsigned_type_for (TREE_TYPE (tem));
13060 tem = fold_convert_loc (loc, tem_type, tem);
13062 else
13063 tem = NULL;
13066 if (tem)
13067 return
13068 fold_convert_loc (loc, type,
13069 fold_build2_loc (loc, BIT_AND_EXPR,
13070 TREE_TYPE (tem), tem,
13071 fold_convert_loc (loc,
13072 TREE_TYPE (tem),
13073 arg1)));
13076 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13077 already handled above. */
13078 if (TREE_CODE (arg0) == BIT_AND_EXPR
13079 && integer_onep (TREE_OPERAND (arg0, 1))
13080 && integer_zerop (op2)
13081 && integer_pow2p (arg1))
13083 tree tem = TREE_OPERAND (arg0, 0);
13084 STRIP_NOPS (tem);
13085 if (TREE_CODE (tem) == RSHIFT_EXPR
13086 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13087 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13088 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13089 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13090 fold_convert_loc (loc, type,
13091 TREE_OPERAND (tem, 0)),
13092 op1);
13095 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13096 is probably obsolete because the first operand should be a
13097 truth value (that's why we have the two cases above), but let's
13098 leave it in until we can confirm this for all front-ends. */
13099 if (integer_zerop (op2)
13100 && TREE_CODE (arg0) == NE_EXPR
13101 && integer_zerop (TREE_OPERAND (arg0, 1))
13102 && integer_pow2p (arg1)
13103 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13104 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13105 arg1, OEP_ONLY_CONST)
13106 /* operand_equal_p compares just value, not precision, so e.g.
13107 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13108 second operand 32-bit -128, which is not a power of two (or vice
13109 versa. */
13110 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13111 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13113 /* Disable the transformations below for vectors, since
13114 fold_binary_op_with_conditional_arg may undo them immediately,
13115 yielding an infinite loop. */
13116 if (code == VEC_COND_EXPR)
13117 return NULL_TREE;
13119 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13120 if (integer_zerop (op2)
13121 && truth_value_p (TREE_CODE (arg0))
13122 && truth_value_p (TREE_CODE (arg1))
13123 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13124 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13125 : TRUTH_ANDIF_EXPR,
13126 type, fold_convert_loc (loc, type, arg0), op1);
13128 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13129 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13130 && truth_value_p (TREE_CODE (arg0))
13131 && truth_value_p (TREE_CODE (arg1))
13132 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13134 location_t loc0 = expr_location_or (arg0, loc);
13135 /* Only perform transformation if ARG0 is easily inverted. */
13136 tem = fold_invert_truthvalue (loc0, arg0);
13137 if (tem)
13138 return fold_build2_loc (loc, code == VEC_COND_EXPR
13139 ? BIT_IOR_EXPR
13140 : TRUTH_ORIF_EXPR,
13141 type, fold_convert_loc (loc, type, tem),
13142 op1);
13145 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13146 if (integer_zerop (arg1)
13147 && truth_value_p (TREE_CODE (arg0))
13148 && truth_value_p (TREE_CODE (op2))
13149 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13151 location_t loc0 = expr_location_or (arg0, loc);
13152 /* Only perform transformation if ARG0 is easily inverted. */
13153 tem = fold_invert_truthvalue (loc0, arg0);
13154 if (tem)
13155 return fold_build2_loc (loc, code == VEC_COND_EXPR
13156 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13157 type, fold_convert_loc (loc, type, tem),
13158 op2);
13161 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13162 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13163 && truth_value_p (TREE_CODE (arg0))
13164 && truth_value_p (TREE_CODE (op2))
13165 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13166 return fold_build2_loc (loc, code == VEC_COND_EXPR
13167 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13168 type, fold_convert_loc (loc, type, arg0), op2);
13170 return NULL_TREE;
13172 case CALL_EXPR:
13173 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13174 of fold_ternary on them. */
13175 gcc_unreachable ();
13177 case BIT_FIELD_REF:
13178 if (TREE_CODE (arg0) == VECTOR_CST
13179 && (type == TREE_TYPE (TREE_TYPE (arg0))
13180 || (VECTOR_TYPE_P (type)
13181 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13182 && tree_fits_uhwi_p (op1)
13183 && tree_fits_uhwi_p (op2))
13185 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13186 unsigned HOST_WIDE_INT width
13187 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13188 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13189 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13190 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13192 if (n != 0
13193 && (idx % width) == 0
13194 && (n % width) == 0
13195 && known_le ((idx + n) / width,
13196 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13198 idx = idx / width;
13199 n = n / width;
13201 if (TREE_CODE (arg0) == VECTOR_CST)
13203 if (n == 1)
13205 tem = VECTOR_CST_ELT (arg0, idx);
13206 if (VECTOR_TYPE_P (type))
13207 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13208 return tem;
13211 tree_vector_builder vals (type, n, 1);
13212 for (unsigned i = 0; i < n; ++i)
13213 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13214 return vals.build ();
13219 /* On constants we can use native encode/interpret to constant
13220 fold (nearly) all BIT_FIELD_REFs. */
13221 if (CONSTANT_CLASS_P (arg0)
13222 && can_native_interpret_type_p (type)
13223 && BITS_PER_UNIT == 8
13224 && tree_fits_uhwi_p (op1)
13225 && tree_fits_uhwi_p (op2))
13227 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13228 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13229 /* Limit us to a reasonable amount of work. To relax the
13230 other limitations we need bit-shifting of the buffer
13231 and rounding up the size. */
13232 if (bitpos % BITS_PER_UNIT == 0
13233 && bitsize % BITS_PER_UNIT == 0
13234 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13236 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13237 unsigned HOST_WIDE_INT len
13238 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13239 bitpos / BITS_PER_UNIT);
13240 if (len > 0
13241 && len * BITS_PER_UNIT >= bitsize)
13243 tree v = native_interpret_expr (type, b,
13244 bitsize / BITS_PER_UNIT);
13245 if (v)
13246 return v;
13251 return NULL_TREE;
13253 case VEC_PERM_EXPR:
13254 /* Perform constant folding of BIT_INSERT_EXPR. */
13255 if (TREE_CODE (arg2) == VECTOR_CST
13256 && TREE_CODE (op0) == VECTOR_CST
13257 && TREE_CODE (op1) == VECTOR_CST)
13259 /* Build a vector of integers from the tree mask. */
13260 vec_perm_builder builder;
13261 if (!tree_to_vec_perm_builder (&builder, arg2))
13262 return NULL_TREE;
13264 /* Create a vec_perm_indices for the integer vector. */
13265 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13266 bool single_arg = (op0 == op1);
13267 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13268 return fold_vec_perm (type, op0, op1, sel);
13270 return NULL_TREE;
13272 case BIT_INSERT_EXPR:
13273 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13274 if (TREE_CODE (arg0) == INTEGER_CST
13275 && TREE_CODE (arg1) == INTEGER_CST)
13277 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13278 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13279 wide_int tem = (wi::to_wide (arg0)
13280 & wi::shifted_mask (bitpos, bitsize, true,
13281 TYPE_PRECISION (type)));
13282 wide_int tem2
13283 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13284 bitsize), bitpos);
13285 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13287 else if (TREE_CODE (arg0) == VECTOR_CST
13288 && CONSTANT_CLASS_P (arg1)
13289 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13290 TREE_TYPE (arg1)))
13292 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13293 unsigned HOST_WIDE_INT elsize
13294 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13295 if (bitpos % elsize == 0)
13297 unsigned k = bitpos / elsize;
13298 unsigned HOST_WIDE_INT nelts;
13299 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13300 return arg0;
13301 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13303 tree_vector_builder elts (type, nelts, 1);
13304 elts.quick_grow (nelts);
13305 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13306 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13307 return elts.build ();
13311 return NULL_TREE;
13313 default:
13314 return NULL_TREE;
13315 } /* switch (code) */
13318 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13319 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13320 constructor element index of the value returned. If the element is
13321 not found NULL_TREE is returned and *CTOR_IDX is updated to
13322 the index of the element after the ACCESS_INDEX position (which
13323 may be outside of the CTOR array). */
13325 tree
13326 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13327 unsigned *ctor_idx)
13329 tree index_type = NULL_TREE;
13330 signop index_sgn = UNSIGNED;
13331 offset_int low_bound = 0;
13333 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13335 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13336 if (domain_type && TYPE_MIN_VALUE (domain_type))
13338 /* Static constructors for variably sized objects makes no sense. */
13339 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13340 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13341 /* ??? When it is obvious that the range is signed, treat it so. */
13342 if (TYPE_UNSIGNED (index_type)
13343 && TYPE_MAX_VALUE (domain_type)
13344 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13345 TYPE_MIN_VALUE (domain_type)))
13347 index_sgn = SIGNED;
13348 low_bound
13349 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13350 SIGNED);
13352 else
13354 index_sgn = TYPE_SIGN (index_type);
13355 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13360 if (index_type)
13361 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13362 index_sgn);
13364 offset_int index = low_bound;
13365 if (index_type)
13366 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13368 offset_int max_index = index;
13369 unsigned cnt;
13370 tree cfield, cval;
13371 bool first_p = true;
13373 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13375 /* Array constructor might explicitly set index, or specify a range,
13376 or leave index NULL meaning that it is next index after previous
13377 one. */
13378 if (cfield)
13380 if (TREE_CODE (cfield) == INTEGER_CST)
13381 max_index = index
13382 = offset_int::from (wi::to_wide (cfield), index_sgn);
13383 else
13385 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13386 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13387 index_sgn);
13388 max_index
13389 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13390 index_sgn);
13391 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13394 else if (!first_p)
13396 index = max_index + 1;
13397 if (index_type)
13398 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13399 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13400 max_index = index;
13402 else
13403 first_p = false;
13405 /* Do we have match? */
13406 if (wi::cmp (access_index, index, index_sgn) >= 0)
13408 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13410 if (ctor_idx)
13411 *ctor_idx = cnt;
13412 return cval;
13415 else if (in_gimple_form)
13416 /* We're past the element we search for. Note during parsing
13417 the elements might not be sorted.
13418 ??? We should use a binary search and a flag on the
13419 CONSTRUCTOR as to whether elements are sorted in declaration
13420 order. */
13421 break;
13423 if (ctor_idx)
13424 *ctor_idx = cnt;
13425 return NULL_TREE;
13428 /* Perform constant folding and related simplification of EXPR.
13429 The related simplifications include x*1 => x, x*0 => 0, etc.,
13430 and application of the associative law.
13431 NOP_EXPR conversions may be removed freely (as long as we
13432 are careful not to change the type of the overall expression).
13433 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13434 but we can constant-fold them if they have constant operands. */
13436 #ifdef ENABLE_FOLD_CHECKING
13437 # define fold(x) fold_1 (x)
13438 static tree fold_1 (tree);
13439 static
13440 #endif
13441 tree
13442 fold (tree expr)
13444 const tree t = expr;
13445 enum tree_code code = TREE_CODE (t);
13446 enum tree_code_class kind = TREE_CODE_CLASS (code);
13447 tree tem;
13448 location_t loc = EXPR_LOCATION (expr);
13450 /* Return right away if a constant. */
13451 if (kind == tcc_constant)
13452 return t;
13454 /* CALL_EXPR-like objects with variable numbers of operands are
13455 treated specially. */
13456 if (kind == tcc_vl_exp)
13458 if (code == CALL_EXPR)
13460 tem = fold_call_expr (loc, expr, false);
13461 return tem ? tem : expr;
13463 return expr;
13466 if (IS_EXPR_CODE_CLASS (kind))
13468 tree type = TREE_TYPE (t);
13469 tree op0, op1, op2;
13471 switch (TREE_CODE_LENGTH (code))
13473 case 1:
13474 op0 = TREE_OPERAND (t, 0);
13475 tem = fold_unary_loc (loc, code, type, op0);
13476 return tem ? tem : expr;
13477 case 2:
13478 op0 = TREE_OPERAND (t, 0);
13479 op1 = TREE_OPERAND (t, 1);
13480 tem = fold_binary_loc (loc, code, type, op0, op1);
13481 return tem ? tem : expr;
13482 case 3:
13483 op0 = TREE_OPERAND (t, 0);
13484 op1 = TREE_OPERAND (t, 1);
13485 op2 = TREE_OPERAND (t, 2);
13486 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13487 return tem ? tem : expr;
13488 default:
13489 break;
13493 switch (code)
13495 case ARRAY_REF:
13497 tree op0 = TREE_OPERAND (t, 0);
13498 tree op1 = TREE_OPERAND (t, 1);
13500 if (TREE_CODE (op1) == INTEGER_CST
13501 && TREE_CODE (op0) == CONSTRUCTOR
13502 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13504 tree val = get_array_ctor_element_at_index (op0,
13505 wi::to_offset (op1));
13506 if (val)
13507 return val;
13510 return t;
13513 /* Return a VECTOR_CST if possible. */
13514 case CONSTRUCTOR:
13516 tree type = TREE_TYPE (t);
13517 if (TREE_CODE (type) != VECTOR_TYPE)
13518 return t;
13520 unsigned i;
13521 tree val;
13522 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13523 if (! CONSTANT_CLASS_P (val))
13524 return t;
13526 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13529 case CONST_DECL:
13530 return fold (DECL_INITIAL (t));
13532 default:
13533 return t;
13534 } /* switch (code) */
13537 #ifdef ENABLE_FOLD_CHECKING
13538 #undef fold
13540 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13541 hash_table<nofree_ptr_hash<const tree_node> > *);
13542 static void fold_check_failed (const_tree, const_tree);
13543 void print_fold_checksum (const_tree);
13545 /* When --enable-checking=fold, compute a digest of expr before
13546 and after actual fold call to see if fold did not accidentally
13547 change original expr. */
13549 tree
13550 fold (tree expr)
13552 tree ret;
13553 struct md5_ctx ctx;
13554 unsigned char checksum_before[16], checksum_after[16];
13555 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13557 md5_init_ctx (&ctx);
13558 fold_checksum_tree (expr, &ctx, &ht);
13559 md5_finish_ctx (&ctx, checksum_before);
13560 ht.empty ();
13562 ret = fold_1 (expr);
13564 md5_init_ctx (&ctx);
13565 fold_checksum_tree (expr, &ctx, &ht);
13566 md5_finish_ctx (&ctx, checksum_after);
13568 if (memcmp (checksum_before, checksum_after, 16))
13569 fold_check_failed (expr, ret);
13571 return ret;
13574 void
13575 print_fold_checksum (const_tree expr)
13577 struct md5_ctx ctx;
13578 unsigned char checksum[16], cnt;
13579 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13581 md5_init_ctx (&ctx);
13582 fold_checksum_tree (expr, &ctx, &ht);
13583 md5_finish_ctx (&ctx, checksum);
13584 for (cnt = 0; cnt < 16; ++cnt)
13585 fprintf (stderr, "%02x", checksum[cnt]);
13586 putc ('\n', stderr);
13589 static void
13590 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13592 internal_error ("fold check: original tree changed by fold");
13595 static void
13596 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13597 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13599 const tree_node **slot;
13600 enum tree_code code;
13601 union tree_node *buf;
13602 int i, len;
13604 recursive_label:
13605 if (expr == NULL)
13606 return;
13607 slot = ht->find_slot (expr, INSERT);
13608 if (*slot != NULL)
13609 return;
13610 *slot = expr;
13611 code = TREE_CODE (expr);
13612 if (TREE_CODE_CLASS (code) == tcc_declaration
13613 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13615 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13616 size_t sz = tree_size (expr);
13617 buf = XALLOCAVAR (union tree_node, sz);
13618 memcpy ((char *) buf, expr, sz);
13619 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13620 buf->decl_with_vis.symtab_node = NULL;
13621 buf->base.nowarning_flag = 0;
13622 expr = (tree) buf;
13624 else if (TREE_CODE_CLASS (code) == tcc_type
13625 && (TYPE_POINTER_TO (expr)
13626 || TYPE_REFERENCE_TO (expr)
13627 || TYPE_CACHED_VALUES_P (expr)
13628 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13629 || TYPE_NEXT_VARIANT (expr)
13630 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13632 /* Allow these fields to be modified. */
13633 tree tmp;
13634 size_t sz = tree_size (expr);
13635 buf = XALLOCAVAR (union tree_node, sz);
13636 memcpy ((char *) buf, expr, sz);
13637 expr = tmp = (tree) buf;
13638 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13639 TYPE_POINTER_TO (tmp) = NULL;
13640 TYPE_REFERENCE_TO (tmp) = NULL;
13641 TYPE_NEXT_VARIANT (tmp) = NULL;
13642 TYPE_ALIAS_SET (tmp) = -1;
13643 if (TYPE_CACHED_VALUES_P (tmp))
13645 TYPE_CACHED_VALUES_P (tmp) = 0;
13646 TYPE_CACHED_VALUES (tmp) = NULL;
13649 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13651 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13652 that and change builtins.cc etc. instead - see PR89543. */
13653 size_t sz = tree_size (expr);
13654 buf = XALLOCAVAR (union tree_node, sz);
13655 memcpy ((char *) buf, expr, sz);
13656 buf->base.nowarning_flag = 0;
13657 expr = (tree) buf;
13659 md5_process_bytes (expr, tree_size (expr), ctx);
13660 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13661 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13662 if (TREE_CODE_CLASS (code) != tcc_type
13663 && TREE_CODE_CLASS (code) != tcc_declaration
13664 && code != TREE_LIST
13665 && code != SSA_NAME
13666 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13667 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13668 switch (TREE_CODE_CLASS (code))
13670 case tcc_constant:
13671 switch (code)
13673 case STRING_CST:
13674 md5_process_bytes (TREE_STRING_POINTER (expr),
13675 TREE_STRING_LENGTH (expr), ctx);
13676 break;
13677 case COMPLEX_CST:
13678 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13679 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13680 break;
13681 case VECTOR_CST:
13682 len = vector_cst_encoded_nelts (expr);
13683 for (i = 0; i < len; ++i)
13684 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13685 break;
13686 default:
13687 break;
13689 break;
13690 case tcc_exceptional:
13691 switch (code)
13693 case TREE_LIST:
13694 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13695 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13696 expr = TREE_CHAIN (expr);
13697 goto recursive_label;
13698 break;
13699 case TREE_VEC:
13700 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13701 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13702 break;
13703 default:
13704 break;
13706 break;
13707 case tcc_expression:
13708 case tcc_reference:
13709 case tcc_comparison:
13710 case tcc_unary:
13711 case tcc_binary:
13712 case tcc_statement:
13713 case tcc_vl_exp:
13714 len = TREE_OPERAND_LENGTH (expr);
13715 for (i = 0; i < len; ++i)
13716 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13717 break;
13718 case tcc_declaration:
13719 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13720 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13721 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13723 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13724 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13725 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13726 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13727 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13730 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13732 if (TREE_CODE (expr) == FUNCTION_DECL)
13734 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13735 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13737 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13739 break;
13740 case tcc_type:
13741 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13742 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13743 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13744 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13745 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13746 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13747 if (INTEGRAL_TYPE_P (expr)
13748 || SCALAR_FLOAT_TYPE_P (expr))
13750 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13751 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13753 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13754 if (TREE_CODE (expr) == RECORD_TYPE
13755 || TREE_CODE (expr) == UNION_TYPE
13756 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13757 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13758 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13759 break;
13760 default:
13761 break;
13765 /* Helper function for outputting the checksum of a tree T. When
13766 debugging with gdb, you can "define mynext" to be "next" followed
13767 by "call debug_fold_checksum (op0)", then just trace down till the
13768 outputs differ. */
13770 DEBUG_FUNCTION void
13771 debug_fold_checksum (const_tree t)
13773 int i;
13774 unsigned char checksum[16];
13775 struct md5_ctx ctx;
13776 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13778 md5_init_ctx (&ctx);
13779 fold_checksum_tree (t, &ctx, &ht);
13780 md5_finish_ctx (&ctx, checksum);
13781 ht.empty ();
13783 for (i = 0; i < 16; i++)
13784 fprintf (stderr, "%d ", checksum[i]);
13786 fprintf (stderr, "\n");
13789 #endif
13791 /* Fold a unary tree expression with code CODE of type TYPE with an
13792 operand OP0. LOC is the location of the resulting expression.
13793 Return a folded expression if successful. Otherwise, return a tree
13794 expression with code CODE of type TYPE with an operand OP0. */
13796 tree
13797 fold_build1_loc (location_t loc,
13798 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13800 tree tem;
13801 #ifdef ENABLE_FOLD_CHECKING
13802 unsigned char checksum_before[16], checksum_after[16];
13803 struct md5_ctx ctx;
13804 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13806 md5_init_ctx (&ctx);
13807 fold_checksum_tree (op0, &ctx, &ht);
13808 md5_finish_ctx (&ctx, checksum_before);
13809 ht.empty ();
13810 #endif
13812 tem = fold_unary_loc (loc, code, type, op0);
13813 if (!tem)
13814 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13816 #ifdef ENABLE_FOLD_CHECKING
13817 md5_init_ctx (&ctx);
13818 fold_checksum_tree (op0, &ctx, &ht);
13819 md5_finish_ctx (&ctx, checksum_after);
13821 if (memcmp (checksum_before, checksum_after, 16))
13822 fold_check_failed (op0, tem);
13823 #endif
13824 return tem;
13827 /* Fold a binary tree expression with code CODE of type TYPE with
13828 operands OP0 and OP1. LOC is the location of the resulting
13829 expression. Return a folded expression if successful. Otherwise,
13830 return a tree expression with code CODE of type TYPE with operands
13831 OP0 and OP1. */
13833 tree
13834 fold_build2_loc (location_t loc,
13835 enum tree_code code, tree type, tree op0, tree op1
13836 MEM_STAT_DECL)
13838 tree tem;
13839 #ifdef ENABLE_FOLD_CHECKING
13840 unsigned char checksum_before_op0[16],
13841 checksum_before_op1[16],
13842 checksum_after_op0[16],
13843 checksum_after_op1[16];
13844 struct md5_ctx ctx;
13845 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13847 md5_init_ctx (&ctx);
13848 fold_checksum_tree (op0, &ctx, &ht);
13849 md5_finish_ctx (&ctx, checksum_before_op0);
13850 ht.empty ();
13852 md5_init_ctx (&ctx);
13853 fold_checksum_tree (op1, &ctx, &ht);
13854 md5_finish_ctx (&ctx, checksum_before_op1);
13855 ht.empty ();
13856 #endif
13858 tem = fold_binary_loc (loc, code, type, op0, op1);
13859 if (!tem)
13860 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13862 #ifdef ENABLE_FOLD_CHECKING
13863 md5_init_ctx (&ctx);
13864 fold_checksum_tree (op0, &ctx, &ht);
13865 md5_finish_ctx (&ctx, checksum_after_op0);
13866 ht.empty ();
13868 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13869 fold_check_failed (op0, tem);
13871 md5_init_ctx (&ctx);
13872 fold_checksum_tree (op1, &ctx, &ht);
13873 md5_finish_ctx (&ctx, checksum_after_op1);
13875 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13876 fold_check_failed (op1, tem);
13877 #endif
13878 return tem;
13881 /* Fold a ternary tree expression with code CODE of type TYPE with
13882 operands OP0, OP1, and OP2. Return a folded expression if
13883 successful. Otherwise, return a tree expression with code CODE of
13884 type TYPE with operands OP0, OP1, and OP2. */
13886 tree
13887 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13888 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13890 tree tem;
13891 #ifdef ENABLE_FOLD_CHECKING
13892 unsigned char checksum_before_op0[16],
13893 checksum_before_op1[16],
13894 checksum_before_op2[16],
13895 checksum_after_op0[16],
13896 checksum_after_op1[16],
13897 checksum_after_op2[16];
13898 struct md5_ctx ctx;
13899 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13901 md5_init_ctx (&ctx);
13902 fold_checksum_tree (op0, &ctx, &ht);
13903 md5_finish_ctx (&ctx, checksum_before_op0);
13904 ht.empty ();
13906 md5_init_ctx (&ctx);
13907 fold_checksum_tree (op1, &ctx, &ht);
13908 md5_finish_ctx (&ctx, checksum_before_op1);
13909 ht.empty ();
13911 md5_init_ctx (&ctx);
13912 fold_checksum_tree (op2, &ctx, &ht);
13913 md5_finish_ctx (&ctx, checksum_before_op2);
13914 ht.empty ();
13915 #endif
13917 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13918 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13919 if (!tem)
13920 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13922 #ifdef ENABLE_FOLD_CHECKING
13923 md5_init_ctx (&ctx);
13924 fold_checksum_tree (op0, &ctx, &ht);
13925 md5_finish_ctx (&ctx, checksum_after_op0);
13926 ht.empty ();
13928 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13929 fold_check_failed (op0, tem);
13931 md5_init_ctx (&ctx);
13932 fold_checksum_tree (op1, &ctx, &ht);
13933 md5_finish_ctx (&ctx, checksum_after_op1);
13934 ht.empty ();
13936 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13937 fold_check_failed (op1, tem);
13939 md5_init_ctx (&ctx);
13940 fold_checksum_tree (op2, &ctx, &ht);
13941 md5_finish_ctx (&ctx, checksum_after_op2);
13943 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13944 fold_check_failed (op2, tem);
13945 #endif
13946 return tem;
13949 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13950 arguments in ARGARRAY, and a null static chain.
13951 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13952 of type TYPE from the given operands as constructed by build_call_array. */
13954 tree
13955 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13956 int nargs, tree *argarray)
13958 tree tem;
13959 #ifdef ENABLE_FOLD_CHECKING
13960 unsigned char checksum_before_fn[16],
13961 checksum_before_arglist[16],
13962 checksum_after_fn[16],
13963 checksum_after_arglist[16];
13964 struct md5_ctx ctx;
13965 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13966 int i;
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (fn, &ctx, &ht);
13970 md5_finish_ctx (&ctx, checksum_before_fn);
13971 ht.empty ();
13973 md5_init_ctx (&ctx);
13974 for (i = 0; i < nargs; i++)
13975 fold_checksum_tree (argarray[i], &ctx, &ht);
13976 md5_finish_ctx (&ctx, checksum_before_arglist);
13977 ht.empty ();
13978 #endif
13980 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13981 if (!tem)
13982 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13984 #ifdef ENABLE_FOLD_CHECKING
13985 md5_init_ctx (&ctx);
13986 fold_checksum_tree (fn, &ctx, &ht);
13987 md5_finish_ctx (&ctx, checksum_after_fn);
13988 ht.empty ();
13990 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13991 fold_check_failed (fn, tem);
13993 md5_init_ctx (&ctx);
13994 for (i = 0; i < nargs; i++)
13995 fold_checksum_tree (argarray[i], &ctx, &ht);
13996 md5_finish_ctx (&ctx, checksum_after_arglist);
13998 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13999 fold_check_failed (NULL_TREE, tem);
14000 #endif
14001 return tem;
14004 /* Perform constant folding and related simplification of initializer
14005 expression EXPR. These behave identically to "fold_buildN" but ignore
14006 potential run-time traps and exceptions that fold must preserve. */
14008 #define START_FOLD_INIT \
14009 int saved_signaling_nans = flag_signaling_nans;\
14010 int saved_trapping_math = flag_trapping_math;\
14011 int saved_rounding_math = flag_rounding_math;\
14012 int saved_trapv = flag_trapv;\
14013 int saved_folding_initializer = folding_initializer;\
14014 flag_signaling_nans = 0;\
14015 flag_trapping_math = 0;\
14016 flag_rounding_math = 0;\
14017 flag_trapv = 0;\
14018 folding_initializer = 1;
14020 #define END_FOLD_INIT \
14021 flag_signaling_nans = saved_signaling_nans;\
14022 flag_trapping_math = saved_trapping_math;\
14023 flag_rounding_math = saved_rounding_math;\
14024 flag_trapv = saved_trapv;\
14025 folding_initializer = saved_folding_initializer;
14027 tree
14028 fold_init (tree expr)
14030 tree result;
14031 START_FOLD_INIT;
14033 result = fold (expr);
14035 END_FOLD_INIT;
14036 return result;
14039 tree
14040 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14041 tree type, tree op)
14043 tree result;
14044 START_FOLD_INIT;
14046 result = fold_build1_loc (loc, code, type, op);
14048 END_FOLD_INIT;
14049 return result;
14052 tree
14053 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14054 tree type, tree op0, tree op1)
14056 tree result;
14057 START_FOLD_INIT;
14059 result = fold_build2_loc (loc, code, type, op0, op1);
14061 END_FOLD_INIT;
14062 return result;
14065 tree
14066 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14067 int nargs, tree *argarray)
14069 tree result;
14070 START_FOLD_INIT;
14072 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14074 END_FOLD_INIT;
14075 return result;
14078 tree
14079 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14080 tree lhs, tree rhs)
14082 tree result;
14083 START_FOLD_INIT;
14085 result = fold_binary_loc (loc, code, type, lhs, rhs);
14087 END_FOLD_INIT;
14088 return result;
14091 #undef START_FOLD_INIT
14092 #undef END_FOLD_INIT
14094 /* Determine if first argument is a multiple of second argument. Return 0 if
14095 it is not, or we cannot easily determined it to be.
14097 An example of the sort of thing we care about (at this point; this routine
14098 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14099 fold cases do now) is discovering that
14101 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14103 is a multiple of
14105 SAVE_EXPR (J * 8)
14107 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14109 This code also handles discovering that
14111 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14113 is a multiple of 8 so we don't have to worry about dealing with a
14114 possible remainder.
14116 Note that we *look* inside a SAVE_EXPR only to determine how it was
14117 calculated; it is not safe for fold to do much of anything else with the
14118 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14119 at run time. For example, the latter example above *cannot* be implemented
14120 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14121 evaluation time of the original SAVE_EXPR is not necessarily the same at
14122 the time the new expression is evaluated. The only optimization of this
14123 sort that would be valid is changing
14125 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14127 divided by 8 to
14129 SAVE_EXPR (I) * SAVE_EXPR (J)
14131 (where the same SAVE_EXPR (J) is used in the original and the
14132 transformed version).
14134 NOWRAP specifies whether all outer operations in TYPE should
14135 be considered not wrapping. Any type conversion within TOP acts
14136 as a barrier and we will fall back to NOWRAP being false.
14137 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14138 as not wrapping even though they are generally using unsigned arithmetic. */
14141 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14143 gimple *stmt;
14144 tree op1, op2;
14146 if (operand_equal_p (top, bottom, 0))
14147 return 1;
14149 if (TREE_CODE (type) != INTEGER_TYPE)
14150 return 0;
14152 switch (TREE_CODE (top))
14154 case BIT_AND_EXPR:
14155 /* Bitwise and provides a power of two multiple. If the mask is
14156 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14157 if (!integer_pow2p (bottom))
14158 return 0;
14159 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14160 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14162 case MULT_EXPR:
14163 /* If the multiplication can wrap we cannot recurse further unless
14164 the bottom is a power of two which is where wrapping does not
14165 matter. */
14166 if (!nowrap
14167 && !TYPE_OVERFLOW_UNDEFINED (type)
14168 && !integer_pow2p (bottom))
14169 return 0;
14170 if (TREE_CODE (bottom) == INTEGER_CST)
14172 op1 = TREE_OPERAND (top, 0);
14173 op2 = TREE_OPERAND (top, 1);
14174 if (TREE_CODE (op1) == INTEGER_CST)
14175 std::swap (op1, op2);
14176 if (TREE_CODE (op2) == INTEGER_CST)
14178 if (multiple_of_p (type, op2, bottom, nowrap))
14179 return 1;
14180 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14181 if (multiple_of_p (type, bottom, op2, nowrap))
14183 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14184 wi::to_widest (op2));
14185 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14187 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14188 return multiple_of_p (type, op1, op2, nowrap);
14191 return multiple_of_p (type, op1, bottom, nowrap);
14194 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14195 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14197 case LSHIFT_EXPR:
14198 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14199 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14201 op1 = TREE_OPERAND (top, 1);
14202 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14204 wide_int mul_op
14205 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14206 return multiple_of_p (type,
14207 wide_int_to_tree (type, mul_op), bottom,
14208 nowrap);
14211 return 0;
14213 case MINUS_EXPR:
14214 case PLUS_EXPR:
14215 /* If the addition or subtraction can wrap we cannot recurse further
14216 unless bottom is a power of two which is where wrapping does not
14217 matter. */
14218 if (!nowrap
14219 && !TYPE_OVERFLOW_UNDEFINED (type)
14220 && !integer_pow2p (bottom))
14221 return 0;
14223 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14224 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14225 but 0xfffffffd is not. */
14226 op1 = TREE_OPERAND (top, 1);
14227 if (TREE_CODE (top) == PLUS_EXPR
14228 && nowrap
14229 && TYPE_UNSIGNED (type)
14230 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14231 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14233 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14234 precisely, so be conservative here checking if both op0 and op1
14235 are multiple of bottom. Note we check the second operand first
14236 since it's usually simpler. */
14237 return (multiple_of_p (type, op1, bottom, nowrap)
14238 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14240 CASE_CONVERT:
14241 /* Can't handle conversions from non-integral or wider integral type. */
14242 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14243 || (TYPE_PRECISION (type)
14244 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14245 return 0;
14246 /* NOWRAP only extends to operations in the outermost type so
14247 make sure to strip it off here. */
14248 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14249 TREE_OPERAND (top, 0), bottom, false);
14251 case SAVE_EXPR:
14252 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14254 case COND_EXPR:
14255 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14256 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14258 case INTEGER_CST:
14259 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14260 return 0;
14261 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14262 SIGNED);
14264 case SSA_NAME:
14265 if (TREE_CODE (bottom) == INTEGER_CST
14266 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14267 && gimple_code (stmt) == GIMPLE_ASSIGN)
14269 enum tree_code code = gimple_assign_rhs_code (stmt);
14271 /* Check for special cases to see if top is defined as multiple
14272 of bottom:
14274 top = (X & ~(bottom - 1) ; bottom is power of 2
14278 Y = X % bottom
14279 top = X - Y. */
14280 if (code == BIT_AND_EXPR
14281 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14282 && TREE_CODE (op2) == INTEGER_CST
14283 && integer_pow2p (bottom)
14284 && wi::multiple_of_p (wi::to_widest (op2),
14285 wi::to_widest (bottom), UNSIGNED))
14286 return 1;
14288 op1 = gimple_assign_rhs1 (stmt);
14289 if (code == MINUS_EXPR
14290 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14291 && TREE_CODE (op2) == SSA_NAME
14292 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14293 && gimple_code (stmt) == GIMPLE_ASSIGN
14294 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14295 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14296 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14297 return 1;
14300 /* fall through */
14302 default:
14303 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14304 return multiple_p (wi::to_poly_widest (top),
14305 wi::to_poly_widest (bottom));
14307 return 0;
14311 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14312 This function returns true for integer expressions, and returns
14313 false if uncertain. */
14315 bool
14316 tree_expr_finite_p (const_tree x)
14318 machine_mode mode = element_mode (x);
14319 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14320 return true;
14321 switch (TREE_CODE (x))
14323 case REAL_CST:
14324 return real_isfinite (TREE_REAL_CST_PTR (x));
14325 case COMPLEX_CST:
14326 return tree_expr_finite_p (TREE_REALPART (x))
14327 && tree_expr_finite_p (TREE_IMAGPART (x));
14328 case FLOAT_EXPR:
14329 return true;
14330 case ABS_EXPR:
14331 case CONVERT_EXPR:
14332 case NON_LVALUE_EXPR:
14333 case NEGATE_EXPR:
14334 case SAVE_EXPR:
14335 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14336 case MIN_EXPR:
14337 case MAX_EXPR:
14338 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14339 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14340 case COND_EXPR:
14341 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14342 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14343 case CALL_EXPR:
14344 switch (get_call_combined_fn (x))
14346 CASE_CFN_FABS:
14347 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14348 CASE_CFN_FMAX:
14349 CASE_CFN_FMIN:
14350 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14351 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14352 default:
14353 return false;
14356 default:
14357 return false;
14361 /* Return true if expression X evaluates to an infinity.
14362 This function returns false for integer expressions. */
14364 bool
14365 tree_expr_infinite_p (const_tree x)
14367 if (!HONOR_INFINITIES (x))
14368 return false;
14369 switch (TREE_CODE (x))
14371 case REAL_CST:
14372 return real_isinf (TREE_REAL_CST_PTR (x));
14373 case ABS_EXPR:
14374 case NEGATE_EXPR:
14375 case NON_LVALUE_EXPR:
14376 case SAVE_EXPR:
14377 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14378 case COND_EXPR:
14379 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14380 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14381 default:
14382 return false;
14386 /* Return true if expression X could evaluate to an infinity.
14387 This function returns false for integer expressions, and returns
14388 true if uncertain. */
14390 bool
14391 tree_expr_maybe_infinite_p (const_tree x)
14393 if (!HONOR_INFINITIES (x))
14394 return false;
14395 switch (TREE_CODE (x))
14397 case REAL_CST:
14398 return real_isinf (TREE_REAL_CST_PTR (x));
14399 case FLOAT_EXPR:
14400 return false;
14401 case ABS_EXPR:
14402 case NEGATE_EXPR:
14403 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14404 case COND_EXPR:
14405 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14406 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14407 default:
14408 return true;
14412 /* Return true if expression X evaluates to a signaling NaN.
14413 This function returns false for integer expressions. */
14415 bool
14416 tree_expr_signaling_nan_p (const_tree x)
14418 if (!HONOR_SNANS (x))
14419 return false;
14420 switch (TREE_CODE (x))
14422 case REAL_CST:
14423 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14424 case NON_LVALUE_EXPR:
14425 case SAVE_EXPR:
14426 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14427 case COND_EXPR:
14428 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14429 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14430 default:
14431 return false;
14435 /* Return true if expression X could evaluate to a signaling NaN.
14436 This function returns false for integer expressions, and returns
14437 true if uncertain. */
14439 bool
14440 tree_expr_maybe_signaling_nan_p (const_tree x)
14442 if (!HONOR_SNANS (x))
14443 return false;
14444 switch (TREE_CODE (x))
14446 case REAL_CST:
14447 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14448 case FLOAT_EXPR:
14449 return false;
14450 case ABS_EXPR:
14451 case CONVERT_EXPR:
14452 case NEGATE_EXPR:
14453 case NON_LVALUE_EXPR:
14454 case SAVE_EXPR:
14455 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14456 case MIN_EXPR:
14457 case MAX_EXPR:
14458 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14459 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14460 case COND_EXPR:
14461 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14462 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14463 case CALL_EXPR:
14464 switch (get_call_combined_fn (x))
14466 CASE_CFN_FABS:
14467 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14468 CASE_CFN_FMAX:
14469 CASE_CFN_FMIN:
14470 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14471 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14472 default:
14473 return true;
14475 default:
14476 return true;
14480 /* Return true if expression X evaluates to a NaN.
14481 This function returns false for integer expressions. */
14483 bool
14484 tree_expr_nan_p (const_tree x)
14486 if (!HONOR_NANS (x))
14487 return false;
14488 switch (TREE_CODE (x))
14490 case REAL_CST:
14491 return real_isnan (TREE_REAL_CST_PTR (x));
14492 case NON_LVALUE_EXPR:
14493 case SAVE_EXPR:
14494 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14495 case COND_EXPR:
14496 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14497 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14498 default:
14499 return false;
14503 /* Return true if expression X could evaluate to a NaN.
14504 This function returns false for integer expressions, and returns
14505 true if uncertain. */
14507 bool
14508 tree_expr_maybe_nan_p (const_tree x)
14510 if (!HONOR_NANS (x))
14511 return false;
14512 switch (TREE_CODE (x))
14514 case REAL_CST:
14515 return real_isnan (TREE_REAL_CST_PTR (x));
14516 case FLOAT_EXPR:
14517 return false;
14518 case PLUS_EXPR:
14519 case MINUS_EXPR:
14520 case MULT_EXPR:
14521 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14522 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14523 case ABS_EXPR:
14524 case CONVERT_EXPR:
14525 case NEGATE_EXPR:
14526 case NON_LVALUE_EXPR:
14527 case SAVE_EXPR:
14528 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14529 case MIN_EXPR:
14530 case MAX_EXPR:
14531 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14532 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14533 case COND_EXPR:
14534 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14535 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14536 case CALL_EXPR:
14537 switch (get_call_combined_fn (x))
14539 CASE_CFN_FABS:
14540 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14541 CASE_CFN_FMAX:
14542 CASE_CFN_FMIN:
14543 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14544 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14545 default:
14546 return true;
14548 default:
14549 return true;
14553 /* Return true if expression X could evaluate to -0.0.
14554 This function returns true if uncertain. */
14556 bool
14557 tree_expr_maybe_real_minus_zero_p (const_tree x)
14559 if (!HONOR_SIGNED_ZEROS (x))
14560 return false;
14561 switch (TREE_CODE (x))
14563 case REAL_CST:
14564 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14565 case INTEGER_CST:
14566 case FLOAT_EXPR:
14567 case ABS_EXPR:
14568 return false;
14569 case NON_LVALUE_EXPR:
14570 case SAVE_EXPR:
14571 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14572 case COND_EXPR:
14573 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14574 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14575 case CALL_EXPR:
14576 switch (get_call_combined_fn (x))
14578 CASE_CFN_FABS:
14579 return false;
14580 default:
14581 break;
14583 default:
14584 break;
14586 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14587 * but currently those predicates require tree and not const_tree. */
14588 return true;
14591 #define tree_expr_nonnegative_warnv_p(X, Y) \
14592 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14594 #define RECURSE(X) \
14595 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14597 /* Return true if CODE or TYPE is known to be non-negative. */
14599 static bool
14600 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14602 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14603 && truth_value_p (code))
14604 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14605 have a signed:1 type (where the value is -1 and 0). */
14606 return true;
14607 return false;
14610 /* Return true if (CODE OP0) is known to be non-negative. If the return
14611 value is based on the assumption that signed overflow is undefined,
14612 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14613 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14615 bool
14616 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14617 bool *strict_overflow_p, int depth)
14619 if (TYPE_UNSIGNED (type))
14620 return true;
14622 switch (code)
14624 case ABS_EXPR:
14625 /* We can't return 1 if flag_wrapv is set because
14626 ABS_EXPR<INT_MIN> = INT_MIN. */
14627 if (!ANY_INTEGRAL_TYPE_P (type))
14628 return true;
14629 if (TYPE_OVERFLOW_UNDEFINED (type))
14631 *strict_overflow_p = true;
14632 return true;
14634 break;
14636 case NON_LVALUE_EXPR:
14637 case FLOAT_EXPR:
14638 case FIX_TRUNC_EXPR:
14639 return RECURSE (op0);
14641 CASE_CONVERT:
14643 tree inner_type = TREE_TYPE (op0);
14644 tree outer_type = type;
14646 if (TREE_CODE (outer_type) == REAL_TYPE)
14648 if (TREE_CODE (inner_type) == REAL_TYPE)
14649 return RECURSE (op0);
14650 if (INTEGRAL_TYPE_P (inner_type))
14652 if (TYPE_UNSIGNED (inner_type))
14653 return true;
14654 return RECURSE (op0);
14657 else if (INTEGRAL_TYPE_P (outer_type))
14659 if (TREE_CODE (inner_type) == REAL_TYPE)
14660 return RECURSE (op0);
14661 if (INTEGRAL_TYPE_P (inner_type))
14662 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14663 && TYPE_UNSIGNED (inner_type);
14666 break;
14668 default:
14669 return tree_simple_nonnegative_warnv_p (code, type);
14672 /* We don't know sign of `t', so be conservative and return false. */
14673 return false;
14676 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14677 value is based on the assumption that signed overflow is undefined,
14678 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14679 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14681 bool
14682 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14683 tree op1, bool *strict_overflow_p,
14684 int depth)
14686 if (TYPE_UNSIGNED (type))
14687 return true;
14689 switch (code)
14691 case POINTER_PLUS_EXPR:
14692 case PLUS_EXPR:
14693 if (FLOAT_TYPE_P (type))
14694 return RECURSE (op0) && RECURSE (op1);
14696 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14697 both unsigned and at least 2 bits shorter than the result. */
14698 if (TREE_CODE (type) == INTEGER_TYPE
14699 && TREE_CODE (op0) == NOP_EXPR
14700 && TREE_CODE (op1) == NOP_EXPR)
14702 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14703 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14704 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14705 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14707 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14708 TYPE_PRECISION (inner2)) + 1;
14709 return prec < TYPE_PRECISION (type);
14712 break;
14714 case MULT_EXPR:
14715 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14717 /* x * x is always non-negative for floating point x
14718 or without overflow. */
14719 if (operand_equal_p (op0, op1, 0)
14720 || (RECURSE (op0) && RECURSE (op1)))
14722 if (ANY_INTEGRAL_TYPE_P (type)
14723 && TYPE_OVERFLOW_UNDEFINED (type))
14724 *strict_overflow_p = true;
14725 return true;
14729 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14730 both unsigned and their total bits is shorter than the result. */
14731 if (TREE_CODE (type) == INTEGER_TYPE
14732 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14733 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14735 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14736 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14737 : TREE_TYPE (op0);
14738 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14739 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14740 : TREE_TYPE (op1);
14742 bool unsigned0 = TYPE_UNSIGNED (inner0);
14743 bool unsigned1 = TYPE_UNSIGNED (inner1);
14745 if (TREE_CODE (op0) == INTEGER_CST)
14746 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14748 if (TREE_CODE (op1) == INTEGER_CST)
14749 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14751 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14752 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14754 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14755 ? tree_int_cst_min_precision (op0, UNSIGNED)
14756 : TYPE_PRECISION (inner0);
14758 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14759 ? tree_int_cst_min_precision (op1, UNSIGNED)
14760 : TYPE_PRECISION (inner1);
14762 return precision0 + precision1 < TYPE_PRECISION (type);
14765 return false;
14767 case BIT_AND_EXPR:
14768 return RECURSE (op0) || RECURSE (op1);
14770 case MAX_EXPR:
14771 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14772 things. */
14773 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14774 return RECURSE (op0) && RECURSE (op1);
14775 return RECURSE (op0) || RECURSE (op1);
14777 case BIT_IOR_EXPR:
14778 case BIT_XOR_EXPR:
14779 case MIN_EXPR:
14780 case RDIV_EXPR:
14781 case TRUNC_DIV_EXPR:
14782 case CEIL_DIV_EXPR:
14783 case FLOOR_DIV_EXPR:
14784 case ROUND_DIV_EXPR:
14785 return RECURSE (op0) && RECURSE (op1);
14787 case TRUNC_MOD_EXPR:
14788 return RECURSE (op0);
14790 case FLOOR_MOD_EXPR:
14791 return RECURSE (op1);
14793 case CEIL_MOD_EXPR:
14794 case ROUND_MOD_EXPR:
14795 default:
14796 return tree_simple_nonnegative_warnv_p (code, type);
14799 /* We don't know sign of `t', so be conservative and return false. */
14800 return false;
14803 /* Return true if T is known to be non-negative. If the return
14804 value is based on the assumption that signed overflow is undefined,
14805 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14808 bool
14809 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14811 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14812 return true;
14814 switch (TREE_CODE (t))
14816 case INTEGER_CST:
14817 return tree_int_cst_sgn (t) >= 0;
14819 case REAL_CST:
14820 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14822 case FIXED_CST:
14823 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14825 case COND_EXPR:
14826 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14828 case SSA_NAME:
14829 /* Limit the depth of recursion to avoid quadratic behavior.
14830 This is expected to catch almost all occurrences in practice.
14831 If this code misses important cases that unbounded recursion
14832 would not, passes that need this information could be revised
14833 to provide it through dataflow propagation. */
14834 return (!name_registered_for_update_p (t)
14835 && depth < param_max_ssa_name_query_depth
14836 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14837 strict_overflow_p, depth));
14839 default:
14840 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14844 /* Return true if T is known to be non-negative. If the return
14845 value is based on the assumption that signed overflow is undefined,
14846 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14847 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14849 bool
14850 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14851 bool *strict_overflow_p, int depth)
14853 switch (fn)
14855 CASE_CFN_ACOS:
14856 CASE_CFN_ACOSH:
14857 CASE_CFN_CABS:
14858 CASE_CFN_COSH:
14859 CASE_CFN_ERFC:
14860 CASE_CFN_EXP:
14861 CASE_CFN_EXP10:
14862 CASE_CFN_EXP2:
14863 CASE_CFN_FABS:
14864 CASE_CFN_FDIM:
14865 CASE_CFN_HYPOT:
14866 CASE_CFN_POW10:
14867 CASE_CFN_FFS:
14868 CASE_CFN_PARITY:
14869 CASE_CFN_POPCOUNT:
14870 CASE_CFN_CLZ:
14871 CASE_CFN_CLRSB:
14872 case CFN_BUILT_IN_BSWAP16:
14873 case CFN_BUILT_IN_BSWAP32:
14874 case CFN_BUILT_IN_BSWAP64:
14875 case CFN_BUILT_IN_BSWAP128:
14876 /* Always true. */
14877 return true;
14879 CASE_CFN_SQRT:
14880 CASE_CFN_SQRT_FN:
14881 /* sqrt(-0.0) is -0.0. */
14882 if (!HONOR_SIGNED_ZEROS (type))
14883 return true;
14884 return RECURSE (arg0);
14886 CASE_CFN_ASINH:
14887 CASE_CFN_ATAN:
14888 CASE_CFN_ATANH:
14889 CASE_CFN_CBRT:
14890 CASE_CFN_CEIL:
14891 CASE_CFN_CEIL_FN:
14892 CASE_CFN_ERF:
14893 CASE_CFN_EXPM1:
14894 CASE_CFN_FLOOR:
14895 CASE_CFN_FLOOR_FN:
14896 CASE_CFN_FMOD:
14897 CASE_CFN_FREXP:
14898 CASE_CFN_ICEIL:
14899 CASE_CFN_IFLOOR:
14900 CASE_CFN_IRINT:
14901 CASE_CFN_IROUND:
14902 CASE_CFN_LCEIL:
14903 CASE_CFN_LDEXP:
14904 CASE_CFN_LFLOOR:
14905 CASE_CFN_LLCEIL:
14906 CASE_CFN_LLFLOOR:
14907 CASE_CFN_LLRINT:
14908 CASE_CFN_LLROUND:
14909 CASE_CFN_LRINT:
14910 CASE_CFN_LROUND:
14911 CASE_CFN_MODF:
14912 CASE_CFN_NEARBYINT:
14913 CASE_CFN_NEARBYINT_FN:
14914 CASE_CFN_RINT:
14915 CASE_CFN_RINT_FN:
14916 CASE_CFN_ROUND:
14917 CASE_CFN_ROUND_FN:
14918 CASE_CFN_ROUNDEVEN:
14919 CASE_CFN_ROUNDEVEN_FN:
14920 CASE_CFN_SCALB:
14921 CASE_CFN_SCALBLN:
14922 CASE_CFN_SCALBN:
14923 CASE_CFN_SIGNBIT:
14924 CASE_CFN_SIGNIFICAND:
14925 CASE_CFN_SINH:
14926 CASE_CFN_TANH:
14927 CASE_CFN_TRUNC:
14928 CASE_CFN_TRUNC_FN:
14929 /* True if the 1st argument is nonnegative. */
14930 return RECURSE (arg0);
14932 CASE_CFN_FMAX:
14933 CASE_CFN_FMAX_FN:
14934 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14935 things. In the presence of sNaNs, we're only guaranteed to be
14936 non-negative if both operands are non-negative. In the presence
14937 of qNaNs, we're non-negative if either operand is non-negative
14938 and can't be a qNaN, or if both operands are non-negative. */
14939 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14940 tree_expr_maybe_signaling_nan_p (arg1))
14941 return RECURSE (arg0) && RECURSE (arg1);
14942 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14943 || RECURSE (arg1))
14944 : (RECURSE (arg1)
14945 && !tree_expr_maybe_nan_p (arg1));
14947 CASE_CFN_FMIN:
14948 CASE_CFN_FMIN_FN:
14949 /* True if the 1st AND 2nd arguments are nonnegative. */
14950 return RECURSE (arg0) && RECURSE (arg1);
14952 CASE_CFN_COPYSIGN:
14953 CASE_CFN_COPYSIGN_FN:
14954 /* True if the 2nd argument is nonnegative. */
14955 return RECURSE (arg1);
14957 CASE_CFN_POWI:
14958 /* True if the 1st argument is nonnegative or the second
14959 argument is an even integer. */
14960 if (TREE_CODE (arg1) == INTEGER_CST
14961 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14962 return true;
14963 return RECURSE (arg0);
14965 CASE_CFN_POW:
14966 /* True if the 1st argument is nonnegative or the second
14967 argument is an even integer valued real. */
14968 if (TREE_CODE (arg1) == REAL_CST)
14970 REAL_VALUE_TYPE c;
14971 HOST_WIDE_INT n;
14973 c = TREE_REAL_CST (arg1);
14974 n = real_to_integer (&c);
14975 if ((n & 1) == 0)
14977 REAL_VALUE_TYPE cint;
14978 real_from_integer (&cint, VOIDmode, n, SIGNED);
14979 if (real_identical (&c, &cint))
14980 return true;
14983 return RECURSE (arg0);
14985 default:
14986 break;
14988 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14991 /* Return true if T is known to be non-negative. If the return
14992 value is based on the assumption that signed overflow is undefined,
14993 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14994 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14996 static bool
14997 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14999 enum tree_code code = TREE_CODE (t);
15000 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15001 return true;
15003 switch (code)
15005 case TARGET_EXPR:
15007 tree temp = TARGET_EXPR_SLOT (t);
15008 t = TARGET_EXPR_INITIAL (t);
15010 /* If the initializer is non-void, then it's a normal expression
15011 that will be assigned to the slot. */
15012 if (!VOID_TYPE_P (t))
15013 return RECURSE (t);
15015 /* Otherwise, the initializer sets the slot in some way. One common
15016 way is an assignment statement at the end of the initializer. */
15017 while (1)
15019 if (TREE_CODE (t) == BIND_EXPR)
15020 t = expr_last (BIND_EXPR_BODY (t));
15021 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15022 || TREE_CODE (t) == TRY_CATCH_EXPR)
15023 t = expr_last (TREE_OPERAND (t, 0));
15024 else if (TREE_CODE (t) == STATEMENT_LIST)
15025 t = expr_last (t);
15026 else
15027 break;
15029 if (TREE_CODE (t) == MODIFY_EXPR
15030 && TREE_OPERAND (t, 0) == temp)
15031 return RECURSE (TREE_OPERAND (t, 1));
15033 return false;
15036 case CALL_EXPR:
15038 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15039 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15041 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15042 get_call_combined_fn (t),
15043 arg0,
15044 arg1,
15045 strict_overflow_p, depth);
15047 case COMPOUND_EXPR:
15048 case MODIFY_EXPR:
15049 return RECURSE (TREE_OPERAND (t, 1));
15051 case BIND_EXPR:
15052 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15054 case SAVE_EXPR:
15055 return RECURSE (TREE_OPERAND (t, 0));
15057 default:
15058 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15062 #undef RECURSE
15063 #undef tree_expr_nonnegative_warnv_p
15065 /* Return true if T is known to be non-negative. If the return
15066 value is based on the assumption that signed overflow is undefined,
15067 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15068 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15070 bool
15071 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15073 enum tree_code code;
15074 if (t == error_mark_node)
15075 return false;
15077 code = TREE_CODE (t);
15078 switch (TREE_CODE_CLASS (code))
15080 case tcc_binary:
15081 case tcc_comparison:
15082 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15083 TREE_TYPE (t),
15084 TREE_OPERAND (t, 0),
15085 TREE_OPERAND (t, 1),
15086 strict_overflow_p, depth);
15088 case tcc_unary:
15089 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15090 TREE_TYPE (t),
15091 TREE_OPERAND (t, 0),
15092 strict_overflow_p, depth);
15094 case tcc_constant:
15095 case tcc_declaration:
15096 case tcc_reference:
15097 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15099 default:
15100 break;
15103 switch (code)
15105 case TRUTH_AND_EXPR:
15106 case TRUTH_OR_EXPR:
15107 case TRUTH_XOR_EXPR:
15108 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15109 TREE_TYPE (t),
15110 TREE_OPERAND (t, 0),
15111 TREE_OPERAND (t, 1),
15112 strict_overflow_p, depth);
15113 case TRUTH_NOT_EXPR:
15114 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15115 TREE_TYPE (t),
15116 TREE_OPERAND (t, 0),
15117 strict_overflow_p, depth);
15119 case COND_EXPR:
15120 case CONSTRUCTOR:
15121 case OBJ_TYPE_REF:
15122 case ASSERT_EXPR:
15123 case ADDR_EXPR:
15124 case WITH_SIZE_EXPR:
15125 case SSA_NAME:
15126 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15128 default:
15129 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15133 /* Return true if `t' is known to be non-negative. Handle warnings
15134 about undefined signed overflow. */
15136 bool
15137 tree_expr_nonnegative_p (tree t)
15139 bool ret, strict_overflow_p;
15141 strict_overflow_p = false;
15142 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15143 if (strict_overflow_p)
15144 fold_overflow_warning (("assuming signed overflow does not occur when "
15145 "determining that expression is always "
15146 "non-negative"),
15147 WARN_STRICT_OVERFLOW_MISC);
15148 return ret;
15152 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15160 bool
15161 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15162 bool *strict_overflow_p)
15164 switch (code)
15166 case ABS_EXPR:
15167 return tree_expr_nonzero_warnv_p (op0,
15168 strict_overflow_p);
15170 case NOP_EXPR:
15172 tree inner_type = TREE_TYPE (op0);
15173 tree outer_type = type;
15175 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15176 && tree_expr_nonzero_warnv_p (op0,
15177 strict_overflow_p));
15179 break;
15181 case NON_LVALUE_EXPR:
15182 return tree_expr_nonzero_warnv_p (op0,
15183 strict_overflow_p);
15185 default:
15186 break;
15189 return false;
15192 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15193 For floating point we further ensure that T is not denormal.
15194 Similar logic is present in nonzero_address in rtlanal.h.
15196 If the return value is based on the assumption that signed overflow
15197 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15198 change *STRICT_OVERFLOW_P. */
15200 bool
15201 tree_binary_nonzero_warnv_p (enum tree_code code,
15202 tree type,
15203 tree op0,
15204 tree op1, bool *strict_overflow_p)
15206 bool sub_strict_overflow_p;
15207 switch (code)
15209 case POINTER_PLUS_EXPR:
15210 case PLUS_EXPR:
15211 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15213 /* With the presence of negative values it is hard
15214 to say something. */
15215 sub_strict_overflow_p = false;
15216 if (!tree_expr_nonnegative_warnv_p (op0,
15217 &sub_strict_overflow_p)
15218 || !tree_expr_nonnegative_warnv_p (op1,
15219 &sub_strict_overflow_p))
15220 return false;
15221 /* One of operands must be positive and the other non-negative. */
15222 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15223 overflows, on a twos-complement machine the sum of two
15224 nonnegative numbers can never be zero. */
15225 return (tree_expr_nonzero_warnv_p (op0,
15226 strict_overflow_p)
15227 || tree_expr_nonzero_warnv_p (op1,
15228 strict_overflow_p));
15230 break;
15232 case MULT_EXPR:
15233 if (TYPE_OVERFLOW_UNDEFINED (type))
15235 if (tree_expr_nonzero_warnv_p (op0,
15236 strict_overflow_p)
15237 && tree_expr_nonzero_warnv_p (op1,
15238 strict_overflow_p))
15240 *strict_overflow_p = true;
15241 return true;
15244 break;
15246 case MIN_EXPR:
15247 sub_strict_overflow_p = false;
15248 if (tree_expr_nonzero_warnv_p (op0,
15249 &sub_strict_overflow_p)
15250 && tree_expr_nonzero_warnv_p (op1,
15251 &sub_strict_overflow_p))
15253 if (sub_strict_overflow_p)
15254 *strict_overflow_p = true;
15256 break;
15258 case MAX_EXPR:
15259 sub_strict_overflow_p = false;
15260 if (tree_expr_nonzero_warnv_p (op0,
15261 &sub_strict_overflow_p))
15263 if (sub_strict_overflow_p)
15264 *strict_overflow_p = true;
15266 /* When both operands are nonzero, then MAX must be too. */
15267 if (tree_expr_nonzero_warnv_p (op1,
15268 strict_overflow_p))
15269 return true;
15271 /* MAX where operand 0 is positive is positive. */
15272 return tree_expr_nonnegative_warnv_p (op0,
15273 strict_overflow_p);
15275 /* MAX where operand 1 is positive is positive. */
15276 else if (tree_expr_nonzero_warnv_p (op1,
15277 &sub_strict_overflow_p)
15278 && tree_expr_nonnegative_warnv_p (op1,
15279 &sub_strict_overflow_p))
15281 if (sub_strict_overflow_p)
15282 *strict_overflow_p = true;
15283 return true;
15285 break;
15287 case BIT_IOR_EXPR:
15288 return (tree_expr_nonzero_warnv_p (op1,
15289 strict_overflow_p)
15290 || tree_expr_nonzero_warnv_p (op0,
15291 strict_overflow_p));
15293 default:
15294 break;
15297 return false;
15300 /* Return true when T is an address and is known to be nonzero.
15301 For floating point we further ensure that T is not denormal.
15302 Similar logic is present in nonzero_address in rtlanal.h.
15304 If the return value is based on the assumption that signed overflow
15305 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15306 change *STRICT_OVERFLOW_P. */
15308 bool
15309 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15311 bool sub_strict_overflow_p;
15312 switch (TREE_CODE (t))
15314 case INTEGER_CST:
15315 return !integer_zerop (t);
15317 case ADDR_EXPR:
15319 tree base = TREE_OPERAND (t, 0);
15321 if (!DECL_P (base))
15322 base = get_base_address (base);
15324 if (base && TREE_CODE (base) == TARGET_EXPR)
15325 base = TARGET_EXPR_SLOT (base);
15327 if (!base)
15328 return false;
15330 /* For objects in symbol table check if we know they are non-zero.
15331 Don't do anything for variables and functions before symtab is built;
15332 it is quite possible that they will be declared weak later. */
15333 int nonzero_addr = maybe_nonzero_address (base);
15334 if (nonzero_addr >= 0)
15335 return nonzero_addr;
15337 /* Constants are never weak. */
15338 if (CONSTANT_CLASS_P (base))
15339 return true;
15341 return false;
15344 case COND_EXPR:
15345 sub_strict_overflow_p = false;
15346 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15347 &sub_strict_overflow_p)
15348 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15349 &sub_strict_overflow_p))
15351 if (sub_strict_overflow_p)
15352 *strict_overflow_p = true;
15353 return true;
15355 break;
15357 case SSA_NAME:
15358 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15359 break;
15360 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15362 default:
15363 break;
15365 return false;
15368 #define integer_valued_real_p(X) \
15369 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15371 #define RECURSE(X) \
15372 ((integer_valued_real_p) (X, depth + 1))
15374 /* Return true if the floating point result of (CODE OP0) has an
15375 integer value. We also allow +Inf, -Inf and NaN to be considered
15376 integer values. Return false for signaling NaN.
15378 DEPTH is the current nesting depth of the query. */
15380 bool
15381 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15383 switch (code)
15385 case FLOAT_EXPR:
15386 return true;
15388 case ABS_EXPR:
15389 return RECURSE (op0);
15391 CASE_CONVERT:
15393 tree type = TREE_TYPE (op0);
15394 if (TREE_CODE (type) == INTEGER_TYPE)
15395 return true;
15396 if (TREE_CODE (type) == REAL_TYPE)
15397 return RECURSE (op0);
15398 break;
15401 default:
15402 break;
15404 return false;
15407 /* Return true if the floating point result of (CODE OP0 OP1) has an
15408 integer value. We also allow +Inf, -Inf and NaN to be considered
15409 integer values. Return false for signaling NaN.
15411 DEPTH is the current nesting depth of the query. */
15413 bool
15414 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15416 switch (code)
15418 case PLUS_EXPR:
15419 case MINUS_EXPR:
15420 case MULT_EXPR:
15421 case MIN_EXPR:
15422 case MAX_EXPR:
15423 return RECURSE (op0) && RECURSE (op1);
15425 default:
15426 break;
15428 return false;
15431 /* Return true if the floating point result of calling FNDECL with arguments
15432 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15433 considered integer values. Return false for signaling NaN. If FNDECL
15434 takes fewer than 2 arguments, the remaining ARGn are null.
15436 DEPTH is the current nesting depth of the query. */
15438 bool
15439 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15441 switch (fn)
15443 CASE_CFN_CEIL:
15444 CASE_CFN_CEIL_FN:
15445 CASE_CFN_FLOOR:
15446 CASE_CFN_FLOOR_FN:
15447 CASE_CFN_NEARBYINT:
15448 CASE_CFN_NEARBYINT_FN:
15449 CASE_CFN_RINT:
15450 CASE_CFN_RINT_FN:
15451 CASE_CFN_ROUND:
15452 CASE_CFN_ROUND_FN:
15453 CASE_CFN_ROUNDEVEN:
15454 CASE_CFN_ROUNDEVEN_FN:
15455 CASE_CFN_TRUNC:
15456 CASE_CFN_TRUNC_FN:
15457 return true;
15459 CASE_CFN_FMIN:
15460 CASE_CFN_FMIN_FN:
15461 CASE_CFN_FMAX:
15462 CASE_CFN_FMAX_FN:
15463 return RECURSE (arg0) && RECURSE (arg1);
15465 default:
15466 break;
15468 return false;
15471 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15472 has an integer value. We also allow +Inf, -Inf and NaN to be
15473 considered integer values. Return false for signaling NaN.
15475 DEPTH is the current nesting depth of the query. */
15477 bool
15478 integer_valued_real_single_p (tree t, int depth)
15480 switch (TREE_CODE (t))
15482 case REAL_CST:
15483 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15485 case COND_EXPR:
15486 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15488 case SSA_NAME:
15489 /* Limit the depth of recursion to avoid quadratic behavior.
15490 This is expected to catch almost all occurrences in practice.
15491 If this code misses important cases that unbounded recursion
15492 would not, passes that need this information could be revised
15493 to provide it through dataflow propagation. */
15494 return (!name_registered_for_update_p (t)
15495 && depth < param_max_ssa_name_query_depth
15496 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15497 depth));
15499 default:
15500 break;
15502 return false;
15505 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15506 has an integer value. We also allow +Inf, -Inf and NaN to be
15507 considered integer values. Return false for signaling NaN.
15509 DEPTH is the current nesting depth of the query. */
15511 static bool
15512 integer_valued_real_invalid_p (tree t, int depth)
15514 switch (TREE_CODE (t))
15516 case COMPOUND_EXPR:
15517 case MODIFY_EXPR:
15518 case BIND_EXPR:
15519 return RECURSE (TREE_OPERAND (t, 1));
15521 case SAVE_EXPR:
15522 return RECURSE (TREE_OPERAND (t, 0));
15524 default:
15525 break;
15527 return false;
15530 #undef RECURSE
15531 #undef integer_valued_real_p
15533 /* Return true if the floating point expression T has an integer value.
15534 We also allow +Inf, -Inf and NaN to be considered integer values.
15535 Return false for signaling NaN.
15537 DEPTH is the current nesting depth of the query. */
15539 bool
15540 integer_valued_real_p (tree t, int depth)
15542 if (t == error_mark_node)
15543 return false;
15545 STRIP_ANY_LOCATION_WRAPPER (t);
15547 tree_code code = TREE_CODE (t);
15548 switch (TREE_CODE_CLASS (code))
15550 case tcc_binary:
15551 case tcc_comparison:
15552 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15553 TREE_OPERAND (t, 1), depth);
15555 case tcc_unary:
15556 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15558 case tcc_constant:
15559 case tcc_declaration:
15560 case tcc_reference:
15561 return integer_valued_real_single_p (t, depth);
15563 default:
15564 break;
15567 switch (code)
15569 case COND_EXPR:
15570 case SSA_NAME:
15571 return integer_valued_real_single_p (t, depth);
15573 case CALL_EXPR:
15575 tree arg0 = (call_expr_nargs (t) > 0
15576 ? CALL_EXPR_ARG (t, 0)
15577 : NULL_TREE);
15578 tree arg1 = (call_expr_nargs (t) > 1
15579 ? CALL_EXPR_ARG (t, 1)
15580 : NULL_TREE);
15581 return integer_valued_real_call_p (get_call_combined_fn (t),
15582 arg0, arg1, depth);
15585 default:
15586 return integer_valued_real_invalid_p (t, depth);
15590 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15591 attempt to fold the expression to a constant without modifying TYPE,
15592 OP0 or OP1.
15594 If the expression could be simplified to a constant, then return
15595 the constant. If the expression would not be simplified to a
15596 constant, then return NULL_TREE. */
15598 tree
15599 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15601 tree tem = fold_binary (code, type, op0, op1);
15602 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15605 /* Given the components of a unary expression CODE, TYPE and OP0,
15606 attempt to fold the expression to a constant without modifying
15607 TYPE or OP0.
15609 If the expression could be simplified to a constant, then return
15610 the constant. If the expression would not be simplified to a
15611 constant, then return NULL_TREE. */
15613 tree
15614 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15616 tree tem = fold_unary (code, type, op0);
15617 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15620 /* If EXP represents referencing an element in a constant string
15621 (either via pointer arithmetic or array indexing), return the
15622 tree representing the value accessed, otherwise return NULL. */
15624 tree
15625 fold_read_from_constant_string (tree exp)
15627 if ((TREE_CODE (exp) == INDIRECT_REF
15628 || TREE_CODE (exp) == ARRAY_REF)
15629 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15631 tree exp1 = TREE_OPERAND (exp, 0);
15632 tree index;
15633 tree string;
15634 location_t loc = EXPR_LOCATION (exp);
15636 if (TREE_CODE (exp) == INDIRECT_REF)
15637 string = string_constant (exp1, &index, NULL, NULL);
15638 else
15640 tree low_bound = array_ref_low_bound (exp);
15641 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15643 /* Optimize the special-case of a zero lower bound.
15645 We convert the low_bound to sizetype to avoid some problems
15646 with constant folding. (E.g. suppose the lower bound is 1,
15647 and its mode is QI. Without the conversion,l (ARRAY
15648 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15649 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15650 if (! integer_zerop (low_bound))
15651 index = size_diffop_loc (loc, index,
15652 fold_convert_loc (loc, sizetype, low_bound));
15654 string = exp1;
15657 scalar_int_mode char_mode;
15658 if (string
15659 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15660 && TREE_CODE (string) == STRING_CST
15661 && tree_fits_uhwi_p (index)
15662 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15663 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15664 &char_mode)
15665 && GET_MODE_SIZE (char_mode) == 1)
15666 return build_int_cst_type (TREE_TYPE (exp),
15667 (TREE_STRING_POINTER (string)
15668 [TREE_INT_CST_LOW (index)]));
15670 return NULL;
15673 /* Folds a read from vector element at IDX of vector ARG. */
15675 tree
15676 fold_read_from_vector (tree arg, poly_uint64 idx)
15678 unsigned HOST_WIDE_INT i;
15679 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15680 && known_ge (idx, 0u)
15681 && idx.is_constant (&i))
15683 if (TREE_CODE (arg) == VECTOR_CST)
15684 return VECTOR_CST_ELT (arg, i);
15685 else if (TREE_CODE (arg) == CONSTRUCTOR)
15687 if (CONSTRUCTOR_NELTS (arg)
15688 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15689 return NULL_TREE;
15690 if (i >= CONSTRUCTOR_NELTS (arg))
15691 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15692 return CONSTRUCTOR_ELT (arg, i)->value;
15695 return NULL_TREE;
15698 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15699 an integer constant, real, or fixed-point constant.
15701 TYPE is the type of the result. */
15703 static tree
15704 fold_negate_const (tree arg0, tree type)
15706 tree t = NULL_TREE;
15708 switch (TREE_CODE (arg0))
15710 case REAL_CST:
15711 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15712 break;
15714 case FIXED_CST:
15716 FIXED_VALUE_TYPE f;
15717 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15718 &(TREE_FIXED_CST (arg0)), NULL,
15719 TYPE_SATURATING (type));
15720 t = build_fixed (type, f);
15721 /* Propagate overflow flags. */
15722 if (overflow_p | TREE_OVERFLOW (arg0))
15723 TREE_OVERFLOW (t) = 1;
15724 break;
15727 default:
15728 if (poly_int_tree_p (arg0))
15730 wi::overflow_type overflow;
15731 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15732 t = force_fit_type (type, res, 1,
15733 (overflow && ! TYPE_UNSIGNED (type))
15734 || TREE_OVERFLOW (arg0));
15735 break;
15738 gcc_unreachable ();
15741 return t;
15744 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15745 an integer constant or real constant.
15747 TYPE is the type of the result. */
15749 tree
15750 fold_abs_const (tree arg0, tree type)
15752 tree t = NULL_TREE;
15754 switch (TREE_CODE (arg0))
15756 case INTEGER_CST:
15758 /* If the value is unsigned or non-negative, then the absolute value
15759 is the same as the ordinary value. */
15760 wide_int val = wi::to_wide (arg0);
15761 wi::overflow_type overflow = wi::OVF_NONE;
15762 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15765 /* If the value is negative, then the absolute value is
15766 its negation. */
15767 else
15768 val = wi::neg (val, &overflow);
15770 /* Force to the destination type, set TREE_OVERFLOW for signed
15771 TYPE only. */
15772 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15774 break;
15776 case REAL_CST:
15777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15778 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15779 else
15780 t = arg0;
15781 break;
15783 default:
15784 gcc_unreachable ();
15787 return t;
15790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15791 constant. TYPE is the type of the result. */
15793 static tree
15794 fold_not_const (const_tree arg0, tree type)
15796 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15798 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15801 /* Given CODE, a relational operator, the target type, TYPE and two
15802 constant operands OP0 and OP1, return the result of the
15803 relational operation. If the result is not a compile time
15804 constant, then return NULL_TREE. */
15806 static tree
15807 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15809 int result, invert;
15811 /* From here on, the only cases we handle are when the result is
15812 known to be a constant. */
15814 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15816 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15817 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15819 /* Handle the cases where either operand is a NaN. */
15820 if (real_isnan (c0) || real_isnan (c1))
15822 switch (code)
15824 case EQ_EXPR:
15825 case ORDERED_EXPR:
15826 result = 0;
15827 break;
15829 case NE_EXPR:
15830 case UNORDERED_EXPR:
15831 case UNLT_EXPR:
15832 case UNLE_EXPR:
15833 case UNGT_EXPR:
15834 case UNGE_EXPR:
15835 case UNEQ_EXPR:
15836 result = 1;
15837 break;
15839 case LT_EXPR:
15840 case LE_EXPR:
15841 case GT_EXPR:
15842 case GE_EXPR:
15843 case LTGT_EXPR:
15844 if (flag_trapping_math)
15845 return NULL_TREE;
15846 result = 0;
15847 break;
15849 default:
15850 gcc_unreachable ();
15853 return constant_boolean_node (result, type);
15856 return constant_boolean_node (real_compare (code, c0, c1), type);
15859 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15861 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15862 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15863 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15866 /* Handle equality/inequality of complex constants. */
15867 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15869 tree rcond = fold_relational_const (code, type,
15870 TREE_REALPART (op0),
15871 TREE_REALPART (op1));
15872 tree icond = fold_relational_const (code, type,
15873 TREE_IMAGPART (op0),
15874 TREE_IMAGPART (op1));
15875 if (code == EQ_EXPR)
15876 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15877 else if (code == NE_EXPR)
15878 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15879 else
15880 return NULL_TREE;
15883 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15885 if (!VECTOR_TYPE_P (type))
15887 /* Have vector comparison with scalar boolean result. */
15888 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15889 && known_eq (VECTOR_CST_NELTS (op0),
15890 VECTOR_CST_NELTS (op1)));
15891 unsigned HOST_WIDE_INT nunits;
15892 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15893 return NULL_TREE;
15894 for (unsigned i = 0; i < nunits; i++)
15896 tree elem0 = VECTOR_CST_ELT (op0, i);
15897 tree elem1 = VECTOR_CST_ELT (op1, i);
15898 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15899 if (tmp == NULL_TREE)
15900 return NULL_TREE;
15901 if (integer_zerop (tmp))
15902 return constant_boolean_node (code == NE_EXPR, type);
15904 return constant_boolean_node (code == EQ_EXPR, type);
15906 tree_vector_builder elts;
15907 if (!elts.new_binary_operation (type, op0, op1, false))
15908 return NULL_TREE;
15909 unsigned int count = elts.encoded_nelts ();
15910 for (unsigned i = 0; i < count; i++)
15912 tree elem_type = TREE_TYPE (type);
15913 tree elem0 = VECTOR_CST_ELT (op0, i);
15914 tree elem1 = VECTOR_CST_ELT (op1, i);
15916 tree tem = fold_relational_const (code, elem_type,
15917 elem0, elem1);
15919 if (tem == NULL_TREE)
15920 return NULL_TREE;
15922 elts.quick_push (build_int_cst (elem_type,
15923 integer_zerop (tem) ? 0 : -1));
15926 return elts.build ();
15929 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15931 To compute GT, swap the arguments and do LT.
15932 To compute GE, do LT and invert the result.
15933 To compute LE, swap the arguments, do LT and invert the result.
15934 To compute NE, do EQ and invert the result.
15936 Therefore, the code below must handle only EQ and LT. */
15938 if (code == LE_EXPR || code == GT_EXPR)
15940 std::swap (op0, op1);
15941 code = swap_tree_comparison (code);
15944 /* Note that it is safe to invert for real values here because we
15945 have already handled the one case that it matters. */
15947 invert = 0;
15948 if (code == NE_EXPR || code == GE_EXPR)
15950 invert = 1;
15951 code = invert_tree_comparison (code, false);
15954 /* Compute a result for LT or EQ if args permit;
15955 Otherwise return T. */
15956 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15958 if (code == EQ_EXPR)
15959 result = tree_int_cst_equal (op0, op1);
15960 else
15961 result = tree_int_cst_lt (op0, op1);
15963 else
15964 return NULL_TREE;
15966 if (invert)
15967 result ^= 1;
15968 return constant_boolean_node (result, type);
15971 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15972 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15973 itself. */
15975 tree
15976 fold_build_cleanup_point_expr (tree type, tree expr)
15978 /* If the expression does not have side effects then we don't have to wrap
15979 it with a cleanup point expression. */
15980 if (!TREE_SIDE_EFFECTS (expr))
15981 return expr;
15983 /* If the expression is a return, check to see if the expression inside the
15984 return has no side effects or the right hand side of the modify expression
15985 inside the return. If either don't have side effects set we don't need to
15986 wrap the expression in a cleanup point expression. Note we don't check the
15987 left hand side of the modify because it should always be a return decl. */
15988 if (TREE_CODE (expr) == RETURN_EXPR)
15990 tree op = TREE_OPERAND (expr, 0);
15991 if (!op || !TREE_SIDE_EFFECTS (op))
15992 return expr;
15993 op = TREE_OPERAND (op, 1);
15994 if (!TREE_SIDE_EFFECTS (op))
15995 return expr;
15998 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16001 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16002 of an indirection through OP0, or NULL_TREE if no simplification is
16003 possible. */
16005 tree
16006 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16008 tree sub = op0;
16009 tree subtype;
16010 poly_uint64 const_op01;
16012 STRIP_NOPS (sub);
16013 subtype = TREE_TYPE (sub);
16014 if (!POINTER_TYPE_P (subtype)
16015 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16016 return NULL_TREE;
16018 if (TREE_CODE (sub) == ADDR_EXPR)
16020 tree op = TREE_OPERAND (sub, 0);
16021 tree optype = TREE_TYPE (op);
16023 /* *&CONST_DECL -> to the value of the const decl. */
16024 if (TREE_CODE (op) == CONST_DECL)
16025 return DECL_INITIAL (op);
16026 /* *&p => p; make sure to handle *&"str"[cst] here. */
16027 if (type == optype)
16029 tree fop = fold_read_from_constant_string (op);
16030 if (fop)
16031 return fop;
16032 else
16033 return op;
16035 /* *(foo *)&fooarray => fooarray[0] */
16036 else if (TREE_CODE (optype) == ARRAY_TYPE
16037 && type == TREE_TYPE (optype)
16038 && (!in_gimple_form
16039 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16041 tree type_domain = TYPE_DOMAIN (optype);
16042 tree min_val = size_zero_node;
16043 if (type_domain && TYPE_MIN_VALUE (type_domain))
16044 min_val = TYPE_MIN_VALUE (type_domain);
16045 if (in_gimple_form
16046 && TREE_CODE (min_val) != INTEGER_CST)
16047 return NULL_TREE;
16048 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16049 NULL_TREE, NULL_TREE);
16051 /* *(foo *)&complexfoo => __real__ complexfoo */
16052 else if (TREE_CODE (optype) == COMPLEX_TYPE
16053 && type == TREE_TYPE (optype))
16054 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16055 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16056 else if (VECTOR_TYPE_P (optype)
16057 && type == TREE_TYPE (optype))
16059 tree part_width = TYPE_SIZE (type);
16060 tree index = bitsize_int (0);
16061 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16062 index);
16066 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16067 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16069 tree op00 = TREE_OPERAND (sub, 0);
16070 tree op01 = TREE_OPERAND (sub, 1);
16072 STRIP_NOPS (op00);
16073 if (TREE_CODE (op00) == ADDR_EXPR)
16075 tree op00type;
16076 op00 = TREE_OPERAND (op00, 0);
16077 op00type = TREE_TYPE (op00);
16079 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16080 if (VECTOR_TYPE_P (op00type)
16081 && type == TREE_TYPE (op00type)
16082 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16083 but we want to treat offsets with MSB set as negative.
16084 For the code below negative offsets are invalid and
16085 TYPE_SIZE of the element is something unsigned, so
16086 check whether op01 fits into poly_int64, which implies
16087 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16088 then just use poly_uint64 because we want to treat the
16089 value as unsigned. */
16090 && tree_fits_poly_int64_p (op01))
16092 tree part_width = TYPE_SIZE (type);
16093 poly_uint64 max_offset
16094 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16095 * TYPE_VECTOR_SUBPARTS (op00type));
16096 if (known_lt (const_op01, max_offset))
16098 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16099 return fold_build3_loc (loc,
16100 BIT_FIELD_REF, type, op00,
16101 part_width, index);
16104 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16105 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16106 && type == TREE_TYPE (op00type))
16108 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16109 const_op01))
16110 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16112 /* ((foo *)&fooarray)[1] => fooarray[1] */
16113 else if (TREE_CODE (op00type) == ARRAY_TYPE
16114 && type == TREE_TYPE (op00type))
16116 tree type_domain = TYPE_DOMAIN (op00type);
16117 tree min_val = size_zero_node;
16118 if (type_domain && TYPE_MIN_VALUE (type_domain))
16119 min_val = TYPE_MIN_VALUE (type_domain);
16120 poly_uint64 type_size, index;
16121 if (poly_int_tree_p (min_val)
16122 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16123 && multiple_p (const_op01, type_size, &index))
16125 poly_offset_int off = index + wi::to_poly_offset (min_val);
16126 op01 = wide_int_to_tree (sizetype, off);
16127 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16128 NULL_TREE, NULL_TREE);
16134 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16135 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16136 && type == TREE_TYPE (TREE_TYPE (subtype))
16137 && (!in_gimple_form
16138 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16140 tree type_domain;
16141 tree min_val = size_zero_node;
16142 sub = build_fold_indirect_ref_loc (loc, sub);
16143 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16144 if (type_domain && TYPE_MIN_VALUE (type_domain))
16145 min_val = TYPE_MIN_VALUE (type_domain);
16146 if (in_gimple_form
16147 && TREE_CODE (min_val) != INTEGER_CST)
16148 return NULL_TREE;
16149 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16150 NULL_TREE);
16153 return NULL_TREE;
16156 /* Builds an expression for an indirection through T, simplifying some
16157 cases. */
16159 tree
16160 build_fold_indirect_ref_loc (location_t loc, tree t)
16162 tree type = TREE_TYPE (TREE_TYPE (t));
16163 tree sub = fold_indirect_ref_1 (loc, type, t);
16165 if (sub)
16166 return sub;
16168 return build1_loc (loc, INDIRECT_REF, type, t);
16171 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16173 tree
16174 fold_indirect_ref_loc (location_t loc, tree t)
16176 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16178 if (sub)
16179 return sub;
16180 else
16181 return t;
16184 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16185 whose result is ignored. The type of the returned tree need not be
16186 the same as the original expression. */
16188 tree
16189 fold_ignored_result (tree t)
16191 if (!TREE_SIDE_EFFECTS (t))
16192 return integer_zero_node;
16194 for (;;)
16195 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16197 case tcc_unary:
16198 t = TREE_OPERAND (t, 0);
16199 break;
16201 case tcc_binary:
16202 case tcc_comparison:
16203 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16204 t = TREE_OPERAND (t, 0);
16205 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16206 t = TREE_OPERAND (t, 1);
16207 else
16208 return t;
16209 break;
16211 case tcc_expression:
16212 switch (TREE_CODE (t))
16214 case COMPOUND_EXPR:
16215 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16216 return t;
16217 t = TREE_OPERAND (t, 0);
16218 break;
16220 case COND_EXPR:
16221 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16222 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16223 return t;
16224 t = TREE_OPERAND (t, 0);
16225 break;
16227 default:
16228 return t;
16230 break;
16232 default:
16233 return t;
16237 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16239 tree
16240 round_up_loc (location_t loc, tree value, unsigned int divisor)
16242 tree div = NULL_TREE;
16244 if (divisor == 1)
16245 return value;
16247 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16248 have to do anything. Only do this when we are not given a const,
16249 because in that case, this check is more expensive than just
16250 doing it. */
16251 if (TREE_CODE (value) != INTEGER_CST)
16253 div = build_int_cst (TREE_TYPE (value), divisor);
16255 if (multiple_of_p (TREE_TYPE (value), value, div))
16256 return value;
16259 /* If divisor is a power of two, simplify this to bit manipulation. */
16260 if (pow2_or_zerop (divisor))
16262 if (TREE_CODE (value) == INTEGER_CST)
16264 wide_int val = wi::to_wide (value);
16265 bool overflow_p;
16267 if ((val & (divisor - 1)) == 0)
16268 return value;
16270 overflow_p = TREE_OVERFLOW (value);
16271 val += divisor - 1;
16272 val &= (int) -divisor;
16273 if (val == 0)
16274 overflow_p = true;
16276 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16278 else
16280 tree t;
16282 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16283 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16284 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16285 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16288 else
16290 if (!div)
16291 div = build_int_cst (TREE_TYPE (value), divisor);
16292 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16293 value = size_binop_loc (loc, MULT_EXPR, value, div);
16296 return value;
16299 /* Likewise, but round down. */
16301 tree
16302 round_down_loc (location_t loc, tree value, int divisor)
16304 tree div = NULL_TREE;
16306 gcc_assert (divisor > 0);
16307 if (divisor == 1)
16308 return value;
16310 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16311 have to do anything. Only do this when we are not given a const,
16312 because in that case, this check is more expensive than just
16313 doing it. */
16314 if (TREE_CODE (value) != INTEGER_CST)
16316 div = build_int_cst (TREE_TYPE (value), divisor);
16318 if (multiple_of_p (TREE_TYPE (value), value, div))
16319 return value;
16322 /* If divisor is a power of two, simplify this to bit manipulation. */
16323 if (pow2_or_zerop (divisor))
16325 tree t;
16327 t = build_int_cst (TREE_TYPE (value), -divisor);
16328 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16330 else
16332 if (!div)
16333 div = build_int_cst (TREE_TYPE (value), divisor);
16334 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16335 value = size_binop_loc (loc, MULT_EXPR, value, div);
16338 return value;
16341 /* Returns the pointer to the base of the object addressed by EXP and
16342 extracts the information about the offset of the access, storing it
16343 to PBITPOS and POFFSET. */
16345 static tree
16346 split_address_to_core_and_offset (tree exp,
16347 poly_int64_pod *pbitpos, tree *poffset)
16349 tree core;
16350 machine_mode mode;
16351 int unsignedp, reversep, volatilep;
16352 poly_int64 bitsize;
16353 location_t loc = EXPR_LOCATION (exp);
16355 if (TREE_CODE (exp) == ADDR_EXPR)
16357 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16358 poffset, &mode, &unsignedp, &reversep,
16359 &volatilep);
16360 core = build_fold_addr_expr_loc (loc, core);
16362 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16364 core = TREE_OPERAND (exp, 0);
16365 STRIP_NOPS (core);
16366 *pbitpos = 0;
16367 *poffset = TREE_OPERAND (exp, 1);
16368 if (poly_int_tree_p (*poffset))
16370 poly_offset_int tem
16371 = wi::sext (wi::to_poly_offset (*poffset),
16372 TYPE_PRECISION (TREE_TYPE (*poffset)));
16373 tem <<= LOG2_BITS_PER_UNIT;
16374 if (tem.to_shwi (pbitpos))
16375 *poffset = NULL_TREE;
16378 else
16380 core = exp;
16381 *pbitpos = 0;
16382 *poffset = NULL_TREE;
16385 return core;
16388 /* Returns true if addresses of E1 and E2 differ by a constant, false
16389 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16391 bool
16392 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16394 tree core1, core2;
16395 poly_int64 bitpos1, bitpos2;
16396 tree toffset1, toffset2, tdiff, type;
16398 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16399 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16401 poly_int64 bytepos1, bytepos2;
16402 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16403 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16404 || !operand_equal_p (core1, core2, 0))
16405 return false;
16407 if (toffset1 && toffset2)
16409 type = TREE_TYPE (toffset1);
16410 if (type != TREE_TYPE (toffset2))
16411 toffset2 = fold_convert (type, toffset2);
16413 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16414 if (!cst_and_fits_in_hwi (tdiff))
16415 return false;
16417 *diff = int_cst_value (tdiff);
16419 else if (toffset1 || toffset2)
16421 /* If only one of the offsets is non-constant, the difference cannot
16422 be a constant. */
16423 return false;
16425 else
16426 *diff = 0;
16428 *diff += bytepos1 - bytepos2;
16429 return true;
16432 /* Return OFF converted to a pointer offset type suitable as offset for
16433 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16434 tree
16435 convert_to_ptrofftype_loc (location_t loc, tree off)
16437 if (ptrofftype_p (TREE_TYPE (off)))
16438 return off;
16439 return fold_convert_loc (loc, sizetype, off);
16442 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16443 tree
16444 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16446 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16447 ptr, convert_to_ptrofftype_loc (loc, off));
16450 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16451 tree
16452 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16454 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16455 ptr, size_int (off));
16458 /* Return a pointer to a NUL-terminated string containing the sequence
16459 of bytes corresponding to the representation of the object referred to
16460 by SRC (or a subsequence of such bytes within it if SRC is a reference
16461 to an initialized constant array plus some constant offset).
16462 Set *STRSIZE the number of bytes in the constant sequence including
16463 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16464 where A is the array that stores the constant sequence that SRC points
16465 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16466 need not point to a string or even an array of characters but may point
16467 to an object of any type. */
16469 const char *
16470 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16472 /* The offset into the array A storing the string, and A's byte size. */
16473 tree offset_node;
16474 tree mem_size;
16476 if (strsize)
16477 *strsize = 0;
16479 if (strsize)
16480 src = byte_representation (src, &offset_node, &mem_size, NULL);
16481 else
16482 src = string_constant (src, &offset_node, &mem_size, NULL);
16483 if (!src)
16484 return NULL;
16486 unsigned HOST_WIDE_INT offset = 0;
16487 if (offset_node != NULL_TREE)
16489 if (!tree_fits_uhwi_p (offset_node))
16490 return NULL;
16491 else
16492 offset = tree_to_uhwi (offset_node);
16495 if (!tree_fits_uhwi_p (mem_size))
16496 return NULL;
16498 /* ARRAY_SIZE is the byte size of the array the constant sequence
16499 is stored in and equal to sizeof A. INIT_BYTES is the number
16500 of bytes in the constant sequence used to initialize the array,
16501 including any embedded NULs as well as the terminating NUL (for
16502 strings), but not including any trailing zeros/NULs past
16503 the terminating one appended implicitly to a string literal to
16504 zero out the remainder of the array it's stored in. For example,
16505 given:
16506 const char a[7] = "abc\0d";
16507 n = strlen (a + 1);
16508 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16509 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16510 is equal to strlen (A) + 1. */
16511 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16512 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16513 const char *string = TREE_STRING_POINTER (src);
16515 /* Ideally this would turn into a gcc_checking_assert over time. */
16516 if (init_bytes > array_size)
16517 init_bytes = array_size;
16519 if (init_bytes == 0 || offset >= array_size)
16520 return NULL;
16522 if (strsize)
16524 /* Compute and store the number of characters from the beginning
16525 of the substring at OFFSET to the end, including the terminating
16526 nul. Offsets past the initial length refer to null strings. */
16527 if (offset < init_bytes)
16528 *strsize = init_bytes - offset;
16529 else
16530 *strsize = 1;
16532 else
16534 tree eltype = TREE_TYPE (TREE_TYPE (src));
16535 /* Support only properly NUL-terminated single byte strings. */
16536 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16537 return NULL;
16538 if (string[init_bytes - 1] != '\0')
16539 return NULL;
16542 return offset < init_bytes ? string + offset : "";
16545 /* Return a pointer to a NUL-terminated string corresponding to
16546 the expression STR referencing a constant string, possibly
16547 involving a constant offset. Return null if STR either doesn't
16548 reference a constant string or if it involves a nonconstant
16549 offset. */
16551 const char *
16552 c_getstr (tree str)
16554 return getbyterep (str, NULL);
16557 /* Given a tree T, compute which bits in T may be nonzero. */
16559 wide_int
16560 tree_nonzero_bits (const_tree t)
16562 switch (TREE_CODE (t))
16564 case INTEGER_CST:
16565 return wi::to_wide (t);
16566 case SSA_NAME:
16567 return get_nonzero_bits (t);
16568 case NON_LVALUE_EXPR:
16569 case SAVE_EXPR:
16570 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16571 case BIT_AND_EXPR:
16572 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16573 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16574 case BIT_IOR_EXPR:
16575 case BIT_XOR_EXPR:
16576 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16577 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16578 case COND_EXPR:
16579 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16580 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16581 CASE_CONVERT:
16582 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16583 TYPE_PRECISION (TREE_TYPE (t)),
16584 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16585 case PLUS_EXPR:
16586 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16588 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16589 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16590 if (wi::bit_and (nzbits1, nzbits2) == 0)
16591 return wi::bit_or (nzbits1, nzbits2);
16593 break;
16594 case LSHIFT_EXPR:
16595 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16597 tree type = TREE_TYPE (t);
16598 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16599 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16600 TYPE_PRECISION (type));
16601 return wi::neg_p (arg1)
16602 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16603 : wi::lshift (nzbits, arg1);
16605 break;
16606 case RSHIFT_EXPR:
16607 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16609 tree type = TREE_TYPE (t);
16610 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16611 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16612 TYPE_PRECISION (type));
16613 return wi::neg_p (arg1)
16614 ? wi::lshift (nzbits, -arg1)
16615 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16617 break;
16618 default:
16619 break;
16622 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16625 /* Helper function for address compare simplifications in match.pd.
16626 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16627 TYPE is the type of comparison operands.
16628 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16629 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16630 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16631 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16632 and 2 if unknown. */
16635 address_compare (tree_code code, tree type, tree op0, tree op1,
16636 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16637 bool generic)
16639 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16640 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16641 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16642 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16643 if (base0 && TREE_CODE (base0) == MEM_REF)
16645 off0 += mem_ref_offset (base0).force_shwi ();
16646 base0 = TREE_OPERAND (base0, 0);
16648 if (base1 && TREE_CODE (base1) == MEM_REF)
16650 off1 += mem_ref_offset (base1).force_shwi ();
16651 base1 = TREE_OPERAND (base1, 0);
16653 if (base0 == NULL_TREE || base1 == NULL_TREE)
16654 return 2;
16656 int equal = 2;
16657 /* Punt in GENERIC on variables with value expressions;
16658 the value expressions might point to fields/elements
16659 of other vars etc. */
16660 if (generic
16661 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16662 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16663 return 2;
16664 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16666 symtab_node *node0 = symtab_node::get_create (base0);
16667 symtab_node *node1 = symtab_node::get_create (base1);
16668 equal = node0->equal_address_to (node1);
16670 else if ((DECL_P (base0)
16671 || TREE_CODE (base0) == SSA_NAME
16672 || TREE_CODE (base0) == STRING_CST)
16673 && (DECL_P (base1)
16674 || TREE_CODE (base1) == SSA_NAME
16675 || TREE_CODE (base1) == STRING_CST))
16676 equal = (base0 == base1);
16677 /* Assume different STRING_CSTs with the same content will be
16678 merged. */
16679 if (equal == 0
16680 && TREE_CODE (base0) == STRING_CST
16681 && TREE_CODE (base1) == STRING_CST
16682 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16683 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16684 TREE_STRING_LENGTH (base0)) == 0)
16685 equal = 1;
16686 if (equal == 1)
16688 if (code == EQ_EXPR
16689 || code == NE_EXPR
16690 /* If the offsets are equal we can ignore overflow. */
16691 || known_eq (off0, off1)
16692 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16693 /* Or if we compare using pointers to decls or strings. */
16694 || (POINTER_TYPE_P (type)
16695 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16696 return 1;
16697 return 2;
16699 if (equal != 0)
16700 return equal;
16701 if (code != EQ_EXPR && code != NE_EXPR)
16702 return 2;
16704 /* At this point we know (or assume) the two pointers point at
16705 different objects. */
16706 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16707 off0.is_constant (&ioff0);
16708 off1.is_constant (&ioff1);
16709 /* Punt on non-zero offsets from functions. */
16710 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16711 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16712 return 2;
16713 /* Or if the bases are neither decls nor string literals. */
16714 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16715 return 2;
16716 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16717 return 2;
16718 /* For initializers, assume addresses of different functions are
16719 different. */
16720 if (folding_initializer
16721 && TREE_CODE (base0) == FUNCTION_DECL
16722 && TREE_CODE (base1) == FUNCTION_DECL)
16723 return 0;
16725 /* Compute whether one address points to the start of one
16726 object and another one to the end of another one. */
16727 poly_int64 size0 = 0, size1 = 0;
16728 if (TREE_CODE (base0) == STRING_CST)
16730 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16731 equal = 2;
16732 else
16733 size0 = TREE_STRING_LENGTH (base0);
16735 else if (TREE_CODE (base0) == FUNCTION_DECL)
16736 size0 = 1;
16737 else
16739 tree sz0 = DECL_SIZE_UNIT (base0);
16740 if (!tree_fits_poly_int64_p (sz0))
16741 equal = 2;
16742 else
16743 size0 = tree_to_poly_int64 (sz0);
16745 if (TREE_CODE (base1) == STRING_CST)
16747 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16748 equal = 2;
16749 else
16750 size1 = TREE_STRING_LENGTH (base1);
16752 else if (TREE_CODE (base1) == FUNCTION_DECL)
16753 size1 = 1;
16754 else
16756 tree sz1 = DECL_SIZE_UNIT (base1);
16757 if (!tree_fits_poly_int64_p (sz1))
16758 equal = 2;
16759 else
16760 size1 = tree_to_poly_int64 (sz1);
16762 if (equal == 0)
16764 /* If one offset is pointing (or could be) to the beginning of one
16765 object and the other is pointing to one past the last byte of the
16766 other object, punt. */
16767 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16768 equal = 2;
16769 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16770 equal = 2;
16771 /* If both offsets are the same, there are some cases we know that are
16772 ok. Either if we know they aren't zero, or if we know both sizes
16773 are no zero. */
16774 if (equal == 2
16775 && known_eq (off0, off1)
16776 && (known_ne (off0, 0)
16777 || (known_ne (size0, 0) && known_ne (size1, 0))))
16778 equal = 0;
16781 /* At this point, equal is 2 if either one or both pointers are out of
16782 bounds of their object, or one points to start of its object and the
16783 other points to end of its object. This is unspecified behavior
16784 e.g. in C++. Otherwise equal is 0. */
16785 if (folding_cxx_constexpr && equal)
16786 return equal;
16788 /* When both pointers point to string literals, even when equal is 0,
16789 due to tail merging of string literals the pointers might be the same. */
16790 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16792 if (ioff0 < 0
16793 || ioff1 < 0
16794 || ioff0 > TREE_STRING_LENGTH (base0)
16795 || ioff1 > TREE_STRING_LENGTH (base1))
16796 return 2;
16798 /* If the bytes in the string literals starting at the pointers
16799 differ, the pointers need to be different. */
16800 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16801 TREE_STRING_POINTER (base1) + ioff1,
16802 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16803 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16805 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16806 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16807 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16808 ioffmin) == 0)
16809 /* If even the bytes in the string literal before the
16810 pointers are the same, the string literals could be
16811 tail merged. */
16812 return 2;
16814 return 0;
16817 if (folding_cxx_constexpr)
16818 return 0;
16820 /* If this is a pointer comparison, ignore for now even
16821 valid equalities where one pointer is the offset zero
16822 of one object and the other to one past end of another one. */
16823 if (!INTEGRAL_TYPE_P (type))
16824 return 0;
16826 /* Assume that string literals can't be adjacent to variables
16827 (automatic or global). */
16828 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16829 return 0;
16831 /* Assume that automatic variables can't be adjacent to global
16832 variables. */
16833 if (is_global_var (base0) != is_global_var (base1))
16834 return 0;
16836 return equal;
16839 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16840 tree
16841 ctor_single_nonzero_element (const_tree t)
16843 unsigned HOST_WIDE_INT idx;
16844 constructor_elt *ce;
16845 tree elt = NULL_TREE;
16847 if (TREE_CODE (t) != CONSTRUCTOR)
16848 return NULL_TREE;
16849 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16850 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16852 if (elt)
16853 return NULL_TREE;
16854 elt = ce->value;
16856 return elt;
16859 #if CHECKING_P
16861 namespace selftest {
16863 /* Helper functions for writing tests of folding trees. */
16865 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16867 static void
16868 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16869 tree constant)
16871 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16874 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16875 wrapping WRAPPED_EXPR. */
16877 static void
16878 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16879 tree wrapped_expr)
16881 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16882 ASSERT_NE (wrapped_expr, result);
16883 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16884 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16887 /* Verify that various arithmetic binary operations are folded
16888 correctly. */
16890 static void
16891 test_arithmetic_folding ()
16893 tree type = integer_type_node;
16894 tree x = create_tmp_var_raw (type, "x");
16895 tree zero = build_zero_cst (type);
16896 tree one = build_int_cst (type, 1);
16898 /* Addition. */
16899 /* 1 <-- (0 + 1) */
16900 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16901 one);
16902 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16903 one);
16905 /* (nonlvalue)x <-- (x + 0) */
16906 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16909 /* Subtraction. */
16910 /* 0 <-- (x - x) */
16911 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16912 zero);
16913 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16916 /* Multiplication. */
16917 /* 0 <-- (x * 0) */
16918 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16919 zero);
16921 /* (nonlvalue)x <-- (x * 1) */
16922 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16926 /* Verify that various binary operations on vectors are folded
16927 correctly. */
16929 static void
16930 test_vector_folding ()
16932 tree inner_type = integer_type_node;
16933 tree type = build_vector_type (inner_type, 4);
16934 tree zero = build_zero_cst (type);
16935 tree one = build_one_cst (type);
16936 tree index = build_index_vector (type, 0, 1);
16938 /* Verify equality tests that return a scalar boolean result. */
16939 tree res_type = boolean_type_node;
16940 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16941 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16942 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16943 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16944 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16945 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16946 index, one)));
16947 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16948 index, index)));
16949 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16950 index, index)));
16953 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16955 static void
16956 test_vec_duplicate_folding ()
16958 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16959 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16960 /* This will be 1 if VEC_MODE isn't a vector mode. */
16961 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16963 tree type = build_vector_type (ssizetype, nunits);
16964 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16965 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16966 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16969 /* Run all of the selftests within this file. */
16971 void
16972 fold_const_cc_tests ()
16974 test_arithmetic_folding ();
16975 test_vector_folding ();
16976 test_vec_duplicate_folding ();
16979 } // namespace selftest
16981 #endif /* CHECKING_P */